mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 12:19:24 +00:00
Compare commits
31 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2ba6d77ee7 | ||
|
|
67a0ff825b | ||
|
|
6518379981 | ||
|
|
0c6ab50e35 | ||
|
|
f73ce37e18 | ||
|
|
973dcf065e | ||
|
|
8f5aaa8492 | ||
|
|
93ba8d840a | ||
|
|
485e454680 | ||
|
|
c3b6abf020 | ||
|
|
f3ccf1cab0 | ||
|
|
0deee0ec6b | ||
|
|
6b428948d4 | ||
|
|
6986257d4f | ||
|
|
a3c37cba21 | ||
|
|
b5f2ff1397 | ||
|
|
c84931c6ae | ||
|
|
63abf2d022 | ||
|
|
a62d2d05ad | ||
|
|
967cc16748 | ||
|
|
ab4b8cc2d5 | ||
|
|
387ccbad3a | ||
|
|
26cdfdd824 | ||
|
|
68e77384ac | ||
|
|
68da88c1f3 | ||
|
|
2b481ab71e | ||
|
|
05e6d81948 | ||
|
|
e426cd00bd | ||
|
|
09e3881b7d | ||
|
|
10124ac4a8 | ||
|
|
1987983f88 |
4
.github/actions/bitcoin/action.yml
vendored
4
.github/actions/bitcoin/action.yml
vendored
@@ -5,7 +5,7 @@ inputs:
|
|||||||
version:
|
version:
|
||||||
description: "Version to download and run"
|
description: "Version to download and run"
|
||||||
required: false
|
required: false
|
||||||
default: "27.0"
|
default: 24.0.1
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
@@ -37,4 +37,4 @@ runs:
|
|||||||
|
|
||||||
- name: Bitcoin Regtest Daemon
|
- name: Bitcoin Regtest Daemon
|
||||||
shell: bash
|
shell: bash
|
||||||
run: PATH=$PATH:/usr/bin ./orchestration/dev/networks/bitcoin/run.sh -txindex -daemon
|
run: PATH=$PATH:/usr/bin ./orchestration/dev/coins/bitcoin/run.sh -daemon
|
||||||
|
|||||||
@@ -42,8 +42,8 @@ runs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cargo install svm-rs
|
cargo install svm-rs
|
||||||
svm install 0.8.26
|
svm install 0.8.25
|
||||||
svm use 0.8.26
|
svm use 0.8.25
|
||||||
|
|
||||||
# - name: Cache Rust
|
# - name: Cache Rust
|
||||||
# uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43
|
# uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43
|
||||||
|
|||||||
2
.github/actions/monero-wallet-rpc/action.yml
vendored
2
.github/actions/monero-wallet-rpc/action.yml
vendored
@@ -5,7 +5,7 @@ inputs:
|
|||||||
version:
|
version:
|
||||||
description: "Version to download and run"
|
description: "Version to download and run"
|
||||||
required: false
|
required: false
|
||||||
default: v0.18.3.4
|
default: v0.18.3.1
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
|
|||||||
4
.github/actions/monero/action.yml
vendored
4
.github/actions/monero/action.yml
vendored
@@ -5,7 +5,7 @@ inputs:
|
|||||||
version:
|
version:
|
||||||
description: "Version to download and run"
|
description: "Version to download and run"
|
||||||
required: false
|
required: false
|
||||||
default: v0.18.3.4
|
default: v0.18.3.1
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
@@ -43,4 +43,4 @@ runs:
|
|||||||
|
|
||||||
- name: Monero Regtest Daemon
|
- name: Monero Regtest Daemon
|
||||||
shell: bash
|
shell: bash
|
||||||
run: PATH=$PATH:/usr/bin ./orchestration/dev/networks/monero/run.sh --detach
|
run: PATH=$PATH:/usr/bin ./orchestration/dev/coins/monero/run.sh --detach
|
||||||
|
|||||||
8
.github/actions/test-dependencies/action.yml
vendored
8
.github/actions/test-dependencies/action.yml
vendored
@@ -5,12 +5,12 @@ inputs:
|
|||||||
monero-version:
|
monero-version:
|
||||||
description: "Monero version to download and run as a regtest node"
|
description: "Monero version to download and run as a regtest node"
|
||||||
required: false
|
required: false
|
||||||
default: v0.18.3.4
|
default: v0.18.3.1
|
||||||
|
|
||||||
bitcoin-version:
|
bitcoin-version:
|
||||||
description: "Bitcoin version to download and run as a regtest node"
|
description: "Bitcoin version to download and run as a regtest node"
|
||||||
required: false
|
required: false
|
||||||
default: "27.1"
|
default: 24.0.1
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
@@ -19,9 +19,9 @@ runs:
|
|||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
- name: Install Foundry
|
- name: Install Foundry
|
||||||
uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773
|
uses: foundry-rs/foundry-toolchain@cb603ca0abb544f301eaed59ac0baf579aa6aecf
|
||||||
with:
|
with:
|
||||||
version: nightly-f625d0fa7c51e65b4bf1e8f7931cd1c6e2e285e9
|
version: nightly-09fe3e041369a816365a020f715ad6f94dbce9f2
|
||||||
cache: false
|
cache: false
|
||||||
|
|
||||||
- name: Run a Monero Regtest Node
|
- name: Run a Monero Regtest Node
|
||||||
|
|||||||
2
.github/nightly-version
vendored
2
.github/nightly-version
vendored
@@ -1 +1 @@
|
|||||||
nightly-2025-02-01
|
nightly-2024-02-07
|
||||||
|
|||||||
35
.github/workflows/coins-tests.yml
vendored
Normal file
35
.github/workflows/coins-tests.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
name: coins/ Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "coins/**"
|
||||||
|
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "coins/**"
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test-coins:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Test Dependencies
|
||||||
|
uses: ./.github/actions/test-dependencies
|
||||||
|
|
||||||
|
- name: Run Tests
|
||||||
|
run: |
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||||
|
-p bitcoin-serai \
|
||||||
|
-p ethereum-serai \
|
||||||
|
-p monero-generators \
|
||||||
|
-p monero-serai
|
||||||
5
.github/workflows/common-tests.yml
vendored
5
.github/workflows/common-tests.yml
vendored
@@ -27,8 +27,5 @@ jobs:
|
|||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||||
-p std-shims \
|
-p std-shims \
|
||||||
-p zalloc \
|
-p zalloc \
|
||||||
-p patchable-async-sleep \
|
|
||||||
-p serai-db \
|
-p serai-db \
|
||||||
-p serai-env \
|
-p serai-env
|
||||||
-p serai-task \
|
|
||||||
-p simple-request
|
|
||||||
|
|||||||
6
.github/workflows/coordinator-tests.yml
vendored
6
.github/workflows/coordinator-tests.yml
vendored
@@ -7,7 +7,7 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- "common/**"
|
- "common/**"
|
||||||
- "crypto/**"
|
- "crypto/**"
|
||||||
- "networks/**"
|
- "coins/**"
|
||||||
- "message-queue/**"
|
- "message-queue/**"
|
||||||
- "coordinator/**"
|
- "coordinator/**"
|
||||||
- "orchestration/**"
|
- "orchestration/**"
|
||||||
@@ -18,7 +18,7 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- "common/**"
|
- "common/**"
|
||||||
- "crypto/**"
|
- "crypto/**"
|
||||||
- "networks/**"
|
- "coins/**"
|
||||||
- "message-queue/**"
|
- "message-queue/**"
|
||||||
- "coordinator/**"
|
- "coordinator/**"
|
||||||
- "orchestration/**"
|
- "orchestration/**"
|
||||||
@@ -37,4 +37,4 @@ jobs:
|
|||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
- name: Run coordinator Docker tests
|
- name: Run coordinator Docker tests
|
||||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-coordinator-tests
|
run: cd tests/coordinator && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||||
|
|||||||
4
.github/workflows/crypto-tests.yml
vendored
4
.github/workflows/crypto-tests.yml
vendored
@@ -35,10 +35,6 @@ jobs:
|
|||||||
-p multiexp \
|
-p multiexp \
|
||||||
-p schnorr-signatures \
|
-p schnorr-signatures \
|
||||||
-p dleq \
|
-p dleq \
|
||||||
-p generalized-bulletproofs \
|
|
||||||
-p generalized-bulletproofs-circuit-abstraction \
|
|
||||||
-p ec-divisors \
|
|
||||||
-p generalized-bulletproofs-ec-gadgets \
|
|
||||||
-p dkg \
|
-p dkg \
|
||||||
-p modular-frost \
|
-p modular-frost \
|
||||||
-p frost-schnorrkel
|
-p frost-schnorrkel
|
||||||
|
|||||||
2
.github/workflows/full-stack-tests.yml
vendored
2
.github/workflows/full-stack-tests.yml
vendored
@@ -19,4 +19,4 @@ jobs:
|
|||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
- name: Run Full Stack Docker tests
|
- name: Run Full Stack Docker tests
|
||||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-full-stack-tests
|
run: cd tests/full-stack && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||||
|
|||||||
31
.github/workflows/lint.yml
vendored
31
.github/workflows/lint.yml
vendored
@@ -73,15 +73,6 @@ jobs:
|
|||||||
- name: Run rustfmt
|
- name: Run rustfmt
|
||||||
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check
|
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check
|
||||||
|
|
||||||
- name: Install foundry
|
|
||||||
uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773
|
|
||||||
with:
|
|
||||||
version: nightly-41d4e5437107f6f42c7711123890147bc736a609
|
|
||||||
cache: false
|
|
||||||
|
|
||||||
- name: Run forge fmt
|
|
||||||
run: FOUNDRY_FMT_SORT_INPUTS=false FOUNDRY_FMT_LINE_LENGTH=100 FOUNDRY_FMT_TAB_WIDTH=2 FOUNDRY_FMT_BRACKET_SPACING=true FOUNDRY_FMT_INT_TYPES=preserve forge fmt --check $(find . -iname "*.sol")
|
|
||||||
|
|
||||||
machete:
|
machete:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@@ -90,25 +81,3 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
cargo install cargo-machete
|
cargo install cargo-machete
|
||||||
cargo machete
|
cargo machete
|
||||||
|
|
||||||
slither:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
- name: Slither
|
|
||||||
run: |
|
|
||||||
python3 -m pip install solc-select
|
|
||||||
solc-select install 0.8.26
|
|
||||||
solc-select use 0.8.26
|
|
||||||
|
|
||||||
python3 -m pip install slither-analyzer
|
|
||||||
|
|
||||||
slither --include-paths ./networks/ethereum/schnorr/contracts/Schnorr.sol
|
|
||||||
slither --include-paths ./networks/ethereum/schnorr/contracts ./networks/ethereum/schnorr/contracts/tests/Schnorr.sol
|
|
||||||
slither processor/ethereum/deployer/contracts/Deployer.sol
|
|
||||||
slither processor/ethereum/erc20/contracts/IERC20.sol
|
|
||||||
|
|
||||||
cp networks/ethereum/schnorr/contracts/Schnorr.sol processor/ethereum/router/contracts/
|
|
||||||
cp processor/ethereum/erc20/contracts/IERC20.sol processor/ethereum/router/contracts/
|
|
||||||
cd processor/ethereum/router/contracts
|
|
||||||
slither Router.sol
|
|
||||||
|
|||||||
2
.github/workflows/message-queue-tests.yml
vendored
2
.github/workflows/message-queue-tests.yml
vendored
@@ -33,4 +33,4 @@ jobs:
|
|||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
- name: Run message-queue Docker tests
|
- name: Run message-queue Docker tests
|
||||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-message-queue-tests
|
run: cd tests/message-queue && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||||
|
|||||||
34
.github/workflows/monero-tests.yaml
vendored
34
.github/workflows/monero-tests.yaml
vendored
@@ -5,12 +5,12 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- develop
|
- develop
|
||||||
paths:
|
paths:
|
||||||
- "networks/monero/**"
|
- "coins/monero/**"
|
||||||
- "processor/**"
|
- "processor/**"
|
||||||
|
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- "networks/monero/**"
|
- "coins/monero/**"
|
||||||
- "processor/**"
|
- "processor/**"
|
||||||
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
@@ -26,19 +26,7 @@ jobs:
|
|||||||
uses: ./.github/actions/test-dependencies
|
uses: ./.github/actions/test-dependencies
|
||||||
|
|
||||||
- name: Run Unit Tests Without Features
|
- name: Run Unit Tests Without Features
|
||||||
run: |
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-io --lib
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-generators --lib
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-primitives --lib
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-mlsag --lib
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-clsag --lib
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-borromean --lib
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-bulletproofs --lib
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-rpc --lib
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --lib
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-address --lib
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --lib
|
|
||||||
|
|
||||||
# Doesn't run unit tests with features as the tests workflow will
|
# Doesn't run unit tests with features as the tests workflow will
|
||||||
|
|
||||||
@@ -47,7 +35,7 @@ jobs:
|
|||||||
# Test against all supported protocol versions
|
# Test against all supported protocol versions
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
version: [v0.17.3.2, v0.18.3.4]
|
version: [v0.17.3.2, v0.18.2.0]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
@@ -58,15 +46,11 @@ jobs:
|
|||||||
monero-version: ${{ matrix.version }}
|
monero-version: ${{ matrix.version }}
|
||||||
|
|
||||||
- name: Run Integration Tests Without Features
|
- name: Run Integration Tests Without Features
|
||||||
run: |
|
# Runs with the binaries feature so the binaries build
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --test '*'
|
# https://github.com/rust-lang/cargo/issues/8396
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --features binaries --test '*'
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --test '*'
|
|
||||||
|
|
||||||
- name: Run Integration Tests
|
- name: Run Integration Tests
|
||||||
# Don't run if the the tests workflow also will
|
# Don't run if the the tests workflow also will
|
||||||
if: ${{ matrix.version != 'v0.18.3.4' }}
|
if: ${{ matrix.version != 'v0.18.2.0' }}
|
||||||
run: |
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --all-features --test '*'
|
|
||||||
|
|||||||
259
.github/workflows/msrv.yml
vendored
259
.github/workflows/msrv.yml
vendored
@@ -1,259 +0,0 @@
|
|||||||
name: Weekly MSRV Check
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: "0 0 * * 0"
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
msrv-common:
|
|
||||||
name: Run cargo msrv on common
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Install cargo msrv
|
|
||||||
run: cargo install --locked cargo-msrv
|
|
||||||
|
|
||||||
- name: Run cargo msrv on common
|
|
||||||
run: |
|
|
||||||
cargo msrv verify --manifest-path common/zalloc/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path common/std-shims/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path common/env/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path common/db/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path common/task/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path common/request/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path common/patchable-async-sleep/Cargo.toml
|
|
||||||
|
|
||||||
msrv-crypto:
|
|
||||||
name: Run cargo msrv on crypto
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Install cargo msrv
|
|
||||||
run: cargo install --locked cargo-msrv
|
|
||||||
|
|
||||||
- name: Run cargo msrv on crypto
|
|
||||||
run: |
|
|
||||||
cargo msrv verify --manifest-path crypto/transcript/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path crypto/ff-group-tests/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path crypto/dalek-ff-group/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path crypto/ed448/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path crypto/multiexp/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path crypto/dleq/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path crypto/ciphersuite/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path crypto/schnorr/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path crypto/evrf/generalized-bulletproofs/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path crypto/evrf/circuit-abstraction/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path crypto/evrf/divisors/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path crypto/evrf/ec-gadgets/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path crypto/evrf/embedwards25519/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path crypto/evrf/secq256k1/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path crypto/dkg/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path crypto/frost/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path crypto/schnorrkel/Cargo.toml
|
|
||||||
|
|
||||||
msrv-networks:
|
|
||||||
name: Run cargo msrv on networks
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Install cargo msrv
|
|
||||||
run: cargo install --locked cargo-msrv
|
|
||||||
|
|
||||||
- name: Run cargo msrv on networks
|
|
||||||
run: |
|
|
||||||
cargo msrv verify --manifest-path networks/bitcoin/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path networks/ethereum/build-contracts/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/ethereum/schnorr/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/ethereum/alloy-simple-request-transport/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/ethereum/relayer/Cargo.toml --features parity-db
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path networks/monero/io/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/monero/generators/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/monero/primitives/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/monero/ringct/mlsag/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/monero/ringct/clsag/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/monero/ringct/borromean/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/monero/ringct/bulletproofs/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/monero/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/monero/rpc/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/monero/rpc/simple-request/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/monero/wallet/address/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/monero/wallet/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path networks/monero/verify-chain/Cargo.toml
|
|
||||||
|
|
||||||
msrv-message-queue:
|
|
||||||
name: Run cargo msrv on message-queue
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Install cargo msrv
|
|
||||||
run: cargo install --locked cargo-msrv
|
|
||||||
|
|
||||||
- name: Run cargo msrv on message-queue
|
|
||||||
run: |
|
|
||||||
cargo msrv verify --manifest-path message-queue/Cargo.toml --features parity-db
|
|
||||||
|
|
||||||
msrv-processor:
|
|
||||||
name: Run cargo msrv on processor
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Install cargo msrv
|
|
||||||
run: cargo install --locked cargo-msrv
|
|
||||||
|
|
||||||
- name: Run cargo msrv on processor
|
|
||||||
run: |
|
|
||||||
cargo msrv verify --manifest-path processor/view-keys/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path processor/primitives/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path processor/messages/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path processor/scanner/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path processor/scheduler/primitives/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path processor/scheduler/smart-contract/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path processor/scheduler/utxo/primitives/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path processor/scheduler/utxo/standard/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path processor/scheduler/utxo/transaction-chaining/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path processor/key-gen/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path processor/frost-attempt-manager/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path processor/signers/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path processor/bin/Cargo.toml --features parity-db
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path processor/bitcoin/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path processor/ethereum/primitives/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path processor/ethereum/test-primitives/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path processor/ethereum/erc20/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path processor/ethereum/deployer/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path processor/ethereum/router/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path processor/ethereum/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path processor/monero/Cargo.toml
|
|
||||||
|
|
||||||
msrv-coordinator:
|
|
||||||
name: Run cargo msrv on coordinator
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Install cargo msrv
|
|
||||||
run: cargo install --locked cargo-msrv
|
|
||||||
|
|
||||||
- name: Run cargo msrv on coordinator
|
|
||||||
run: |
|
|
||||||
cargo msrv verify --manifest-path coordinator/tributary-sdk/tendermint/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path coordinator/tributary-sdk/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path coordinator/cosign/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path coordinator/substrate/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path coordinator/tributary/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path coordinator/p2p/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path coordinator/p2p/libp2p/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path coordinator/Cargo.toml
|
|
||||||
|
|
||||||
msrv-substrate:
|
|
||||||
name: Run cargo msrv on substrate
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Install cargo msrv
|
|
||||||
run: cargo install --locked cargo-msrv
|
|
||||||
|
|
||||||
- name: Run cargo msrv on substrate
|
|
||||||
run: |
|
|
||||||
cargo msrv verify --manifest-path substrate/primitives/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path substrate/coins/primitives/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path substrate/coins/pallet/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path substrate/dex/pallet/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path substrate/economic-security/pallet/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path substrate/genesis-liquidity/primitives/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path substrate/genesis-liquidity/pallet/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path substrate/in-instructions/primitives/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path substrate/in-instructions/pallet/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path substrate/validator-sets/pallet/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path substrate/validator-sets/primitives/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path substrate/emissions/primitives/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path substrate/emissions/pallet/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path substrate/signals/primitives/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path substrate/signals/pallet/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path substrate/abi/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path substrate/client/Cargo.toml
|
|
||||||
|
|
||||||
cargo msrv verify --manifest-path substrate/runtime/Cargo.toml
|
|
||||||
cargo msrv verify --manifest-path substrate/node/Cargo.toml
|
|
||||||
|
|
||||||
msrv-orchestration:
|
|
||||||
name: Run cargo msrv on orchestration
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Install cargo msrv
|
|
||||||
run: cargo install --locked cargo-msrv
|
|
||||||
|
|
||||||
- name: Run cargo msrv on message-queue
|
|
||||||
run: |
|
|
||||||
cargo msrv verify --manifest-path orchestration/Cargo.toml
|
|
||||||
|
|
||||||
msrv-mini:
|
|
||||||
name: Run cargo msrv on mini
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Install cargo msrv
|
|
||||||
run: cargo install --locked cargo-msrv
|
|
||||||
|
|
||||||
- name: Run cargo msrv on mini
|
|
||||||
run: |
|
|
||||||
cargo msrv verify --manifest-path mini/Cargo.toml
|
|
||||||
49
.github/workflows/networks-tests.yml
vendored
49
.github/workflows/networks-tests.yml
vendored
@@ -1,49 +0,0 @@
|
|||||||
name: networks/ Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "networks/**"
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "networks/**"
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-networks:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Test Dependencies
|
|
||||||
uses: ./.github/actions/test-dependencies
|
|
||||||
|
|
||||||
- name: Run Tests
|
|
||||||
run: |
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
|
||||||
-p bitcoin-serai \
|
|
||||||
-p build-solidity-contracts \
|
|
||||||
-p ethereum-schnorr-contract \
|
|
||||||
-p alloy-simple-request-transport \
|
|
||||||
-p serai-ethereum-relayer \
|
|
||||||
-p monero-io \
|
|
||||||
-p monero-generators \
|
|
||||||
-p monero-primitives \
|
|
||||||
-p monero-mlsag \
|
|
||||||
-p monero-clsag \
|
|
||||||
-p monero-borromean \
|
|
||||||
-p monero-bulletproofs \
|
|
||||||
-p monero-serai \
|
|
||||||
-p monero-rpc \
|
|
||||||
-p monero-simple-request-rpc \
|
|
||||||
-p monero-address \
|
|
||||||
-p monero-wallet \
|
|
||||||
-p monero-serai-verify-chain
|
|
||||||
6
.github/workflows/no-std.yml
vendored
6
.github/workflows/no-std.yml
vendored
@@ -7,14 +7,14 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- "common/**"
|
- "common/**"
|
||||||
- "crypto/**"
|
- "crypto/**"
|
||||||
- "networks/**"
|
- "coins/**"
|
||||||
- "tests/no-std/**"
|
- "tests/no-std/**"
|
||||||
|
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- "common/**"
|
- "common/**"
|
||||||
- "crypto/**"
|
- "crypto/**"
|
||||||
- "networks/**"
|
- "coins/**"
|
||||||
- "tests/no-std/**"
|
- "tests/no-std/**"
|
||||||
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
@@ -32,4 +32,4 @@ jobs:
|
|||||||
run: sudo apt update && sudo apt install -y gcc-riscv64-unknown-elf gcc-multilib && rustup target add riscv32imac-unknown-none-elf
|
run: sudo apt update && sudo apt install -y gcc-riscv64-unknown-elf gcc-multilib && rustup target add riscv32imac-unknown-none-elf
|
||||||
|
|
||||||
- name: Verify no-std builds
|
- name: Verify no-std builds
|
||||||
run: CFLAGS=-I/usr/include cargo build --target riscv32imac-unknown-none-elf -p serai-no-std-tests
|
run: cd tests/no-std && CFLAGS=-I/usr/include cargo build --target riscv32imac-unknown-none-elf
|
||||||
|
|||||||
37
.github/workflows/pages.yml
vendored
37
.github/workflows/pages.yml
vendored
@@ -1,7 +1,6 @@
|
|||||||
# MIT License
|
# MIT License
|
||||||
#
|
#
|
||||||
# Copyright (c) 2022 just-the-docs
|
# Copyright (c) 2022 just-the-docs
|
||||||
# Copyright (c) 2022-2024 Luke Parker
|
|
||||||
#
|
#
|
||||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
# of this software and associated documentation files (the "Software"), to deal
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
@@ -21,21 +20,31 @@
|
|||||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
# SOFTWARE.
|
# SOFTWARE.
|
||||||
|
|
||||||
name: Deploy Rust docs and Jekyll site to Pages
|
# This workflow uses actions that are not certified by GitHub.
|
||||||
|
# They are provided by a third-party and are governed by
|
||||||
|
# separate terms of service, privacy policy, and support
|
||||||
|
# documentation.
|
||||||
|
|
||||||
|
# Sample workflow for building and deploying a Jekyll site to GitHub Pages
|
||||||
|
name: Deploy Jekyll site to Pages
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- "develop"
|
- "develop"
|
||||||
|
paths:
|
||||||
|
- "docs/**"
|
||||||
|
|
||||||
|
# Allows you to run this workflow manually from the Actions tab
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
pages: write
|
pages: write
|
||||||
id-token: write
|
id-token: write
|
||||||
|
|
||||||
# Only allow one concurrent deployment
|
# Allow one concurrent deployment
|
||||||
concurrency:
|
concurrency:
|
||||||
group: "pages"
|
group: "pages"
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
@@ -44,6 +53,9 @@ jobs:
|
|||||||
# Build job
|
# Build job
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: docs
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@@ -57,24 +69,11 @@ jobs:
|
|||||||
id: pages
|
id: pages
|
||||||
uses: actions/configure-pages@v3
|
uses: actions/configure-pages@v3
|
||||||
- name: Build with Jekyll
|
- name: Build with Jekyll
|
||||||
run: cd ${{ github.workspace }}/docs && bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}"
|
run: bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}"
|
||||||
env:
|
env:
|
||||||
JEKYLL_ENV: production
|
JEKYLL_ENV: production
|
||||||
|
|
||||||
- name: Get nightly version to use
|
|
||||||
id: nightly
|
|
||||||
shell: bash
|
|
||||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
|
||||||
- name: Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
- name: Buld Rust docs
|
|
||||||
run: |
|
|
||||||
rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32-unknown-unknown -c rust-docs
|
|
||||||
RUSTDOCFLAGS="--cfg docsrs" cargo +${{ steps.nightly.outputs.version }} doc --workspace --all-features
|
|
||||||
mv target/doc docs/_site/rust
|
|
||||||
|
|
||||||
- name: Upload artifact
|
- name: Upload artifact
|
||||||
uses: actions/upload-pages-artifact@v3
|
uses: actions/upload-pages-artifact@v1
|
||||||
with:
|
with:
|
||||||
path: "docs/_site/"
|
path: "docs/_site/"
|
||||||
|
|
||||||
@@ -88,4 +87,4 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Deploy to GitHub Pages
|
- name: Deploy to GitHub Pages
|
||||||
id: deployment
|
id: deployment
|
||||||
uses: actions/deploy-pages@v4
|
uses: actions/deploy-pages@v2
|
||||||
|
|||||||
6
.github/workflows/processor-tests.yml
vendored
6
.github/workflows/processor-tests.yml
vendored
@@ -7,7 +7,7 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- "common/**"
|
- "common/**"
|
||||||
- "crypto/**"
|
- "crypto/**"
|
||||||
- "networks/**"
|
- "coins/**"
|
||||||
- "message-queue/**"
|
- "message-queue/**"
|
||||||
- "processor/**"
|
- "processor/**"
|
||||||
- "orchestration/**"
|
- "orchestration/**"
|
||||||
@@ -18,7 +18,7 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- "common/**"
|
- "common/**"
|
||||||
- "crypto/**"
|
- "crypto/**"
|
||||||
- "networks/**"
|
- "coins/**"
|
||||||
- "message-queue/**"
|
- "message-queue/**"
|
||||||
- "processor/**"
|
- "processor/**"
|
||||||
- "orchestration/**"
|
- "orchestration/**"
|
||||||
@@ -37,4 +37,4 @@ jobs:
|
|||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
- name: Run processor Docker tests
|
- name: Run processor Docker tests
|
||||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-processor-tests
|
run: cd tests/processor && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||||
|
|||||||
2
.github/workflows/reproducible-runtime.yml
vendored
2
.github/workflows/reproducible-runtime.yml
vendored
@@ -33,4 +33,4 @@ jobs:
|
|||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
- name: Run Reproducible Runtime tests
|
- name: Run Reproducible Runtime tests
|
||||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-reproducible-runtime-tests
|
run: cd tests/reproducible-runtime && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||||
|
|||||||
40
.github/workflows/tests.yml
vendored
40
.github/workflows/tests.yml
vendored
@@ -7,7 +7,7 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- "common/**"
|
- "common/**"
|
||||||
- "crypto/**"
|
- "crypto/**"
|
||||||
- "networks/**"
|
- "coins/**"
|
||||||
- "message-queue/**"
|
- "message-queue/**"
|
||||||
- "processor/**"
|
- "processor/**"
|
||||||
- "coordinator/**"
|
- "coordinator/**"
|
||||||
@@ -17,7 +17,7 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- "common/**"
|
- "common/**"
|
||||||
- "crypto/**"
|
- "crypto/**"
|
||||||
- "networks/**"
|
- "coins/**"
|
||||||
- "message-queue/**"
|
- "message-queue/**"
|
||||||
- "processor/**"
|
- "processor/**"
|
||||||
- "coordinator/**"
|
- "coordinator/**"
|
||||||
@@ -39,35 +39,10 @@ jobs:
|
|||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||||
-p serai-message-queue \
|
-p serai-message-queue \
|
||||||
-p serai-processor-messages \
|
-p serai-processor-messages \
|
||||||
-p serai-processor-key-gen \
|
-p serai-processor \
|
||||||
-p serai-processor-view-keys \
|
|
||||||
-p serai-processor-frost-attempt-manager \
|
|
||||||
-p serai-processor-primitives \
|
|
||||||
-p serai-processor-scanner \
|
|
||||||
-p serai-processor-scheduler-primitives \
|
|
||||||
-p serai-processor-utxo-scheduler-primitives \
|
|
||||||
-p serai-processor-utxo-scheduler \
|
|
||||||
-p serai-processor-transaction-chaining-scheduler \
|
|
||||||
-p serai-processor-smart-contract-scheduler \
|
|
||||||
-p serai-processor-signers \
|
|
||||||
-p serai-processor-bin \
|
|
||||||
-p serai-bitcoin-processor \
|
|
||||||
-p serai-processor-ethereum-primitives \
|
|
||||||
-p serai-processor-ethereum-test-primitives \
|
|
||||||
-p serai-processor-ethereum-deployer \
|
|
||||||
-p serai-processor-ethereum-router \
|
|
||||||
-p serai-processor-ethereum-erc20 \
|
|
||||||
-p serai-ethereum-processor \
|
|
||||||
-p serai-monero-processor \
|
|
||||||
-p tendermint-machine \
|
-p tendermint-machine \
|
||||||
-p tributary-sdk \
|
-p tributary-chain \
|
||||||
-p serai-cosign \
|
|
||||||
-p serai-coordinator-substrate \
|
|
||||||
-p serai-coordinator-tributary \
|
|
||||||
-p serai-coordinator-p2p \
|
|
||||||
-p serai-coordinator-libp2p-p2p \
|
|
||||||
-p serai-coordinator \
|
-p serai-coordinator \
|
||||||
-p serai-orchestrator \
|
|
||||||
-p serai-docker-tests
|
-p serai-docker-tests
|
||||||
|
|
||||||
test-substrate:
|
test-substrate:
|
||||||
@@ -87,16 +62,9 @@ jobs:
|
|||||||
-p serai-dex-pallet \
|
-p serai-dex-pallet \
|
||||||
-p serai-validator-sets-primitives \
|
-p serai-validator-sets-primitives \
|
||||||
-p serai-validator-sets-pallet \
|
-p serai-validator-sets-pallet \
|
||||||
-p serai-genesis-liquidity-primitives \
|
|
||||||
-p serai-genesis-liquidity-pallet \
|
|
||||||
-p serai-emissions-primitives \
|
|
||||||
-p serai-emissions-pallet \
|
|
||||||
-p serai-economic-security-pallet \
|
|
||||||
-p serai-in-instructions-primitives \
|
-p serai-in-instructions-primitives \
|
||||||
-p serai-in-instructions-pallet \
|
-p serai-in-instructions-pallet \
|
||||||
-p serai-signals-primitives \
|
|
||||||
-p serai-signals-pallet \
|
-p serai-signals-pallet \
|
||||||
-p serai-abi \
|
|
||||||
-p serai-runtime \
|
-p serai-runtime \
|
||||||
-p serai-node
|
-p serai-node
|
||||||
|
|
||||||
|
|||||||
6499
Cargo.lock
generated
6499
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
119
Cargo.toml
119
Cargo.toml
@@ -2,10 +2,9 @@
|
|||||||
resolver = "2"
|
resolver = "2"
|
||||||
members = [
|
members = [
|
||||||
# Version patches
|
# Version patches
|
||||||
"patches/parking_lot_core",
|
|
||||||
"patches/parking_lot",
|
|
||||||
"patches/zstd",
|
"patches/zstd",
|
||||||
"patches/rocksdb",
|
"patches/rocksdb",
|
||||||
|
"patches/proc-macro-crate",
|
||||||
|
|
||||||
# std patches
|
# std patches
|
||||||
"patches/matches",
|
"patches/matches",
|
||||||
@@ -17,10 +16,8 @@ members = [
|
|||||||
|
|
||||||
"common/std-shims",
|
"common/std-shims",
|
||||||
"common/zalloc",
|
"common/zalloc",
|
||||||
"common/patchable-async-sleep",
|
|
||||||
"common/db",
|
"common/db",
|
||||||
"common/env",
|
"common/env",
|
||||||
"common/task",
|
|
||||||
"common/request",
|
"common/request",
|
||||||
|
|
||||||
"crypto/transcript",
|
"crypto/transcript",
|
||||||
@@ -31,75 +28,25 @@ members = [
|
|||||||
"crypto/ciphersuite",
|
"crypto/ciphersuite",
|
||||||
|
|
||||||
"crypto/multiexp",
|
"crypto/multiexp",
|
||||||
|
|
||||||
"crypto/schnorr",
|
"crypto/schnorr",
|
||||||
"crypto/dleq",
|
"crypto/dleq",
|
||||||
|
|
||||||
"crypto/evrf/secq256k1",
|
|
||||||
"crypto/evrf/embedwards25519",
|
|
||||||
"crypto/evrf/generalized-bulletproofs",
|
|
||||||
"crypto/evrf/circuit-abstraction",
|
|
||||||
"crypto/evrf/divisors",
|
|
||||||
"crypto/evrf/ec-gadgets",
|
|
||||||
|
|
||||||
"crypto/dkg",
|
"crypto/dkg",
|
||||||
"crypto/frost",
|
"crypto/frost",
|
||||||
"crypto/schnorrkel",
|
"crypto/schnorrkel",
|
||||||
|
|
||||||
"networks/bitcoin",
|
"coins/bitcoin",
|
||||||
|
"coins/ethereum",
|
||||||
"networks/ethereum/build-contracts",
|
"coins/monero/generators",
|
||||||
"networks/ethereum/schnorr",
|
"coins/monero",
|
||||||
"networks/ethereum/alloy-simple-request-transport",
|
|
||||||
"networks/ethereum/relayer",
|
|
||||||
|
|
||||||
"networks/monero/io",
|
|
||||||
"networks/monero/generators",
|
|
||||||
"networks/monero/primitives",
|
|
||||||
"networks/monero/ringct/mlsag",
|
|
||||||
"networks/monero/ringct/clsag",
|
|
||||||
"networks/monero/ringct/borromean",
|
|
||||||
"networks/monero/ringct/bulletproofs",
|
|
||||||
"networks/monero",
|
|
||||||
"networks/monero/rpc",
|
|
||||||
"networks/monero/rpc/simple-request",
|
|
||||||
"networks/monero/wallet/address",
|
|
||||||
"networks/monero/wallet",
|
|
||||||
"networks/monero/verify-chain",
|
|
||||||
|
|
||||||
"message-queue",
|
"message-queue",
|
||||||
|
|
||||||
"processor/messages",
|
"processor/messages",
|
||||||
|
"processor",
|
||||||
|
|
||||||
"processor/key-gen",
|
"coordinator/tributary/tendermint",
|
||||||
"processor/view-keys",
|
|
||||||
"processor/frost-attempt-manager",
|
|
||||||
|
|
||||||
"processor/primitives",
|
|
||||||
"processor/scanner",
|
|
||||||
"processor/scheduler/primitives",
|
|
||||||
"processor/scheduler/utxo/primitives",
|
|
||||||
"processor/scheduler/utxo/standard",
|
|
||||||
"processor/scheduler/utxo/transaction-chaining",
|
|
||||||
"processor/scheduler/smart-contract",
|
|
||||||
"processor/signers",
|
|
||||||
|
|
||||||
"processor/bin",
|
|
||||||
"processor/bitcoin",
|
|
||||||
"processor/ethereum/primitives",
|
|
||||||
"processor/ethereum/test-primitives",
|
|
||||||
"processor/ethereum/deployer",
|
|
||||||
"processor/ethereum/router",
|
|
||||||
"processor/ethereum/erc20",
|
|
||||||
"processor/ethereum",
|
|
||||||
"processor/monero",
|
|
||||||
|
|
||||||
"coordinator/tributary-sdk/tendermint",
|
|
||||||
"coordinator/tributary-sdk",
|
|
||||||
"coordinator/cosign",
|
|
||||||
"coordinator/substrate",
|
|
||||||
"coordinator/tributary",
|
"coordinator/tributary",
|
||||||
"coordinator/p2p",
|
|
||||||
"coordinator/p2p/libp2p",
|
|
||||||
"coordinator",
|
"coordinator",
|
||||||
|
|
||||||
"substrate/primitives",
|
"substrate/primitives",
|
||||||
@@ -107,22 +54,12 @@ members = [
|
|||||||
"substrate/coins/primitives",
|
"substrate/coins/primitives",
|
||||||
"substrate/coins/pallet",
|
"substrate/coins/pallet",
|
||||||
|
|
||||||
"substrate/dex/pallet",
|
"substrate/in-instructions/primitives",
|
||||||
|
"substrate/in-instructions/pallet",
|
||||||
|
|
||||||
"substrate/validator-sets/primitives",
|
"substrate/validator-sets/primitives",
|
||||||
"substrate/validator-sets/pallet",
|
"substrate/validator-sets/pallet",
|
||||||
|
|
||||||
"substrate/genesis-liquidity/primitives",
|
|
||||||
"substrate/genesis-liquidity/pallet",
|
|
||||||
|
|
||||||
"substrate/emissions/primitives",
|
|
||||||
"substrate/emissions/pallet",
|
|
||||||
|
|
||||||
"substrate/economic-security/pallet",
|
|
||||||
|
|
||||||
"substrate/in-instructions/primitives",
|
|
||||||
"substrate/in-instructions/pallet",
|
|
||||||
|
|
||||||
"substrate/signals/primitives",
|
"substrate/signals/primitives",
|
||||||
"substrate/signals/pallet",
|
"substrate/signals/pallet",
|
||||||
|
|
||||||
@@ -141,9 +78,9 @@ members = [
|
|||||||
|
|
||||||
"tests/docker",
|
"tests/docker",
|
||||||
"tests/message-queue",
|
"tests/message-queue",
|
||||||
# TODO "tests/processor",
|
"tests/processor",
|
||||||
# TODO "tests/coordinator",
|
"tests/coordinator",
|
||||||
# TODO "tests/full-stack",
|
"tests/full-stack",
|
||||||
"tests/reproducible-runtime",
|
"tests/reproducible-runtime",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -151,32 +88,18 @@ members = [
|
|||||||
# to the extensive operations required for Bulletproofs
|
# to the extensive operations required for Bulletproofs
|
||||||
[profile.dev.package]
|
[profile.dev.package]
|
||||||
subtle = { opt-level = 3 }
|
subtle = { opt-level = 3 }
|
||||||
|
curve25519-dalek = { opt-level = 3 }
|
||||||
|
|
||||||
ff = { opt-level = 3 }
|
ff = { opt-level = 3 }
|
||||||
group = { opt-level = 3 }
|
group = { opt-level = 3 }
|
||||||
|
|
||||||
crypto-bigint = { opt-level = 3 }
|
crypto-bigint = { opt-level = 3 }
|
||||||
secp256k1 = { opt-level = 3 }
|
|
||||||
curve25519-dalek = { opt-level = 3 }
|
|
||||||
dalek-ff-group = { opt-level = 3 }
|
dalek-ff-group = { opt-level = 3 }
|
||||||
minimal-ed448 = { opt-level = 3 }
|
minimal-ed448 = { opt-level = 3 }
|
||||||
|
|
||||||
multiexp = { opt-level = 3 }
|
multiexp = { opt-level = 3 }
|
||||||
|
|
||||||
secq256k1 = { opt-level = 3 }
|
monero-serai = { opt-level = 3 }
|
||||||
embedwards25519 = { opt-level = 3 }
|
|
||||||
generalized-bulletproofs = { opt-level = 3 }
|
|
||||||
generalized-bulletproofs-circuit-abstraction = { opt-level = 3 }
|
|
||||||
ec-divisors = { opt-level = 3 }
|
|
||||||
generalized-bulletproofs-ec-gadgets = { opt-level = 3 }
|
|
||||||
|
|
||||||
dkg = { opt-level = 3 }
|
|
||||||
|
|
||||||
monero-generators = { opt-level = 3 }
|
|
||||||
monero-borromean = { opt-level = 3 }
|
|
||||||
monero-bulletproofs = { opt-level = 3 }
|
|
||||||
monero-mlsag = { opt-level = 3 }
|
|
||||||
monero-clsag = { opt-level = 3 }
|
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
panic = "unwind"
|
panic = "unwind"
|
||||||
@@ -185,12 +108,15 @@ panic = "unwind"
|
|||||||
# https://github.com/rust-lang-nursery/lazy-static.rs/issues/201
|
# https://github.com/rust-lang-nursery/lazy-static.rs/issues/201
|
||||||
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
|
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
|
||||||
|
|
||||||
parking_lot_core = { path = "patches/parking_lot_core" }
|
# Needed due to dockertest's usage of `Rc`s when we need `Arc`s
|
||||||
parking_lot = { path = "patches/parking_lot" }
|
dockertest = { git = "https://github.com/kayabaNerve/dockertest-rs", branch = "arc" }
|
||||||
|
|
||||||
# wasmtime pulls in an old version for this
|
# wasmtime pulls in an old version for this
|
||||||
zstd = { path = "patches/zstd" }
|
zstd = { path = "patches/zstd" }
|
||||||
# Needed for WAL compression
|
# Needed for WAL compression
|
||||||
rocksdb = { path = "patches/rocksdb" }
|
rocksdb = { path = "patches/rocksdb" }
|
||||||
|
# proc-macro-crate 2 binds to an old version of toml for msrv so we patch to 3
|
||||||
|
proc-macro-crate = { path = "patches/proc-macro-crate" }
|
||||||
|
|
||||||
# is-terminal now has an std-based solution with an equivalent API
|
# is-terminal now has an std-based solution with an equivalent API
|
||||||
is-terminal = { path = "patches/is-terminal" }
|
is-terminal = { path = "patches/is-terminal" }
|
||||||
@@ -207,8 +133,6 @@ directories-next = { path = "patches/directories-next" }
|
|||||||
|
|
||||||
[workspace.lints.clippy]
|
[workspace.lints.clippy]
|
||||||
unwrap_or_default = "allow"
|
unwrap_or_default = "allow"
|
||||||
map_unwrap_or = "allow"
|
|
||||||
needless_continue = "allow"
|
|
||||||
borrow_as_ptr = "deny"
|
borrow_as_ptr = "deny"
|
||||||
cast_lossless = "deny"
|
cast_lossless = "deny"
|
||||||
cast_possible_truncation = "deny"
|
cast_possible_truncation = "deny"
|
||||||
@@ -236,9 +160,11 @@ manual_instant_elapsed = "deny"
|
|||||||
manual_let_else = "deny"
|
manual_let_else = "deny"
|
||||||
manual_ok_or = "deny"
|
manual_ok_or = "deny"
|
||||||
manual_string_new = "deny"
|
manual_string_new = "deny"
|
||||||
|
map_unwrap_or = "deny"
|
||||||
match_bool = "deny"
|
match_bool = "deny"
|
||||||
match_same_arms = "deny"
|
match_same_arms = "deny"
|
||||||
missing_fields_in_debug = "deny"
|
missing_fields_in_debug = "deny"
|
||||||
|
needless_continue = "deny"
|
||||||
needless_pass_by_value = "deny"
|
needless_pass_by_value = "deny"
|
||||||
ptr_cast_constness = "deny"
|
ptr_cast_constness = "deny"
|
||||||
range_minus_one = "deny"
|
range_minus_one = "deny"
|
||||||
@@ -246,7 +172,6 @@ range_plus_one = "deny"
|
|||||||
redundant_closure_for_method_calls = "deny"
|
redundant_closure_for_method_calls = "deny"
|
||||||
redundant_else = "deny"
|
redundant_else = "deny"
|
||||||
string_add_assign = "deny"
|
string_add_assign = "deny"
|
||||||
string_slice = "deny"
|
|
||||||
unchecked_duration_subtraction = "deny"
|
unchecked_duration_subtraction = "deny"
|
||||||
uninlined_format_args = "deny"
|
uninlined_format_args = "deny"
|
||||||
unnecessary_box_returns = "deny"
|
unnecessary_box_returns = "deny"
|
||||||
|
|||||||
2
LICENSE
2
LICENSE
@@ -5,4 +5,4 @@ a full copy of the AGPL-3.0 License is included in the root of this repository
|
|||||||
as a reference text. This copy should be provided with any distribution of a
|
as a reference text. This copy should be provided with any distribution of a
|
||||||
crate licensed under the AGPL-3.0, as per its terms.
|
crate licensed under the AGPL-3.0, as per its terms.
|
||||||
|
|
||||||
The GitHub actions/workflows (`.github`) are licensed under the MIT license.
|
The GitHub actions (`.github/actions`) are licensed under the MIT license.
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ wallet.
|
|||||||
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
|
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
|
||||||
needed for Bitcoin-Monero atomic swaps.
|
needed for Bitcoin-Monero atomic swaps.
|
||||||
|
|
||||||
- `networks`: Various libraries intended for usage in Serai yet also by the
|
- `coins`: Various coin libraries intended for usage in Serai yet also by the
|
||||||
wider community. This means they will always support the functionality Serai
|
wider community. This means they will always support the functionality Serai
|
||||||
needs, yet won't disadvantage other use cases when possible.
|
needs, yet won't disadvantage other use cases when possible.
|
||||||
|
|
||||||
|
|||||||
6
audits/Cypher Stack coins bitcoin August 2023/README.md
Normal file
6
audits/Cypher Stack coins bitcoin August 2023/README.md
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
# Cypher Stack /coins/bitcoin Audit, August 2023
|
||||||
|
|
||||||
|
This audit was over the /coins/bitcoin folder. It is encompassing up to commit
|
||||||
|
5121ca75199dff7bd34230880a1fdd793012068c.
|
||||||
|
|
||||||
|
Please see https://github.com/cypherstack/serai-btc-audit for provenance.
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
# Cypher Stack /networks/bitcoin Audit, August 2023
|
|
||||||
|
|
||||||
This audit was over the `/networks/bitcoin` folder (at the time located at
|
|
||||||
`/coins/bitcoin`). It is encompassing up to commit
|
|
||||||
5121ca75199dff7bd34230880a1fdd793012068c.
|
|
||||||
|
|
||||||
Please see https://github.com/cypherstack/serai-btc-audit for provenance.
|
|
||||||
Binary file not shown.
@@ -1,427 +0,0 @@
|
|||||||
Attribution-ShareAlike 4.0 International
|
|
||||||
|
|
||||||
=======================================================================
|
|
||||||
|
|
||||||
Creative Commons Corporation ("Creative Commons") is not a law firm and
|
|
||||||
does not provide legal services or legal advice. Distribution of
|
|
||||||
Creative Commons public licenses does not create a lawyer-client or
|
|
||||||
other relationship. Creative Commons makes its licenses and related
|
|
||||||
information available on an "as-is" basis. Creative Commons gives no
|
|
||||||
warranties regarding its licenses, any material licensed under their
|
|
||||||
terms and conditions, or any related information. Creative Commons
|
|
||||||
disclaims all liability for damages resulting from their use to the
|
|
||||||
fullest extent possible.
|
|
||||||
|
|
||||||
Using Creative Commons Public Licenses
|
|
||||||
|
|
||||||
Creative Commons public licenses provide a standard set of terms and
|
|
||||||
conditions that creators and other rights holders may use to share
|
|
||||||
original works of authorship and other material subject to copyright
|
|
||||||
and certain other rights specified in the public license below. The
|
|
||||||
following considerations are for informational purposes only, are not
|
|
||||||
exhaustive, and do not form part of our licenses.
|
|
||||||
|
|
||||||
Considerations for licensors: Our public licenses are
|
|
||||||
intended for use by those authorized to give the public
|
|
||||||
permission to use material in ways otherwise restricted by
|
|
||||||
copyright and certain other rights. Our licenses are
|
|
||||||
irrevocable. Licensors should read and understand the terms
|
|
||||||
and conditions of the license they choose before applying it.
|
|
||||||
Licensors should also secure all rights necessary before
|
|
||||||
applying our licenses so that the public can reuse the
|
|
||||||
material as expected. Licensors should clearly mark any
|
|
||||||
material not subject to the license. This includes other CC-
|
|
||||||
licensed material, or material used under an exception or
|
|
||||||
limitation to copyright. More considerations for licensors:
|
|
||||||
wiki.creativecommons.org/Considerations_for_licensors
|
|
||||||
|
|
||||||
Considerations for the public: By using one of our public
|
|
||||||
licenses, a licensor grants the public permission to use the
|
|
||||||
licensed material under specified terms and conditions. If
|
|
||||||
the licensor's permission is not necessary for any reason--for
|
|
||||||
example, because of any applicable exception or limitation to
|
|
||||||
copyright--then that use is not regulated by the license. Our
|
|
||||||
licenses grant only permissions under copyright and certain
|
|
||||||
other rights that a licensor has authority to grant. Use of
|
|
||||||
the licensed material may still be restricted for other
|
|
||||||
reasons, including because others have copyright or other
|
|
||||||
rights in the material. A licensor may make special requests,
|
|
||||||
such as asking that all changes be marked or described.
|
|
||||||
Although not required by our licenses, you are encouraged to
|
|
||||||
respect those requests where reasonable. More considerations
|
|
||||||
for the public:
|
|
||||||
wiki.creativecommons.org/Considerations_for_licensees
|
|
||||||
|
|
||||||
=======================================================================
|
|
||||||
|
|
||||||
Creative Commons Attribution-ShareAlike 4.0 International Public
|
|
||||||
License
|
|
||||||
|
|
||||||
By exercising the Licensed Rights (defined below), You accept and agree
|
|
||||||
to be bound by the terms and conditions of this Creative Commons
|
|
||||||
Attribution-ShareAlike 4.0 International Public License ("Public
|
|
||||||
License"). To the extent this Public License may be interpreted as a
|
|
||||||
contract, You are granted the Licensed Rights in consideration of Your
|
|
||||||
acceptance of these terms and conditions, and the Licensor grants You
|
|
||||||
such rights in consideration of benefits the Licensor receives from
|
|
||||||
making the Licensed Material available under these terms and
|
|
||||||
conditions.
|
|
||||||
|
|
||||||
|
|
||||||
Section 1 -- Definitions.
|
|
||||||
|
|
||||||
a. Adapted Material means material subject to Copyright and Similar
|
|
||||||
Rights that is derived from or based upon the Licensed Material
|
|
||||||
and in which the Licensed Material is translated, altered,
|
|
||||||
arranged, transformed, or otherwise modified in a manner requiring
|
|
||||||
permission under the Copyright and Similar Rights held by the
|
|
||||||
Licensor. For purposes of this Public License, where the Licensed
|
|
||||||
Material is a musical work, performance, or sound recording,
|
|
||||||
Adapted Material is always produced where the Licensed Material is
|
|
||||||
synched in timed relation with a moving image.
|
|
||||||
|
|
||||||
b. Adapter's License means the license You apply to Your Copyright
|
|
||||||
and Similar Rights in Your contributions to Adapted Material in
|
|
||||||
accordance with the terms and conditions of this Public License.
|
|
||||||
|
|
||||||
c. BY-SA Compatible License means a license listed at
|
|
||||||
creativecommons.org/compatiblelicenses, approved by Creative
|
|
||||||
Commons as essentially the equivalent of this Public License.
|
|
||||||
|
|
||||||
d. Copyright and Similar Rights means copyright and/or similar rights
|
|
||||||
closely related to copyright including, without limitation,
|
|
||||||
performance, broadcast, sound recording, and Sui Generis Database
|
|
||||||
Rights, without regard to how the rights are labeled or
|
|
||||||
categorized. For purposes of this Public License, the rights
|
|
||||||
specified in Section 2(b)(1)-(2) are not Copyright and Similar
|
|
||||||
Rights.
|
|
||||||
|
|
||||||
e. Effective Technological Measures means those measures that, in the
|
|
||||||
absence of proper authority, may not be circumvented under laws
|
|
||||||
fulfilling obligations under Article 11 of the WIPO Copyright
|
|
||||||
Treaty adopted on December 20, 1996, and/or similar international
|
|
||||||
agreements.
|
|
||||||
|
|
||||||
f. Exceptions and Limitations means fair use, fair dealing, and/or
|
|
||||||
any other exception or limitation to Copyright and Similar Rights
|
|
||||||
that applies to Your use of the Licensed Material.
|
|
||||||
|
|
||||||
g. License Elements means the license attributes listed in the name
|
|
||||||
of a Creative Commons Public License. The License Elements of this
|
|
||||||
Public License are Attribution and ShareAlike.
|
|
||||||
|
|
||||||
h. Licensed Material means the artistic or literary work, database,
|
|
||||||
or other material to which the Licensor applied this Public
|
|
||||||
License.
|
|
||||||
|
|
||||||
i. Licensed Rights means the rights granted to You subject to the
|
|
||||||
terms and conditions of this Public License, which are limited to
|
|
||||||
all Copyright and Similar Rights that apply to Your use of the
|
|
||||||
Licensed Material and that the Licensor has authority to license.
|
|
||||||
|
|
||||||
j. Licensor means the individual(s) or entity(ies) granting rights
|
|
||||||
under this Public License.
|
|
||||||
|
|
||||||
k. Share means to provide material to the public by any means or
|
|
||||||
process that requires permission under the Licensed Rights, such
|
|
||||||
as reproduction, public display, public performance, distribution,
|
|
||||||
dissemination, communication, or importation, and to make material
|
|
||||||
available to the public including in ways that members of the
|
|
||||||
public may access the material from a place and at a time
|
|
||||||
individually chosen by them.
|
|
||||||
|
|
||||||
l. Sui Generis Database Rights means rights other than copyright
|
|
||||||
resulting from Directive 96/9/EC of the European Parliament and of
|
|
||||||
the Council of 11 March 1996 on the legal protection of databases,
|
|
||||||
as amended and/or succeeded, as well as other essentially
|
|
||||||
equivalent rights anywhere in the world.
|
|
||||||
|
|
||||||
m. You means the individual or entity exercising the Licensed Rights
|
|
||||||
under this Public License. Your has a corresponding meaning.
|
|
||||||
|
|
||||||
|
|
||||||
Section 2 -- Scope.
|
|
||||||
|
|
||||||
a. License grant.
|
|
||||||
|
|
||||||
1. Subject to the terms and conditions of this Public License,
|
|
||||||
the Licensor hereby grants You a worldwide, royalty-free,
|
|
||||||
non-sublicensable, non-exclusive, irrevocable license to
|
|
||||||
exercise the Licensed Rights in the Licensed Material to:
|
|
||||||
|
|
||||||
a. reproduce and Share the Licensed Material, in whole or
|
|
||||||
in part; and
|
|
||||||
|
|
||||||
b. produce, reproduce, and Share Adapted Material.
|
|
||||||
|
|
||||||
2. Exceptions and Limitations. For the avoidance of doubt, where
|
|
||||||
Exceptions and Limitations apply to Your use, this Public
|
|
||||||
License does not apply, and You do not need to comply with
|
|
||||||
its terms and conditions.
|
|
||||||
|
|
||||||
3. Term. The term of this Public License is specified in Section
|
|
||||||
6(a).
|
|
||||||
|
|
||||||
4. Media and formats; technical modifications allowed. The
|
|
||||||
Licensor authorizes You to exercise the Licensed Rights in
|
|
||||||
all media and formats whether now known or hereafter created,
|
|
||||||
and to make technical modifications necessary to do so. The
|
|
||||||
Licensor waives and/or agrees not to assert any right or
|
|
||||||
authority to forbid You from making technical modifications
|
|
||||||
necessary to exercise the Licensed Rights, including
|
|
||||||
technical modifications necessary to circumvent Effective
|
|
||||||
Technological Measures. For purposes of this Public License,
|
|
||||||
simply making modifications authorized by this Section 2(a)
|
|
||||||
(4) never produces Adapted Material.
|
|
||||||
|
|
||||||
5. Downstream recipients.
|
|
||||||
|
|
||||||
a. Offer from the Licensor -- Licensed Material. Every
|
|
||||||
recipient of the Licensed Material automatically
|
|
||||||
receives an offer from the Licensor to exercise the
|
|
||||||
Licensed Rights under the terms and conditions of this
|
|
||||||
Public License.
|
|
||||||
|
|
||||||
b. Additional offer from the Licensor -- Adapted Material.
|
|
||||||
Every recipient of Adapted Material from You
|
|
||||||
automatically receives an offer from the Licensor to
|
|
||||||
exercise the Licensed Rights in the Adapted Material
|
|
||||||
under the conditions of the Adapter's License You apply.
|
|
||||||
|
|
||||||
c. No downstream restrictions. You may not offer or impose
|
|
||||||
any additional or different terms or conditions on, or
|
|
||||||
apply any Effective Technological Measures to, the
|
|
||||||
Licensed Material if doing so restricts exercise of the
|
|
||||||
Licensed Rights by any recipient of the Licensed
|
|
||||||
Material.
|
|
||||||
|
|
||||||
6. No endorsement. Nothing in this Public License constitutes or
|
|
||||||
may be construed as permission to assert or imply that You
|
|
||||||
are, or that Your use of the Licensed Material is, connected
|
|
||||||
with, or sponsored, endorsed, or granted official status by,
|
|
||||||
the Licensor or others designated to receive attribution as
|
|
||||||
provided in Section 3(a)(1)(A)(i).
|
|
||||||
|
|
||||||
b. Other rights.
|
|
||||||
|
|
||||||
1. Moral rights, such as the right of integrity, are not
|
|
||||||
licensed under this Public License, nor are publicity,
|
|
||||||
privacy, and/or other similar personality rights; however, to
|
|
||||||
the extent possible, the Licensor waives and/or agrees not to
|
|
||||||
assert any such rights held by the Licensor to the limited
|
|
||||||
extent necessary to allow You to exercise the Licensed
|
|
||||||
Rights, but not otherwise.
|
|
||||||
|
|
||||||
2. Patent and trademark rights are not licensed under this
|
|
||||||
Public License.
|
|
||||||
|
|
||||||
3. To the extent possible, the Licensor waives any right to
|
|
||||||
collect royalties from You for the exercise of the Licensed
|
|
||||||
Rights, whether directly or through a collecting society
|
|
||||||
under any voluntary or waivable statutory or compulsory
|
|
||||||
licensing scheme. In all other cases the Licensor expressly
|
|
||||||
reserves any right to collect such royalties.
|
|
||||||
|
|
||||||
|
|
||||||
Section 3 -- License Conditions.
|
|
||||||
|
|
||||||
Your exercise of the Licensed Rights is expressly made subject to the
|
|
||||||
following conditions.
|
|
||||||
|
|
||||||
a. Attribution.
|
|
||||||
|
|
||||||
1. If You Share the Licensed Material (including in modified
|
|
||||||
form), You must:
|
|
||||||
|
|
||||||
a. retain the following if it is supplied by the Licensor
|
|
||||||
with the Licensed Material:
|
|
||||||
|
|
||||||
i. identification of the creator(s) of the Licensed
|
|
||||||
Material and any others designated to receive
|
|
||||||
attribution, in any reasonable manner requested by
|
|
||||||
the Licensor (including by pseudonym if
|
|
||||||
designated);
|
|
||||||
|
|
||||||
ii. a copyright notice;
|
|
||||||
|
|
||||||
iii. a notice that refers to this Public License;
|
|
||||||
|
|
||||||
iv. a notice that refers to the disclaimer of
|
|
||||||
warranties;
|
|
||||||
|
|
||||||
v. a URI or hyperlink to the Licensed Material to the
|
|
||||||
extent reasonably practicable;
|
|
||||||
|
|
||||||
b. indicate if You modified the Licensed Material and
|
|
||||||
retain an indication of any previous modifications; and
|
|
||||||
|
|
||||||
c. indicate the Licensed Material is licensed under this
|
|
||||||
Public License, and include the text of, or the URI or
|
|
||||||
hyperlink to, this Public License.
|
|
||||||
|
|
||||||
2. You may satisfy the conditions in Section 3(a)(1) in any
|
|
||||||
reasonable manner based on the medium, means, and context in
|
|
||||||
which You Share the Licensed Material. For example, it may be
|
|
||||||
reasonable to satisfy the conditions by providing a URI or
|
|
||||||
hyperlink to a resource that includes the required
|
|
||||||
information.
|
|
||||||
|
|
||||||
3. If requested by the Licensor, You must remove any of the
|
|
||||||
information required by Section 3(a)(1)(A) to the extent
|
|
||||||
reasonably practicable.
|
|
||||||
|
|
||||||
b. ShareAlike.
|
|
||||||
|
|
||||||
In addition to the conditions in Section 3(a), if You Share
|
|
||||||
Adapted Material You produce, the following conditions also apply.
|
|
||||||
|
|
||||||
1. The Adapter's License You apply must be a Creative Commons
|
|
||||||
license with the same License Elements, this version or
|
|
||||||
later, or a BY-SA Compatible License.
|
|
||||||
|
|
||||||
2. You must include the text of, or the URI or hyperlink to, the
|
|
||||||
Adapter's License You apply. You may satisfy this condition
|
|
||||||
in any reasonable manner based on the medium, means, and
|
|
||||||
context in which You Share Adapted Material.
|
|
||||||
|
|
||||||
3. You may not offer or impose any additional or different terms
|
|
||||||
or conditions on, or apply any Effective Technological
|
|
||||||
Measures to, Adapted Material that restrict exercise of the
|
|
||||||
rights granted under the Adapter's License You apply.
|
|
||||||
|
|
||||||
|
|
||||||
Section 4 -- Sui Generis Database Rights.
|
|
||||||
|
|
||||||
Where the Licensed Rights include Sui Generis Database Rights that
|
|
||||||
apply to Your use of the Licensed Material:
|
|
||||||
|
|
||||||
a. for the avoidance of doubt, Section 2(a)(1) grants You the right
|
|
||||||
to extract, reuse, reproduce, and Share all or a substantial
|
|
||||||
portion of the contents of the database;
|
|
||||||
|
|
||||||
b. if You include all or a substantial portion of the database
|
|
||||||
contents in a database in which You have Sui Generis Database
|
|
||||||
Rights, then the database in which You have Sui Generis Database
|
|
||||||
Rights (but not its individual contents) is Adapted Material,
|
|
||||||
|
|
||||||
including for purposes of Section 3(b); and
|
|
||||||
c. You must comply with the conditions in Section 3(a) if You Share
|
|
||||||
all or a substantial portion of the contents of the database.
|
|
||||||
|
|
||||||
For the avoidance of doubt, this Section 4 supplements and does not
|
|
||||||
replace Your obligations under this Public License where the Licensed
|
|
||||||
Rights include other Copyright and Similar Rights.
|
|
||||||
|
|
||||||
|
|
||||||
Section 5 -- Disclaimer of Warranties and Limitation of Liability.
|
|
||||||
|
|
||||||
a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE
|
|
||||||
EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS
|
|
||||||
AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
|
|
||||||
ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,
|
|
||||||
IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,
|
|
||||||
WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
|
|
||||||
PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,
|
|
||||||
ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT
|
|
||||||
KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT
|
|
||||||
ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
|
|
||||||
|
|
||||||
b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE
|
|
||||||
TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,
|
|
||||||
NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,
|
|
||||||
INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,
|
|
||||||
COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR
|
|
||||||
USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN
|
|
||||||
ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR
|
|
||||||
DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR
|
|
||||||
IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
|
|
||||||
|
|
||||||
c. The disclaimer of warranties and limitation of liability provided
|
|
||||||
above shall be interpreted in a manner that, to the extent
|
|
||||||
possible, most closely approximates an absolute disclaimer and
|
|
||||||
waiver of all liability.
|
|
||||||
|
|
||||||
|
|
||||||
Section 6 -- Term and Termination.
|
|
||||||
|
|
||||||
a. This Public License applies for the term of the Copyright and
|
|
||||||
Similar Rights licensed here. However, if You fail to comply with
|
|
||||||
this Public License, then Your rights under this Public License
|
|
||||||
terminate automatically.
|
|
||||||
|
|
||||||
b. Where Your right to use the Licensed Material has terminated under
|
|
||||||
Section 6(a), it reinstates:
|
|
||||||
|
|
||||||
1. automatically as of the date the violation is cured, provided
|
|
||||||
it is cured within 30 days of Your discovery of the
|
|
||||||
violation; or
|
|
||||||
|
|
||||||
2. upon express reinstatement by the Licensor.
|
|
||||||
|
|
||||||
For the avoidance of doubt, this Section 6(b) does not affect any
|
|
||||||
right the Licensor may have to seek remedies for Your violations
|
|
||||||
of this Public License.
|
|
||||||
|
|
||||||
c. For the avoidance of doubt, the Licensor may also offer the
|
|
||||||
Licensed Material under separate terms or conditions or stop
|
|
||||||
distributing the Licensed Material at any time; however, doing so
|
|
||||||
will not terminate this Public License.
|
|
||||||
|
|
||||||
d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
|
|
||||||
License.
|
|
||||||
|
|
||||||
|
|
||||||
Section 7 -- Other Terms and Conditions.
|
|
||||||
|
|
||||||
a. The Licensor shall not be bound by any additional or different
|
|
||||||
terms or conditions communicated by You unless expressly agreed.
|
|
||||||
|
|
||||||
b. Any arrangements, understandings, or agreements regarding the
|
|
||||||
Licensed Material not stated herein are separate from and
|
|
||||||
independent of the terms and conditions of this Public License.
|
|
||||||
|
|
||||||
|
|
||||||
Section 8 -- Interpretation.
|
|
||||||
|
|
||||||
a. For the avoidance of doubt, this Public License does not, and
|
|
||||||
shall not be interpreted to, reduce, limit, restrict, or impose
|
|
||||||
conditions on any use of the Licensed Material that could lawfully
|
|
||||||
be made without permission under this Public License.
|
|
||||||
|
|
||||||
b. To the extent possible, if any provision of this Public License is
|
|
||||||
deemed unenforceable, it shall be automatically reformed to the
|
|
||||||
minimum extent necessary to make it enforceable. If the provision
|
|
||||||
cannot be reformed, it shall be severed from this Public License
|
|
||||||
without affecting the enforceability of the remaining terms and
|
|
||||||
conditions.
|
|
||||||
|
|
||||||
c. No term or condition of this Public License will be waived and no
|
|
||||||
failure to comply consented to unless expressly agreed to by the
|
|
||||||
Licensor.
|
|
||||||
|
|
||||||
d. Nothing in this Public License constitutes or may be interpreted
|
|
||||||
as a limitation upon, or waiver of, any privileges and immunities
|
|
||||||
that apply to the Licensor or You, including from the legal
|
|
||||||
processes of any jurisdiction or authority.
|
|
||||||
|
|
||||||
|
|
||||||
=======================================================================
|
|
||||||
|
|
||||||
Creative Commons is not a party to its public
|
|
||||||
licenses. Notwithstanding, Creative Commons may elect to apply one of
|
|
||||||
its public licenses to material it publishes and in those instances
|
|
||||||
will be considered the “Licensor.” The text of the Creative Commons
|
|
||||||
public licenses is dedicated to the public domain under the CC0 Public
|
|
||||||
Domain Dedication. Except for the limited purpose of indicating that
|
|
||||||
material is shared under a Creative Commons public license or as
|
|
||||||
otherwise permitted by the Creative Commons policies published at
|
|
||||||
creativecommons.org/policies, Creative Commons does not authorize the
|
|
||||||
use of the trademark "Creative Commons" or any other trademark or logo
|
|
||||||
of Creative Commons without its prior written consent including,
|
|
||||||
without limitation, in connection with any unauthorized modifications
|
|
||||||
to any of its public licenses or any other arrangements,
|
|
||||||
understandings, or agreements concerning use of licensed material. For
|
|
||||||
the avoidance of doubt, this paragraph does not form part of the
|
|
||||||
public licenses.
|
|
||||||
|
|
||||||
Creative Commons may be contacted at creativecommons.org.
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
# Trail of Bits Ethereum Contracts Audit, June 2025
|
|
||||||
|
|
||||||
This audit included:
|
|
||||||
- Our Schnorr contract and associated library (/networks/ethereum/schnorr)
|
|
||||||
- Our Ethereum primitives library (/processor/ethereum/primitives)
|
|
||||||
- Our Deployer contract and associated library (/processor/ethereum/deployer)
|
|
||||||
- Our ERC20 library (/processor/ethereum/erc20)
|
|
||||||
- Our Router contract and associated library (/processor/ethereum/router)
|
|
||||||
|
|
||||||
It is encompassing up to commit 4e0c58464fc4673623938335f06e2e9ea96ca8dd.
|
|
||||||
|
|
||||||
Please see
|
|
||||||
https://github.com/trailofbits/publications/blob/30c4fa3ebf39ff8e4d23ba9567344ec9691697b5/reviews/2025-04-serai-dex-security-review.pdf
|
|
||||||
for provenance.
|
|
||||||
@@ -3,10 +3,10 @@ name = "bitcoin-serai"
|
|||||||
version = "0.3.0"
|
version = "0.3.0"
|
||||||
description = "A Bitcoin library for FROST-signing transactions"
|
description = "A Bitcoin library for FROST-signing transactions"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/networks/bitcoin"
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
|
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.80"
|
rust-version = "1.74"
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
all-features = true
|
all-features = true
|
||||||
@@ -18,14 +18,16 @@ workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
std-shims = { version = "0.1.1", path = "../../common/std-shims", default-features = false }
|
std-shims = { version = "0.1.1", path = "../../common/std-shims", default-features = false }
|
||||||
|
|
||||||
thiserror = { version = "2", default-features = false }
|
thiserror = { version = "1", default-features = false, optional = true }
|
||||||
|
|
||||||
zeroize = { version = "^1.5", default-features = false }
|
zeroize = { version = "^1.5", default-features = false }
|
||||||
rand_core = { version = "0.6", default-features = false }
|
rand_core = { version = "0.6", default-features = false }
|
||||||
|
|
||||||
bitcoin = { version = "0.32", default-features = false }
|
bitcoin = { version = "0.31", default-features = false, features = ["no-std"] }
|
||||||
|
|
||||||
k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] }
|
k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] }
|
||||||
|
|
||||||
|
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["secp256k1"], optional = true }
|
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["secp256k1"], optional = true }
|
||||||
|
|
||||||
hex = { version = "0.4", default-features = false, optional = true }
|
hex = { version = "0.4", default-features = false, optional = true }
|
||||||
@@ -34,7 +36,7 @@ serde_json = { version = "1", default-features = false, optional = true }
|
|||||||
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls", "basic-auth"], optional = true }
|
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls", "basic-auth"], optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
secp256k1 = { version = "0.29", default-features = false, features = ["std"] }
|
secp256k1 = { version = "0.28", default-features = false, features = ["std"] }
|
||||||
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
|
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
|
||||||
|
|
||||||
@@ -44,7 +46,7 @@ tokio = { version = "1", features = ["macros"] }
|
|||||||
std = [
|
std = [
|
||||||
"std-shims/std",
|
"std-shims/std",
|
||||||
|
|
||||||
"thiserror/std",
|
"thiserror",
|
||||||
|
|
||||||
"zeroize/std",
|
"zeroize/std",
|
||||||
"rand_core/std",
|
"rand_core/std",
|
||||||
@@ -53,6 +55,8 @@ std = [
|
|||||||
"bitcoin/serde",
|
"bitcoin/serde",
|
||||||
|
|
||||||
"k256/std",
|
"k256/std",
|
||||||
|
|
||||||
|
"transcript/std",
|
||||||
"frost",
|
"frost",
|
||||||
|
|
||||||
"hex/std",
|
"hex/std",
|
||||||
@@ -40,12 +40,14 @@ mod frost_crypto {
|
|||||||
|
|
||||||
use bitcoin::hashes::{HashEngine, Hash, sha256::Hash as Sha256};
|
use bitcoin::hashes::{HashEngine, Hash, sha256::Hash as Sha256};
|
||||||
|
|
||||||
|
use transcript::Transcript;
|
||||||
|
|
||||||
use k256::{elliptic_curve::ops::Reduce, U256, Scalar};
|
use k256::{elliptic_curve::ops::Reduce, U256, Scalar};
|
||||||
|
|
||||||
use frost::{
|
use frost::{
|
||||||
curve::{Ciphersuite, Secp256k1},
|
curve::{Ciphersuite, Secp256k1},
|
||||||
Participant, ThresholdKeys, ThresholdView, FrostError,
|
Participant, ThresholdKeys, ThresholdView, FrostError,
|
||||||
algorithm::{Hram as HramTrait, Algorithm, IetfSchnorr as FrostSchnorr},
|
algorithm::{Hram as HramTrait, Algorithm, Schnorr as FrostSchnorr},
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
@@ -80,17 +82,16 @@ mod frost_crypto {
|
|||||||
///
|
///
|
||||||
/// This must be used with a ThresholdKeys whose group key is even. If it is odd, this will panic.
|
/// This must be used with a ThresholdKeys whose group key is even. If it is odd, this will panic.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Schnorr(FrostSchnorr<Secp256k1, Hram>);
|
pub struct Schnorr<T: Sync + Clone + Debug + Transcript>(FrostSchnorr<Secp256k1, T, Hram>);
|
||||||
impl Schnorr {
|
impl<T: Sync + Clone + Debug + Transcript> Schnorr<T> {
|
||||||
/// Construct a Schnorr algorithm continuing the specified transcript.
|
/// Construct a Schnorr algorithm continuing the specified transcript.
|
||||||
#[allow(clippy::new_without_default)]
|
pub fn new(transcript: T) -> Schnorr<T> {
|
||||||
pub fn new() -> Schnorr {
|
Schnorr(FrostSchnorr::new(transcript))
|
||||||
Schnorr(FrostSchnorr::ietf())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Algorithm<Secp256k1> for Schnorr {
|
impl<T: Sync + Clone + Debug + Transcript> Algorithm<Secp256k1> for Schnorr<T> {
|
||||||
type Transcript = <FrostSchnorr<Secp256k1, Hram> as Algorithm<Secp256k1>>::Transcript;
|
type Transcript = T;
|
||||||
type Addendum = ();
|
type Addendum = ();
|
||||||
type Signature = [u8; 64];
|
type Signature = [u8; 64];
|
||||||
|
|
||||||
@@ -195,13 +195,13 @@ impl Rpc {
|
|||||||
// If this was already successfully published, consider this having succeeded
|
// If this was already successfully published, consider this having succeeded
|
||||||
if let RpcError::RequestError(Error { code, .. }) = e {
|
if let RpcError::RequestError(Error { code, .. }) = e {
|
||||||
if code == RPC_VERIFY_ALREADY_IN_CHAIN {
|
if code == RPC_VERIFY_ALREADY_IN_CHAIN {
|
||||||
return Ok(tx.compute_txid());
|
return Ok(tx.txid());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e)?
|
Err(e)?
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if txid != tx.compute_txid() {
|
if txid != tx.txid() {
|
||||||
Err(RpcError::InvalidResponse("returned TX ID inequals calculated TX ID"))?;
|
Err(RpcError::InvalidResponse("returned TX ID inequals calculated TX ID"))?;
|
||||||
}
|
}
|
||||||
Ok(txid)
|
Ok(txid)
|
||||||
@@ -215,7 +215,7 @@ impl Rpc {
|
|||||||
let tx: Transaction = encode::deserialize(&bytes)
|
let tx: Transaction = encode::deserialize(&bytes)
|
||||||
.map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized transaction"))?;
|
.map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized transaction"))?;
|
||||||
|
|
||||||
let mut tx_hash = *tx.compute_txid().as_raw_hash().as_byte_array();
|
let mut tx_hash = *tx.txid().as_raw_hash().as_byte_array();
|
||||||
tx_hash.reverse();
|
tx_hash.reverse();
|
||||||
if hash != &tx_hash {
|
if hash != &tx_hash {
|
||||||
Err(RpcError::InvalidResponse("node replied with a different transaction"))?;
|
Err(RpcError::InvalidResponse("node replied with a different transaction"))?;
|
||||||
@@ -3,6 +3,7 @@ use rand_core::OsRng;
|
|||||||
use secp256k1::{Secp256k1 as BContext, Message, schnorr::Signature};
|
use secp256k1::{Secp256k1 as BContext, Message, schnorr::Signature};
|
||||||
|
|
||||||
use k256::Scalar;
|
use k256::Scalar;
|
||||||
|
use transcript::{Transcript, RecommendedTranscript};
|
||||||
use frost::{
|
use frost::{
|
||||||
curve::Secp256k1,
|
curve::Secp256k1,
|
||||||
Participant,
|
Participant,
|
||||||
@@ -24,7 +25,8 @@ fn test_algorithm() {
|
|||||||
*keys = keys.offset(Scalar::from(offset));
|
*keys = keys.offset(Scalar::from(offset));
|
||||||
}
|
}
|
||||||
|
|
||||||
let algo = Schnorr::new();
|
let algo =
|
||||||
|
Schnorr::<RecommendedTranscript>::new(RecommendedTranscript::new(b"bitcoin-serai sign test"));
|
||||||
let sig = sign(
|
let sig = sign(
|
||||||
&mut OsRng,
|
&mut OsRng,
|
||||||
&algo,
|
&algo,
|
||||||
@@ -37,7 +39,7 @@ fn test_algorithm() {
|
|||||||
.verify_schnorr(
|
.verify_schnorr(
|
||||||
&Signature::from_slice(&sig)
|
&Signature::from_slice(&sig)
|
||||||
.expect("couldn't convert produced signature to secp256k1::Signature"),
|
.expect("couldn't convert produced signature to secp256k1::Signature"),
|
||||||
&Message::from_digest_slice(Hash::hash(MESSAGE).as_ref()).unwrap(),
|
&Message::from(Hash::hash(MESSAGE)),
|
||||||
&x_only(&keys[&Participant::new(1).unwrap()].group_key()),
|
&x_only(&keys[&Participant::new(1).unwrap()].group_key()),
|
||||||
)
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
@@ -4,7 +4,7 @@ use std_shims::{
|
|||||||
io::{self, Write},
|
io::{self, Write},
|
||||||
};
|
};
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
use std::io::{Read, BufReader};
|
use std_shims::io::Read;
|
||||||
|
|
||||||
use k256::{
|
use k256::{
|
||||||
elliptic_curve::sec1::{Tag, ToEncodedPoint},
|
elliptic_curve::sec1::{Tag, ToEncodedPoint},
|
||||||
@@ -18,11 +18,11 @@ use frost::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use bitcoin::{
|
use bitcoin::{
|
||||||
consensus::encode::serialize, key::TweakedPublicKey, OutPoint, ScriptBuf, TxOut, Transaction,
|
consensus::encode::serialize, key::TweakedPublicKey, address::Payload, OutPoint, ScriptBuf,
|
||||||
Block,
|
TxOut, Transaction, Block,
|
||||||
};
|
};
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
use bitcoin::{hashes::Hash, consensus::encode::Decodable, TapTweakHash};
|
use bitcoin::consensus::encode::Decodable;
|
||||||
|
|
||||||
use crate::crypto::x_only;
|
use crate::crypto::x_only;
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
@@ -33,40 +33,12 @@ mod send;
|
|||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
pub use send::*;
|
pub use send::*;
|
||||||
|
|
||||||
/// Tweak keys to ensure they're usable with Bitcoin's Taproot upgrade.
|
/// Tweak keys to ensure they're usable with Bitcoin.
|
||||||
///
|
///
|
||||||
/// This adds an unspendable script path to the key, preventing any outputs received to this key
|
/// Taproot keys, which these keys are used as, must be even. This offsets the keys until they're
|
||||||
/// from being spent via a script. To have keys which have spendable script paths, further offsets
|
/// even.
|
||||||
/// from this position must be used.
|
|
||||||
///
|
|
||||||
/// After adding an unspendable script path, the key is incremented until its even. This means the
|
|
||||||
/// existence of the unspendable script path may not provable, without an understanding of the
|
|
||||||
/// algorithm used here.
|
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
|
pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
|
||||||
// Adds the unspendable script path per
|
|
||||||
// https://github.com/bitcoin/bips/blob/master/bip-0341.mediawiki#cite_note-23
|
|
||||||
let keys = {
|
|
||||||
use k256::elliptic_curve::{
|
|
||||||
bigint::{Encoding, U256},
|
|
||||||
ops::Reduce,
|
|
||||||
group::GroupEncoding,
|
|
||||||
};
|
|
||||||
let tweak_hash = TapTweakHash::hash(&keys.group_key().to_bytes().as_slice()[1 ..]);
|
|
||||||
/*
|
|
||||||
https://github.com/bitcoin/bips/blob/master/bip-0340.mediawiki#cite_ref-13-0 states how the
|
|
||||||
bias is negligible. This reduction shouldn't ever occur, yet if it did, the script path
|
|
||||||
would be unusable due to a check the script path hash is less than the order. That doesn't
|
|
||||||
impact us as we don't want the script path to be usable.
|
|
||||||
*/
|
|
||||||
keys.offset(<Secp256k1 as Ciphersuite>::F::reduce(U256::from_be_bytes(
|
|
||||||
*tweak_hash.to_raw_hash().as_ref(),
|
|
||||||
)))
|
|
||||||
};
|
|
||||||
|
|
||||||
// This doesn't risk re-introducing a script path as you'd have to find a preimage for the tweak
|
|
||||||
// hash with whatever increment, or manipulate the key so that the tweak hash and increment
|
|
||||||
// equals the desired offset, yet manipulating the key would change the tweak hash
|
|
||||||
let (_, offset) = make_even(keys.group_key());
|
let (_, offset) = make_even(keys.group_key());
|
||||||
keys.offset(Scalar::from(offset))
|
keys.offset(Scalar::from(offset))
|
||||||
}
|
}
|
||||||
@@ -74,12 +46,12 @@ pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
|
|||||||
/// Return the Taproot address payload for a public key.
|
/// Return the Taproot address payload for a public key.
|
||||||
///
|
///
|
||||||
/// If the key is odd, this will return None.
|
/// If the key is odd, this will return None.
|
||||||
pub fn p2tr_script_buf(key: ProjectivePoint) -> Option<ScriptBuf> {
|
pub fn address_payload(key: ProjectivePoint) -> Option<Payload> {
|
||||||
if key.to_encoded_point(true).tag() != Tag::CompressedEvenY {
|
if key.to_encoded_point(true).tag() != Tag::CompressedEvenY {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(ScriptBuf::new_p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key))))
|
Some(Payload::p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key))))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A spendable output.
|
/// A spendable output.
|
||||||
@@ -117,17 +89,11 @@ impl ReceivedOutput {
|
|||||||
/// Read a ReceivedOutput from a generic satisfying Read.
|
/// Read a ReceivedOutput from a generic satisfying Read.
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
|
||||||
let offset = Secp256k1::read_F(r)?;
|
Ok(ReceivedOutput {
|
||||||
let output;
|
offset: Secp256k1::read_F(r)?,
|
||||||
let outpoint;
|
output: TxOut::consensus_decode(r).map_err(|_| io::Error::other("invalid TxOut"))?,
|
||||||
{
|
outpoint: OutPoint::consensus_decode(r).map_err(|_| io::Error::other("invalid OutPoint"))?,
|
||||||
let mut buf_r = BufReader::with_capacity(0, r);
|
})
|
||||||
output =
|
|
||||||
TxOut::consensus_decode(&mut buf_r).map_err(|_| io::Error::other("invalid TxOut"))?;
|
|
||||||
outpoint =
|
|
||||||
OutPoint::consensus_decode(&mut buf_r).map_err(|_| io::Error::other("invalid OutPoint"))?;
|
|
||||||
}
|
|
||||||
Ok(ReceivedOutput { offset, output, outpoint })
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write a ReceivedOutput to a generic satisfying Write.
|
/// Write a ReceivedOutput to a generic satisfying Write.
|
||||||
@@ -158,7 +124,7 @@ impl Scanner {
|
|||||||
/// Returns None if this key can't be scanned for.
|
/// Returns None if this key can't be scanned for.
|
||||||
pub fn new(key: ProjectivePoint) -> Option<Scanner> {
|
pub fn new(key: ProjectivePoint) -> Option<Scanner> {
|
||||||
let mut scripts = HashMap::new();
|
let mut scripts = HashMap::new();
|
||||||
scripts.insert(p2tr_script_buf(key)?, Scalar::ZERO);
|
scripts.insert(address_payload(key)?.script_pubkey(), Scalar::ZERO);
|
||||||
Some(Scanner { key, scripts })
|
Some(Scanner { key, scripts })
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -170,17 +136,14 @@ impl Scanner {
|
|||||||
///
|
///
|
||||||
/// This means offsets are surjective, not bijective, and the order offsets are registered in
|
/// This means offsets are surjective, not bijective, and the order offsets are registered in
|
||||||
/// may determine the validity of future offsets.
|
/// may determine the validity of future offsets.
|
||||||
///
|
|
||||||
/// The offsets registered must be securely generated. Arbitrary offsets may introduce a script
|
|
||||||
/// path into the output, allowing the output to be spent by satisfaction of an arbitrary script
|
|
||||||
/// (not by the signature of the key).
|
|
||||||
pub fn register_offset(&mut self, mut offset: Scalar) -> Option<Scalar> {
|
pub fn register_offset(&mut self, mut offset: Scalar) -> Option<Scalar> {
|
||||||
// This loop will terminate as soon as an even point is found, with any point having a ~50%
|
// This loop will terminate as soon as an even point is found, with any point having a ~50%
|
||||||
// chance of being even
|
// chance of being even
|
||||||
// That means this should terminate within a very small amount of iterations
|
// That means this should terminate within a very small amount of iterations
|
||||||
loop {
|
loop {
|
||||||
match p2tr_script_buf(self.key + (ProjectivePoint::GENERATOR * offset)) {
|
match address_payload(self.key + (ProjectivePoint::GENERATOR * offset)) {
|
||||||
Some(script) => {
|
Some(address) => {
|
||||||
|
let script = address.script_pubkey();
|
||||||
if self.scripts.contains_key(&script) {
|
if self.scripts.contains_key(&script) {
|
||||||
None?;
|
None?;
|
||||||
}
|
}
|
||||||
@@ -203,7 +166,7 @@ impl Scanner {
|
|||||||
res.push(ReceivedOutput {
|
res.push(ReceivedOutput {
|
||||||
offset: *offset,
|
offset: *offset,
|
||||||
output: output.clone(),
|
output: output.clone(),
|
||||||
outpoint: OutPoint::new(tx.compute_txid(), vout),
|
outpoint: OutPoint::new(tx.txid(), vout),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -7,7 +7,9 @@ use thiserror::Error;
|
|||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
use k256::Scalar;
|
use transcript::{Transcript, RecommendedTranscript};
|
||||||
|
|
||||||
|
use k256::{elliptic_curve::sec1::ToEncodedPoint, Scalar};
|
||||||
use frost::{curve::Secp256k1, Participant, ThresholdKeys, FrostError, sign::*};
|
use frost::{curve::Secp256k1, Participant, ThresholdKeys, FrostError, sign::*};
|
||||||
|
|
||||||
use bitcoin::{
|
use bitcoin::{
|
||||||
@@ -16,12 +18,12 @@ use bitcoin::{
|
|||||||
absolute::LockTime,
|
absolute::LockTime,
|
||||||
script::{PushBytesBuf, ScriptBuf},
|
script::{PushBytesBuf, ScriptBuf},
|
||||||
transaction::{Version, Transaction},
|
transaction::{Version, Transaction},
|
||||||
OutPoint, Sequence, Witness, TxIn, Amount, TxOut,
|
OutPoint, Sequence, Witness, TxIn, Amount, TxOut, Address,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
crypto::Schnorr,
|
crypto::Schnorr,
|
||||||
wallet::{ReceivedOutput, p2tr_script_buf},
|
wallet::{ReceivedOutput, address_payload},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
@@ -44,7 +46,7 @@ pub enum TransactionError {
|
|||||||
#[error("fee was too low to pass the default minimum fee rate")]
|
#[error("fee was too low to pass the default minimum fee rate")]
|
||||||
TooLowFee,
|
TooLowFee,
|
||||||
#[error("not enough funds for these payments")]
|
#[error("not enough funds for these payments")]
|
||||||
NotEnoughFunds { inputs: u64, payments: u64, fee: u64 },
|
NotEnoughFunds,
|
||||||
#[error("transaction was too large")]
|
#[error("transaction was too large")]
|
||||||
TooLargeTransaction,
|
TooLargeTransaction,
|
||||||
}
|
}
|
||||||
@@ -59,11 +61,7 @@ pub struct SignableTransaction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SignableTransaction {
|
impl SignableTransaction {
|
||||||
fn calculate_weight_vbytes(
|
fn calculate_weight(inputs: usize, payments: &[(Address, u64)], change: Option<&Address>) -> u64 {
|
||||||
inputs: usize,
|
|
||||||
payments: &[(ScriptBuf, u64)],
|
|
||||||
change: Option<&ScriptBuf>,
|
|
||||||
) -> (u64, u64) {
|
|
||||||
// Expand this a full transaction in order to use the bitcoin library's weight function
|
// Expand this a full transaction in order to use the bitcoin library's weight function
|
||||||
let mut tx = Transaction {
|
let mut tx = Transaction {
|
||||||
version: Version(2),
|
version: Version(2),
|
||||||
@@ -88,42 +86,16 @@ impl SignableTransaction {
|
|||||||
// The script pub key is not of a fixed size and does have to be used here
|
// The script pub key is not of a fixed size and does have to be used here
|
||||||
.map(|payment| TxOut {
|
.map(|payment| TxOut {
|
||||||
value: Amount::from_sat(payment.1),
|
value: Amount::from_sat(payment.1),
|
||||||
script_pubkey: payment.0.clone(),
|
script_pubkey: payment.0.script_pubkey(),
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
};
|
};
|
||||||
if let Some(change) = change {
|
if let Some(change) = change {
|
||||||
// Use a 0 value since we're currently unsure what the change amount will be, and since
|
// Use a 0 value since we're currently unsure what the change amount will be, and since
|
||||||
// the value is fixed size (so any value could be used here)
|
// the value is fixed size (so any value could be used here)
|
||||||
tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.clone() });
|
tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.script_pubkey() });
|
||||||
}
|
}
|
||||||
|
u64::from(tx.weight())
|
||||||
let weight = tx.weight();
|
|
||||||
|
|
||||||
// Now calculate the size in vbytes
|
|
||||||
|
|
||||||
/*
|
|
||||||
"Virtual transaction size" is weight ceildiv 4 per
|
|
||||||
https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki
|
|
||||||
|
|
||||||
https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04
|
|
||||||
/src/policy/policy.cpp#L295-L298
|
|
||||||
implements this almost as expected, with an additional consideration to signature operations
|
|
||||||
|
|
||||||
Signature operations (the second argument of the following call) do not count Taproot
|
|
||||||
signatures per https://github.com/bitcoin/bips/blob/master/bip-0342.mediawiki#cite_ref-11-0
|
|
||||||
|
|
||||||
We don't risk running afoul of the Taproot signature limit as it allows at least one per
|
|
||||||
input, which is all we use
|
|
||||||
*/
|
|
||||||
(
|
|
||||||
weight.to_wu(),
|
|
||||||
u64::try_from(bitcoin::policy::get_virtual_tx_size(
|
|
||||||
i64::try_from(weight.to_wu()).unwrap(),
|
|
||||||
0i64,
|
|
||||||
))
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the fee necessary for this transaction to achieve the fee rate specified at
|
/// Returns the fee necessary for this transaction to achieve the fee rate specified at
|
||||||
@@ -149,10 +121,10 @@ impl SignableTransaction {
|
|||||||
/// If data is specified, an OP_RETURN output will be added with it.
|
/// If data is specified, an OP_RETURN output will be added with it.
|
||||||
pub fn new(
|
pub fn new(
|
||||||
mut inputs: Vec<ReceivedOutput>,
|
mut inputs: Vec<ReceivedOutput>,
|
||||||
payments: &[(ScriptBuf, u64)],
|
payments: &[(Address, u64)],
|
||||||
change: Option<ScriptBuf>,
|
change: Option<&Address>,
|
||||||
data: Option<Vec<u8>>,
|
data: Option<Vec<u8>>,
|
||||||
fee_per_vbyte: u64,
|
fee_per_weight: u64,
|
||||||
) -> Result<SignableTransaction, TransactionError> {
|
) -> Result<SignableTransaction, TransactionError> {
|
||||||
if inputs.is_empty() {
|
if inputs.is_empty() {
|
||||||
Err(TransactionError::NoInputs)?;
|
Err(TransactionError::NoInputs)?;
|
||||||
@@ -187,7 +159,10 @@ impl SignableTransaction {
|
|||||||
let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>();
|
let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>();
|
||||||
let mut tx_outs = payments
|
let mut tx_outs = payments
|
||||||
.iter()
|
.iter()
|
||||||
.map(|payment| TxOut { value: Amount::from_sat(payment.1), script_pubkey: payment.0.clone() })
|
.map(|payment| TxOut {
|
||||||
|
value: Amount::from_sat(payment.1),
|
||||||
|
script_pubkey: payment.0.script_pubkey(),
|
||||||
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
// Add the OP_RETURN output
|
// Add the OP_RETURN output
|
||||||
@@ -201,33 +176,49 @@ impl SignableTransaction {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
let (mut weight, vbytes) = Self::calculate_weight_vbytes(tx_ins.len(), payments, None);
|
let mut weight = Self::calculate_weight(tx_ins.len(), payments, None);
|
||||||
|
let mut needed_fee = fee_per_weight * weight;
|
||||||
|
|
||||||
let mut needed_fee = fee_per_vbyte * vbytes;
|
// "Virtual transaction size" is weight ceildiv 4 per
|
||||||
|
// https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki
|
||||||
|
|
||||||
|
// https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04/
|
||||||
|
// src/policy/policy.cpp#L295-L298
|
||||||
|
// implements this as expected
|
||||||
|
|
||||||
|
// Technically, it takes whatever's greater, the weight or the amount of signature operations
|
||||||
|
// multiplied by DEFAULT_BYTES_PER_SIGOP (20)
|
||||||
|
// We only use 1 signature per input, and our inputs have a weight exceeding 20
|
||||||
|
// Accordingly, our inputs' weight will always be greater than the cost of the signature ops
|
||||||
|
let vsize = weight.div_ceil(4);
|
||||||
|
debug_assert_eq!(
|
||||||
|
u64::try_from(bitcoin::policy::get_virtual_tx_size(
|
||||||
|
weight.try_into().unwrap(),
|
||||||
|
tx_ins.len().try_into().unwrap()
|
||||||
|
))
|
||||||
|
.unwrap(),
|
||||||
|
vsize
|
||||||
|
);
|
||||||
// Technically, if there isn't change, this TX may still pay enough of a fee to pass the
|
// Technically, if there isn't change, this TX may still pay enough of a fee to pass the
|
||||||
// minimum fee. Such edge cases aren't worth programming when they go against intent, as the
|
// minimum fee. Such edge cases aren't worth programming when they go against intent, as the
|
||||||
// specified fee rate is too low to be valid
|
// specified fee rate is too low to be valid
|
||||||
// bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE is in sats/kilo-vbyte
|
// bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE is in sats/kilo-vbyte
|
||||||
if needed_fee < ((u64::from(bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE) * vbytes) / 1000) {
|
if needed_fee < ((u64::from(bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE) * vsize) / 1000) {
|
||||||
Err(TransactionError::TooLowFee)?;
|
Err(TransactionError::TooLowFee)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if input_sat < (payment_sat + needed_fee) {
|
if input_sat < (payment_sat + needed_fee) {
|
||||||
Err(TransactionError::NotEnoughFunds {
|
Err(TransactionError::NotEnoughFunds)?;
|
||||||
inputs: input_sat,
|
|
||||||
payments: payment_sat,
|
|
||||||
fee: needed_fee,
|
|
||||||
})?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// If there's a change address, check if there's change to give it
|
// If there's a change address, check if there's change to give it
|
||||||
if let Some(change) = change {
|
if let Some(change) = change {
|
||||||
let (weight_with_change, vbytes_with_change) =
|
let weight_with_change = Self::calculate_weight(tx_ins.len(), payments, Some(change));
|
||||||
Self::calculate_weight_vbytes(tx_ins.len(), payments, Some(&change));
|
let fee_with_change = fee_per_weight * weight_with_change;
|
||||||
let fee_with_change = fee_per_vbyte * vbytes_with_change;
|
|
||||||
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
|
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
|
||||||
if value >= DUST {
|
if value >= DUST {
|
||||||
tx_outs.push(TxOut { value: Amount::from_sat(value), script_pubkey: change });
|
tx_outs
|
||||||
|
.push(TxOut { value: Amount::from_sat(value), script_pubkey: change.script_pubkey() });
|
||||||
weight = weight_with_change;
|
weight = weight_with_change;
|
||||||
needed_fee = fee_with_change;
|
needed_fee = fee_with_change;
|
||||||
}
|
}
|
||||||
@@ -257,28 +248,54 @@ impl SignableTransaction {
|
|||||||
|
|
||||||
/// Returns the TX ID of the transaction this will create.
|
/// Returns the TX ID of the transaction this will create.
|
||||||
pub fn txid(&self) -> [u8; 32] {
|
pub fn txid(&self) -> [u8; 32] {
|
||||||
let mut res = self.tx.compute_txid().to_byte_array();
|
let mut res = self.tx.txid().to_byte_array();
|
||||||
res.reverse();
|
res.reverse();
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the transaction, sans witness, this will create if signed.
|
/// Returns the outputs this transaction will create.
|
||||||
pub fn transaction(&self) -> &Transaction {
|
pub fn outputs(&self) -> &[TxOut] {
|
||||||
&self.tx
|
&self.tx.output
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a multisig machine for this transaction.
|
/// Create a multisig machine for this transaction.
|
||||||
///
|
///
|
||||||
/// Returns None if the wrong keys are used.
|
/// Returns None if the wrong keys are used.
|
||||||
pub fn multisig(self, keys: &ThresholdKeys<Secp256k1>) -> Option<TransactionMachine> {
|
pub fn multisig(
|
||||||
|
self,
|
||||||
|
keys: &ThresholdKeys<Secp256k1>,
|
||||||
|
mut transcript: RecommendedTranscript,
|
||||||
|
) -> Option<TransactionMachine> {
|
||||||
|
transcript.domain_separate(b"bitcoin_transaction");
|
||||||
|
transcript.append_message(b"root_key", keys.group_key().to_encoded_point(true).as_bytes());
|
||||||
|
|
||||||
|
// Transcript the inputs and outputs
|
||||||
|
let tx = &self.tx;
|
||||||
|
for input in &tx.input {
|
||||||
|
transcript.append_message(b"input_hash", input.previous_output.txid);
|
||||||
|
transcript.append_message(b"input_output_index", input.previous_output.vout.to_le_bytes());
|
||||||
|
}
|
||||||
|
for payment in &tx.output {
|
||||||
|
transcript.append_message(b"output_script", payment.script_pubkey.as_bytes());
|
||||||
|
transcript.append_message(b"output_amount", payment.value.to_sat().to_le_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
let mut sigs = vec![];
|
let mut sigs = vec![];
|
||||||
for i in 0 .. self.tx.input.len() {
|
for i in 0 .. tx.input.len() {
|
||||||
|
let mut transcript = transcript.clone();
|
||||||
|
// This unwrap is safe since any transaction with this many inputs violates the maximum
|
||||||
|
// size allowed under standards, which this lib will error on creation of
|
||||||
|
transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes());
|
||||||
|
|
||||||
let offset = keys.clone().offset(self.offsets[i]);
|
let offset = keys.clone().offset(self.offsets[i]);
|
||||||
if p2tr_script_buf(offset.group_key())? != self.prevouts[i].script_pubkey {
|
if address_payload(offset.group_key())?.script_pubkey() != self.prevouts[i].script_pubkey {
|
||||||
None?;
|
None?;
|
||||||
}
|
}
|
||||||
|
|
||||||
sigs.push(AlgorithmMachine::new(Schnorr::new(), keys.clone().offset(self.offsets[i])));
|
sigs.push(AlgorithmMachine::new(
|
||||||
|
Schnorr::new(transcript),
|
||||||
|
keys.clone().offset(self.offsets[i]),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(TransactionMachine { tx: self, sigs })
|
Some(TransactionMachine { tx: self, sigs })
|
||||||
@@ -291,7 +308,7 @@ impl SignableTransaction {
|
|||||||
/// This will panic if either `cache` is called or the message isn't empty.
|
/// This will panic if either `cache` is called or the message isn't empty.
|
||||||
pub struct TransactionMachine {
|
pub struct TransactionMachine {
|
||||||
tx: SignableTransaction,
|
tx: SignableTransaction,
|
||||||
sigs: Vec<AlgorithmMachine<Secp256k1, Schnorr>>,
|
sigs: Vec<AlgorithmMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PreprocessMachine for TransactionMachine {
|
impl PreprocessMachine for TransactionMachine {
|
||||||
@@ -320,7 +337,7 @@ impl PreprocessMachine for TransactionMachine {
|
|||||||
|
|
||||||
pub struct TransactionSignMachine {
|
pub struct TransactionSignMachine {
|
||||||
tx: SignableTransaction,
|
tx: SignableTransaction,
|
||||||
sigs: Vec<AlgorithmSignMachine<Secp256k1, Schnorr>>,
|
sigs: Vec<AlgorithmSignMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SignMachine<Transaction> for TransactionSignMachine {
|
impl SignMachine<Transaction> for TransactionSignMachine {
|
||||||
@@ -358,7 +375,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
|
|||||||
msg: &[u8],
|
msg: &[u8],
|
||||||
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
|
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
|
||||||
if !msg.is_empty() {
|
if !msg.is_empty() {
|
||||||
panic!("message was passed to the TransactionSignMachine when it generates its own");
|
panic!("message was passed to the TransactionMachine when it generates its own");
|
||||||
}
|
}
|
||||||
|
|
||||||
let commitments = (0 .. self.sigs.len())
|
let commitments = (0 .. self.sigs.len())
|
||||||
@@ -400,7 +417,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
|
|||||||
|
|
||||||
pub struct TransactionSignatureMachine {
|
pub struct TransactionSignatureMachine {
|
||||||
tx: Transaction,
|
tx: Transaction,
|
||||||
sigs: Vec<AlgorithmSignatureMachine<Secp256k1, Schnorr>>,
|
sigs: Vec<AlgorithmSignatureMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SignatureMachine<Transaction> for TransactionSignatureMachine {
|
impl SignatureMachine<Transaction> for TransactionSignatureMachine {
|
||||||
@@ -1,11 +1,14 @@
|
|||||||
use std::sync::LazyLock;
|
use std::sync::OnceLock;
|
||||||
|
|
||||||
use bitcoin_serai::rpc::Rpc;
|
use bitcoin_serai::rpc::Rpc;
|
||||||
|
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
#[allow(dead_code)]
|
static SEQUENTIAL_CELL: OnceLock<Mutex<()>> = OnceLock::new();
|
||||||
pub(crate) static SEQUENTIAL: LazyLock<Mutex<()>> = LazyLock::new(|| Mutex::new(()));
|
#[allow(non_snake_case)]
|
||||||
|
pub fn SEQUENTIAL() -> &'static Mutex<()> {
|
||||||
|
SEQUENTIAL_CELL.get_or_init(|| Mutex::new(()))
|
||||||
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub(crate) async fn rpc() -> Rpc {
|
pub(crate) async fn rpc() -> Rpc {
|
||||||
@@ -31,7 +34,7 @@ macro_rules! async_sequential {
|
|||||||
$(
|
$(
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn $name() {
|
async fn $name() {
|
||||||
let guard = runner::SEQUENTIAL.lock().await;
|
let guard = runner::SEQUENTIAL().lock().await;
|
||||||
let local = tokio::task::LocalSet::new();
|
let local = tokio::task::LocalSet::new();
|
||||||
local.run_until(async move {
|
local.run_until(async move {
|
||||||
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {
|
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {
|
||||||
@@ -2,6 +2,8 @@ use std::collections::HashMap;
|
|||||||
|
|
||||||
use rand_core::{RngCore, OsRng};
|
use rand_core::{RngCore, OsRng};
|
||||||
|
|
||||||
|
use transcript::{Transcript, RecommendedTranscript};
|
||||||
|
|
||||||
use k256::{
|
use k256::{
|
||||||
elliptic_curve::{
|
elliptic_curve::{
|
||||||
group::{ff::Field, Group},
|
group::{ff::Field, Group},
|
||||||
@@ -20,10 +22,11 @@ use bitcoin_serai::{
|
|||||||
hashes::Hash as HashTrait,
|
hashes::Hash as HashTrait,
|
||||||
blockdata::opcodes::all::OP_RETURN,
|
blockdata::opcodes::all::OP_RETURN,
|
||||||
script::{PushBytesBuf, Instruction, Instructions, Script},
|
script::{PushBytesBuf, Instruction, Instructions, Script},
|
||||||
|
address::NetworkChecked,
|
||||||
OutPoint, Amount, TxOut, Transaction, Network, Address,
|
OutPoint, Amount, TxOut, Transaction, Network, Address,
|
||||||
},
|
},
|
||||||
wallet::{
|
wallet::{
|
||||||
tweak_keys, p2tr_script_buf, ReceivedOutput, Scanner, TransactionError, SignableTransaction,
|
tweak_keys, address_payload, ReceivedOutput, Scanner, TransactionError, SignableTransaction,
|
||||||
},
|
},
|
||||||
rpc::Rpc,
|
rpc::Rpc,
|
||||||
};
|
};
|
||||||
@@ -45,7 +48,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
|
|||||||
"generatetoaddress",
|
"generatetoaddress",
|
||||||
serde_json::json!([
|
serde_json::json!([
|
||||||
1,
|
1,
|
||||||
Address::from_script(&p2tr_script_buf(key).unwrap(), Network::Regtest).unwrap()
|
Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())
|
||||||
]),
|
]),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
@@ -66,7 +69,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
|
|||||||
assert_eq!(outputs, scanner.scan_transaction(&block.txdata[0]));
|
assert_eq!(outputs, scanner.scan_transaction(&block.txdata[0]));
|
||||||
|
|
||||||
assert_eq!(outputs.len(), 1);
|
assert_eq!(outputs.len(), 1);
|
||||||
assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].compute_txid(), 0));
|
assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].txid(), 0));
|
||||||
assert_eq!(outputs[0].value(), block.txdata[0].output[0].value.to_sat());
|
assert_eq!(outputs[0].value(), block.txdata[0].output[0].value.to_sat());
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -92,11 +95,46 @@ fn sign(
|
|||||||
) -> Transaction {
|
) -> Transaction {
|
||||||
let mut machines = HashMap::new();
|
let mut machines = HashMap::new();
|
||||||
for i in (1 ..= THRESHOLD).map(|i| Participant::new(i).unwrap()) {
|
for i in (1 ..= THRESHOLD).map(|i| Participant::new(i).unwrap()) {
|
||||||
machines.insert(i, tx.clone().multisig(&keys[&i].clone()).unwrap());
|
machines.insert(
|
||||||
|
i,
|
||||||
|
tx.clone()
|
||||||
|
.multisig(&keys[&i].clone(), RecommendedTranscript::new(b"bitcoin-serai Test Transaction"))
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
sign_without_caching(&mut OsRng, machines, &[])
|
sign_without_caching(&mut OsRng, machines, &[])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_tweak_keys() {
|
||||||
|
let mut even = false;
|
||||||
|
let mut odd = false;
|
||||||
|
|
||||||
|
// Generate keys until we get an even set and an odd set
|
||||||
|
while !(even && odd) {
|
||||||
|
let mut keys = key_gen(&mut OsRng).drain().next().unwrap().1;
|
||||||
|
if is_even(keys.group_key()) {
|
||||||
|
// Tweaking should do nothing
|
||||||
|
assert_eq!(tweak_keys(&keys).group_key(), keys.group_key());
|
||||||
|
|
||||||
|
even = true;
|
||||||
|
} else {
|
||||||
|
let tweaked = tweak_keys(&keys).group_key();
|
||||||
|
assert_ne!(tweaked, keys.group_key());
|
||||||
|
// Tweaking should produce an even key
|
||||||
|
assert!(is_even(tweaked));
|
||||||
|
|
||||||
|
// Verify it uses the smallest possible offset
|
||||||
|
while keys.group_key().to_encoded_point(true).tag() == Tag::CompressedOddY {
|
||||||
|
keys = keys.offset(Scalar::ONE);
|
||||||
|
}
|
||||||
|
assert_eq!(tweaked, keys.group_key());
|
||||||
|
|
||||||
|
odd = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async_sequential! {
|
async_sequential! {
|
||||||
async fn test_scanner() {
|
async fn test_scanner() {
|
||||||
// Test Scanners are creatable for even keys.
|
// Test Scanners are creatable for even keys.
|
||||||
@@ -155,7 +193,7 @@ async_sequential! {
|
|||||||
assert_eq!(output.offset(), Scalar::ZERO);
|
assert_eq!(output.offset(), Scalar::ZERO);
|
||||||
|
|
||||||
let inputs = vec![output];
|
let inputs = vec![output];
|
||||||
let addr = || p2tr_script_buf(key).unwrap();
|
let addr = || Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap());
|
||||||
let payments = vec![(addr(), 1000)];
|
let payments = vec![(addr(), 1000)];
|
||||||
|
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok());
|
assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok());
|
||||||
@@ -168,7 +206,7 @@ async_sequential! {
|
|||||||
// No change
|
// No change
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &[(addr(), 1000)], None, None, FEE).is_ok());
|
assert!(SignableTransaction::new(inputs.clone(), &[(addr(), 1000)], None, None, FEE).is_ok());
|
||||||
// Consolidation TX
|
// Consolidation TX
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, FEE).is_ok());
|
assert!(SignableTransaction::new(inputs.clone(), &[], Some(&addr()), None, FEE).is_ok());
|
||||||
// Data
|
// Data
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &[], None, Some(vec![]), FEE).is_ok());
|
assert!(SignableTransaction::new(inputs.clone(), &[], None, Some(vec![]), FEE).is_ok());
|
||||||
// No outputs
|
// No outputs
|
||||||
@@ -191,14 +229,14 @@ async_sequential! {
|
|||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, 0),
|
SignableTransaction::new(inputs.clone(), &[], Some(&addr()), None, 0),
|
||||||
Err(TransactionError::TooLowFee),
|
Err(TransactionError::TooLowFee),
|
||||||
);
|
);
|
||||||
|
|
||||||
assert!(matches!(
|
assert_eq!(
|
||||||
SignableTransaction::new(inputs.clone(), &[(addr(), inputs[0].value() * 2)], None, None, FEE),
|
SignableTransaction::new(inputs.clone(), &[(addr(), inputs[0].value() * 2)], None, None, FEE),
|
||||||
Err(TransactionError::NotEnoughFunds { .. }),
|
Err(TransactionError::NotEnoughFunds),
|
||||||
));
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
SignableTransaction::new(inputs, &vec![(addr(), 1000); 10000], None, None, FEE),
|
SignableTransaction::new(inputs, &vec![(addr(), 1000); 10000], None, None, FEE),
|
||||||
@@ -223,19 +261,20 @@ async_sequential! {
|
|||||||
|
|
||||||
// Declare payments, change, fee
|
// Declare payments, change, fee
|
||||||
let payments = [
|
let payments = [
|
||||||
(p2tr_script_buf(key).unwrap(), 1005),
|
(Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap()), 1005),
|
||||||
(p2tr_script_buf(offset_key).unwrap(), 1007)
|
(Address::<NetworkChecked>::new(Network::Regtest, address_payload(offset_key).unwrap()), 1007)
|
||||||
];
|
];
|
||||||
|
|
||||||
let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
|
let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
|
||||||
let change_key = key + (ProjectivePoint::GENERATOR * change_offset);
|
let change_key = key + (ProjectivePoint::GENERATOR * change_offset);
|
||||||
let change_addr = p2tr_script_buf(change_key).unwrap();
|
let change_addr =
|
||||||
|
Address::<NetworkChecked>::new(Network::Regtest, address_payload(change_key).unwrap());
|
||||||
|
|
||||||
// Create and sign the TX
|
// Create and sign the TX
|
||||||
let tx = SignableTransaction::new(
|
let tx = SignableTransaction::new(
|
||||||
vec![output.clone(), offset_output.clone()],
|
vec![output.clone(), offset_output.clone()],
|
||||||
&payments,
|
&payments,
|
||||||
Some(change_addr.clone()),
|
Some(&change_addr),
|
||||||
None,
|
None,
|
||||||
FEE
|
FEE
|
||||||
).unwrap();
|
).unwrap();
|
||||||
@@ -248,7 +287,7 @@ async_sequential! {
|
|||||||
// Ensure we can scan it
|
// Ensure we can scan it
|
||||||
let outputs = scanner.scan_transaction(&tx);
|
let outputs = scanner.scan_transaction(&tx);
|
||||||
for (o, output) in outputs.iter().enumerate() {
|
for (o, output) in outputs.iter().enumerate() {
|
||||||
assert_eq!(output.outpoint(), &OutPoint::new(tx.compute_txid(), u32::try_from(o).unwrap()));
|
assert_eq!(output.outpoint(), &OutPoint::new(tx.txid(), u32::try_from(o).unwrap()));
|
||||||
assert_eq!(&ReceivedOutput::read::<&[u8]>(&mut output.serialize().as_ref()).unwrap(), output);
|
assert_eq!(&ReceivedOutput::read::<&[u8]>(&mut output.serialize().as_ref()).unwrap(), output);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -260,13 +299,13 @@ async_sequential! {
|
|||||||
for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) {
|
for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
output,
|
output,
|
||||||
&TxOut { script_pubkey: payment.0.clone(), value: Amount::from_sat(payment.1) },
|
&TxOut { script_pubkey: payment.0.script_pubkey(), value: Amount::from_sat(payment.1) },
|
||||||
);
|
);
|
||||||
assert_eq!(scanned.value(), payment.1 );
|
assert_eq!(scanned.value(), payment.1 );
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure the change is correct
|
// Make sure the change is correct
|
||||||
assert_eq!(needed_fee, u64::try_from(tx.vsize()).unwrap() * FEE);
|
assert_eq!(needed_fee, u64::from(tx.weight()) * FEE);
|
||||||
let input_value = output.value() + offset_output.value();
|
let input_value = output.value() + offset_output.value();
|
||||||
let output_value = tx.output.iter().map(|output| output.value.to_sat()).sum::<u64>();
|
let output_value = tx.output.iter().map(|output| output.value.to_sat()).sum::<u64>();
|
||||||
assert_eq!(input_value - output_value, needed_fee);
|
assert_eq!(input_value - output_value, needed_fee);
|
||||||
@@ -275,13 +314,13 @@ async_sequential! {
|
|||||||
input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee;
|
input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tx.output[2],
|
tx.output[2],
|
||||||
TxOut { script_pubkey: change_addr, value: Amount::from_sat(change_amount) },
|
TxOut { script_pubkey: change_addr.script_pubkey(), value: Amount::from_sat(change_amount) },
|
||||||
);
|
);
|
||||||
|
|
||||||
// This also tests send_raw_transaction and get_transaction, which the RPC test can't
|
// This also tests send_raw_transaction and get_transaction, which the RPC test can't
|
||||||
// effectively test
|
// effectively test
|
||||||
rpc.send_raw_transaction(&tx).await.unwrap();
|
rpc.send_raw_transaction(&tx).await.unwrap();
|
||||||
let mut hash = *tx.compute_txid().as_raw_hash().as_byte_array();
|
let mut hash = *tx.txid().as_raw_hash().as_byte_array();
|
||||||
hash.reverse();
|
hash.reverse();
|
||||||
assert_eq!(tx, rpc.get_transaction(&hash).await.unwrap());
|
assert_eq!(tx, rpc.get_transaction(&hash).await.unwrap());
|
||||||
assert_eq!(expected_id, hash);
|
assert_eq!(expected_id, hash);
|
||||||
@@ -305,7 +344,7 @@ async_sequential! {
|
|||||||
&SignableTransaction::new(
|
&SignableTransaction::new(
|
||||||
vec![output],
|
vec![output],
|
||||||
&[],
|
&[],
|
||||||
Some(p2tr_script_buf(key).unwrap()),
|
Some(&Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())),
|
||||||
Some(data.clone()),
|
Some(data.clone()),
|
||||||
FEE
|
FEE
|
||||||
).unwrap()
|
).unwrap()
|
||||||
7
coins/ethereum/.gitignore
vendored
Normal file
7
coins/ethereum/.gitignore
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# Solidity build outputs
|
||||||
|
cache
|
||||||
|
artifacts
|
||||||
|
|
||||||
|
# Auto-generated ABI files
|
||||||
|
src/abi/schnorr.rs
|
||||||
|
src/abi/router.rs
|
||||||
45
coins/ethereum/Cargo.toml
Normal file
45
coins/ethereum/Cargo.toml
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
[package]
|
||||||
|
name = "ethereum-serai"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "An Ethereum library supporting Schnorr signing and on-chain verification"
|
||||||
|
license = "AGPL-3.0-only"
|
||||||
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
|
||||||
|
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
|
||||||
|
edition = "2021"
|
||||||
|
publish = false
|
||||||
|
rust-version = "1.74"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
thiserror = { version = "1", default-features = false }
|
||||||
|
eyre = { version = "0.6", default-features = false }
|
||||||
|
|
||||||
|
sha3 = { version = "0.10", default-features = false, features = ["std"] }
|
||||||
|
|
||||||
|
group = { version = "0.13", default-features = false }
|
||||||
|
k256 = { version = "^0.13.1", default-features = false, features = ["std", "ecdsa"] }
|
||||||
|
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] }
|
||||||
|
|
||||||
|
ethers-core = { version = "2", default-features = false }
|
||||||
|
ethers-providers = { version = "2", default-features = false }
|
||||||
|
ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] }
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
rand_core = { version = "0.6", default-features = false, features = ["std"] }
|
||||||
|
|
||||||
|
hex = { version = "0.4", default-features = false, features = ["std"] }
|
||||||
|
serde = { version = "1", default-features = false, features = ["std"] }
|
||||||
|
serde_json = { version = "1", default-features = false, features = ["std"] }
|
||||||
|
|
||||||
|
sha2 = { version = "0.10", default-features = false, features = ["std"] }
|
||||||
|
|
||||||
|
tokio = { version = "1", features = ["macros"] }
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
AGPL-3.0-only license
|
AGPL-3.0-only license
|
||||||
|
|
||||||
Copyright (c) 2023-2025 Luke Parker
|
Copyright (c) 2022-2023 Luke Parker
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
This program is free software: you can redistribute it and/or modify
|
||||||
it under the terms of the GNU Affero General Public License Version 3 as
|
it under the terms of the GNU Affero General Public License Version 3 as
|
||||||
9
coins/ethereum/README.md
Normal file
9
coins/ethereum/README.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Ethereum
|
||||||
|
|
||||||
|
This package contains Ethereum-related functionality, specifically deploying and
|
||||||
|
interacting with Serai contracts.
|
||||||
|
|
||||||
|
### Dependencies
|
||||||
|
|
||||||
|
- solc
|
||||||
|
- [Foundry](https://github.com/foundry-rs/foundry)
|
||||||
42
coins/ethereum/build.rs
Normal file
42
coins/ethereum/build.rs
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
use std::process::Command;
|
||||||
|
|
||||||
|
use ethers_contract::Abigen;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
println!("cargo:rerun-if-changed=contracts/*");
|
||||||
|
println!("cargo:rerun-if-changed=artifacts/*");
|
||||||
|
|
||||||
|
for line in String::from_utf8(Command::new("solc").args(["--version"]).output().unwrap().stdout)
|
||||||
|
.unwrap()
|
||||||
|
.lines()
|
||||||
|
{
|
||||||
|
if let Some(version) = line.strip_prefix("Version: ") {
|
||||||
|
let version = version.split('+').next().unwrap();
|
||||||
|
assert_eq!(version, "0.8.25");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
let args = [
|
||||||
|
"--base-path", ".",
|
||||||
|
"-o", "./artifacts", "--overwrite",
|
||||||
|
"--bin", "--abi",
|
||||||
|
"--optimize",
|
||||||
|
"./contracts/Schnorr.sol", "./contracts/Router.sol",
|
||||||
|
];
|
||||||
|
assert!(Command::new("solc").args(args).status().unwrap().success());
|
||||||
|
|
||||||
|
Abigen::new("Schnorr", "./artifacts/Schnorr.abi")
|
||||||
|
.unwrap()
|
||||||
|
.generate()
|
||||||
|
.unwrap()
|
||||||
|
.write_to_file("./src/abi/schnorr.rs")
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
Abigen::new("Router", "./artifacts/Router.abi")
|
||||||
|
.unwrap()
|
||||||
|
.generate()
|
||||||
|
.unwrap()
|
||||||
|
.write_to_file("./src/abi/router.rs")
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
90
coins/ethereum/contracts/Router.sol
Normal file
90
coins/ethereum/contracts/Router.sol
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
// SPDX-License-Identifier: AGPLv3
|
||||||
|
pragma solidity ^0.8.0;
|
||||||
|
|
||||||
|
import "./Schnorr.sol";
|
||||||
|
|
||||||
|
contract Router is Schnorr {
|
||||||
|
// Contract initializer
|
||||||
|
// TODO: Replace with a MuSig of the genesis validators
|
||||||
|
address public initializer;
|
||||||
|
|
||||||
|
// Nonce is incremented for each batch of transactions executed
|
||||||
|
uint256 public nonce;
|
||||||
|
|
||||||
|
// fixed parity for the public keys used in this contract
|
||||||
|
uint8 constant public KEY_PARITY = 27;
|
||||||
|
|
||||||
|
// current public key's x-coordinate
|
||||||
|
// note: this key must always use the fixed parity defined above
|
||||||
|
bytes32 public seraiKey;
|
||||||
|
|
||||||
|
struct OutInstruction {
|
||||||
|
address to;
|
||||||
|
uint256 value;
|
||||||
|
bytes data;
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Signature {
|
||||||
|
bytes32 c;
|
||||||
|
bytes32 s;
|
||||||
|
}
|
||||||
|
|
||||||
|
// success is a uint256 representing a bitfield of transaction successes
|
||||||
|
event Executed(uint256 nonce, bytes32 batch, uint256 success);
|
||||||
|
|
||||||
|
// error types
|
||||||
|
error NotInitializer();
|
||||||
|
error AlreadyInitialized();
|
||||||
|
error InvalidKey();
|
||||||
|
error TooManyTransactions();
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
initializer = msg.sender;
|
||||||
|
}
|
||||||
|
|
||||||
|
// initSeraiKey can be called by the contract initializer to set the first
|
||||||
|
// public key, only if the public key has yet to be set.
|
||||||
|
function initSeraiKey(bytes32 _seraiKey) external {
|
||||||
|
if (msg.sender != initializer) revert NotInitializer();
|
||||||
|
if (seraiKey != 0) revert AlreadyInitialized();
|
||||||
|
if (_seraiKey == bytes32(0)) revert InvalidKey();
|
||||||
|
seraiKey = _seraiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
// updateSeraiKey validates the given Schnorr signature against the current public key,
|
||||||
|
// and if successful, updates the contract's public key to the given one.
|
||||||
|
function updateSeraiKey(
|
||||||
|
bytes32 _seraiKey,
|
||||||
|
Signature memory sig
|
||||||
|
) public {
|
||||||
|
if (_seraiKey == bytes32(0)) revert InvalidKey();
|
||||||
|
bytes32 message = keccak256(abi.encodePacked("updateSeraiKey", _seraiKey));
|
||||||
|
if (!verify(KEY_PARITY, seraiKey, message, sig.c, sig.s)) revert InvalidSignature();
|
||||||
|
seraiKey = _seraiKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
// execute accepts a list of transactions to execute as well as a Schnorr signature.
|
||||||
|
// if signature verification passes, the given transactions are executed.
|
||||||
|
// if signature verification fails, this function will revert.
|
||||||
|
function execute(
|
||||||
|
OutInstruction[] calldata transactions,
|
||||||
|
Signature memory sig
|
||||||
|
) public {
|
||||||
|
if (transactions.length > 256) revert TooManyTransactions();
|
||||||
|
|
||||||
|
bytes32 message = keccak256(abi.encode("execute", nonce, transactions));
|
||||||
|
// This prevents re-entrancy from causing double spends yet does allow
|
||||||
|
// out-of-order execution via re-entrancy
|
||||||
|
nonce++;
|
||||||
|
if (!verify(KEY_PARITY, seraiKey, message, sig.c, sig.s)) revert InvalidSignature();
|
||||||
|
|
||||||
|
uint256 successes;
|
||||||
|
for(uint256 i = 0; i < transactions.length; i++) {
|
||||||
|
(bool success, ) = transactions[i].to.call{value: transactions[i].value, gas: 200_000}(transactions[i].data);
|
||||||
|
assembly {
|
||||||
|
successes := or(successes, shl(i, success))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
emit Executed(nonce, message, successes);
|
||||||
|
}
|
||||||
|
}
|
||||||
39
coins/ethereum/contracts/Schnorr.sol
Normal file
39
coins/ethereum/contracts/Schnorr.sol
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
// SPDX-License-Identifier: AGPLv3
|
||||||
|
pragma solidity ^0.8.0;
|
||||||
|
|
||||||
|
// see https://github.com/noot/schnorr-verify for implementation details
|
||||||
|
contract Schnorr {
|
||||||
|
// secp256k1 group order
|
||||||
|
uint256 constant public Q =
|
||||||
|
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
|
||||||
|
|
||||||
|
error InvalidSOrA();
|
||||||
|
error InvalidSignature();
|
||||||
|
|
||||||
|
// parity := public key y-coord parity (27 or 28)
|
||||||
|
// px := public key x-coord
|
||||||
|
// message := 32-byte hash of the message
|
||||||
|
// c := schnorr signature challenge
|
||||||
|
// s := schnorr signature
|
||||||
|
function verify(
|
||||||
|
uint8 parity,
|
||||||
|
bytes32 px,
|
||||||
|
bytes32 message,
|
||||||
|
bytes32 c,
|
||||||
|
bytes32 s
|
||||||
|
) public view returns (bool) {
|
||||||
|
// ecrecover = (m, v, r, s);
|
||||||
|
bytes32 sa = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
|
||||||
|
bytes32 ca = bytes32(Q - mulmod(uint256(c), uint256(px), Q));
|
||||||
|
|
||||||
|
if (sa == 0) revert InvalidSOrA();
|
||||||
|
// the ecrecover precompile implementation checks that the `r` and `s`
|
||||||
|
// inputs are non-zero (in this case, `px` and `ca`), thus we don't need to
|
||||||
|
// check if they're zero.
|
||||||
|
address R = ecrecover(sa, parity, px, ca);
|
||||||
|
if (R == address(0)) revert InvalidSignature();
|
||||||
|
return c == keccak256(
|
||||||
|
abi.encodePacked(R, uint8(parity), px, block.chainid, message)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
6
coins/ethereum/src/abi/mod.rs
Normal file
6
coins/ethereum/src/abi/mod.rs
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
#[rustfmt::skip]
|
||||||
|
#[allow(clippy::all)]
|
||||||
|
pub(crate) mod schnorr;
|
||||||
|
#[rustfmt::skip]
|
||||||
|
#[allow(clippy::all)]
|
||||||
|
pub(crate) mod router;
|
||||||
91
coins/ethereum/src/crypto.rs
Normal file
91
coins/ethereum/src/crypto.rs
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
use sha3::{Digest, Keccak256};
|
||||||
|
|
||||||
|
use group::ff::PrimeField;
|
||||||
|
use k256::{
|
||||||
|
elliptic_curve::{
|
||||||
|
bigint::ArrayEncoding, ops::Reduce, point::AffineCoordinates, sec1::ToEncodedPoint,
|
||||||
|
},
|
||||||
|
ProjectivePoint, Scalar, U256,
|
||||||
|
};
|
||||||
|
|
||||||
|
use frost::{
|
||||||
|
algorithm::{Hram, SchnorrSignature},
|
||||||
|
curve::Secp256k1,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(crate) fn keccak256(data: &[u8]) -> [u8; 32] {
|
||||||
|
Keccak256::digest(data).into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn address(point: &ProjectivePoint) -> [u8; 20] {
|
||||||
|
let encoded_point = point.to_encoded_point(false);
|
||||||
|
// Last 20 bytes of the hash of the concatenated x and y coordinates
|
||||||
|
// We obtain the concatenated x and y coordinates via the uncompressed encoding of the point
|
||||||
|
keccak256(&encoded_point.as_ref()[1 .. 65])[12 ..].try_into().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub struct PublicKey {
|
||||||
|
pub A: ProjectivePoint,
|
||||||
|
pub px: Scalar,
|
||||||
|
pub parity: u8,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PublicKey {
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub fn new(A: ProjectivePoint) -> Option<PublicKey> {
|
||||||
|
let affine = A.to_affine();
|
||||||
|
let parity = u8::from(bool::from(affine.y_is_odd())) + 27;
|
||||||
|
if parity != 27 {
|
||||||
|
None?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let x_coord = affine.x();
|
||||||
|
let x_coord_scalar = <Scalar as Reduce<U256>>::reduce_bytes(&x_coord);
|
||||||
|
// Return None if a reduction would occur
|
||||||
|
if x_coord_scalar.to_repr() != x_coord {
|
||||||
|
None?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(PublicKey { A, px: x_coord_scalar, parity })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Default)]
|
||||||
|
pub struct EthereumHram {}
|
||||||
|
impl Hram<Secp256k1> for EthereumHram {
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
||||||
|
let a_encoded_point = A.to_encoded_point(true);
|
||||||
|
let mut a_encoded = a_encoded_point.as_ref().to_owned();
|
||||||
|
a_encoded[0] += 25; // Ethereum uses 27/28 for point parity
|
||||||
|
assert!((a_encoded[0] == 27) || (a_encoded[0] == 28));
|
||||||
|
let mut data = address(R).to_vec();
|
||||||
|
data.append(&mut a_encoded);
|
||||||
|
data.extend(m);
|
||||||
|
Scalar::reduce(U256::from_be_slice(&keccak256(&data)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Signature {
|
||||||
|
pub(crate) c: Scalar,
|
||||||
|
pub(crate) s: Scalar,
|
||||||
|
}
|
||||||
|
impl Signature {
|
||||||
|
pub fn new(
|
||||||
|
public_key: &PublicKey,
|
||||||
|
chain_id: U256,
|
||||||
|
m: &[u8],
|
||||||
|
signature: SchnorrSignature<Secp256k1>,
|
||||||
|
) -> Option<Signature> {
|
||||||
|
let c = EthereumHram::hram(
|
||||||
|
&signature.R,
|
||||||
|
&public_key.A,
|
||||||
|
&[chain_id.to_be_byte_array().as_slice(), &keccak256(m)].concat(),
|
||||||
|
);
|
||||||
|
if !signature.verify(public_key.A, c) {
|
||||||
|
None?;
|
||||||
|
}
|
||||||
|
Some(Signature { c, s: signature.s })
|
||||||
|
}
|
||||||
|
}
|
||||||
16
coins/ethereum/src/lib.rs
Normal file
16
coins/ethereum/src/lib.rs
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
pub mod crypto;
|
||||||
|
|
||||||
|
pub(crate) mod abi;
|
||||||
|
pub mod schnorr;
|
||||||
|
pub mod router;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error("failed to verify Schnorr signature")]
|
||||||
|
InvalidSignature,
|
||||||
|
}
|
||||||
30
coins/ethereum/src/router.rs
Normal file
30
coins/ethereum/src/router.rs
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
pub use crate::abi::router::*;
|
||||||
|
|
||||||
|
/*
|
||||||
|
use crate::crypto::{ProcessedSignature, PublicKey};
|
||||||
|
use ethers::{contract::ContractFactory, prelude::*, solc::artifacts::contract::ContractBytecode};
|
||||||
|
use eyre::Result;
|
||||||
|
use std::{convert::From, fs::File, sync::Arc};
|
||||||
|
|
||||||
|
pub async fn router_update_public_key<M: Middleware + 'static>(
|
||||||
|
contract: &Router<M>,
|
||||||
|
public_key: &PublicKey,
|
||||||
|
signature: &ProcessedSignature,
|
||||||
|
) -> std::result::Result<Option<TransactionReceipt>, eyre::ErrReport> {
|
||||||
|
let tx = contract.update_public_key(public_key.px.to_bytes().into(), signature.into());
|
||||||
|
let pending_tx = tx.send().await?;
|
||||||
|
let receipt = pending_tx.await?;
|
||||||
|
Ok(receipt)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn router_execute<M: Middleware + 'static>(
|
||||||
|
contract: &Router<M>,
|
||||||
|
txs: Vec<Rtransaction>,
|
||||||
|
signature: &ProcessedSignature,
|
||||||
|
) -> std::result::Result<Option<TransactionReceipt>, eyre::ErrReport> {
|
||||||
|
let tx = contract.execute(txs, signature.into()).send();
|
||||||
|
let pending_tx = tx.send().await?;
|
||||||
|
let receipt = pending_tx.await?;
|
||||||
|
Ok(receipt)
|
||||||
|
}
|
||||||
|
*/
|
||||||
34
coins/ethereum/src/schnorr.rs
Normal file
34
coins/ethereum/src/schnorr.rs
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
use eyre::{eyre, Result};
|
||||||
|
|
||||||
|
use group::ff::PrimeField;
|
||||||
|
|
||||||
|
use ethers_providers::{Provider, Http};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
Error,
|
||||||
|
crypto::{keccak256, PublicKey, Signature},
|
||||||
|
};
|
||||||
|
pub use crate::abi::schnorr::*;
|
||||||
|
|
||||||
|
pub async fn call_verify(
|
||||||
|
contract: &Schnorr<Provider<Http>>,
|
||||||
|
public_key: &PublicKey,
|
||||||
|
message: &[u8],
|
||||||
|
signature: &Signature,
|
||||||
|
) -> Result<()> {
|
||||||
|
if contract
|
||||||
|
.verify(
|
||||||
|
public_key.parity,
|
||||||
|
public_key.px.to_repr().into(),
|
||||||
|
keccak256(message),
|
||||||
|
signature.c.to_repr().into(),
|
||||||
|
signature.s.to_repr().into(),
|
||||||
|
)
|
||||||
|
.call()
|
||||||
|
.await?
|
||||||
|
{
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(eyre!(Error::InvalidSignature))
|
||||||
|
}
|
||||||
|
}
|
||||||
132
coins/ethereum/src/tests/crypto.rs
Normal file
132
coins/ethereum/src/tests/crypto.rs
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use sha2::Sha256;
|
||||||
|
use sha3::{Digest, Keccak256};
|
||||||
|
|
||||||
|
use group::Group;
|
||||||
|
use k256::{
|
||||||
|
ecdsa::{hazmat::SignPrimitive, signature::DigestVerifier, SigningKey, VerifyingKey},
|
||||||
|
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, point::DecompressPoint},
|
||||||
|
U256, Scalar, AffinePoint, ProjectivePoint,
|
||||||
|
};
|
||||||
|
|
||||||
|
use frost::{
|
||||||
|
curve::Secp256k1,
|
||||||
|
algorithm::{Hram, IetfSchnorr},
|
||||||
|
tests::{algorithm_machines, sign},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{crypto::*, tests::key_gen};
|
||||||
|
|
||||||
|
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||||
|
Scalar::reduce(U256::from_be_slice(&keccak256(data)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
|
||||||
|
if r.is_zero().into() || s.is_zero().into() || !((v == 27) || (v == 28)) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let R = AffinePoint::decompress(&r.to_bytes(), (v - 27).into());
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
if let Some(R) = Option::<AffinePoint>::from(R) {
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let R = ProjectivePoint::from(R);
|
||||||
|
|
||||||
|
let r = r.invert().unwrap();
|
||||||
|
let u1 = ProjectivePoint::GENERATOR * (-message * r);
|
||||||
|
let u2 = R * (s * r);
|
||||||
|
let key: ProjectivePoint = u1 + u2;
|
||||||
|
if !bool::from(key.is_identity()) {
|
||||||
|
return Some(address(&key));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ecrecover() {
|
||||||
|
let private = SigningKey::random(&mut OsRng);
|
||||||
|
let public = VerifyingKey::from(&private);
|
||||||
|
|
||||||
|
// Sign the signature
|
||||||
|
const MESSAGE: &[u8] = b"Hello, World!";
|
||||||
|
let (sig, recovery_id) = private
|
||||||
|
.as_nonzero_scalar()
|
||||||
|
.try_sign_prehashed_rfc6979::<Sha256>(&Keccak256::digest(MESSAGE), b"")
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Sanity check the signature verifies
|
||||||
|
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
|
||||||
|
{
|
||||||
|
assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform the ecrecover
|
||||||
|
assert_eq!(
|
||||||
|
ecrecover(
|
||||||
|
hash_to_scalar(MESSAGE),
|
||||||
|
u8::from(recovery_id.unwrap().is_y_odd()) + 27,
|
||||||
|
*sig.r(),
|
||||||
|
*sig.s()
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
address(&ProjectivePoint::from(public.as_affine()))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the sign test with the EthereumHram
|
||||||
|
#[test]
|
||||||
|
fn test_signing() {
|
||||||
|
let (keys, _) = key_gen();
|
||||||
|
|
||||||
|
const MESSAGE: &[u8] = b"Hello, World!";
|
||||||
|
|
||||||
|
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
||||||
|
let _sig =
|
||||||
|
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub fn preprocess_signature_for_ecrecover(
|
||||||
|
R: ProjectivePoint,
|
||||||
|
public_key: &PublicKey,
|
||||||
|
chain_id: U256,
|
||||||
|
m: &[u8],
|
||||||
|
s: Scalar,
|
||||||
|
) -> (u8, Scalar, Scalar) {
|
||||||
|
let c = EthereumHram::hram(
|
||||||
|
&R,
|
||||||
|
&public_key.A,
|
||||||
|
&[chain_id.to_be_byte_array().as_slice(), &keccak256(m)].concat(),
|
||||||
|
);
|
||||||
|
let sa = -(s * public_key.px);
|
||||||
|
let ca = -(c * public_key.px);
|
||||||
|
(public_key.parity, sa, ca)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ecrecover_hack() {
|
||||||
|
let (keys, public_key) = key_gen();
|
||||||
|
|
||||||
|
const MESSAGE: &[u8] = b"Hello, World!";
|
||||||
|
let hashed_message = keccak256(MESSAGE);
|
||||||
|
let chain_id = U256::ONE;
|
||||||
|
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
||||||
|
|
||||||
|
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
||||||
|
let sig = sign(
|
||||||
|
&mut OsRng,
|
||||||
|
&algo,
|
||||||
|
keys.clone(),
|
||||||
|
algorithm_machines(&mut OsRng, &algo, &keys),
|
||||||
|
full_message,
|
||||||
|
);
|
||||||
|
|
||||||
|
let (parity, sa, ca) =
|
||||||
|
preprocess_signature_for_ecrecover(sig.R, &public_key, chain_id, MESSAGE, sig.s);
|
||||||
|
let q = ecrecover(sa, parity, public_key.px, ca).unwrap();
|
||||||
|
assert_eq!(q, address(&sig.R));
|
||||||
|
}
|
||||||
92
coins/ethereum/src/tests/mod.rs
Normal file
92
coins/ethereum/src/tests/mod.rs
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
use std::{sync::Arc, time::Duration, fs::File, collections::HashMap};
|
||||||
|
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use group::ff::PrimeField;
|
||||||
|
use k256::{Scalar, ProjectivePoint};
|
||||||
|
use frost::{curve::Secp256k1, Participant, ThresholdKeys, tests::key_gen as frost_key_gen};
|
||||||
|
|
||||||
|
use ethers_core::{
|
||||||
|
types::{H160, Signature as EthersSignature},
|
||||||
|
abi::Abi,
|
||||||
|
};
|
||||||
|
use ethers_contract::ContractFactory;
|
||||||
|
use ethers_providers::{Middleware, Provider, Http};
|
||||||
|
|
||||||
|
use crate::crypto::PublicKey;
|
||||||
|
|
||||||
|
mod crypto;
|
||||||
|
mod schnorr;
|
||||||
|
mod router;
|
||||||
|
|
||||||
|
pub fn key_gen() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, PublicKey) {
|
||||||
|
let mut keys = frost_key_gen::<_, Secp256k1>(&mut OsRng);
|
||||||
|
let mut group_key = keys[&Participant::new(1).unwrap()].group_key();
|
||||||
|
|
||||||
|
let mut offset = Scalar::ZERO;
|
||||||
|
while PublicKey::new(group_key).is_none() {
|
||||||
|
offset += Scalar::ONE;
|
||||||
|
group_key += ProjectivePoint::GENERATOR;
|
||||||
|
}
|
||||||
|
for keys in keys.values_mut() {
|
||||||
|
*keys = keys.offset(offset);
|
||||||
|
}
|
||||||
|
let public_key = PublicKey::new(group_key).unwrap();
|
||||||
|
|
||||||
|
(keys, public_key)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Replace with a contract deployment from an unknown account, so the environment solely has
|
||||||
|
// to fund the deployer, not create/pass a wallet
|
||||||
|
// TODO: Deterministic deployments across chains
|
||||||
|
pub async fn deploy_contract(
|
||||||
|
chain_id: u32,
|
||||||
|
client: Arc<Provider<Http>>,
|
||||||
|
wallet: &k256::ecdsa::SigningKey,
|
||||||
|
name: &str,
|
||||||
|
) -> eyre::Result<H160> {
|
||||||
|
let abi: Abi =
|
||||||
|
serde_json::from_reader(File::open(format!("./artifacts/{name}.abi")).unwrap()).unwrap();
|
||||||
|
|
||||||
|
let hex_bin_buf = std::fs::read_to_string(format!("./artifacts/{name}.bin")).unwrap();
|
||||||
|
let hex_bin =
|
||||||
|
if let Some(stripped) = hex_bin_buf.strip_prefix("0x") { stripped } else { &hex_bin_buf };
|
||||||
|
let bin = hex::decode(hex_bin).unwrap();
|
||||||
|
let factory = ContractFactory::new(abi, bin.into(), client.clone());
|
||||||
|
|
||||||
|
let mut deployment_tx = factory.deploy(())?.tx;
|
||||||
|
deployment_tx.set_chain_id(chain_id);
|
||||||
|
deployment_tx.set_gas(1_000_000);
|
||||||
|
let (max_fee_per_gas, max_priority_fee_per_gas) = client.estimate_eip1559_fees(None).await?;
|
||||||
|
deployment_tx.as_eip1559_mut().unwrap().max_fee_per_gas = Some(max_fee_per_gas);
|
||||||
|
deployment_tx.as_eip1559_mut().unwrap().max_priority_fee_per_gas = Some(max_priority_fee_per_gas);
|
||||||
|
|
||||||
|
let sig_hash = deployment_tx.sighash();
|
||||||
|
let (sig, rid) = wallet.sign_prehash_recoverable(sig_hash.as_ref()).unwrap();
|
||||||
|
|
||||||
|
// EIP-155 v
|
||||||
|
let mut v = u64::from(rid.to_byte());
|
||||||
|
assert!((v == 0) || (v == 1));
|
||||||
|
v += u64::from((chain_id * 2) + 35);
|
||||||
|
|
||||||
|
let r = sig.r().to_repr();
|
||||||
|
let r_ref: &[u8] = r.as_ref();
|
||||||
|
let s = sig.s().to_repr();
|
||||||
|
let s_ref: &[u8] = s.as_ref();
|
||||||
|
let deployment_tx =
|
||||||
|
deployment_tx.rlp_signed(&EthersSignature { r: r_ref.into(), s: s_ref.into(), v });
|
||||||
|
|
||||||
|
let pending_tx = client.send_raw_transaction(deployment_tx).await?;
|
||||||
|
|
||||||
|
let mut receipt;
|
||||||
|
while {
|
||||||
|
receipt = client.get_transaction_receipt(pending_tx.tx_hash()).await?;
|
||||||
|
receipt.is_none()
|
||||||
|
} {
|
||||||
|
tokio::time::sleep(Duration::from_secs(6)).await;
|
||||||
|
}
|
||||||
|
let receipt = receipt.unwrap();
|
||||||
|
assert!(receipt.status == Some(1.into()));
|
||||||
|
|
||||||
|
Ok(receipt.contract_address.unwrap())
|
||||||
|
}
|
||||||
109
coins/ethereum/src/tests/router.rs
Normal file
109
coins/ethereum/src/tests/router.rs
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
use std::{convert::TryFrom, sync::Arc, collections::HashMap};
|
||||||
|
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use group::ff::PrimeField;
|
||||||
|
use frost::{
|
||||||
|
curve::Secp256k1,
|
||||||
|
Participant, ThresholdKeys,
|
||||||
|
algorithm::IetfSchnorr,
|
||||||
|
tests::{algorithm_machines, sign},
|
||||||
|
};
|
||||||
|
|
||||||
|
use ethers_core::{
|
||||||
|
types::{H160, U256, Bytes},
|
||||||
|
abi::AbiEncode,
|
||||||
|
utils::{Anvil, AnvilInstance},
|
||||||
|
};
|
||||||
|
use ethers_providers::{Middleware, Provider, Http};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
crypto::{keccak256, PublicKey, EthereumHram, Signature},
|
||||||
|
router::{self, *},
|
||||||
|
tests::{key_gen, deploy_contract},
|
||||||
|
};
|
||||||
|
|
||||||
|
async fn setup_test() -> (
|
||||||
|
u32,
|
||||||
|
AnvilInstance,
|
||||||
|
Router<Provider<Http>>,
|
||||||
|
HashMap<Participant, ThresholdKeys<Secp256k1>>,
|
||||||
|
PublicKey,
|
||||||
|
) {
|
||||||
|
let anvil = Anvil::new().spawn();
|
||||||
|
|
||||||
|
let provider = Provider::<Http>::try_from(anvil.endpoint()).unwrap();
|
||||||
|
let chain_id = provider.get_chainid().await.unwrap().as_u32();
|
||||||
|
let wallet = anvil.keys()[0].clone().into();
|
||||||
|
let client = Arc::new(provider);
|
||||||
|
|
||||||
|
let contract_address =
|
||||||
|
deploy_contract(chain_id, client.clone(), &wallet, "Router").await.unwrap();
|
||||||
|
let contract = Router::new(contract_address, client.clone());
|
||||||
|
|
||||||
|
let (keys, public_key) = key_gen();
|
||||||
|
|
||||||
|
// Set the key to the threshold keys
|
||||||
|
let tx = contract.init_serai_key(public_key.px.to_repr().into()).gas(100_000);
|
||||||
|
let pending_tx = tx.send().await.unwrap();
|
||||||
|
let receipt = pending_tx.await.unwrap().unwrap();
|
||||||
|
assert!(receipt.status == Some(1.into()));
|
||||||
|
|
||||||
|
(chain_id, anvil, contract, keys, public_key)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_deploy_contract() {
|
||||||
|
setup_test().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn hash_and_sign(
|
||||||
|
keys: &HashMap<Participant, ThresholdKeys<Secp256k1>>,
|
||||||
|
public_key: &PublicKey,
|
||||||
|
chain_id: U256,
|
||||||
|
message: &[u8],
|
||||||
|
) -> Signature {
|
||||||
|
let hashed_message = keccak256(message);
|
||||||
|
|
||||||
|
let mut chain_id_bytes = [0; 32];
|
||||||
|
chain_id.to_big_endian(&mut chain_id_bytes);
|
||||||
|
let full_message = &[chain_id_bytes.as_slice(), &hashed_message].concat();
|
||||||
|
|
||||||
|
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
||||||
|
let sig = sign(
|
||||||
|
&mut OsRng,
|
||||||
|
&algo,
|
||||||
|
keys.clone(),
|
||||||
|
algorithm_machines(&mut OsRng, &algo, keys),
|
||||||
|
full_message,
|
||||||
|
);
|
||||||
|
|
||||||
|
Signature::new(public_key, k256::U256::from_words(chain_id.0), message, sig).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_router_execute() {
|
||||||
|
let (chain_id, _anvil, contract, keys, public_key) = setup_test().await;
|
||||||
|
|
||||||
|
let to = H160([0u8; 20]);
|
||||||
|
let value = U256([0u64; 4]);
|
||||||
|
let data = Bytes::from([0]);
|
||||||
|
let tx = OutInstruction { to, value, data: data.clone() };
|
||||||
|
|
||||||
|
let nonce_call = contract.nonce();
|
||||||
|
let nonce = nonce_call.call().await.unwrap();
|
||||||
|
|
||||||
|
let encoded =
|
||||||
|
("execute".to_string(), nonce, vec![router::OutInstruction { to, value, data }]).encode();
|
||||||
|
let sig = hash_and_sign(&keys, &public_key, chain_id.into(), &encoded);
|
||||||
|
|
||||||
|
let tx = contract
|
||||||
|
.execute(vec![tx], router::Signature { c: sig.c.to_repr().into(), s: sig.s.to_repr().into() })
|
||||||
|
.gas(300_000);
|
||||||
|
let pending_tx = tx.send().await.unwrap();
|
||||||
|
let receipt = dbg!(pending_tx.await.unwrap().unwrap());
|
||||||
|
assert!(receipt.status == Some(1.into()));
|
||||||
|
|
||||||
|
println!("gas used: {:?}", receipt.cumulative_gas_used);
|
||||||
|
println!("logs: {:?}", receipt.logs);
|
||||||
|
}
|
||||||
67
coins/ethereum/src/tests/schnorr.rs
Normal file
67
coins/ethereum/src/tests/schnorr.rs
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
use std::{convert::TryFrom, sync::Arc};
|
||||||
|
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use ::k256::{elliptic_curve::bigint::ArrayEncoding, U256, Scalar};
|
||||||
|
|
||||||
|
use ethers_core::utils::{keccak256, Anvil, AnvilInstance};
|
||||||
|
use ethers_providers::{Middleware, Provider, Http};
|
||||||
|
|
||||||
|
use frost::{
|
||||||
|
curve::Secp256k1,
|
||||||
|
algorithm::IetfSchnorr,
|
||||||
|
tests::{algorithm_machines, sign},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
crypto::*,
|
||||||
|
schnorr::*,
|
||||||
|
tests::{key_gen, deploy_contract},
|
||||||
|
};
|
||||||
|
|
||||||
|
async fn setup_test() -> (u32, AnvilInstance, Schnorr<Provider<Http>>) {
|
||||||
|
let anvil = Anvil::new().spawn();
|
||||||
|
|
||||||
|
let provider = Provider::<Http>::try_from(anvil.endpoint()).unwrap();
|
||||||
|
let chain_id = provider.get_chainid().await.unwrap().as_u32();
|
||||||
|
let wallet = anvil.keys()[0].clone().into();
|
||||||
|
let client = Arc::new(provider);
|
||||||
|
|
||||||
|
let contract_address =
|
||||||
|
deploy_contract(chain_id, client.clone(), &wallet, "Schnorr").await.unwrap();
|
||||||
|
let contract = Schnorr::new(contract_address, client.clone());
|
||||||
|
(chain_id, anvil, contract)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_deploy_contract() {
|
||||||
|
setup_test().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_ecrecover_hack() {
|
||||||
|
let (chain_id, _anvil, contract) = setup_test().await;
|
||||||
|
let chain_id = U256::from(chain_id);
|
||||||
|
|
||||||
|
let (keys, public_key) = key_gen();
|
||||||
|
|
||||||
|
const MESSAGE: &[u8] = b"Hello, World!";
|
||||||
|
let hashed_message = keccak256(MESSAGE);
|
||||||
|
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
||||||
|
|
||||||
|
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
||||||
|
let sig = sign(
|
||||||
|
&mut OsRng,
|
||||||
|
&algo,
|
||||||
|
keys.clone(),
|
||||||
|
algorithm_machines(&mut OsRng, &algo, &keys),
|
||||||
|
full_message,
|
||||||
|
);
|
||||||
|
let sig = Signature::new(&public_key, chain_id, MESSAGE, sig).unwrap();
|
||||||
|
|
||||||
|
call_verify(&contract, &public_key, MESSAGE, &sig).await.unwrap();
|
||||||
|
// Test an invalid signature fails
|
||||||
|
let mut sig = sig;
|
||||||
|
sig.s += Scalar::ONE;
|
||||||
|
assert!(call_verify(&contract, &public_key, MESSAGE, &sig).await.is_err());
|
||||||
|
}
|
||||||
113
coins/monero/Cargo.toml
Normal file
113
coins/monero/Cargo.toml
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
[package]
|
||||||
|
name = "monero-serai"
|
||||||
|
version = "0.1.4-alpha"
|
||||||
|
description = "A modern Monero transaction library"
|
||||||
|
license = "MIT"
|
||||||
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
|
||||||
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
|
edition = "2021"
|
||||||
|
rust-version = "1.74"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
std-shims = { path = "../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||||
|
|
||||||
|
async-trait = { version = "0.1", default-features = false }
|
||||||
|
thiserror = { version = "1", default-features = false, optional = true }
|
||||||
|
|
||||||
|
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||||
|
subtle = { version = "^2.4", default-features = false }
|
||||||
|
|
||||||
|
rand_core = { version = "0.6", default-features = false }
|
||||||
|
# Used to send transactions
|
||||||
|
rand = { version = "0.8", default-features = false }
|
||||||
|
rand_chacha = { version = "0.3", default-features = false }
|
||||||
|
# Used to select decoys
|
||||||
|
rand_distr = { version = "0.4", default-features = false }
|
||||||
|
|
||||||
|
sha3 = { version = "0.10", default-features = false }
|
||||||
|
pbkdf2 = { version = "0.12", features = ["simple"], default-features = false }
|
||||||
|
|
||||||
|
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "precomputed-tables"] }
|
||||||
|
|
||||||
|
# Used for the hash to curve, along with the more complicated proofs
|
||||||
|
group = { version = "0.13", default-features = false }
|
||||||
|
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
||||||
|
multiexp = { path = "../../crypto/multiexp", version = "0.4", default-features = false, features = ["batch"] }
|
||||||
|
|
||||||
|
# Needed for multisig
|
||||||
|
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
|
||||||
|
dleq = { path = "../../crypto/dleq", version = "0.4", default-features = false, features = ["serialize"], optional = true }
|
||||||
|
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["ed25519"], optional = true }
|
||||||
|
|
||||||
|
monero-generators = { path = "generators", version = "0.4", default-features = false }
|
||||||
|
|
||||||
|
async-lock = { version = "3", default-features = false, optional = true }
|
||||||
|
|
||||||
|
hex-literal = "0.4"
|
||||||
|
hex = { version = "0.4", default-features = false, features = ["alloc"] }
|
||||||
|
serde = { version = "1", default-features = false, features = ["derive", "alloc"] }
|
||||||
|
serde_json = { version = "1", default-features = false, features = ["alloc"] }
|
||||||
|
|
||||||
|
base58-monero = { version = "2", default-features = false, features = ["check"] }
|
||||||
|
|
||||||
|
# Used for the provided HTTP RPC
|
||||||
|
digest_auth = { version = "0.3", default-features = false, optional = true }
|
||||||
|
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls"], optional = true }
|
||||||
|
tokio = { version = "1", default-features = false, optional = true }
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
||||||
|
monero-generators = { path = "generators", version = "0.4", default-features = false }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tokio = { version = "1", features = ["sync", "macros"] }
|
||||||
|
|
||||||
|
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
std = [
|
||||||
|
"std-shims/std",
|
||||||
|
|
||||||
|
"thiserror",
|
||||||
|
|
||||||
|
"zeroize/std",
|
||||||
|
"subtle/std",
|
||||||
|
|
||||||
|
"rand_core/std",
|
||||||
|
"rand/std",
|
||||||
|
"rand_chacha/std",
|
||||||
|
"rand_distr/std",
|
||||||
|
|
||||||
|
"sha3/std",
|
||||||
|
"pbkdf2/std",
|
||||||
|
|
||||||
|
"multiexp/std",
|
||||||
|
|
||||||
|
"transcript/std",
|
||||||
|
"dleq/std",
|
||||||
|
|
||||||
|
"monero-generators/std",
|
||||||
|
|
||||||
|
"async-lock?/std",
|
||||||
|
|
||||||
|
"hex/std",
|
||||||
|
"serde/std",
|
||||||
|
"serde_json/std",
|
||||||
|
|
||||||
|
"base58-monero/std",
|
||||||
|
]
|
||||||
|
|
||||||
|
cache-distribution = ["async-lock"]
|
||||||
|
http-rpc = ["digest_auth", "simple-request", "tokio"]
|
||||||
|
multisig = ["transcript", "frost", "dleq", "std"]
|
||||||
|
binaries = ["tokio/rt-multi-thread", "tokio/macros", "http-rpc"]
|
||||||
|
experimental = []
|
||||||
|
|
||||||
|
default = ["std", "http-rpc"]
|
||||||
49
coins/monero/README.md
Normal file
49
coins/monero/README.md
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
# monero-serai
|
||||||
|
|
||||||
|
A modern Monero transaction library intended for usage in wallets. It prides
|
||||||
|
itself on accuracy, correctness, and removing common pit falls developers may
|
||||||
|
face.
|
||||||
|
|
||||||
|
monero-serai also offers the following features:
|
||||||
|
|
||||||
|
- Featured Addresses
|
||||||
|
- A FROST-based multisig orders of magnitude more performant than Monero's
|
||||||
|
|
||||||
|
### Purpose and support
|
||||||
|
|
||||||
|
monero-serai was written for Serai, a decentralized exchange aiming to support
|
||||||
|
Monero. Despite this, monero-serai is intended to be a widely usable library,
|
||||||
|
accurate to Monero. monero-serai guarantees the functionality needed for Serai,
|
||||||
|
yet will not deprive functionality from other users.
|
||||||
|
|
||||||
|
Various legacy transaction formats are not currently implemented, yet we are
|
||||||
|
willing to add support for them. There aren't active development efforts around
|
||||||
|
them however.
|
||||||
|
|
||||||
|
### Caveats
|
||||||
|
|
||||||
|
This library DOES attempt to do the following:
|
||||||
|
|
||||||
|
- Create on-chain transactions identical to how wallet2 would (unless told not
|
||||||
|
to)
|
||||||
|
- Not be detectable as monero-serai when scanning outputs
|
||||||
|
- Not reveal spent outputs to the connected RPC node
|
||||||
|
|
||||||
|
This library DOES NOT attempt to do the following:
|
||||||
|
|
||||||
|
- Have identical RPC behavior when creating transactions
|
||||||
|
- Be a wallet
|
||||||
|
|
||||||
|
This means that monero-serai shouldn't be fingerprintable on-chain. It also
|
||||||
|
shouldn't be fingerprintable if a targeted attack occurs to detect if the
|
||||||
|
receiving wallet is monero-serai or wallet2. It also should be generally safe
|
||||||
|
for usage with remote nodes.
|
||||||
|
|
||||||
|
It won't hide from remote nodes it's monero-serai however, potentially
|
||||||
|
allowing a remote node to profile you. The implications of this are left to the
|
||||||
|
user to consider.
|
||||||
|
|
||||||
|
It also won't act as a wallet, just as a transaction library. wallet2 has
|
||||||
|
several *non-transaction-level* policies, such as always attempting to use two
|
||||||
|
inputs to create transactions. These are considered out of scope to
|
||||||
|
monero-serai.
|
||||||
67
coins/monero/build.rs
Normal file
67
coins/monero/build.rs
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
use std::{
|
||||||
|
io::Write,
|
||||||
|
env,
|
||||||
|
path::Path,
|
||||||
|
fs::{File, remove_file},
|
||||||
|
};
|
||||||
|
|
||||||
|
use dalek_ff_group::EdwardsPoint;
|
||||||
|
|
||||||
|
use monero_generators::bulletproofs_generators;
|
||||||
|
|
||||||
|
fn serialize(generators_string: &mut String, points: &[EdwardsPoint]) {
|
||||||
|
for generator in points {
|
||||||
|
generators_string.extend(
|
||||||
|
format!(
|
||||||
|
"
|
||||||
|
dalek_ff_group::EdwardsPoint(
|
||||||
|
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap()
|
||||||
|
),
|
||||||
|
",
|
||||||
|
generator.compress().to_bytes()
|
||||||
|
)
|
||||||
|
.chars(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generators(prefix: &'static str, path: &str) {
|
||||||
|
let generators = bulletproofs_generators(prefix.as_bytes());
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let mut G_str = String::new();
|
||||||
|
serialize(&mut G_str, &generators.G);
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let mut H_str = String::new();
|
||||||
|
serialize(&mut H_str, &generators.H);
|
||||||
|
|
||||||
|
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
|
||||||
|
let _ = remove_file(&path);
|
||||||
|
File::create(&path)
|
||||||
|
.unwrap()
|
||||||
|
.write_all(
|
||||||
|
format!(
|
||||||
|
"
|
||||||
|
pub(crate) static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
|
||||||
|
pub fn GENERATORS() -> &'static Generators {{
|
||||||
|
GENERATORS_CELL.get_or_init(|| Generators {{
|
||||||
|
G: vec![
|
||||||
|
{G_str}
|
||||||
|
],
|
||||||
|
H: vec![
|
||||||
|
{H_str}
|
||||||
|
],
|
||||||
|
}})
|
||||||
|
}}
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.as_bytes(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
println!("cargo:rerun-if-changed=build.rs");
|
||||||
|
|
||||||
|
generators("bulletproof", "generators.rs");
|
||||||
|
generators("bulletproof_plus", "generators_plus.rs");
|
||||||
|
}
|
||||||
@@ -1,12 +1,11 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "monero-generators"
|
name = "monero-generators"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
description = "Monero's hash to point function and generators"
|
description = "Monero's hash_to_point and generators"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/generators"
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.80"
|
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
all-features = true
|
all-features = true
|
||||||
@@ -21,27 +20,15 @@ std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-fe
|
|||||||
subtle = { version = "^2.4", default-features = false }
|
subtle = { version = "^2.4", default-features = false }
|
||||||
|
|
||||||
sha3 = { version = "0.10", default-features = false }
|
sha3 = { version = "0.10", default-features = false }
|
||||||
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
|
|
||||||
|
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "precomputed-tables"] }
|
||||||
|
|
||||||
group = { version = "0.13", default-features = false }
|
group = { version = "0.13", default-features = false }
|
||||||
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
||||||
|
|
||||||
monero-io = { path = "../io", version = "0.1", default-features = false }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
hex = "0.4"
|
hex = "0.4"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
std = [
|
std = ["std-shims/std", "subtle/std", "sha3/std", "dalek-ff-group/std"]
|
||||||
"std-shims/std",
|
|
||||||
|
|
||||||
"subtle/std",
|
|
||||||
|
|
||||||
"sha3/std",
|
|
||||||
|
|
||||||
"group/alloc",
|
|
||||||
"dalek-ff-group/std",
|
|
||||||
|
|
||||||
"monero-io/std"
|
|
||||||
]
|
|
||||||
default = ["std"]
|
default = ["std"]
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2024 Luke Parker
|
Copyright (c) 2022-2023 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
7
coins/monero/generators/README.md
Normal file
7
coins/monero/generators/README.md
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# Monero Generators
|
||||||
|
|
||||||
|
Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
|
||||||
|
An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
|
||||||
|
`hash_to_point` here, is included, as needed to generate generators.
|
||||||
|
|
||||||
|
This library is usable under no-std when the `std` feature is disabled.
|
||||||
@@ -1,20 +1,27 @@
|
|||||||
use subtle::ConditionallySelectable;
|
use subtle::ConditionallySelectable;
|
||||||
|
|
||||||
use curve25519_dalek::edwards::EdwardsPoint;
|
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
||||||
|
|
||||||
use group::ff::{Field, PrimeField};
|
use group::ff::{Field, PrimeField};
|
||||||
use dalek_ff_group::FieldElement;
|
use dalek_ff_group::FieldElement;
|
||||||
|
|
||||||
use monero_io::decompress_point;
|
use crate::hash;
|
||||||
|
|
||||||
use crate::keccak256;
|
/// Decompress canonically encoded ed25519 point
|
||||||
|
/// It does not check if the point is in the prime order subgroup
|
||||||
|
pub fn decompress_point(bytes: [u8; 32]) -> Option<EdwardsPoint> {
|
||||||
|
CompressedEdwardsY(bytes)
|
||||||
|
.decompress()
|
||||||
|
// Ban points which are either unreduced or -0
|
||||||
|
.filter(|point| point.compress().to_bytes() == bytes)
|
||||||
|
}
|
||||||
|
|
||||||
/// Monero's `hash_to_ec` function.
|
/// Monero's hash to point function, as named `hash_to_ec`.
|
||||||
pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
|
pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
let A = FieldElement::from(486662u64);
|
let A = FieldElement::from(486662u64);
|
||||||
|
|
||||||
let v = FieldElement::from_square(keccak256(&bytes)).double();
|
let v = FieldElement::from_square(hash(&bytes)).double();
|
||||||
let w = v + FieldElement::ONE;
|
let w = v + FieldElement::ONE;
|
||||||
let x = w.square() + (-A.square() * v);
|
let x = w.square() + (-A.square() * v);
|
||||||
|
|
||||||
79
coins/monero/generators/src/lib.rs
Normal file
79
coins/monero/generators/src/lib.rs
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
//! Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
|
||||||
|
//!
|
||||||
|
//! An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
|
||||||
|
//! `hash_to_point` here, is included, as needed to generate generators.
|
||||||
|
|
||||||
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
|
|
||||||
|
use std_shims::{sync::OnceLock, vec::Vec};
|
||||||
|
|
||||||
|
use sha3::{Digest, Keccak256};
|
||||||
|
|
||||||
|
use curve25519_dalek::edwards::{EdwardsPoint as DalekPoint};
|
||||||
|
|
||||||
|
use group::{Group, GroupEncoding};
|
||||||
|
use dalek_ff_group::EdwardsPoint;
|
||||||
|
|
||||||
|
mod varint;
|
||||||
|
use varint::write_varint;
|
||||||
|
|
||||||
|
mod hash_to_point;
|
||||||
|
pub use hash_to_point::{hash_to_point, decompress_point};
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
|
fn hash(data: &[u8]) -> [u8; 32] {
|
||||||
|
Keccak256::digest(data).into()
|
||||||
|
}
|
||||||
|
|
||||||
|
static H_CELL: OnceLock<DalekPoint> = OnceLock::new();
|
||||||
|
/// Monero's alternate generator `H`, used for amounts in Pedersen commitments.
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub fn H() -> DalekPoint {
|
||||||
|
*H_CELL.get_or_init(|| {
|
||||||
|
decompress_point(hash(&EdwardsPoint::generator().to_bytes())).unwrap().mul_by_cofactor()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
static H_POW_2_CELL: OnceLock<[DalekPoint; 64]> = OnceLock::new();
|
||||||
|
/// Monero's alternate generator `H`, multiplied by 2**i for i in 1 ..= 64.
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub fn H_pow_2() -> &'static [DalekPoint; 64] {
|
||||||
|
H_POW_2_CELL.get_or_init(|| {
|
||||||
|
let mut res = [H(); 64];
|
||||||
|
for i in 1 .. 64 {
|
||||||
|
res[i] = res[i - 1] + res[i - 1];
|
||||||
|
}
|
||||||
|
res
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const MAX_M: usize = 16;
|
||||||
|
const N: usize = 64;
|
||||||
|
const MAX_MN: usize = MAX_M * N;
|
||||||
|
|
||||||
|
/// Container struct for Bulletproofs(+) generators.
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub struct Generators {
|
||||||
|
pub G: Vec<EdwardsPoint>,
|
||||||
|
pub H: Vec<EdwardsPoint>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate generators as needed for Bulletproofs(+), as Monero does.
|
||||||
|
pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
|
||||||
|
let mut res = Generators { G: Vec::with_capacity(MAX_MN), H: Vec::with_capacity(MAX_MN) };
|
||||||
|
for i in 0 .. MAX_MN {
|
||||||
|
let i = 2 * i;
|
||||||
|
|
||||||
|
let mut even = H().compress().to_bytes().to_vec();
|
||||||
|
even.extend(dst);
|
||||||
|
let mut odd = even.clone();
|
||||||
|
|
||||||
|
write_varint(&i.try_into().unwrap(), &mut even).unwrap();
|
||||||
|
write_varint(&(i + 1).try_into().unwrap(), &mut odd).unwrap();
|
||||||
|
res.H.push(EdwardsPoint(hash_to_point(hash(&even))));
|
||||||
|
res.G.push(EdwardsPoint(hash_to_point(hash(&odd))));
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
use crate::{decompress_point, hash_to_point};
|
use crate::{decompress_point, hash_to_point};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_vectors() {
|
fn crypto_tests() {
|
||||||
// tests.txt file copied from monero repo
|
// tests.txt file copied from monero repo
|
||||||
// https://github.com/monero-project/monero/
|
// https://github.com/monero-project/monero/
|
||||||
// blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/tests/crypto/tests.txt
|
// blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/tests/crypto/tests.txt
|
||||||
@@ -21,6 +21,7 @@ fn test_vectors() {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let actual = decompress_point(hex::decode(key).unwrap().try_into().unwrap());
|
let actual = decompress_point(hex::decode(key).unwrap().try_into().unwrap());
|
||||||
|
|
||||||
assert_eq!(actual.is_some(), expected);
|
assert_eq!(actual.is_some(), expected);
|
||||||
}
|
}
|
||||||
"hash_to_ec" => {
|
"hash_to_ec" => {
|
||||||
@@ -28,6 +29,7 @@ fn test_vectors() {
|
|||||||
let expected = words.next().unwrap();
|
let expected = words.next().unwrap();
|
||||||
|
|
||||||
let actual = hash_to_point(hex::decode(bytes).unwrap().try_into().unwrap());
|
let actual = hash_to_point(hex::decode(bytes).unwrap().try_into().unwrap());
|
||||||
|
|
||||||
assert_eq!(hex::encode(actual.compress().to_bytes()), expected);
|
assert_eq!(hex::encode(actual.compress().to_bytes()), expected);
|
||||||
}
|
}
|
||||||
_ => unreachable!("unknown command"),
|
_ => unreachable!("unknown command"),
|
||||||
1
coins/monero/generators/src/tests/mod.rs
Normal file
1
coins/monero/generators/src/tests/mod.rs
Normal file
@@ -0,0 +1 @@
|
|||||||
|
mod hash_to_point;
|
||||||
16
coins/monero/generators/src/varint.rs
Normal file
16
coins/monero/generators/src/varint.rs
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
use std_shims::io::{self, Write};
|
||||||
|
|
||||||
|
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
||||||
|
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
||||||
|
let mut varint = *varint;
|
||||||
|
while {
|
||||||
|
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
||||||
|
varint >>= 7;
|
||||||
|
if varint != 0 {
|
||||||
|
b |= VARINT_CONTINUATION_MASK;
|
||||||
|
}
|
||||||
|
w.write_all(&[b])?;
|
||||||
|
varint != 0
|
||||||
|
} {}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
321
coins/monero/src/bin/reserialize_chain.rs
Normal file
321
coins/monero/src/bin/reserialize_chain.rs
Normal file
@@ -0,0 +1,321 @@
|
|||||||
|
#[cfg(feature = "binaries")]
|
||||||
|
mod binaries {
|
||||||
|
pub(crate) use std::sync::Arc;
|
||||||
|
|
||||||
|
pub(crate) use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
||||||
|
|
||||||
|
pub(crate) use multiexp::BatchVerifier;
|
||||||
|
|
||||||
|
pub(crate) use serde::Deserialize;
|
||||||
|
pub(crate) use serde_json::json;
|
||||||
|
|
||||||
|
pub(crate) use monero_serai::{
|
||||||
|
Commitment,
|
||||||
|
ringct::RctPrunable,
|
||||||
|
transaction::{Input, Transaction},
|
||||||
|
block::Block,
|
||||||
|
rpc::{RpcError, Rpc, HttpRpc},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(crate) use monero_generators::decompress_point;
|
||||||
|
|
||||||
|
pub(crate) use tokio::task::JoinHandle;
|
||||||
|
|
||||||
|
pub(crate) async fn check_block(rpc: Arc<Rpc<HttpRpc>>, block_i: usize) {
|
||||||
|
let hash = loop {
|
||||||
|
match rpc.get_block_hash(block_i).await {
|
||||||
|
Ok(hash) => break hash,
|
||||||
|
Err(RpcError::ConnectionError(e)) => {
|
||||||
|
println!("get_block_hash ConnectionError: {e}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Err(e) => panic!("couldn't get block {block_i}'s hash: {e:?}"),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: Grab the JSON to also check it was deserialized correctly
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct BlockResponse {
|
||||||
|
blob: String,
|
||||||
|
}
|
||||||
|
let res: BlockResponse = loop {
|
||||||
|
match rpc.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await {
|
||||||
|
Ok(res) => break res,
|
||||||
|
Err(RpcError::ConnectionError(e)) => {
|
||||||
|
println!("get_block ConnectionError: {e}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Err(e) => panic!("couldn't get block {block_i} via block.hash(): {e:?}"),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let blob = hex::decode(res.blob).expect("node returned non-hex block");
|
||||||
|
let block = Block::read(&mut blob.as_slice())
|
||||||
|
.unwrap_or_else(|e| panic!("couldn't deserialize block {block_i}: {e}"));
|
||||||
|
assert_eq!(block.hash(), hash, "hash differs");
|
||||||
|
assert_eq!(block.serialize(), blob, "serialization differs");
|
||||||
|
|
||||||
|
let txs_len = 1 + block.txs.len();
|
||||||
|
|
||||||
|
if !block.txs.is_empty() {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct TransactionResponse {
|
||||||
|
tx_hash: String,
|
||||||
|
as_hex: String,
|
||||||
|
}
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct TransactionsResponse {
|
||||||
|
#[serde(default)]
|
||||||
|
missed_tx: Vec<String>,
|
||||||
|
txs: Vec<TransactionResponse>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut hashes_hex = block.txs.iter().map(hex::encode).collect::<Vec<_>>();
|
||||||
|
let mut all_txs = vec![];
|
||||||
|
while !hashes_hex.is_empty() {
|
||||||
|
let txs: TransactionsResponse = loop {
|
||||||
|
match rpc
|
||||||
|
.rpc_call(
|
||||||
|
"get_transactions",
|
||||||
|
Some(json!({
|
||||||
|
"txs_hashes": hashes_hex.drain(.. hashes_hex.len().min(100)).collect::<Vec<_>>(),
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(txs) => break txs,
|
||||||
|
Err(RpcError::ConnectionError(e)) => {
|
||||||
|
println!("get_transactions ConnectionError: {e}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Err(e) => panic!("couldn't call get_transactions: {e:?}"),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
assert!(txs.missed_tx.is_empty());
|
||||||
|
all_txs.extend(txs.txs);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut batch = BatchVerifier::new(block.txs.len());
|
||||||
|
for (tx_hash, tx_res) in block.txs.into_iter().zip(all_txs) {
|
||||||
|
assert_eq!(
|
||||||
|
tx_res.tx_hash,
|
||||||
|
hex::encode(tx_hash),
|
||||||
|
"node returned a transaction with different hash"
|
||||||
|
);
|
||||||
|
|
||||||
|
let tx = Transaction::read(
|
||||||
|
&mut hex::decode(&tx_res.as_hex).expect("node returned non-hex transaction").as_slice(),
|
||||||
|
)
|
||||||
|
.expect("couldn't deserialize transaction");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
hex::encode(tx.serialize()),
|
||||||
|
tx_res.as_hex,
|
||||||
|
"Transaction serialization was different"
|
||||||
|
);
|
||||||
|
assert_eq!(tx.hash(), tx_hash, "Transaction hash was different");
|
||||||
|
|
||||||
|
if matches!(tx.rct_signatures.prunable, RctPrunable::Null) {
|
||||||
|
assert_eq!(tx.prefix.version, 1);
|
||||||
|
assert!(!tx.signatures.is_empty());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let sig_hash = tx.signature_hash();
|
||||||
|
// Verify all proofs we support proving for
|
||||||
|
// This is due to having debug_asserts calling verify within their proving, and CLSAG
|
||||||
|
// multisig explicitly calling verify as part of its signing process
|
||||||
|
// Accordingly, making sure our signature_hash algorithm is correct is great, and further
|
||||||
|
// making sure the verification functions are valid is appreciated
|
||||||
|
match tx.rct_signatures.prunable {
|
||||||
|
RctPrunable::Null |
|
||||||
|
RctPrunable::AggregateMlsagBorromean { .. } |
|
||||||
|
RctPrunable::MlsagBorromean { .. } => {}
|
||||||
|
RctPrunable::MlsagBulletproofs { bulletproofs, .. } => {
|
||||||
|
assert!(bulletproofs.batch_verify(
|
||||||
|
&mut rand_core::OsRng,
|
||||||
|
&mut batch,
|
||||||
|
(),
|
||||||
|
&tx.rct_signatures.base.commitments
|
||||||
|
));
|
||||||
|
}
|
||||||
|
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs } => {
|
||||||
|
assert!(bulletproofs.batch_verify(
|
||||||
|
&mut rand_core::OsRng,
|
||||||
|
&mut batch,
|
||||||
|
(),
|
||||||
|
&tx.rct_signatures.base.commitments
|
||||||
|
));
|
||||||
|
|
||||||
|
for (i, clsag) in clsags.into_iter().enumerate() {
|
||||||
|
let (amount, key_offsets, image) = match &tx.prefix.inputs[i] {
|
||||||
|
Input::Gen(_) => panic!("Input::Gen"),
|
||||||
|
Input::ToKey { amount, key_offsets, key_image } => (amount, key_offsets, key_image),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut running_sum = 0;
|
||||||
|
let mut actual_indexes = vec![];
|
||||||
|
for offset in key_offsets {
|
||||||
|
running_sum += offset;
|
||||||
|
actual_indexes.push(running_sum);
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_outs(
|
||||||
|
rpc: &Rpc<HttpRpc>,
|
||||||
|
amount: u64,
|
||||||
|
indexes: &[u64],
|
||||||
|
) -> Vec<[EdwardsPoint; 2]> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct Out {
|
||||||
|
key: String,
|
||||||
|
mask: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct Outs {
|
||||||
|
outs: Vec<Out>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let outs: Outs = loop {
|
||||||
|
match rpc
|
||||||
|
.rpc_call(
|
||||||
|
"get_outs",
|
||||||
|
Some(json!({
|
||||||
|
"get_txid": true,
|
||||||
|
"outputs": indexes.iter().map(|o| json!({
|
||||||
|
"amount": amount,
|
||||||
|
"index": o
|
||||||
|
})).collect::<Vec<_>>()
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(outs) => break outs,
|
||||||
|
Err(RpcError::ConnectionError(e)) => {
|
||||||
|
println!("get_outs ConnectionError: {e}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Err(e) => panic!("couldn't connect to RPC to get outs: {e:?}"),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let rpc_point = |point: &str| {
|
||||||
|
decompress_point(
|
||||||
|
hex::decode(point)
|
||||||
|
.expect("invalid hex for ring member")
|
||||||
|
.try_into()
|
||||||
|
.expect("invalid point len for ring member"),
|
||||||
|
)
|
||||||
|
.expect("invalid point for ring member")
|
||||||
|
};
|
||||||
|
|
||||||
|
outs
|
||||||
|
.outs
|
||||||
|
.iter()
|
||||||
|
.map(|out| {
|
||||||
|
let mask = rpc_point(&out.mask);
|
||||||
|
if amount != 0 {
|
||||||
|
assert_eq!(mask, Commitment::new(Scalar::from(1u8), amount).calculate());
|
||||||
|
}
|
||||||
|
[rpc_point(&out.key), mask]
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
clsag
|
||||||
|
.verify(
|
||||||
|
&get_outs(&rpc, amount.unwrap_or(0), &actual_indexes).await,
|
||||||
|
image,
|
||||||
|
&pseudo_outs[i],
|
||||||
|
&sig_hash,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert!(batch.verify_vartime());
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("Deserialized, hashed, and reserialized {block_i} with {txs_len} TXs");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "binaries")]
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() {
|
||||||
|
use binaries::*;
|
||||||
|
|
||||||
|
let args = std::env::args().collect::<Vec<String>>();
|
||||||
|
|
||||||
|
// Read start block as the first arg
|
||||||
|
let mut block_i = args[1].parse::<usize>().expect("invalid start block");
|
||||||
|
|
||||||
|
// How many blocks to work on at once
|
||||||
|
let async_parallelism: usize =
|
||||||
|
args.get(2).unwrap_or(&"8".to_string()).parse::<usize>().expect("invalid parallelism argument");
|
||||||
|
|
||||||
|
// Read further args as RPC URLs
|
||||||
|
let default_nodes = vec![
|
||||||
|
"http://xmr-node.cakewallet.com:18081".to_string(),
|
||||||
|
"https://node.sethforprivacy.com".to_string(),
|
||||||
|
];
|
||||||
|
let mut specified_nodes = vec![];
|
||||||
|
{
|
||||||
|
let mut i = 0;
|
||||||
|
loop {
|
||||||
|
let Some(node) = args.get(3 + i) else { break };
|
||||||
|
specified_nodes.push(node.clone());
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let nodes = if specified_nodes.is_empty() { default_nodes } else { specified_nodes };
|
||||||
|
|
||||||
|
let rpc = |url: String| async move {
|
||||||
|
HttpRpc::new(url.clone())
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|_| panic!("couldn't create HttpRpc connected to {url}"))
|
||||||
|
};
|
||||||
|
let main_rpc = rpc(nodes[0].clone()).await;
|
||||||
|
let mut rpcs = vec![];
|
||||||
|
for i in 0 .. async_parallelism {
|
||||||
|
rpcs.push(Arc::new(rpc(nodes[i % nodes.len()].clone()).await));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut rpc_i = 0;
|
||||||
|
let mut handles: Vec<JoinHandle<()>> = vec![];
|
||||||
|
let mut height = 0;
|
||||||
|
loop {
|
||||||
|
let new_height = main_rpc.get_height().await.expect("couldn't call get_height");
|
||||||
|
if new_height == height {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
height = new_height;
|
||||||
|
|
||||||
|
while block_i < height {
|
||||||
|
if handles.len() >= async_parallelism {
|
||||||
|
// Guarantee one handle is complete
|
||||||
|
handles.swap_remove(0).await.unwrap();
|
||||||
|
|
||||||
|
// Remove all of the finished handles
|
||||||
|
let mut i = 0;
|
||||||
|
while i < handles.len() {
|
||||||
|
if handles[i].is_finished() {
|
||||||
|
handles.swap_remove(i).await.unwrap();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
handles.push(tokio::spawn(check_block(rpcs[rpc_i].clone(), block_i)));
|
||||||
|
rpc_i = (rpc_i + 1) % rpcs.len();
|
||||||
|
block_i += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "binaries"))]
|
||||||
|
fn main() {
|
||||||
|
panic!("To run binaries, please build with `--feature binaries`.");
|
||||||
|
}
|
||||||
130
coins/monero/src/block.rs
Normal file
130
coins/monero/src/block.rs
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
use std_shims::{
|
||||||
|
vec::Vec,
|
||||||
|
io::{self, Read, Write},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
hash,
|
||||||
|
merkle::merkle_root,
|
||||||
|
serialize::*,
|
||||||
|
transaction::{Input, Transaction},
|
||||||
|
};
|
||||||
|
|
||||||
|
const CORRECT_BLOCK_HASH_202612: [u8; 32] =
|
||||||
|
hex_literal::hex!("426d16cff04c71f8b16340b722dc4010a2dd3831c22041431f772547ba6e331a");
|
||||||
|
const EXISTING_BLOCK_HASH_202612: [u8; 32] =
|
||||||
|
hex_literal::hex!("bbd604d2ba11ba27935e006ed39c9bfdd99b76bf4a50654bc1e1e61217962698");
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub struct BlockHeader {
|
||||||
|
pub major_version: u8,
|
||||||
|
pub minor_version: u8,
|
||||||
|
pub timestamp: u64,
|
||||||
|
pub previous: [u8; 32],
|
||||||
|
pub nonce: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BlockHeader {
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
write_varint(&self.major_version, w)?;
|
||||||
|
write_varint(&self.minor_version, w)?;
|
||||||
|
write_varint(&self.timestamp, w)?;
|
||||||
|
w.write_all(&self.previous)?;
|
||||||
|
w.write_all(&self.nonce.to_le_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut serialized = vec![];
|
||||||
|
self.write(&mut serialized).unwrap();
|
||||||
|
serialized
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(r: &mut R) -> io::Result<BlockHeader> {
|
||||||
|
Ok(BlockHeader {
|
||||||
|
major_version: read_varint(r)?,
|
||||||
|
minor_version: read_varint(r)?,
|
||||||
|
timestamp: read_varint(r)?,
|
||||||
|
previous: read_bytes(r)?,
|
||||||
|
nonce: read_bytes(r).map(u32::from_le_bytes)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub struct Block {
|
||||||
|
pub header: BlockHeader,
|
||||||
|
pub miner_tx: Transaction,
|
||||||
|
pub txs: Vec<[u8; 32]>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Block {
|
||||||
|
pub fn number(&self) -> Option<u64> {
|
||||||
|
match self.miner_tx.prefix.inputs.first() {
|
||||||
|
Some(Input::Gen(number)) => Some(*number),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
self.header.write(w)?;
|
||||||
|
self.miner_tx.write(w)?;
|
||||||
|
write_varint(&self.txs.len(), w)?;
|
||||||
|
for tx in &self.txs {
|
||||||
|
w.write_all(tx)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tx_merkle_root(&self) -> [u8; 32] {
|
||||||
|
merkle_root(self.miner_tx.hash(), &self.txs)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Serialize the block as required for the proof of work hash.
|
||||||
|
///
|
||||||
|
/// This is distinct from the serialization required for the block hash. To get the block hash,
|
||||||
|
/// use the [`Block::hash`] function.
|
||||||
|
pub fn serialize_hashable(&self) -> Vec<u8> {
|
||||||
|
let mut blob = self.header.serialize();
|
||||||
|
blob.extend_from_slice(&self.tx_merkle_root());
|
||||||
|
write_varint(&(1 + u64::try_from(self.txs.len()).unwrap()), &mut blob).unwrap();
|
||||||
|
|
||||||
|
blob
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn hash(&self) -> [u8; 32] {
|
||||||
|
let mut hashable = self.serialize_hashable();
|
||||||
|
// Monero pre-appends a VarInt of the block hashing blobs length before getting the block hash
|
||||||
|
// but doesn't do this when getting the proof of work hash :)
|
||||||
|
let mut hashing_blob = Vec::with_capacity(8 + hashable.len());
|
||||||
|
write_varint(&u64::try_from(hashable.len()).unwrap(), &mut hashing_blob).unwrap();
|
||||||
|
hashing_blob.append(&mut hashable);
|
||||||
|
|
||||||
|
let hash = hash(&hashing_blob);
|
||||||
|
if hash == CORRECT_BLOCK_HASH_202612 {
|
||||||
|
return EXISTING_BLOCK_HASH_202612;
|
||||||
|
};
|
||||||
|
|
||||||
|
hash
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut serialized = vec![];
|
||||||
|
self.write(&mut serialized).unwrap();
|
||||||
|
serialized
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Block> {
|
||||||
|
let header = BlockHeader::read(r)?;
|
||||||
|
|
||||||
|
let miner_tx = Transaction::read(r)?;
|
||||||
|
if !matches!(miner_tx.prefix.inputs.as_slice(), &[Input::Gen(_)]) {
|
||||||
|
Err(io::Error::other("Miner transaction has incorrect input type."))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Block {
|
||||||
|
header,
|
||||||
|
miner_tx,
|
||||||
|
txs: (0_usize .. read_varint(r)?).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
229
coins/monero/src/lib.rs
Normal file
229
coins/monero/src/lib.rs
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![doc = include_str!("../README.md")]
|
||||||
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
|
|
||||||
|
#[cfg(not(feature = "std"))]
|
||||||
|
#[macro_use]
|
||||||
|
extern crate alloc;
|
||||||
|
|
||||||
|
use std_shims::{sync::OnceLock, io};
|
||||||
|
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
|
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||||
|
|
||||||
|
use sha3::{Digest, Keccak256};
|
||||||
|
|
||||||
|
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
||||||
|
|
||||||
|
pub use monero_generators::{H, decompress_point};
|
||||||
|
|
||||||
|
mod merkle;
|
||||||
|
|
||||||
|
mod serialize;
|
||||||
|
use serialize::{read_byte, read_u16};
|
||||||
|
|
||||||
|
/// UnreducedScalar struct with functionality for recovering incorrectly reduced scalars.
|
||||||
|
mod unreduced_scalar;
|
||||||
|
|
||||||
|
/// Ring Signature structs and functionality.
|
||||||
|
pub mod ring_signatures;
|
||||||
|
|
||||||
|
/// RingCT structs and functionality.
|
||||||
|
pub mod ringct;
|
||||||
|
use ringct::RctType;
|
||||||
|
|
||||||
|
/// Transaction structs.
|
||||||
|
pub mod transaction;
|
||||||
|
/// Block structs.
|
||||||
|
pub mod block;
|
||||||
|
|
||||||
|
/// Monero daemon RPC interface.
|
||||||
|
pub mod rpc;
|
||||||
|
/// Wallet functionality, enabling scanning and sending transactions.
|
||||||
|
pub mod wallet;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
|
pub const DEFAULT_LOCK_WINDOW: usize = 10;
|
||||||
|
pub const COINBASE_LOCK_WINDOW: usize = 60;
|
||||||
|
pub const BLOCK_TIME: usize = 120;
|
||||||
|
|
||||||
|
static INV_EIGHT_CELL: OnceLock<Scalar> = OnceLock::new();
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub(crate) fn INV_EIGHT() -> Scalar {
|
||||||
|
*INV_EIGHT_CELL.get_or_init(|| Scalar::from(8u8).invert())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Monero protocol version.
|
||||||
|
///
|
||||||
|
/// v15 is omitted as v15 was simply v14 and v16 being active at the same time, with regards to the
|
||||||
|
/// transactions supported. Accordingly, v16 should be used during v15.
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||||
|
#[allow(non_camel_case_types)]
|
||||||
|
pub enum Protocol {
|
||||||
|
v14,
|
||||||
|
v16,
|
||||||
|
Custom {
|
||||||
|
ring_len: usize,
|
||||||
|
bp_plus: bool,
|
||||||
|
optimal_rct_type: RctType,
|
||||||
|
view_tags: bool,
|
||||||
|
v16_fee: bool,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Protocol {
|
||||||
|
/// Amount of ring members under this protocol version.
|
||||||
|
pub fn ring_len(&self) -> usize {
|
||||||
|
match self {
|
||||||
|
Protocol::v14 => 11,
|
||||||
|
Protocol::v16 => 16,
|
||||||
|
Protocol::Custom { ring_len, .. } => *ring_len,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether or not the specified version uses Bulletproofs or Bulletproofs+.
|
||||||
|
///
|
||||||
|
/// This method will likely be reworked when versions not using Bulletproofs at all are added.
|
||||||
|
pub fn bp_plus(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Protocol::v14 => false,
|
||||||
|
Protocol::v16 => true,
|
||||||
|
Protocol::Custom { bp_plus, .. } => *bp_plus,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Make this an Option when we support pre-RCT protocols
|
||||||
|
pub fn optimal_rct_type(&self) -> RctType {
|
||||||
|
match self {
|
||||||
|
Protocol::v14 => RctType::Clsag,
|
||||||
|
Protocol::v16 => RctType::BulletproofsPlus,
|
||||||
|
Protocol::Custom { optimal_rct_type, .. } => *optimal_rct_type,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether or not the specified version uses view tags.
|
||||||
|
pub fn view_tags(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Protocol::v14 => false,
|
||||||
|
Protocol::v16 => true,
|
||||||
|
Protocol::Custom { view_tags, .. } => *view_tags,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Whether or not the specified version uses the fee algorithm from Monero
|
||||||
|
/// hard fork version 16 (released in v18 binaries).
|
||||||
|
pub fn v16_fee(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
Protocol::v14 => false,
|
||||||
|
Protocol::v16 => true,
|
||||||
|
Protocol::Custom { v16_fee, .. } => *v16_fee,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
match self {
|
||||||
|
Protocol::v14 => w.write_all(&[0, 14]),
|
||||||
|
Protocol::v16 => w.write_all(&[0, 16]),
|
||||||
|
Protocol::Custom { ring_len, bp_plus, optimal_rct_type, view_tags, v16_fee } => {
|
||||||
|
// Custom, version 0
|
||||||
|
w.write_all(&[1, 0])?;
|
||||||
|
w.write_all(&u16::try_from(*ring_len).unwrap().to_le_bytes())?;
|
||||||
|
w.write_all(&[u8::from(*bp_plus)])?;
|
||||||
|
w.write_all(&[optimal_rct_type.to_byte()])?;
|
||||||
|
w.write_all(&[u8::from(*view_tags)])?;
|
||||||
|
w.write_all(&[u8::from(*v16_fee)])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read<R: io::Read>(r: &mut R) -> io::Result<Protocol> {
|
||||||
|
Ok(match read_byte(r)? {
|
||||||
|
// Monero protocol
|
||||||
|
0 => match read_byte(r)? {
|
||||||
|
14 => Protocol::v14,
|
||||||
|
16 => Protocol::v16,
|
||||||
|
_ => Err(io::Error::other("unrecognized monero protocol"))?,
|
||||||
|
},
|
||||||
|
// Custom
|
||||||
|
1 => match read_byte(r)? {
|
||||||
|
0 => Protocol::Custom {
|
||||||
|
ring_len: read_u16(r)?.into(),
|
||||||
|
bp_plus: match read_byte(r)? {
|
||||||
|
0 => false,
|
||||||
|
1 => true,
|
||||||
|
_ => Err(io::Error::other("invalid bool serialization"))?,
|
||||||
|
},
|
||||||
|
optimal_rct_type: RctType::from_byte(read_byte(r)?)
|
||||||
|
.ok_or_else(|| io::Error::other("invalid RctType serialization"))?,
|
||||||
|
view_tags: match read_byte(r)? {
|
||||||
|
0 => false,
|
||||||
|
1 => true,
|
||||||
|
_ => Err(io::Error::other("invalid bool serialization"))?,
|
||||||
|
},
|
||||||
|
v16_fee: match read_byte(r)? {
|
||||||
|
0 => false,
|
||||||
|
1 => true,
|
||||||
|
_ => Err(io::Error::other("invalid bool serialization"))?,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
_ => Err(io::Error::other("unrecognized custom protocol serialization"))?,
|
||||||
|
},
|
||||||
|
_ => Err(io::Error::other("unrecognized protocol serialization"))?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Transparent structure representing a Pedersen commitment's contents.
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||||
|
pub struct Commitment {
|
||||||
|
pub mask: Scalar,
|
||||||
|
pub amount: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl core::fmt::Debug for Commitment {
|
||||||
|
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||||
|
fmt.debug_struct("Commitment").field("amount", &self.amount).finish_non_exhaustive()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Commitment {
|
||||||
|
/// A commitment to zero, defined with a mask of 1 (as to not be the identity).
|
||||||
|
pub fn zero() -> Commitment {
|
||||||
|
Commitment { mask: Scalar::ONE, amount: 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new(mask: Scalar, amount: u64) -> Commitment {
|
||||||
|
Commitment { mask, amount }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Calculate a Pedersen commitment, as a point, from the transparent structure.
|
||||||
|
pub fn calculate(&self) -> EdwardsPoint {
|
||||||
|
(&self.mask * ED25519_BASEPOINT_TABLE) + (Scalar::from(self.amount) * H())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Support generating a random scalar using a modern rand, as dalek's is notoriously dated.
|
||||||
|
pub fn random_scalar<R: RngCore + CryptoRng>(rng: &mut R) -> Scalar {
|
||||||
|
let mut r = [0; 64];
|
||||||
|
rng.fill_bytes(&mut r);
|
||||||
|
Scalar::from_bytes_mod_order_wide(&r)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn hash(data: &[u8]) -> [u8; 32] {
|
||||||
|
Keccak256::digest(data).into()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Hash the provided data to a scalar via keccak256(data) % l.
|
||||||
|
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||||
|
let scalar = Scalar::from_bytes_mod_order(hash(data));
|
||||||
|
// Monero will explicitly error in this case
|
||||||
|
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
|
||||||
|
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
|
||||||
|
// not generate/verify a proof we believe to be valid when it isn't
|
||||||
|
assert!(scalar != Scalar::ZERO, "ZERO HASH: {data:?}");
|
||||||
|
scalar
|
||||||
|
}
|
||||||
@@ -1,11 +1,11 @@
|
|||||||
use std_shims::vec::Vec;
|
use std_shims::vec::Vec;
|
||||||
|
|
||||||
use crate::primitives::keccak256;
|
use crate::hash;
|
||||||
|
|
||||||
pub(crate) fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] {
|
pub(crate) fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] {
|
||||||
match leafs.len() {
|
match leafs.len() {
|
||||||
0 => root,
|
0 => root,
|
||||||
1 => keccak256([root, leafs[0]].concat()),
|
1 => hash(&[root, leafs[0]].concat()),
|
||||||
_ => {
|
_ => {
|
||||||
let mut hashes = Vec::with_capacity(1 + leafs.len());
|
let mut hashes = Vec::with_capacity(1 + leafs.len());
|
||||||
hashes.push(root);
|
hashes.push(root);
|
||||||
@@ -29,7 +29,7 @@ pub(crate) fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] {
|
|||||||
let mut paired_hashes = Vec::with_capacity(overage);
|
let mut paired_hashes = Vec::with_capacity(overage);
|
||||||
while let Some(left) = rightmost.next() {
|
while let Some(left) = rightmost.next() {
|
||||||
let right = rightmost.next().unwrap();
|
let right = rightmost.next().unwrap();
|
||||||
paired_hashes.push(keccak256([left.as_ref(), &right].concat()));
|
paired_hashes.push(hash(&[left.as_ref(), &right].concat()));
|
||||||
}
|
}
|
||||||
drop(rightmost);
|
drop(rightmost);
|
||||||
|
|
||||||
@@ -42,7 +42,7 @@ pub(crate) fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] {
|
|||||||
while hashes.len() > 1 {
|
while hashes.len() > 1 {
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
while i < hashes.len() {
|
while i < hashes.len() {
|
||||||
new_hashes.push(keccak256([hashes[i], hashes[i + 1]].concat()));
|
new_hashes.push(hash(&[hashes[i], hashes[i + 1]].concat()));
|
||||||
i += 2;
|
i += 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
72
coins/monero/src/ring_signatures.rs
Normal file
72
coins/monero/src/ring_signatures.rs
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
use std_shims::{
|
||||||
|
io::{self, *},
|
||||||
|
vec::Vec,
|
||||||
|
};
|
||||||
|
|
||||||
|
use zeroize::Zeroize;
|
||||||
|
|
||||||
|
use curve25519_dalek::{EdwardsPoint, Scalar};
|
||||||
|
|
||||||
|
use monero_generators::hash_to_point;
|
||||||
|
|
||||||
|
use crate::{serialize::*, hash_to_scalar};
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
|
pub struct Signature {
|
||||||
|
c: Scalar,
|
||||||
|
r: Scalar,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Signature {
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
write_scalar(&self.c, w)?;
|
||||||
|
write_scalar(&self.r, w)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Signature> {
|
||||||
|
Ok(Signature { c: read_scalar(r)?, r: read_scalar(r)? })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
|
pub struct RingSignature {
|
||||||
|
sigs: Vec<Signature>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RingSignature {
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
for sig in &self.sigs {
|
||||||
|
sig.write(w)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(members: usize, r: &mut R) -> io::Result<RingSignature> {
|
||||||
|
Ok(RingSignature { sigs: read_raw_vec(Signature::read, members, r)? })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn verify(&self, msg: &[u8; 32], ring: &[EdwardsPoint], key_image: &EdwardsPoint) -> bool {
|
||||||
|
if ring.len() != self.sigs.len() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut buf = Vec::with_capacity(32 + (32 * 2 * ring.len()));
|
||||||
|
buf.extend_from_slice(msg);
|
||||||
|
|
||||||
|
let mut sum = Scalar::ZERO;
|
||||||
|
|
||||||
|
for (ring_member, sig) in ring.iter().zip(&self.sigs) {
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let Li = EdwardsPoint::vartime_double_scalar_mul_basepoint(&sig.c, ring_member, &sig.r);
|
||||||
|
buf.extend_from_slice(Li.compress().as_bytes());
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let Ri = (sig.r * hash_to_point(ring_member.compress().to_bytes())) + (sig.c * key_image);
|
||||||
|
buf.extend_from_slice(Ri.compress().as_bytes());
|
||||||
|
|
||||||
|
sum += sig.c;
|
||||||
|
}
|
||||||
|
|
||||||
|
sum == hash_to_scalar(&buf)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,35 +1,26 @@
|
|||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
|
||||||
#![doc = include_str!("../README.md")]
|
|
||||||
#![deny(missing_docs)]
|
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
|
||||||
#![allow(non_snake_case)]
|
|
||||||
|
|
||||||
use core::fmt::Debug;
|
use core::fmt::Debug;
|
||||||
use std_shims::io::{self, Read, Write};
|
use std_shims::io::{self, Read, Write};
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
|
||||||
|
|
||||||
use curve25519_dalek::{traits::Identity, Scalar, EdwardsPoint};
|
use curve25519_dalek::{traits::Identity, Scalar, EdwardsPoint};
|
||||||
|
|
||||||
use monero_io::*;
|
|
||||||
use monero_generators::H_pow_2;
|
use monero_generators::H_pow_2;
|
||||||
use monero_primitives::{keccak256_to_scalar, UnreducedScalar};
|
|
||||||
|
|
||||||
// 64 Borromean ring signatures, as needed for a 64-bit range proof.
|
use crate::{hash_to_scalar, unreduced_scalar::UnreducedScalar, serialize::*};
|
||||||
//
|
|
||||||
// s0 and s1 are stored as `UnreducedScalar`s due to Monero not requiring they were reduced.
|
/// 64 Borromean ring signatures.
|
||||||
// `UnreducedScalar` preserves their original byte encoding and implements a custom reduction
|
///
|
||||||
// algorithm which was in use.
|
/// s0 and s1 are stored as `UnreducedScalar`s due to Monero not requiring they were reduced.
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
/// `UnreducedScalar` preserves their original byte encoding and implements a custom reduction
|
||||||
struct BorromeanSignatures {
|
/// algorithm which was in use.
|
||||||
s0: [UnreducedScalar; 64],
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
s1: [UnreducedScalar; 64],
|
pub struct BorromeanSignatures {
|
||||||
ee: Scalar,
|
pub s0: [UnreducedScalar; 64],
|
||||||
|
pub s1: [UnreducedScalar; 64],
|
||||||
|
pub ee: Scalar,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BorromeanSignatures {
|
impl BorromeanSignatures {
|
||||||
// Read a set of BorromeanSignatures.
|
pub fn read<R: Read>(r: &mut R) -> io::Result<BorromeanSignatures> {
|
||||||
fn read<R: Read>(r: &mut R) -> io::Result<BorromeanSignatures> {
|
|
||||||
Ok(BorromeanSignatures {
|
Ok(BorromeanSignatures {
|
||||||
s0: read_array(UnreducedScalar::read, r)?,
|
s0: read_array(UnreducedScalar::read, r)?,
|
||||||
s1: read_array(UnreducedScalar::read, r)?,
|
s1: read_array(UnreducedScalar::read, r)?,
|
||||||
@@ -37,8 +28,7 @@ impl BorromeanSignatures {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write the set of BorromeanSignatures.
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
for s0 in &self.s0 {
|
for s0 in &self.s0 {
|
||||||
s0.write(w)?;
|
s0.write(w)?;
|
||||||
}
|
}
|
||||||
@@ -60,41 +50,36 @@ impl BorromeanSignatures {
|
|||||||
);
|
);
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
||||||
&keccak256_to_scalar(LL.compress().as_bytes()),
|
&hash_to_scalar(LL.compress().as_bytes()),
|
||||||
&keys_b[i],
|
&keys_b[i],
|
||||||
&self.s1[i].recover_monero_slide_scalar(),
|
&self.s1[i].recover_monero_slide_scalar(),
|
||||||
);
|
);
|
||||||
transcript[(i * 32) .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes());
|
transcript[(i * 32) .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes());
|
||||||
}
|
}
|
||||||
|
|
||||||
keccak256_to_scalar(transcript) == self.ee
|
hash_to_scalar(&transcript) == self.ee
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A range proof premised on Borromean ring signatures.
|
/// A range proof premised on Borromean ring signatures.
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
pub struct BorromeanRange {
|
pub struct BorromeanRange {
|
||||||
sigs: BorromeanSignatures,
|
pub sigs: BorromeanSignatures,
|
||||||
bit_commitments: [EdwardsPoint; 64],
|
pub bit_commitments: [EdwardsPoint; 64],
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BorromeanRange {
|
impl BorromeanRange {
|
||||||
/// Read a BorromeanRange proof.
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<BorromeanRange> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<BorromeanRange> {
|
||||||
Ok(BorromeanRange {
|
Ok(BorromeanRange {
|
||||||
sigs: BorromeanSignatures::read(r)?,
|
sigs: BorromeanSignatures::read(r)?,
|
||||||
bit_commitments: read_array(read_point, r)?,
|
bit_commitments: read_array(read_point, r)?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write the BorromeanRange proof.
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
self.sigs.write(w)?;
|
self.sigs.write(w)?;
|
||||||
write_raw_vec(write_point, &self.bit_commitments, w)
|
write_raw_vec(write_point, &self.bit_commitments, w)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Verify the commitment contains a 64-bit value.
|
|
||||||
#[must_use]
|
|
||||||
pub fn verify(&self, commitment: &EdwardsPoint) -> bool {
|
pub fn verify(&self, commitment: &EdwardsPoint) -> bool {
|
||||||
if &self.bit_commitments.iter().sum::<EdwardsPoint>() != commitment {
|
if &self.bit_commitments.iter().sum::<EdwardsPoint>() != commitment {
|
||||||
return false;
|
return false;
|
||||||
151
coins/monero/src/ringct/bulletproofs/core.rs
Normal file
151
coins/monero/src/ringct/bulletproofs/core.rs
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
use std_shims::{vec::Vec, sync::OnceLock};
|
||||||
|
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
|
use subtle::{Choice, ConditionallySelectable};
|
||||||
|
|
||||||
|
use curve25519_dalek::edwards::EdwardsPoint as DalekPoint;
|
||||||
|
|
||||||
|
use group::{ff::Field, Group};
|
||||||
|
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||||
|
|
||||||
|
use multiexp::multiexp as multiexp_const;
|
||||||
|
|
||||||
|
pub(crate) use monero_generators::Generators;
|
||||||
|
|
||||||
|
use crate::{INV_EIGHT as DALEK_INV_EIGHT, H as DALEK_H, Commitment, hash_to_scalar as dalek_hash};
|
||||||
|
pub(crate) use crate::ringct::bulletproofs::scalar_vector::*;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub(crate) fn INV_EIGHT() -> Scalar {
|
||||||
|
Scalar(DALEK_INV_EIGHT())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub(crate) fn H() -> EdwardsPoint {
|
||||||
|
EdwardsPoint(DALEK_H())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||||
|
Scalar(dalek_hash(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Components common between variants
|
||||||
|
pub(crate) const MAX_M: usize = 16;
|
||||||
|
pub(crate) const LOG_N: usize = 6; // 2 << 6 == N
|
||||||
|
pub(crate) const N: usize = 64;
|
||||||
|
|
||||||
|
pub(crate) fn prove_multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
|
||||||
|
multiexp_const(pairs) * INV_EIGHT()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn vector_exponent(
|
||||||
|
generators: &Generators,
|
||||||
|
a: &ScalarVector,
|
||||||
|
b: &ScalarVector,
|
||||||
|
) -> EdwardsPoint {
|
||||||
|
debug_assert_eq!(a.len(), b.len());
|
||||||
|
(a * &generators.G[.. a.len()]) + (b * &generators.H[.. b.len()])
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn hash_cache(cache: &mut Scalar, mash: &[[u8; 32]]) -> Scalar {
|
||||||
|
let slice =
|
||||||
|
&[cache.to_bytes().as_ref(), mash.iter().copied().flatten().collect::<Vec<_>>().as_ref()]
|
||||||
|
.concat();
|
||||||
|
*cache = hash_to_scalar(slice);
|
||||||
|
*cache
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn MN(outputs: usize) -> (usize, usize, usize) {
|
||||||
|
let mut logM = 0;
|
||||||
|
let mut M;
|
||||||
|
while {
|
||||||
|
M = 1 << logM;
|
||||||
|
(M <= MAX_M) && (M < outputs)
|
||||||
|
} {
|
||||||
|
logM += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
(logM + LOG_N, M, M * N)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn bit_decompose(commitments: &[Commitment]) -> (ScalarVector, ScalarVector) {
|
||||||
|
let (_, M, MN) = MN(commitments.len());
|
||||||
|
|
||||||
|
let sv = commitments.iter().map(|c| Scalar::from(c.amount)).collect::<Vec<_>>();
|
||||||
|
let mut aL = ScalarVector::new(MN);
|
||||||
|
let mut aR = ScalarVector::new(MN);
|
||||||
|
|
||||||
|
for j in 0 .. M {
|
||||||
|
for i in (0 .. N).rev() {
|
||||||
|
let bit =
|
||||||
|
if j < sv.len() { Choice::from((sv[j][i / 8] >> (i % 8)) & 1) } else { Choice::from(0) };
|
||||||
|
aL.0[(j * N) + i] = Scalar::conditional_select(&Scalar::ZERO, &Scalar::ONE, bit);
|
||||||
|
aR.0[(j * N) + i] = Scalar::conditional_select(&-Scalar::ONE, &Scalar::ZERO, bit);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
(aL, aR)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn hash_commitments<C: IntoIterator<Item = DalekPoint>>(
|
||||||
|
commitments: C,
|
||||||
|
) -> (Scalar, Vec<EdwardsPoint>) {
|
||||||
|
let V = commitments.into_iter().map(|c| EdwardsPoint(c) * INV_EIGHT()).collect::<Vec<_>>();
|
||||||
|
(hash_to_scalar(&V.iter().flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>()), V)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn alpha_rho<R: RngCore + CryptoRng>(
|
||||||
|
rng: &mut R,
|
||||||
|
generators: &Generators,
|
||||||
|
aL: &ScalarVector,
|
||||||
|
aR: &ScalarVector,
|
||||||
|
) -> (Scalar, EdwardsPoint) {
|
||||||
|
let ar = Scalar::random(rng);
|
||||||
|
(ar, (vector_exponent(generators, aL, aR) + (EdwardsPoint::generator() * ar)) * INV_EIGHT())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn LR_statements(
|
||||||
|
a: &ScalarVector,
|
||||||
|
G_i: &[EdwardsPoint],
|
||||||
|
b: &ScalarVector,
|
||||||
|
H_i: &[EdwardsPoint],
|
||||||
|
cL: Scalar,
|
||||||
|
U: EdwardsPoint,
|
||||||
|
) -> Vec<(Scalar, EdwardsPoint)> {
|
||||||
|
let mut res = a
|
||||||
|
.0
|
||||||
|
.iter()
|
||||||
|
.copied()
|
||||||
|
.zip(G_i.iter().copied())
|
||||||
|
.chain(b.0.iter().copied().zip(H_i.iter().copied()))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
res.push((cL, U));
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
static TWO_N_CELL: OnceLock<ScalarVector> = OnceLock::new();
|
||||||
|
pub(crate) fn TWO_N() -> &'static ScalarVector {
|
||||||
|
TWO_N_CELL.get_or_init(|| ScalarVector::powers(Scalar::from(2u8), N))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn challenge_products(w: &[Scalar], winv: &[Scalar]) -> Vec<Scalar> {
|
||||||
|
let mut products = vec![Scalar::ZERO; 1 << w.len()];
|
||||||
|
products[0] = winv[0];
|
||||||
|
products[1] = w[0];
|
||||||
|
for j in 1 .. w.len() {
|
||||||
|
let mut slots = (1 << (j + 1)) - 1;
|
||||||
|
while slots > 0 {
|
||||||
|
products[slots] = products[slots / 2] * w[j];
|
||||||
|
products[slots - 1] = products[slots / 2] * winv[j];
|
||||||
|
slots = slots.saturating_sub(2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sanity check as if the above failed to populate, it'd be critical
|
||||||
|
for w in &products {
|
||||||
|
debug_assert!(!bool::from(w.is_zero()));
|
||||||
|
}
|
||||||
|
|
||||||
|
products
|
||||||
|
}
|
||||||
229
coins/monero/src/ringct/bulletproofs/mod.rs
Normal file
229
coins/monero/src/ringct/bulletproofs/mod.rs
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
|
use std_shims::{
|
||||||
|
vec::Vec,
|
||||||
|
io::{self, Read, Write},
|
||||||
|
};
|
||||||
|
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
|
use zeroize::{Zeroize, Zeroizing};
|
||||||
|
|
||||||
|
use curve25519_dalek::edwards::EdwardsPoint;
|
||||||
|
use multiexp::BatchVerifier;
|
||||||
|
|
||||||
|
use crate::{Commitment, wallet::TransactionError, serialize::*};
|
||||||
|
|
||||||
|
pub(crate) mod scalar_vector;
|
||||||
|
pub(crate) mod core;
|
||||||
|
use self::core::LOG_N;
|
||||||
|
|
||||||
|
pub(crate) mod original;
|
||||||
|
use self::original::OriginalStruct;
|
||||||
|
|
||||||
|
pub(crate) mod plus;
|
||||||
|
use self::plus::*;
|
||||||
|
|
||||||
|
pub(crate) const MAX_OUTPUTS: usize = self::core::MAX_M;
|
||||||
|
|
||||||
|
/// Bulletproofs enum, supporting the original and plus formulations.
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub enum Bulletproofs {
|
||||||
|
Original(OriginalStruct),
|
||||||
|
Plus(AggregateRangeProof),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Bulletproofs {
|
||||||
|
fn bp_fields(plus: bool) -> usize {
|
||||||
|
if plus {
|
||||||
|
6
|
||||||
|
} else {
|
||||||
|
9
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
||||||
|
// src/cryptonote_basic/cryptonote_format_utils.cpp#L106-L124
|
||||||
|
pub(crate) fn calculate_bp_clawback(plus: bool, n_outputs: usize) -> (usize, usize) {
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let mut LR_len = 0;
|
||||||
|
let mut n_padded_outputs = 1;
|
||||||
|
while n_padded_outputs < n_outputs {
|
||||||
|
LR_len += 1;
|
||||||
|
n_padded_outputs = 1 << LR_len;
|
||||||
|
}
|
||||||
|
LR_len += LOG_N;
|
||||||
|
|
||||||
|
let mut bp_clawback = 0;
|
||||||
|
if n_padded_outputs > 2 {
|
||||||
|
let fields = Bulletproofs::bp_fields(plus);
|
||||||
|
let base = ((fields + (2 * (LOG_N + 1))) * 32) / 2;
|
||||||
|
let size = (fields + (2 * LR_len)) * 32;
|
||||||
|
bp_clawback = ((base * n_padded_outputs) - size) * 4 / 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
(bp_clawback, LR_len)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn fee_weight(plus: bool, outputs: usize) -> usize {
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let (bp_clawback, LR_len) = Bulletproofs::calculate_bp_clawback(plus, outputs);
|
||||||
|
32 * (Bulletproofs::bp_fields(plus) + (2 * LR_len)) + 2 + bp_clawback
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Prove the list of commitments are within [0 .. 2^64).
|
||||||
|
pub fn prove<R: RngCore + CryptoRng>(
|
||||||
|
rng: &mut R,
|
||||||
|
outputs: &[Commitment],
|
||||||
|
plus: bool,
|
||||||
|
) -> Result<Bulletproofs, TransactionError> {
|
||||||
|
if outputs.is_empty() {
|
||||||
|
Err(TransactionError::NoOutputs)?;
|
||||||
|
}
|
||||||
|
if outputs.len() > MAX_OUTPUTS {
|
||||||
|
Err(TransactionError::TooManyOutputs)?;
|
||||||
|
}
|
||||||
|
Ok(if !plus {
|
||||||
|
Bulletproofs::Original(OriginalStruct::prove(rng, outputs))
|
||||||
|
} else {
|
||||||
|
use dalek_ff_group::EdwardsPoint as DfgPoint;
|
||||||
|
Bulletproofs::Plus(
|
||||||
|
AggregateRangeStatement::new(outputs.iter().map(|com| DfgPoint(com.calculate())).collect())
|
||||||
|
.unwrap()
|
||||||
|
.prove(rng, &Zeroizing::new(AggregateRangeWitness::new(outputs).unwrap()))
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify the given Bulletproofs.
|
||||||
|
#[must_use]
|
||||||
|
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
|
||||||
|
match self {
|
||||||
|
Bulletproofs::Original(bp) => bp.verify(rng, commitments),
|
||||||
|
Bulletproofs::Plus(bp) => {
|
||||||
|
let mut verifier = BatchVerifier::new(1);
|
||||||
|
// If this commitment is torsioned (which is allowed), this won't be a well-formed
|
||||||
|
// dfg::EdwardsPoint (expected to be of prime-order)
|
||||||
|
// The actual BP+ impl will perform a torsion clear though, making this safe
|
||||||
|
// TODO: Have AggregateRangeStatement take in dalek EdwardsPoint for clarity on this
|
||||||
|
let Some(statement) = AggregateRangeStatement::new(
|
||||||
|
commitments.iter().map(|c| dalek_ff_group::EdwardsPoint(*c)).collect(),
|
||||||
|
) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if !statement.verify(rng, &mut verifier, (), bp.clone()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
verifier.verify_vartime()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Accumulate the verification for the given Bulletproofs into the specified BatchVerifier.
|
||||||
|
/// Returns false if the Bulletproofs aren't sane, without mutating the BatchVerifier.
|
||||||
|
/// Returns true if the Bulletproofs are sane, regardless of their validity.
|
||||||
|
#[must_use]
|
||||||
|
pub fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||||
|
&self,
|
||||||
|
rng: &mut R,
|
||||||
|
verifier: &mut BatchVerifier<ID, dalek_ff_group::EdwardsPoint>,
|
||||||
|
id: ID,
|
||||||
|
commitments: &[EdwardsPoint],
|
||||||
|
) -> bool {
|
||||||
|
match self {
|
||||||
|
Bulletproofs::Original(bp) => bp.batch_verify(rng, verifier, id, commitments),
|
||||||
|
Bulletproofs::Plus(bp) => {
|
||||||
|
let Some(statement) = AggregateRangeStatement::new(
|
||||||
|
commitments.iter().map(|c| dalek_ff_group::EdwardsPoint(*c)).collect(),
|
||||||
|
) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
statement.verify(rng, verifier, id, bp.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_core<W: Write, F: Fn(&[EdwardsPoint], &mut W) -> io::Result<()>>(
|
||||||
|
&self,
|
||||||
|
w: &mut W,
|
||||||
|
specific_write_vec: F,
|
||||||
|
) -> io::Result<()> {
|
||||||
|
match self {
|
||||||
|
Bulletproofs::Original(bp) => {
|
||||||
|
write_point(&bp.A, w)?;
|
||||||
|
write_point(&bp.S, w)?;
|
||||||
|
write_point(&bp.T1, w)?;
|
||||||
|
write_point(&bp.T2, w)?;
|
||||||
|
write_scalar(&bp.taux, w)?;
|
||||||
|
write_scalar(&bp.mu, w)?;
|
||||||
|
specific_write_vec(&bp.L, w)?;
|
||||||
|
specific_write_vec(&bp.R, w)?;
|
||||||
|
write_scalar(&bp.a, w)?;
|
||||||
|
write_scalar(&bp.b, w)?;
|
||||||
|
write_scalar(&bp.t, w)
|
||||||
|
}
|
||||||
|
|
||||||
|
Bulletproofs::Plus(bp) => {
|
||||||
|
write_point(&bp.A.0, w)?;
|
||||||
|
write_point(&bp.wip.A.0, w)?;
|
||||||
|
write_point(&bp.wip.B.0, w)?;
|
||||||
|
write_scalar(&bp.wip.r_answer.0, w)?;
|
||||||
|
write_scalar(&bp.wip.s_answer.0, w)?;
|
||||||
|
write_scalar(&bp.wip.delta_answer.0, w)?;
|
||||||
|
specific_write_vec(&bp.wip.L.iter().copied().map(|L| L.0).collect::<Vec<_>>(), w)?;
|
||||||
|
specific_write_vec(&bp.wip.R.iter().copied().map(|R| R.0).collect::<Vec<_>>(), w)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
self.write_core(w, |points, w| write_raw_vec(write_point, points, w))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
self.write_core(w, |points, w| write_vec(write_point, points, w))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut serialized = vec![];
|
||||||
|
self.write(&mut serialized).unwrap();
|
||||||
|
serialized
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read Bulletproofs.
|
||||||
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Bulletproofs> {
|
||||||
|
Ok(Bulletproofs::Original(OriginalStruct {
|
||||||
|
A: read_point(r)?,
|
||||||
|
S: read_point(r)?,
|
||||||
|
T1: read_point(r)?,
|
||||||
|
T2: read_point(r)?,
|
||||||
|
taux: read_scalar(r)?,
|
||||||
|
mu: read_scalar(r)?,
|
||||||
|
L: read_vec(read_point, r)?,
|
||||||
|
R: read_vec(read_point, r)?,
|
||||||
|
a: read_scalar(r)?,
|
||||||
|
b: read_scalar(r)?,
|
||||||
|
t: read_scalar(r)?,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read Bulletproofs+.
|
||||||
|
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Bulletproofs> {
|
||||||
|
use dalek_ff_group::{Scalar as DfgScalar, EdwardsPoint as DfgPoint};
|
||||||
|
|
||||||
|
Ok(Bulletproofs::Plus(AggregateRangeProof {
|
||||||
|
A: DfgPoint(read_point(r)?),
|
||||||
|
wip: WipProof {
|
||||||
|
A: DfgPoint(read_point(r)?),
|
||||||
|
B: DfgPoint(read_point(r)?),
|
||||||
|
r_answer: DfgScalar(read_scalar(r)?),
|
||||||
|
s_answer: DfgScalar(read_scalar(r)?),
|
||||||
|
delta_answer: DfgScalar(read_scalar(r)?),
|
||||||
|
L: read_vec(read_point, r)?.into_iter().map(DfgPoint).collect(),
|
||||||
|
R: read_vec(read_point, r)?.into_iter().map(DfgPoint).collect(),
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
322
coins/monero/src/ringct/bulletproofs/original.rs
Normal file
322
coins/monero/src/ringct/bulletproofs/original.rs
Normal file
@@ -0,0 +1,322 @@
|
|||||||
|
use std_shims::{vec::Vec, sync::OnceLock};
|
||||||
|
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
|
use zeroize::Zeroize;
|
||||||
|
|
||||||
|
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
|
||||||
|
|
||||||
|
use group::{ff::Field, Group};
|
||||||
|
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
|
||||||
|
|
||||||
|
use multiexp::{BatchVerifier, multiexp};
|
||||||
|
|
||||||
|
use crate::{Commitment, ringct::bulletproofs::core::*};
|
||||||
|
|
||||||
|
include!(concat!(env!("OUT_DIR"), "/generators.rs"));
|
||||||
|
|
||||||
|
static IP12_CELL: OnceLock<Scalar> = OnceLock::new();
|
||||||
|
pub(crate) fn IP12() -> Scalar {
|
||||||
|
*IP12_CELL.get_or_init(|| ScalarVector(vec![Scalar::ONE; N]).inner_product(TWO_N()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn hadamard_fold(
|
||||||
|
l: &[EdwardsPoint],
|
||||||
|
r: &[EdwardsPoint],
|
||||||
|
a: Scalar,
|
||||||
|
b: Scalar,
|
||||||
|
) -> Vec<EdwardsPoint> {
|
||||||
|
let mut res = Vec::with_capacity(l.len() / 2);
|
||||||
|
for i in 0 .. l.len() {
|
||||||
|
res.push(multiexp(&[(a, l[i]), (b, r[i])]));
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub struct OriginalStruct {
|
||||||
|
pub(crate) A: DalekPoint,
|
||||||
|
pub(crate) S: DalekPoint,
|
||||||
|
pub(crate) T1: DalekPoint,
|
||||||
|
pub(crate) T2: DalekPoint,
|
||||||
|
pub(crate) taux: DalekScalar,
|
||||||
|
pub(crate) mu: DalekScalar,
|
||||||
|
pub(crate) L: Vec<DalekPoint>,
|
||||||
|
pub(crate) R: Vec<DalekPoint>,
|
||||||
|
pub(crate) a: DalekScalar,
|
||||||
|
pub(crate) b: DalekScalar,
|
||||||
|
pub(crate) t: DalekScalar,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl OriginalStruct {
|
||||||
|
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
||||||
|
rng: &mut R,
|
||||||
|
commitments: &[Commitment],
|
||||||
|
) -> OriginalStruct {
|
||||||
|
let (logMN, M, MN) = MN(commitments.len());
|
||||||
|
|
||||||
|
let (aL, aR) = bit_decompose(commitments);
|
||||||
|
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||||
|
let (mut cache, _) = hash_commitments(commitments_points.clone());
|
||||||
|
|
||||||
|
let (sL, sR) =
|
||||||
|
ScalarVector((0 .. (MN * 2)).map(|_| Scalar::random(&mut *rng)).collect::<Vec<_>>()).split();
|
||||||
|
|
||||||
|
let generators = GENERATORS();
|
||||||
|
let (mut alpha, A) = alpha_rho(&mut *rng, generators, &aL, &aR);
|
||||||
|
let (mut rho, S) = alpha_rho(&mut *rng, generators, &sL, &sR);
|
||||||
|
|
||||||
|
let y = hash_cache(&mut cache, &[A.compress().to_bytes(), S.compress().to_bytes()]);
|
||||||
|
let mut cache = hash_to_scalar(&y.to_bytes());
|
||||||
|
let z = cache;
|
||||||
|
|
||||||
|
let l0 = aL - z;
|
||||||
|
let l1 = sL;
|
||||||
|
|
||||||
|
let mut zero_twos = Vec::with_capacity(MN);
|
||||||
|
let zpow = ScalarVector::powers(z, M + 2);
|
||||||
|
for j in 0 .. M {
|
||||||
|
for i in 0 .. N {
|
||||||
|
zero_twos.push(zpow[j + 2] * TWO_N()[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let yMN = ScalarVector::powers(y, MN);
|
||||||
|
let r0 = ((aR + z) * &yMN) + &ScalarVector(zero_twos);
|
||||||
|
let r1 = yMN * &sR;
|
||||||
|
|
||||||
|
let (T1, T2, x, mut taux) = {
|
||||||
|
let t1 = l0.clone().inner_product(&r1) + r0.clone().inner_product(&l1);
|
||||||
|
let t2 = l1.clone().inner_product(&r1);
|
||||||
|
|
||||||
|
let mut tau1 = Scalar::random(&mut *rng);
|
||||||
|
let mut tau2 = Scalar::random(&mut *rng);
|
||||||
|
|
||||||
|
let T1 = prove_multiexp(&[(t1, H()), (tau1, EdwardsPoint::generator())]);
|
||||||
|
let T2 = prove_multiexp(&[(t2, H()), (tau2, EdwardsPoint::generator())]);
|
||||||
|
|
||||||
|
let x =
|
||||||
|
hash_cache(&mut cache, &[z.to_bytes(), T1.compress().to_bytes(), T2.compress().to_bytes()]);
|
||||||
|
|
||||||
|
let taux = (tau2 * (x * x)) + (tau1 * x);
|
||||||
|
|
||||||
|
tau1.zeroize();
|
||||||
|
tau2.zeroize();
|
||||||
|
(T1, T2, x, taux)
|
||||||
|
};
|
||||||
|
|
||||||
|
let mu = (x * rho) + alpha;
|
||||||
|
alpha.zeroize();
|
||||||
|
rho.zeroize();
|
||||||
|
|
||||||
|
for (i, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
|
||||||
|
taux += zpow[i + 2] * gamma;
|
||||||
|
}
|
||||||
|
|
||||||
|
let l = l0 + &(l1 * x);
|
||||||
|
let r = r0 + &(r1 * x);
|
||||||
|
|
||||||
|
let t = l.clone().inner_product(&r);
|
||||||
|
|
||||||
|
let x_ip =
|
||||||
|
hash_cache(&mut cache, &[x.to_bytes(), taux.to_bytes(), mu.to_bytes(), t.to_bytes()]);
|
||||||
|
|
||||||
|
let mut a = l;
|
||||||
|
let mut b = r;
|
||||||
|
|
||||||
|
let yinv = y.invert().unwrap();
|
||||||
|
let yinvpow = ScalarVector::powers(yinv, MN);
|
||||||
|
|
||||||
|
let mut G_proof = generators.G[.. a.len()].to_vec();
|
||||||
|
let mut H_proof = generators.H[.. a.len()].to_vec();
|
||||||
|
H_proof.iter_mut().zip(yinvpow.0.iter()).for_each(|(this_H, yinvpow)| *this_H *= yinvpow);
|
||||||
|
let U = H() * x_ip;
|
||||||
|
|
||||||
|
let mut L = Vec::with_capacity(logMN);
|
||||||
|
let mut R = Vec::with_capacity(logMN);
|
||||||
|
|
||||||
|
while a.len() != 1 {
|
||||||
|
let (aL, aR) = a.split();
|
||||||
|
let (bL, bR) = b.split();
|
||||||
|
|
||||||
|
let cL = aL.clone().inner_product(&bR);
|
||||||
|
let cR = aR.clone().inner_product(&bL);
|
||||||
|
|
||||||
|
let (G_L, G_R) = G_proof.split_at(aL.len());
|
||||||
|
let (H_L, H_R) = H_proof.split_at(aL.len());
|
||||||
|
|
||||||
|
let L_i = prove_multiexp(&LR_statements(&aL, G_R, &bR, H_L, cL, U));
|
||||||
|
let R_i = prove_multiexp(&LR_statements(&aR, G_L, &bL, H_R, cR, U));
|
||||||
|
L.push(L_i);
|
||||||
|
R.push(R_i);
|
||||||
|
|
||||||
|
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
|
||||||
|
let winv = w.invert().unwrap();
|
||||||
|
|
||||||
|
a = (aL * w) + &(aR * winv);
|
||||||
|
b = (bL * winv) + &(bR * w);
|
||||||
|
|
||||||
|
if a.len() != 1 {
|
||||||
|
G_proof = hadamard_fold(G_L, G_R, winv, w);
|
||||||
|
H_proof = hadamard_fold(H_L, H_R, w, winv);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let res = OriginalStruct {
|
||||||
|
A: *A,
|
||||||
|
S: *S,
|
||||||
|
T1: *T1,
|
||||||
|
T2: *T2,
|
||||||
|
taux: *taux,
|
||||||
|
mu: *mu,
|
||||||
|
L: L.drain(..).map(|L| *L).collect(),
|
||||||
|
R: R.drain(..).map(|R| *R).collect(),
|
||||||
|
a: *a[0],
|
||||||
|
b: *b[0],
|
||||||
|
t: *t,
|
||||||
|
};
|
||||||
|
debug_assert!(res.verify(rng, &commitments_points));
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||||
|
&self,
|
||||||
|
rng: &mut R,
|
||||||
|
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
||||||
|
id: ID,
|
||||||
|
commitments: &[DalekPoint],
|
||||||
|
) -> bool {
|
||||||
|
// Verify commitments are valid
|
||||||
|
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify L and R are properly sized
|
||||||
|
if self.L.len() != self.R.len() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (logMN, M, MN) = MN(commitments.len());
|
||||||
|
if self.L.len() != logMN {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuild all challenges
|
||||||
|
let (mut cache, commitments) = hash_commitments(commitments.iter().copied());
|
||||||
|
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes(), self.S.compress().to_bytes()]);
|
||||||
|
|
||||||
|
let z = hash_to_scalar(&y.to_bytes());
|
||||||
|
cache = z;
|
||||||
|
|
||||||
|
let x = hash_cache(
|
||||||
|
&mut cache,
|
||||||
|
&[z.to_bytes(), self.T1.compress().to_bytes(), self.T2.compress().to_bytes()],
|
||||||
|
);
|
||||||
|
|
||||||
|
let x_ip = hash_cache(
|
||||||
|
&mut cache,
|
||||||
|
&[x.to_bytes(), self.taux.to_bytes(), self.mu.to_bytes(), self.t.to_bytes()],
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut w = Vec::with_capacity(logMN);
|
||||||
|
let mut winv = Vec::with_capacity(logMN);
|
||||||
|
for (L, R) in self.L.iter().zip(&self.R) {
|
||||||
|
w.push(hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]));
|
||||||
|
winv.push(cache.invert().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert the proof from * INV_EIGHT to its actual form
|
||||||
|
let normalize = |point: &DalekPoint| EdwardsPoint(point.mul_by_cofactor());
|
||||||
|
|
||||||
|
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
|
||||||
|
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
|
||||||
|
let T1 = normalize(&self.T1);
|
||||||
|
let T2 = normalize(&self.T2);
|
||||||
|
let A = normalize(&self.A);
|
||||||
|
let S = normalize(&self.S);
|
||||||
|
|
||||||
|
let commitments = commitments.iter().map(EdwardsPoint::mul_by_cofactor).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
// Verify it
|
||||||
|
let mut proof = Vec::with_capacity(4 + commitments.len());
|
||||||
|
|
||||||
|
let zpow = ScalarVector::powers(z, M + 3);
|
||||||
|
let ip1y = ScalarVector::powers(y, M * N).sum();
|
||||||
|
let mut k = -(zpow[2] * ip1y);
|
||||||
|
for j in 1 ..= M {
|
||||||
|
k -= zpow[j + 2] * IP12();
|
||||||
|
}
|
||||||
|
let y1 = Scalar(self.t) - ((z * ip1y) + k);
|
||||||
|
proof.push((-y1, H()));
|
||||||
|
|
||||||
|
proof.push((-Scalar(self.taux), G));
|
||||||
|
|
||||||
|
for (j, commitment) in commitments.iter().enumerate() {
|
||||||
|
proof.push((zpow[j + 2], *commitment));
|
||||||
|
}
|
||||||
|
|
||||||
|
proof.push((x, T1));
|
||||||
|
proof.push((x * x, T2));
|
||||||
|
verifier.queue(&mut *rng, id, proof);
|
||||||
|
|
||||||
|
proof = Vec::with_capacity(4 + (2 * (MN + logMN)));
|
||||||
|
let z3 = (Scalar(self.t) - (Scalar(self.a) * Scalar(self.b))) * x_ip;
|
||||||
|
proof.push((z3, H()));
|
||||||
|
proof.push((-Scalar(self.mu), G));
|
||||||
|
|
||||||
|
proof.push((Scalar::ONE, A));
|
||||||
|
proof.push((x, S));
|
||||||
|
|
||||||
|
{
|
||||||
|
let ypow = ScalarVector::powers(y, MN);
|
||||||
|
let yinv = y.invert().unwrap();
|
||||||
|
let yinvpow = ScalarVector::powers(yinv, MN);
|
||||||
|
|
||||||
|
let w_cache = challenge_products(&w, &winv);
|
||||||
|
|
||||||
|
let generators = GENERATORS();
|
||||||
|
for i in 0 .. MN {
|
||||||
|
let g = (Scalar(self.a) * w_cache[i]) + z;
|
||||||
|
proof.push((-g, generators.G[i]));
|
||||||
|
|
||||||
|
let mut h = Scalar(self.b) * yinvpow[i] * w_cache[(!i) & (MN - 1)];
|
||||||
|
h -= ((zpow[(i / N) + 2] * TWO_N()[i % N]) + (z * ypow[i])) * yinvpow[i];
|
||||||
|
proof.push((-h, generators.H[i]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for i in 0 .. logMN {
|
||||||
|
proof.push((w[i] * w[i], L[i]));
|
||||||
|
proof.push((winv[i] * winv[i], R[i]));
|
||||||
|
}
|
||||||
|
verifier.queue(rng, id, proof);
|
||||||
|
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub(crate) fn verify<R: RngCore + CryptoRng>(
|
||||||
|
&self,
|
||||||
|
rng: &mut R,
|
||||||
|
commitments: &[DalekPoint],
|
||||||
|
) -> bool {
|
||||||
|
let mut verifier = BatchVerifier::new(1);
|
||||||
|
if self.verify_core(rng, &mut verifier, (), commitments) {
|
||||||
|
verifier.verify_vartime()
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||||
|
&self,
|
||||||
|
rng: &mut R,
|
||||||
|
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
||||||
|
id: ID,
|
||||||
|
commitments: &[DalekPoint],
|
||||||
|
) -> bool {
|
||||||
|
self.verify_core(rng, verifier, id, commitments)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,85 +1,93 @@
|
|||||||
use std_shims::{vec, vec::Vec};
|
use std_shims::vec::Vec;
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||||
|
|
||||||
use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::EdwardsPoint};
|
use multiexp::{multiexp, multiexp_vartime, BatchVerifier};
|
||||||
|
use group::{
|
||||||
use monero_primitives::{INV_EIGHT, Commitment, keccak256_to_scalar};
|
ff::{Field, PrimeField},
|
||||||
|
Group, GroupEncoding,
|
||||||
|
};
|
||||||
|
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
batch_verifier::BulletproofsPlusBatchVerifier,
|
Commitment,
|
||||||
core::{MAX_COMMITMENTS, COMMITMENT_BITS, multiexp, multiexp_vartime},
|
ringct::{
|
||||||
plus::{
|
bulletproofs::core::{MAX_M, N},
|
||||||
ScalarVector, PointVector, GeneratorsList, BpPlusGenerators,
|
bulletproofs::plus::{
|
||||||
|
ScalarVector, PointVector, GeneratorsList, Generators,
|
||||||
transcript::*,
|
transcript::*,
|
||||||
weighted_inner_product::{WipStatement, WipWitness, WipProof},
|
weighted_inner_product::{WipStatement, WipWitness, WipProof},
|
||||||
padded_pow_of_2, u64_decompose,
|
padded_pow_of_2, u64_decompose,
|
||||||
},
|
},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
// Figure 3 of the Bulletproofs+ Paper
|
// Figure 3
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) struct AggregateRangeStatement<'a> {
|
pub(crate) struct AggregateRangeStatement {
|
||||||
generators: BpPlusGenerators,
|
generators: Generators,
|
||||||
V: &'a [EdwardsPoint],
|
V: Vec<EdwardsPoint>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Zeroize for AggregateRangeStatement {
|
||||||
|
fn zeroize(&mut self) {
|
||||||
|
self.V.zeroize();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
|
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
|
||||||
pub(crate) struct AggregateRangeWitness(Vec<Commitment>);
|
pub(crate) struct AggregateRangeWitness {
|
||||||
|
values: Vec<u64>,
|
||||||
|
gammas: Vec<Scalar>,
|
||||||
|
}
|
||||||
|
|
||||||
impl AggregateRangeWitness {
|
impl AggregateRangeWitness {
|
||||||
pub(crate) fn new(commitments: Vec<Commitment>) -> Option<Self> {
|
pub(crate) fn new(commitments: &[Commitment]) -> Option<Self> {
|
||||||
if commitments.is_empty() || (commitments.len() > MAX_COMMITMENTS) {
|
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(AggregateRangeWitness(commitments))
|
let mut values = Vec::with_capacity(commitments.len());
|
||||||
|
let mut gammas = Vec::with_capacity(commitments.len());
|
||||||
|
for commitment in commitments {
|
||||||
|
values.push(commitment.amount);
|
||||||
|
gammas.push(Scalar(commitment.mask));
|
||||||
|
}
|
||||||
|
Some(AggregateRangeWitness { values, gammas })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Internal structure representing a Bulletproof+, as defined by Monero..
|
|
||||||
#[doc(hidden)]
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
pub struct AggregateRangeProof {
|
pub struct AggregateRangeProof {
|
||||||
pub(crate) A: EdwardsPoint,
|
pub(crate) A: EdwardsPoint,
|
||||||
pub(crate) wip: WipProof,
|
pub(crate) wip: WipProof,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct AHatComputation {
|
impl AggregateRangeStatement {
|
||||||
y: Scalar,
|
pub(crate) fn new(V: Vec<EdwardsPoint>) -> Option<Self> {
|
||||||
d_descending_y_plus_z: ScalarVector,
|
if V.is_empty() || (V.len() > MAX_M) {
|
||||||
y_mn_plus_one: Scalar,
|
|
||||||
z: Scalar,
|
|
||||||
z_pow: ScalarVector,
|
|
||||||
A_hat: EdwardsPoint,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> AggregateRangeStatement<'a> {
|
|
||||||
pub(crate) fn new(V: &'a [EdwardsPoint]) -> Option<Self> {
|
|
||||||
if V.is_empty() || (V.len() > MAX_COMMITMENTS) {
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(Self { generators: BpPlusGenerators::new(), V })
|
Some(Self { generators: Generators::new(), V })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn transcript_A(transcript: &mut Scalar, A: EdwardsPoint) -> (Scalar, Scalar) {
|
fn transcript_A(transcript: &mut Scalar, A: EdwardsPoint) -> (Scalar, Scalar) {
|
||||||
let y = keccak256_to_scalar(
|
let y = hash_to_scalar(&[transcript.to_repr().as_ref(), A.to_bytes().as_ref()].concat());
|
||||||
[transcript.to_bytes().as_ref(), A.compress().to_bytes().as_ref()].concat(),
|
let z = hash_to_scalar(y.to_bytes().as_ref());
|
||||||
);
|
|
||||||
let z = keccak256_to_scalar(y.to_bytes().as_ref());
|
|
||||||
*transcript = z;
|
*transcript = z;
|
||||||
(y, z)
|
(y, z)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn d_j(j: usize, m: usize) -> ScalarVector {
|
fn d_j(j: usize, m: usize) -> ScalarVector {
|
||||||
let mut d_j = Vec::with_capacity(m * COMMITMENT_BITS);
|
let mut d_j = Vec::with_capacity(m * N);
|
||||||
for _ in 0 .. (j - 1) * COMMITMENT_BITS {
|
for _ in 0 .. (j - 1) * N {
|
||||||
d_j.push(Scalar::ZERO);
|
d_j.push(Scalar::ZERO);
|
||||||
}
|
}
|
||||||
d_j.append(&mut ScalarVector::powers(Scalar::from(2u8), COMMITMENT_BITS).0);
|
d_j.append(&mut ScalarVector::powers(Scalar::from(2u8), N).0);
|
||||||
for _ in 0 .. (m - j) * COMMITMENT_BITS {
|
for _ in 0 .. (m - j) * N {
|
||||||
d_j.push(Scalar::ZERO);
|
d_j.push(Scalar::ZERO);
|
||||||
}
|
}
|
||||||
ScalarVector(d_j)
|
ScalarVector(d_j)
|
||||||
@@ -87,26 +95,23 @@ impl<'a> AggregateRangeStatement<'a> {
|
|||||||
|
|
||||||
fn compute_A_hat(
|
fn compute_A_hat(
|
||||||
mut V: PointVector,
|
mut V: PointVector,
|
||||||
generators: &BpPlusGenerators,
|
generators: &Generators,
|
||||||
transcript: &mut Scalar,
|
transcript: &mut Scalar,
|
||||||
mut A: EdwardsPoint,
|
mut A: EdwardsPoint,
|
||||||
) -> AHatComputation {
|
) -> (Scalar, ScalarVector, Scalar, Scalar, ScalarVector, EdwardsPoint) {
|
||||||
let (y, z) = Self::transcript_A(transcript, A);
|
let (y, z) = Self::transcript_A(transcript, A);
|
||||||
A = A.mul_by_cofactor();
|
A = A.mul_by_cofactor();
|
||||||
|
|
||||||
while V.len() < padded_pow_of_2(V.len()) {
|
while V.len() < padded_pow_of_2(V.len()) {
|
||||||
V.0.push(EdwardsPoint::identity());
|
V.0.push(EdwardsPoint::identity());
|
||||||
}
|
}
|
||||||
let mn = V.len() * COMMITMENT_BITS;
|
let mn = V.len() * N;
|
||||||
|
|
||||||
// 2, 4, 6, 8... powers of z, of length equivalent to the amount of commitments
|
|
||||||
let mut z_pow = Vec::with_capacity(V.len());
|
let mut z_pow = Vec::with_capacity(V.len());
|
||||||
// z**2
|
|
||||||
z_pow.push(z * z);
|
|
||||||
|
|
||||||
let mut d = ScalarVector::new(mn);
|
let mut d = ScalarVector::new(mn);
|
||||||
for j in 1 ..= V.len() {
|
for j in 1 ..= V.len() {
|
||||||
z_pow.push(*z_pow.last().unwrap() * z_pow[0]);
|
z_pow.push(z.pow(Scalar::from(2 * u64::try_from(j).unwrap()))); // TODO: Optimize this
|
||||||
d = d + &(Self::d_j(j, V.len()) * (z_pow[j - 1]));
|
d = d + &(Self::d_j(j, V.len()) * (z_pow[j - 1]));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -132,23 +137,23 @@ impl<'a> AggregateRangeStatement<'a> {
|
|||||||
let neg_z = -z;
|
let neg_z = -z;
|
||||||
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 2);
|
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 2);
|
||||||
for (i, d_y_z) in d_descending_y_plus_z.0.iter().enumerate() {
|
for (i, d_y_z) in d_descending_y_plus_z.0.iter().enumerate() {
|
||||||
A_terms.push((neg_z, generators.generator(GeneratorsList::GBold, i)));
|
A_terms.push((neg_z, generators.generator(GeneratorsList::GBold1, i)));
|
||||||
A_terms.push((*d_y_z, generators.generator(GeneratorsList::HBold, i)));
|
A_terms.push((*d_y_z, generators.generator(GeneratorsList::HBold1, i)));
|
||||||
}
|
}
|
||||||
A_terms.push((y_mn_plus_one, commitment_accum));
|
A_terms.push((y_mn_plus_one, commitment_accum));
|
||||||
A_terms.push((
|
A_terms.push((
|
||||||
((y_pows * z) - (d.sum() * y_mn_plus_one * z) - (y_pows * (z * z))),
|
((y_pows * z) - (d.sum() * y_mn_plus_one * z) - (y_pows * z.square())),
|
||||||
BpPlusGenerators::g(),
|
Generators::g(),
|
||||||
));
|
));
|
||||||
|
|
||||||
AHatComputation {
|
(
|
||||||
y,
|
y,
|
||||||
d_descending_y_plus_z,
|
d_descending_y_plus_z,
|
||||||
y_mn_plus_one,
|
y_mn_plus_one,
|
||||||
z,
|
z,
|
||||||
z_pow: ScalarVector(z_pow),
|
ScalarVector(z_pow),
|
||||||
A_hat: A + multiexp_vartime(&A_terms),
|
A + multiexp_vartime(&A_terms),
|
||||||
}
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
||||||
@@ -157,11 +162,13 @@ impl<'a> AggregateRangeStatement<'a> {
|
|||||||
witness: &AggregateRangeWitness,
|
witness: &AggregateRangeWitness,
|
||||||
) -> Option<AggregateRangeProof> {
|
) -> Option<AggregateRangeProof> {
|
||||||
// Check for consistency with the witness
|
// Check for consistency with the witness
|
||||||
if self.V.len() != witness.0.len() {
|
if self.V.len() != witness.values.len() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
for (commitment, witness) in self.V.iter().zip(witness.0.iter()) {
|
for (commitment, (value, gamma)) in
|
||||||
if witness.calculate() != *commitment {
|
self.V.iter().zip(witness.values.iter().zip(witness.gammas.iter()))
|
||||||
|
{
|
||||||
|
if Commitment::new(**gamma, *value).calculate() != **commitment {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -174,28 +181,22 @@ impl<'a> AggregateRangeStatement<'a> {
|
|||||||
// Commitments aren't transmitted INV_EIGHT though, so this multiplies by INV_EIGHT to enable
|
// Commitments aren't transmitted INV_EIGHT though, so this multiplies by INV_EIGHT to enable
|
||||||
// clearing its cofactor without mutating the value
|
// clearing its cofactor without mutating the value
|
||||||
// For some reason, these values are transcripted * INV_EIGHT, not as transmitted
|
// For some reason, these values are transcripted * INV_EIGHT, not as transmitted
|
||||||
let V = V.iter().map(|V| V * INV_EIGHT()).collect::<Vec<_>>();
|
let mut V = V.into_iter().map(|V| EdwardsPoint(V.0 * crate::INV_EIGHT())).collect::<Vec<_>>();
|
||||||
let mut transcript = initial_transcript(V.iter());
|
let mut transcript = initial_transcript(V.iter());
|
||||||
let mut V = V.iter().map(EdwardsPoint::mul_by_cofactor).collect::<Vec<_>>();
|
V.iter_mut().for_each(|V| *V = V.mul_by_cofactor());
|
||||||
|
|
||||||
// Pad V
|
// Pad V
|
||||||
while V.len() < padded_pow_of_2(V.len()) {
|
while V.len() < padded_pow_of_2(V.len()) {
|
||||||
V.push(EdwardsPoint::identity());
|
V.push(EdwardsPoint::identity());
|
||||||
}
|
}
|
||||||
|
|
||||||
let generators = generators.reduce(V.len() * COMMITMENT_BITS);
|
let generators = generators.reduce(V.len() * N);
|
||||||
|
|
||||||
let mut d_js = Vec::with_capacity(V.len());
|
let mut d_js = Vec::with_capacity(V.len());
|
||||||
let mut a_l = ScalarVector(Vec::with_capacity(V.len() * COMMITMENT_BITS));
|
let mut a_l = ScalarVector(Vec::with_capacity(V.len() * N));
|
||||||
for j in 1 ..= V.len() {
|
for j in 1 ..= V.len() {
|
||||||
d_js.push(Self::d_j(j, V.len()));
|
d_js.push(Self::d_j(j, V.len()));
|
||||||
#[allow(clippy::map_unwrap_or)]
|
a_l.0.append(&mut u64_decompose(*witness.values.get(j - 1).unwrap_or(&0)).0);
|
||||||
a_l.0.append(
|
|
||||||
&mut u64_decompose(
|
|
||||||
*witness.0.get(j - 1).map(|commitment| &commitment.amount).unwrap_or(&0),
|
|
||||||
)
|
|
||||||
.0,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let a_r = a_l.clone() - Scalar::ONE;
|
let a_r = a_l.clone() - Scalar::ONE;
|
||||||
@@ -204,26 +205,26 @@ impl<'a> AggregateRangeStatement<'a> {
|
|||||||
|
|
||||||
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 1);
|
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 1);
|
||||||
for (i, a_l) in a_l.0.iter().enumerate() {
|
for (i, a_l) in a_l.0.iter().enumerate() {
|
||||||
A_terms.push((*a_l, generators.generator(GeneratorsList::GBold, i)));
|
A_terms.push((*a_l, generators.generator(GeneratorsList::GBold1, i)));
|
||||||
}
|
}
|
||||||
for (i, a_r) in a_r.0.iter().enumerate() {
|
for (i, a_r) in a_r.0.iter().enumerate() {
|
||||||
A_terms.push((*a_r, generators.generator(GeneratorsList::HBold, i)));
|
A_terms.push((*a_r, generators.generator(GeneratorsList::HBold1, i)));
|
||||||
}
|
}
|
||||||
A_terms.push((alpha, BpPlusGenerators::h()));
|
A_terms.push((alpha, Generators::h()));
|
||||||
let mut A = multiexp(&A_terms);
|
let mut A = multiexp(&A_terms);
|
||||||
A_terms.zeroize();
|
A_terms.zeroize();
|
||||||
|
|
||||||
// Multiply by INV_EIGHT per earlier commentary
|
// Multiply by INV_EIGHT per earlier commentary
|
||||||
A *= INV_EIGHT();
|
A.0 *= crate::INV_EIGHT();
|
||||||
|
|
||||||
let AHatComputation { y, d_descending_y_plus_z, y_mn_plus_one, z, z_pow, A_hat } =
|
let (y, d_descending_y_plus_z, y_mn_plus_one, z, z_pow, A_hat) =
|
||||||
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, A);
|
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, A);
|
||||||
|
|
||||||
let a_l = a_l - z;
|
let a_l = a_l - z;
|
||||||
let a_r = a_r + &d_descending_y_plus_z;
|
let a_r = a_r + &d_descending_y_plus_z;
|
||||||
let mut alpha = alpha;
|
let mut alpha = alpha;
|
||||||
for j in 1 ..= witness.0.len() {
|
for j in 1 ..= witness.gammas.len() {
|
||||||
alpha += z_pow[j - 1] * witness.0[j - 1].mask * y_mn_plus_one;
|
alpha += z_pow[j - 1] * witness.gammas[j - 1] * y_mn_plus_one;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(AggregateRangeProof {
|
Some(AggregateRangeProof {
|
||||||
@@ -234,22 +235,23 @@ impl<'a> AggregateRangeStatement<'a> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn verify<R: RngCore + CryptoRng>(
|
pub(crate) fn verify<Id: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||||
self,
|
self,
|
||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
verifier: &mut BulletproofsPlusBatchVerifier,
|
verifier: &mut BatchVerifier<Id, EdwardsPoint>,
|
||||||
|
id: Id,
|
||||||
proof: AggregateRangeProof,
|
proof: AggregateRangeProof,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let Self { generators, V } = self;
|
let Self { generators, V } = self;
|
||||||
|
|
||||||
let V = V.iter().map(|V| V * INV_EIGHT()).collect::<Vec<_>>();
|
let mut V = V.into_iter().map(|V| EdwardsPoint(V.0 * crate::INV_EIGHT())).collect::<Vec<_>>();
|
||||||
let mut transcript = initial_transcript(V.iter());
|
let mut transcript = initial_transcript(V.iter());
|
||||||
let V = V.iter().map(EdwardsPoint::mul_by_cofactor).collect::<Vec<_>>();
|
V.iter_mut().for_each(|V| *V = V.mul_by_cofactor());
|
||||||
|
|
||||||
let generators = generators.reduce(V.len() * COMMITMENT_BITS);
|
let generators = generators.reduce(V.len() * N);
|
||||||
|
|
||||||
let AHatComputation { y, A_hat, .. } =
|
let (y, _, _, _, _, A_hat) =
|
||||||
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, proof.A);
|
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, proof.A);
|
||||||
WipStatement::new(generators, A_hat, y).verify(rng, verifier, transcript, proof.wip)
|
WipStatement::new(generators, A_hat, y).verify(rng, verifier, id, transcript, proof.wip)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,12 +1,11 @@
|
|||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
use std_shims::sync::LazyLock;
|
use group::Group;
|
||||||
|
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||||
|
|
||||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_POINT, scalar::Scalar, edwards::EdwardsPoint};
|
pub(crate) use crate::ringct::bulletproofs::scalar_vector::ScalarVector;
|
||||||
|
mod point_vector;
|
||||||
use monero_generators::{H, Generators};
|
pub(crate) use point_vector::PointVector;
|
||||||
|
|
||||||
pub(crate) use crate::{scalar_vector::ScalarVector, point_vector::PointVector};
|
|
||||||
|
|
||||||
pub(crate) mod transcript;
|
pub(crate) mod transcript;
|
||||||
pub(crate) mod weighted_inner_product;
|
pub(crate) mod weighted_inner_product;
|
||||||
@@ -24,50 +23,55 @@ pub(crate) fn padded_pow_of_2(i: usize) -> usize {
|
|||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||||
pub(crate) enum GeneratorsList {
|
pub(crate) enum GeneratorsList {
|
||||||
GBold,
|
GBold1,
|
||||||
HBold,
|
HBold1,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: Table these
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) struct BpPlusGenerators {
|
pub(crate) struct Generators {
|
||||||
g_bold: &'static [EdwardsPoint],
|
g_bold1: &'static [EdwardsPoint],
|
||||||
h_bold: &'static [EdwardsPoint],
|
h_bold1: &'static [EdwardsPoint],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
mod generators {
|
||||||
|
use std_shims::sync::OnceLock;
|
||||||
|
use monero_generators::Generators;
|
||||||
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
|
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
|
||||||
|
}
|
||||||
|
|
||||||
impl BpPlusGenerators {
|
impl Generators {
|
||||||
#[allow(clippy::new_without_default)]
|
#[allow(clippy::new_without_default)]
|
||||||
pub(crate) fn new() -> Self {
|
pub(crate) fn new() -> Self {
|
||||||
let gens = &GENERATORS;
|
let gens = generators::GENERATORS();
|
||||||
BpPlusGenerators { g_bold: &gens.G, h_bold: &gens.H }
|
Generators { g_bold1: &gens.G, h_bold1: &gens.H }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn len(&self) -> usize {
|
pub(crate) fn len(&self) -> usize {
|
||||||
self.g_bold.len()
|
self.g_bold1.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn g() -> EdwardsPoint {
|
pub(crate) fn g() -> EdwardsPoint {
|
||||||
*H
|
dalek_ff_group::EdwardsPoint(crate::H())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn h() -> EdwardsPoint {
|
pub(crate) fn h() -> EdwardsPoint {
|
||||||
ED25519_BASEPOINT_POINT
|
EdwardsPoint::generator()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn generator(&self, list: GeneratorsList, i: usize) -> EdwardsPoint {
|
pub(crate) fn generator(&self, list: GeneratorsList, i: usize) -> EdwardsPoint {
|
||||||
match list {
|
match list {
|
||||||
GeneratorsList::GBold => self.g_bold[i],
|
GeneratorsList::GBold1 => self.g_bold1[i],
|
||||||
GeneratorsList::HBold => self.h_bold[i],
|
GeneratorsList::HBold1 => self.h_bold1[i],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn reduce(&self, generators: usize) -> Self {
|
pub(crate) fn reduce(&self, generators: usize) -> Self {
|
||||||
// Round to the nearest power of 2
|
// Round to the nearest power of 2
|
||||||
let generators = padded_pow_of_2(generators);
|
let generators = padded_pow_of_2(generators);
|
||||||
assert!(generators <= self.g_bold.len());
|
assert!(generators <= self.g_bold1.len());
|
||||||
|
|
||||||
BpPlusGenerators { g_bold: &self.g_bold[.. generators], h_bold: &self.h_bold[.. generators] }
|
Generators { g_bold1: &self.g_bold1[.. generators], h_bold1: &self.h_bold1[.. generators] }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1,16 +1,16 @@
|
|||||||
use core::ops::{Index, IndexMut};
|
use core::ops::{Index, IndexMut};
|
||||||
use std_shims::vec::Vec;
|
use std_shims::vec::Vec;
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||||
|
|
||||||
use curve25519_dalek::edwards::EdwardsPoint;
|
use dalek_ff_group::EdwardsPoint;
|
||||||
|
|
||||||
use crate::scalar_vector::ScalarVector;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
use crate::core::multiexp;
|
use multiexp::multiexp;
|
||||||
|
#[cfg(test)]
|
||||||
|
use crate::ringct::bulletproofs::plus::ScalarVector;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||||
pub(crate) struct PointVector(pub(crate) Vec<EdwardsPoint>);
|
pub(crate) struct PointVector(pub(crate) Vec<EdwardsPoint>);
|
||||||
|
|
||||||
impl Index<usize> for PointVector {
|
impl Index<usize> for PointVector {
|
||||||
@@ -27,15 +27,6 @@ impl IndexMut<usize> for PointVector {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PointVector {
|
impl PointVector {
|
||||||
pub(crate) fn mul_vec(&self, vector: &ScalarVector) -> Self {
|
|
||||||
assert_eq!(self.len(), vector.len());
|
|
||||||
let mut res = self.clone();
|
|
||||||
for (i, val) in res.0.iter_mut().enumerate() {
|
|
||||||
*val *= vector.0[i];
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) fn multiexp(&self, vector: &ScalarVector) -> EdwardsPoint {
|
pub(crate) fn multiexp(&self, vector: &ScalarVector) -> EdwardsPoint {
|
||||||
debug_assert_eq!(self.len(), vector.len());
|
debug_assert_eq!(self.len(), vector.len());
|
||||||
24
coins/monero/src/ringct/bulletproofs/plus/transcript.rs
Normal file
24
coins/monero/src/ringct/bulletproofs/plus/transcript.rs
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
use std_shims::{sync::OnceLock, vec::Vec};
|
||||||
|
|
||||||
|
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||||
|
|
||||||
|
use monero_generators::{hash_to_point as raw_hash_to_point};
|
||||||
|
use crate::{hash, hash_to_scalar as dalek_hash};
|
||||||
|
|
||||||
|
// Monero starts BP+ transcripts with the following constant.
|
||||||
|
static TRANSCRIPT_CELL: OnceLock<[u8; 32]> = OnceLock::new();
|
||||||
|
pub(crate) fn TRANSCRIPT() -> [u8; 32] {
|
||||||
|
// Why this uses a hash_to_point is completely unknown.
|
||||||
|
*TRANSCRIPT_CELL
|
||||||
|
.get_or_init(|| raw_hash_to_point(hash(b"bulletproof_plus_transcript")).compress().to_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||||
|
Scalar(dalek_hash(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn initial_transcript(commitments: core::slice::Iter<'_, EdwardsPoint>) -> Scalar {
|
||||||
|
let commitments_hash =
|
||||||
|
hash_to_scalar(&commitments.flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>());
|
||||||
|
hash_to_scalar(&[TRANSCRIPT().as_ref(), &commitments_hash.to_bytes()].concat())
|
||||||
|
}
|
||||||
@@ -1,21 +1,24 @@
|
|||||||
use std_shims::{vec, vec::Vec};
|
use std_shims::vec::Vec;
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||||
|
|
||||||
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
use multiexp::{BatchVerifier, multiexp, multiexp_vartime};
|
||||||
|
use group::{
|
||||||
|
ff::{Field, PrimeField},
|
||||||
|
GroupEncoding,
|
||||||
|
};
|
||||||
|
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||||
|
|
||||||
use monero_primitives::{INV_EIGHT, keccak256_to_scalar};
|
use crate::ringct::bulletproofs::plus::{
|
||||||
use crate::{
|
ScalarVector, PointVector, GeneratorsList, Generators, padded_pow_of_2, transcript::*,
|
||||||
core::{multiexp, multiexp_vartime, challenge_products},
|
|
||||||
batch_verifier::BulletproofsPlusBatchVerifier,
|
|
||||||
plus::{ScalarVector, PointVector, GeneratorsList, BpPlusGenerators, padded_pow_of_2},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Figure 1 of the Bulletproofs+ paper
|
// Figure 1
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) struct WipStatement {
|
pub(crate) struct WipStatement {
|
||||||
generators: BpPlusGenerators,
|
generators: Generators,
|
||||||
P: EdwardsPoint,
|
P: EdwardsPoint,
|
||||||
y: ScalarVector,
|
y: ScalarVector,
|
||||||
}
|
}
|
||||||
@@ -65,7 +68,7 @@ pub(crate) struct WipProof {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl WipStatement {
|
impl WipStatement {
|
||||||
pub(crate) fn new(generators: BpPlusGenerators, P: EdwardsPoint, y: Scalar) -> Self {
|
pub(crate) fn new(generators: Generators, P: EdwardsPoint, y: Scalar) -> Self {
|
||||||
debug_assert_eq!(generators.len(), padded_pow_of_2(generators.len()));
|
debug_assert_eq!(generators.len(), padded_pow_of_2(generators.len()));
|
||||||
|
|
||||||
// y ** n
|
// y ** n
|
||||||
@@ -79,26 +82,16 @@ impl WipStatement {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn transcript_L_R(transcript: &mut Scalar, L: EdwardsPoint, R: EdwardsPoint) -> Scalar {
|
fn transcript_L_R(transcript: &mut Scalar, L: EdwardsPoint, R: EdwardsPoint) -> Scalar {
|
||||||
let e = keccak256_to_scalar(
|
let e = hash_to_scalar(
|
||||||
[
|
&[transcript.to_repr().as_ref(), L.to_bytes().as_ref(), R.to_bytes().as_ref()].concat(),
|
||||||
transcript.to_bytes().as_ref(),
|
|
||||||
L.compress().to_bytes().as_ref(),
|
|
||||||
R.compress().to_bytes().as_ref(),
|
|
||||||
]
|
|
||||||
.concat(),
|
|
||||||
);
|
);
|
||||||
*transcript = e;
|
*transcript = e;
|
||||||
e
|
e
|
||||||
}
|
}
|
||||||
|
|
||||||
fn transcript_A_B(transcript: &mut Scalar, A: EdwardsPoint, B: EdwardsPoint) -> Scalar {
|
fn transcript_A_B(transcript: &mut Scalar, A: EdwardsPoint, B: EdwardsPoint) -> Scalar {
|
||||||
let e = keccak256_to_scalar(
|
let e = hash_to_scalar(
|
||||||
[
|
&[transcript.to_repr().as_ref(), A.to_bytes().as_ref(), B.to_bytes().as_ref()].concat(),
|
||||||
transcript.to_bytes().as_ref(),
|
|
||||||
A.compress().to_bytes().as_ref(),
|
|
||||||
B.compress().to_bytes().as_ref(),
|
|
||||||
]
|
|
||||||
.concat(),
|
|
||||||
);
|
);
|
||||||
*transcript = e;
|
*transcript = e;
|
||||||
e
|
e
|
||||||
@@ -107,6 +100,9 @@ impl WipStatement {
|
|||||||
// Prover's variant of the shared code block to calculate G/H/P when n > 1
|
// Prover's variant of the shared code block to calculate G/H/P when n > 1
|
||||||
// Returns each permutation of G/H since the prover needs to do operation on each permutation
|
// Returns each permutation of G/H since the prover needs to do operation on each permutation
|
||||||
// P is dropped as it's unused in the prover's path
|
// P is dropped as it's unused in the prover's path
|
||||||
|
// TODO: It'd still probably be faster to keep in terms of the original generators, both between
|
||||||
|
// the reduced amount of group operations and the potential tabling of the generators under
|
||||||
|
// multiexp
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn next_G_H(
|
fn next_G_H(
|
||||||
transcript: &mut Scalar,
|
transcript: &mut Scalar,
|
||||||
@@ -123,7 +119,7 @@ impl WipStatement {
|
|||||||
debug_assert_eq!(g_bold1.len(), h_bold1.len());
|
debug_assert_eq!(g_bold1.len(), h_bold1.len());
|
||||||
|
|
||||||
let e = Self::transcript_L_R(transcript, L, R);
|
let e = Self::transcript_L_R(transcript, L, R);
|
||||||
let inv_e = e.invert();
|
let inv_e = e.invert().unwrap();
|
||||||
|
|
||||||
// This vartime is safe as all of these arguments are public
|
// This vartime is safe as all of these arguments are public
|
||||||
let mut new_g_bold = Vec::with_capacity(g_bold1.len());
|
let mut new_g_bold = Vec::with_capacity(g_bold1.len());
|
||||||
@@ -137,12 +133,57 @@ impl WipStatement {
|
|||||||
new_h_bold.push(multiexp_vartime(&[(e, h_bold.0), (inv_e, h_bold.1)]));
|
new_h_bold.push(multiexp_vartime(&[(e, h_bold.0), (inv_e, h_bold.1)]));
|
||||||
}
|
}
|
||||||
|
|
||||||
let e_square = e * e;
|
let e_square = e.square();
|
||||||
let inv_e_square = inv_e * inv_e;
|
let inv_e_square = inv_e.square();
|
||||||
|
|
||||||
(e, inv_e, e_square, inv_e_square, PointVector(new_g_bold), PointVector(new_h_bold))
|
(e, inv_e, e_square, inv_e_square, PointVector(new_g_bold), PointVector(new_h_bold))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
This has room for optimization worth investigating further. It currently takes
|
||||||
|
an iterative approach. It can be optimized further via divide and conquer.
|
||||||
|
|
||||||
|
Assume there are 4 challenges.
|
||||||
|
|
||||||
|
Iterative approach (current):
|
||||||
|
1. Do the optimal multiplications across challenge column 0 and 1.
|
||||||
|
2. Do the optimal multiplications across that result and column 2.
|
||||||
|
3. Do the optimal multiplications across that result and column 3.
|
||||||
|
|
||||||
|
Divide and conquer (worth investigating further):
|
||||||
|
1. Do the optimal multiplications across challenge column 0 and 1.
|
||||||
|
2. Do the optimal multiplications across challenge column 2 and 3.
|
||||||
|
3. Multiply both results together.
|
||||||
|
|
||||||
|
When there are 4 challenges (n=16), the iterative approach does 28 multiplications
|
||||||
|
versus divide and conquer's 24.
|
||||||
|
*/
|
||||||
|
fn challenge_products(challenges: &[(Scalar, Scalar)]) -> Vec<Scalar> {
|
||||||
|
let mut products = vec![Scalar::ONE; 1 << challenges.len()];
|
||||||
|
|
||||||
|
if !challenges.is_empty() {
|
||||||
|
products[0] = challenges[0].1;
|
||||||
|
products[1] = challenges[0].0;
|
||||||
|
|
||||||
|
for (j, challenge) in challenges.iter().enumerate().skip(1) {
|
||||||
|
let mut slots = (1 << (j + 1)) - 1;
|
||||||
|
while slots > 0 {
|
||||||
|
products[slots] = products[slots / 2] * challenge.0;
|
||||||
|
products[slots - 1] = products[slots / 2] * challenge.1;
|
||||||
|
|
||||||
|
slots = slots.saturating_sub(2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sanity check since if the above failed to populate, it'd be critical
|
||||||
|
for product in &products {
|
||||||
|
debug_assert!(!bool::from(product.is_zero()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
products
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
||||||
self,
|
self,
|
||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
@@ -156,27 +197,16 @@ impl WipStatement {
|
|||||||
if generators.len() != witness.a.len() {
|
if generators.len() != witness.a.len() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let (g, h) = (BpPlusGenerators::g(), BpPlusGenerators::h());
|
let (g, h) = (Generators::g(), Generators::h());
|
||||||
let mut g_bold = vec![];
|
let mut g_bold = vec![];
|
||||||
let mut h_bold = vec![];
|
let mut h_bold = vec![];
|
||||||
for i in 0 .. generators.len() {
|
for i in 0 .. generators.len() {
|
||||||
g_bold.push(generators.generator(GeneratorsList::GBold, i));
|
g_bold.push(generators.generator(GeneratorsList::GBold1, i));
|
||||||
h_bold.push(generators.generator(GeneratorsList::HBold, i));
|
h_bold.push(generators.generator(GeneratorsList::HBold1, i));
|
||||||
}
|
}
|
||||||
let mut g_bold = PointVector(g_bold);
|
let mut g_bold = PointVector(g_bold);
|
||||||
let mut h_bold = PointVector(h_bold);
|
let mut h_bold = PointVector(h_bold);
|
||||||
|
|
||||||
let mut y_inv = {
|
|
||||||
let mut i = 1;
|
|
||||||
let mut to_invert = vec![];
|
|
||||||
while i < g_bold.len() {
|
|
||||||
to_invert.push(y[i - 1]);
|
|
||||||
i *= 2;
|
|
||||||
}
|
|
||||||
Scalar::batch_invert(&mut to_invert);
|
|
||||||
to_invert
|
|
||||||
};
|
|
||||||
|
|
||||||
// Check P has the expected relationship
|
// Check P has the expected relationship
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
{
|
{
|
||||||
@@ -230,7 +260,8 @@ impl WipStatement {
|
|||||||
let c_l = a1.clone().weighted_inner_product(&b2, &y);
|
let c_l = a1.clone().weighted_inner_product(&b2, &y);
|
||||||
let c_r = (a2.clone() * y_n_hat).weighted_inner_product(&b1, &y);
|
let c_r = (a2.clone() * y_n_hat).weighted_inner_product(&b1, &y);
|
||||||
|
|
||||||
let y_inv_n_hat = y_inv.pop().unwrap();
|
// TODO: Calculate these with a batch inversion
|
||||||
|
let y_inv_n_hat = y_n_hat.invert().unwrap();
|
||||||
|
|
||||||
let mut L_terms = (a1.clone() * y_inv_n_hat)
|
let mut L_terms = (a1.clone() * y_inv_n_hat)
|
||||||
.0
|
.0
|
||||||
@@ -240,7 +271,7 @@ impl WipStatement {
|
|||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
L_terms.push((c_l, g));
|
L_terms.push((c_l, g));
|
||||||
L_terms.push((d_l, h));
|
L_terms.push((d_l, h));
|
||||||
let L = multiexp(&L_terms) * INV_EIGHT();
|
let L = multiexp(&L_terms) * Scalar(crate::INV_EIGHT());
|
||||||
L_vec.push(L);
|
L_vec.push(L);
|
||||||
L_terms.zeroize();
|
L_terms.zeroize();
|
||||||
|
|
||||||
@@ -252,7 +283,7 @@ impl WipStatement {
|
|||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
R_terms.push((c_r, g));
|
R_terms.push((c_r, g));
|
||||||
R_terms.push((d_r, h));
|
R_terms.push((d_r, h));
|
||||||
let R = multiexp(&R_terms) * INV_EIGHT();
|
let R = multiexp(&R_terms) * Scalar(crate::INV_EIGHT());
|
||||||
R_vec.push(R);
|
R_vec.push(R);
|
||||||
R_terms.zeroize();
|
R_terms.zeroize();
|
||||||
|
|
||||||
@@ -285,33 +316,34 @@ impl WipStatement {
|
|||||||
|
|
||||||
let mut A_terms =
|
let mut A_terms =
|
||||||
vec![(r, g_bold[0]), (s, h_bold[0]), ((ry * b[0]) + (s * y[0] * a[0]), g), (delta, h)];
|
vec![(r, g_bold[0]), (s, h_bold[0]), ((ry * b[0]) + (s * y[0] * a[0]), g), (delta, h)];
|
||||||
let A = multiexp(&A_terms) * INV_EIGHT();
|
let A = multiexp(&A_terms) * Scalar(crate::INV_EIGHT());
|
||||||
A_terms.zeroize();
|
A_terms.zeroize();
|
||||||
|
|
||||||
let mut B_terms = vec![(ry * s, g), (eta, h)];
|
let mut B_terms = vec![(ry * s, g), (eta, h)];
|
||||||
let B = multiexp(&B_terms) * INV_EIGHT();
|
let B = multiexp(&B_terms) * Scalar(crate::INV_EIGHT());
|
||||||
B_terms.zeroize();
|
B_terms.zeroize();
|
||||||
|
|
||||||
let e = Self::transcript_A_B(&mut transcript, A, B);
|
let e = Self::transcript_A_B(&mut transcript, A, B);
|
||||||
|
|
||||||
let r_answer = r + (a[0] * e);
|
let r_answer = r + (a[0] * e);
|
||||||
let s_answer = s + (b[0] * e);
|
let s_answer = s + (b[0] * e);
|
||||||
let delta_answer = eta + (delta * e) + (alpha * (e * e));
|
let delta_answer = eta + (delta * e) + (alpha * e.square());
|
||||||
|
|
||||||
Some(WipProof { L: L_vec, R: R_vec, A, B, r_answer, s_answer, delta_answer })
|
Some(WipProof { L: L_vec, R: R_vec, A, B, r_answer, s_answer, delta_answer })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn verify<R: RngCore + CryptoRng>(
|
pub(crate) fn verify<Id: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||||
self,
|
self,
|
||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
verifier: &mut BulletproofsPlusBatchVerifier,
|
verifier: &mut BatchVerifier<Id, EdwardsPoint>,
|
||||||
|
id: Id,
|
||||||
mut transcript: Scalar,
|
mut transcript: Scalar,
|
||||||
mut proof: WipProof,
|
mut proof: WipProof,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let verifier_weight = Scalar::random(rng);
|
|
||||||
|
|
||||||
let WipStatement { generators, P, y } = self;
|
let WipStatement { generators, P, y } = self;
|
||||||
|
|
||||||
|
let (g, h) = (Generators::g(), Generators::h());
|
||||||
|
|
||||||
// Verify the L/R lengths
|
// Verify the L/R lengths
|
||||||
{
|
{
|
||||||
let mut lr_len = 0;
|
let mut lr_len = 0;
|
||||||
@@ -327,7 +359,7 @@ impl WipStatement {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let inv_y = {
|
let inv_y = {
|
||||||
let inv_y = y[0].invert();
|
let inv_y = y[0].invert().unwrap();
|
||||||
let mut res = Vec::with_capacity(y.len());
|
let mut res = Vec::with_capacity(y.len());
|
||||||
res.push(inv_y);
|
res.push(inv_y);
|
||||||
while res.len() < y.len() {
|
while res.len() < y.len() {
|
||||||
@@ -336,49 +368,51 @@ impl WipStatement {
|
|||||||
res
|
res
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut e_is = Vec::with_capacity(proof.L.len());
|
let mut P_terms = vec![(Scalar::ONE, P)];
|
||||||
|
P_terms.reserve(6 + (2 * generators.len()) + proof.L.len());
|
||||||
|
|
||||||
|
let mut challenges = Vec::with_capacity(proof.L.len());
|
||||||
|
let product_cache = {
|
||||||
|
let mut es = Vec::with_capacity(proof.L.len());
|
||||||
for (L, R) in proof.L.iter_mut().zip(proof.R.iter_mut()) {
|
for (L, R) in proof.L.iter_mut().zip(proof.R.iter_mut()) {
|
||||||
e_is.push(Self::transcript_L_R(&mut transcript, *L, *R));
|
es.push(Self::transcript_L_R(&mut transcript, *L, *R));
|
||||||
*L = L.mul_by_cofactor();
|
*L = L.mul_by_cofactor();
|
||||||
*R = R.mul_by_cofactor();
|
*R = R.mul_by_cofactor();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let mut inv_es = es.clone();
|
||||||
|
let mut scratch = vec![Scalar::ZERO; es.len()];
|
||||||
|
group::ff::BatchInverter::invert_with_external_scratch(&mut inv_es, &mut scratch);
|
||||||
|
drop(scratch);
|
||||||
|
|
||||||
|
debug_assert_eq!(es.len(), inv_es.len());
|
||||||
|
debug_assert_eq!(es.len(), proof.L.len());
|
||||||
|
debug_assert_eq!(es.len(), proof.R.len());
|
||||||
|
for ((e, inv_e), (L, R)) in
|
||||||
|
es.drain(..).zip(inv_es.drain(..)).zip(proof.L.iter().zip(proof.R.iter()))
|
||||||
|
{
|
||||||
|
debug_assert_eq!(e.invert().unwrap(), inv_e);
|
||||||
|
|
||||||
|
challenges.push((e, inv_e));
|
||||||
|
|
||||||
|
let e_square = e.square();
|
||||||
|
let inv_e_square = inv_e.square();
|
||||||
|
P_terms.push((e_square, *L));
|
||||||
|
P_terms.push((inv_e_square, *R));
|
||||||
|
}
|
||||||
|
|
||||||
|
Self::challenge_products(&challenges)
|
||||||
|
};
|
||||||
|
|
||||||
let e = Self::transcript_A_B(&mut transcript, proof.A, proof.B);
|
let e = Self::transcript_A_B(&mut transcript, proof.A, proof.B);
|
||||||
proof.A = proof.A.mul_by_cofactor();
|
proof.A = proof.A.mul_by_cofactor();
|
||||||
proof.B = proof.B.mul_by_cofactor();
|
proof.B = proof.B.mul_by_cofactor();
|
||||||
let neg_e_square = verifier_weight * -(e * e);
|
let neg_e_square = -e.square();
|
||||||
|
|
||||||
verifier.0.other.push((neg_e_square, P));
|
let mut multiexp = P_terms;
|
||||||
|
multiexp.reserve(4 + (2 * generators.len()));
|
||||||
let mut challenges = Vec::with_capacity(proof.L.len());
|
for (scalar, _) in &mut multiexp {
|
||||||
let product_cache = {
|
*scalar *= neg_e_square;
|
||||||
let mut inv_e_is = e_is.clone();
|
|
||||||
Scalar::batch_invert(&mut inv_e_is);
|
|
||||||
|
|
||||||
debug_assert_eq!(e_is.len(), inv_e_is.len());
|
|
||||||
debug_assert_eq!(e_is.len(), proof.L.len());
|
|
||||||
debug_assert_eq!(e_is.len(), proof.R.len());
|
|
||||||
for ((e_i, inv_e_i), (L, R)) in
|
|
||||||
e_is.drain(..).zip(inv_e_is.drain(..)).zip(proof.L.iter().zip(proof.R.iter()))
|
|
||||||
{
|
|
||||||
debug_assert_eq!(e_i.invert(), inv_e_i);
|
|
||||||
|
|
||||||
challenges.push((e_i, inv_e_i));
|
|
||||||
|
|
||||||
let e_i_square = e_i * e_i;
|
|
||||||
let inv_e_i_square = inv_e_i * inv_e_i;
|
|
||||||
verifier.0.other.push((neg_e_square * e_i_square, *L));
|
|
||||||
verifier.0.other.push((neg_e_square * inv_e_i_square, *R));
|
|
||||||
}
|
|
||||||
|
|
||||||
challenge_products(&challenges)
|
|
||||||
};
|
|
||||||
|
|
||||||
while verifier.0.g_bold.len() < generators.len() {
|
|
||||||
verifier.0.g_bold.push(Scalar::ZERO);
|
|
||||||
}
|
|
||||||
while verifier.0.h_bold.len() < generators.len() {
|
|
||||||
verifier.0.h_bold.push(Scalar::ZERO);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let re = proof.r_answer * e;
|
let re = proof.r_answer * e;
|
||||||
@@ -387,18 +421,23 @@ impl WipStatement {
|
|||||||
if i > 0 {
|
if i > 0 {
|
||||||
scalar *= inv_y[i - 1];
|
scalar *= inv_y[i - 1];
|
||||||
}
|
}
|
||||||
verifier.0.g_bold[i] += verifier_weight * scalar;
|
multiexp.push((scalar, generators.generator(GeneratorsList::GBold1, i)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let se = proof.s_answer * e;
|
let se = proof.s_answer * e;
|
||||||
for i in 0 .. generators.len() {
|
for i in 0 .. generators.len() {
|
||||||
verifier.0.h_bold[i] += verifier_weight * (se * product_cache[product_cache.len() - 1 - i]);
|
multiexp.push((
|
||||||
|
se * product_cache[product_cache.len() - 1 - i],
|
||||||
|
generators.generator(GeneratorsList::HBold1, i),
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
verifier.0.other.push((verifier_weight * -e, proof.A));
|
multiexp.push((-e, proof.A));
|
||||||
verifier.0.g += verifier_weight * (proof.r_answer * y[0] * proof.s_answer);
|
multiexp.push((proof.r_answer * y[0] * proof.s_answer, g));
|
||||||
verifier.0.h += verifier_weight * proof.delta_answer;
|
multiexp.push((proof.delta_answer, h));
|
||||||
verifier.0.other.push((-verifier_weight, proof.B));
|
multiexp.push((-Scalar::ONE, proof.B));
|
||||||
|
|
||||||
|
verifier.queue(rng, id, multiexp);
|
||||||
|
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
@@ -2,13 +2,13 @@ use core::{
|
|||||||
borrow::Borrow,
|
borrow::Borrow,
|
||||||
ops::{Index, IndexMut, Add, Sub, Mul},
|
ops::{Index, IndexMut, Add, Sub, Mul},
|
||||||
};
|
};
|
||||||
use std_shims::{vec, vec::Vec};
|
use std_shims::vec::Vec;
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||||
|
|
||||||
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
use group::ff::Field;
|
||||||
|
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||||
use crate::core::multiexp;
|
use multiexp::multiexp;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||||
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
||||||
324
coins/monero/src/ringct/clsag/mod.rs
Normal file
324
coins/monero/src/ringct/clsag/mod.rs
Normal file
@@ -0,0 +1,324 @@
|
|||||||
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
|
use core::ops::Deref;
|
||||||
|
use std_shims::{
|
||||||
|
vec::Vec,
|
||||||
|
io::{self, Read, Write},
|
||||||
|
};
|
||||||
|
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
|
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||||
|
use subtle::{ConstantTimeEq, ConditionallySelectable};
|
||||||
|
|
||||||
|
use curve25519_dalek::{
|
||||||
|
constants::ED25519_BASEPOINT_TABLE,
|
||||||
|
scalar::Scalar,
|
||||||
|
traits::{IsIdentity, VartimePrecomputedMultiscalarMul},
|
||||||
|
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
INV_EIGHT, Commitment, random_scalar, hash_to_scalar, wallet::decoys::Decoys,
|
||||||
|
ringct::hash_to_point, serialize::*,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(feature = "multisig")]
|
||||||
|
mod multisig;
|
||||||
|
#[cfg(feature = "multisig")]
|
||||||
|
pub use multisig::{ClsagDetails, ClsagAddendum, ClsagMultisig};
|
||||||
|
#[cfg(feature = "multisig")]
|
||||||
|
pub(crate) use multisig::add_key_image_share;
|
||||||
|
|
||||||
|
/// Errors returned when CLSAG signing fails.
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||||
|
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
||||||
|
pub enum ClsagError {
|
||||||
|
#[cfg_attr(feature = "std", error("internal error ({0})"))]
|
||||||
|
InternalError(&'static str),
|
||||||
|
#[cfg_attr(feature = "std", error("invalid ring"))]
|
||||||
|
InvalidRing,
|
||||||
|
#[cfg_attr(feature = "std", error("invalid ring member (member {0}, ring size {1})"))]
|
||||||
|
InvalidRingMember(u8, u8),
|
||||||
|
#[cfg_attr(feature = "std", error("invalid commitment"))]
|
||||||
|
InvalidCommitment,
|
||||||
|
#[cfg_attr(feature = "std", error("invalid key image"))]
|
||||||
|
InvalidImage,
|
||||||
|
#[cfg_attr(feature = "std", error("invalid D"))]
|
||||||
|
InvalidD,
|
||||||
|
#[cfg_attr(feature = "std", error("invalid s"))]
|
||||||
|
InvalidS,
|
||||||
|
#[cfg_attr(feature = "std", error("invalid c1"))]
|
||||||
|
InvalidC1,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Input being signed for.
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||||
|
pub struct ClsagInput {
|
||||||
|
// The actual commitment for the true spend
|
||||||
|
pub(crate) commitment: Commitment,
|
||||||
|
// True spend index, offsets, and ring
|
||||||
|
pub(crate) decoys: Decoys,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ClsagInput {
|
||||||
|
pub fn new(commitment: Commitment, decoys: Decoys) -> Result<ClsagInput, ClsagError> {
|
||||||
|
let n = decoys.len();
|
||||||
|
if n > u8::MAX.into() {
|
||||||
|
Err(ClsagError::InternalError("max ring size in this library is u8 max"))?;
|
||||||
|
}
|
||||||
|
let n = u8::try_from(n).unwrap();
|
||||||
|
if decoys.i >= n {
|
||||||
|
Err(ClsagError::InvalidRingMember(decoys.i, n))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate the commitment matches
|
||||||
|
if decoys.ring[usize::from(decoys.i)][1] != commitment.calculate() {
|
||||||
|
Err(ClsagError::InvalidCommitment)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ClsagInput { commitment, decoys })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
|
enum Mode {
|
||||||
|
Sign(usize, EdwardsPoint, EdwardsPoint),
|
||||||
|
Verify(Scalar),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences
|
||||||
|
// Said differences are covered via the above Mode
|
||||||
|
fn core(
|
||||||
|
ring: &[[EdwardsPoint; 2]],
|
||||||
|
I: &EdwardsPoint,
|
||||||
|
pseudo_out: &EdwardsPoint,
|
||||||
|
msg: &[u8; 32],
|
||||||
|
D: &EdwardsPoint,
|
||||||
|
s: &[Scalar],
|
||||||
|
A_c1: &Mode,
|
||||||
|
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
|
||||||
|
let n = ring.len();
|
||||||
|
|
||||||
|
let images_precomp = VartimeEdwardsPrecomputation::new([I, D]);
|
||||||
|
let D = D * INV_EIGHT();
|
||||||
|
|
||||||
|
// Generate the transcript
|
||||||
|
// Instead of generating multiple, a single transcript is created and then edited as needed
|
||||||
|
const PREFIX: &[u8] = b"CLSAG_";
|
||||||
|
#[rustfmt::skip]
|
||||||
|
const AGG_0: &[u8] = b"agg_0";
|
||||||
|
#[rustfmt::skip]
|
||||||
|
const ROUND: &[u8] = b"round";
|
||||||
|
const PREFIX_AGG_0_LEN: usize = PREFIX.len() + AGG_0.len();
|
||||||
|
|
||||||
|
let mut to_hash = Vec::with_capacity(((2 * n) + 5) * 32);
|
||||||
|
to_hash.extend(PREFIX);
|
||||||
|
to_hash.extend(AGG_0);
|
||||||
|
to_hash.extend([0; 32 - PREFIX_AGG_0_LEN]);
|
||||||
|
|
||||||
|
let mut P = Vec::with_capacity(n);
|
||||||
|
for member in ring {
|
||||||
|
P.push(member[0]);
|
||||||
|
to_hash.extend(member[0].compress().to_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut C = Vec::with_capacity(n);
|
||||||
|
for member in ring {
|
||||||
|
C.push(member[1] - pseudo_out);
|
||||||
|
to_hash.extend(member[1].compress().to_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
to_hash.extend(I.compress().to_bytes());
|
||||||
|
to_hash.extend(D.compress().to_bytes());
|
||||||
|
to_hash.extend(pseudo_out.compress().to_bytes());
|
||||||
|
// mu_P with agg_0
|
||||||
|
let mu_P = hash_to_scalar(&to_hash);
|
||||||
|
// mu_C with agg_1
|
||||||
|
to_hash[PREFIX_AGG_0_LEN - 1] = b'1';
|
||||||
|
let mu_C = hash_to_scalar(&to_hash);
|
||||||
|
|
||||||
|
// Truncate it for the round transcript, altering the DST as needed
|
||||||
|
to_hash.truncate(((2 * n) + 1) * 32);
|
||||||
|
for i in 0 .. ROUND.len() {
|
||||||
|
to_hash[PREFIX.len() + i] = ROUND[i];
|
||||||
|
}
|
||||||
|
// Unfortunately, it's I D pseudo_out instead of pseudo_out I D, meaning this needs to be
|
||||||
|
// truncated just to add it back
|
||||||
|
to_hash.extend(pseudo_out.compress().to_bytes());
|
||||||
|
to_hash.extend(msg);
|
||||||
|
|
||||||
|
// Configure the loop based on if we're signing or verifying
|
||||||
|
let start;
|
||||||
|
let end;
|
||||||
|
let mut c;
|
||||||
|
match A_c1 {
|
||||||
|
Mode::Sign(r, A, AH) => {
|
||||||
|
start = r + 1;
|
||||||
|
end = r + n;
|
||||||
|
to_hash.extend(A.compress().to_bytes());
|
||||||
|
to_hash.extend(AH.compress().to_bytes());
|
||||||
|
c = hash_to_scalar(&to_hash);
|
||||||
|
}
|
||||||
|
|
||||||
|
Mode::Verify(c1) => {
|
||||||
|
start = 0;
|
||||||
|
end = n;
|
||||||
|
c = *c1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform the core loop
|
||||||
|
let mut c1 = c;
|
||||||
|
for i in (start .. end).map(|i| i % n) {
|
||||||
|
let c_p = mu_P * c;
|
||||||
|
let c_c = mu_C * c;
|
||||||
|
|
||||||
|
let L = (&s[i] * ED25519_BASEPOINT_TABLE) + (c_p * P[i]) + (c_c * C[i]);
|
||||||
|
let PH = hash_to_point(&P[i]);
|
||||||
|
// Shouldn't be an issue as all of the variables in this vartime statement are public
|
||||||
|
let R = (s[i] * PH) + images_precomp.vartime_multiscalar_mul([c_p, c_c]);
|
||||||
|
|
||||||
|
to_hash.truncate(((2 * n) + 3) * 32);
|
||||||
|
to_hash.extend(L.compress().to_bytes());
|
||||||
|
to_hash.extend(R.compress().to_bytes());
|
||||||
|
c = hash_to_scalar(&to_hash);
|
||||||
|
|
||||||
|
// This will only execute once and shouldn't need to be constant time. Making it constant time
|
||||||
|
// removes the risk of branch prediction creating timing differences depending on ring index
|
||||||
|
// however
|
||||||
|
c1.conditional_assign(&c, i.ct_eq(&(n - 1)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// This first tuple is needed to continue signing, the latter is the c to be tested/worked with
|
||||||
|
((D, c * mu_P, c * mu_C), c1)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// CLSAG signature, as used in Monero.
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub struct Clsag {
|
||||||
|
pub D: EdwardsPoint,
|
||||||
|
pub s: Vec<Scalar>,
|
||||||
|
pub c1: Scalar,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clsag {
|
||||||
|
// Sign core is the extension of core as needed for signing, yet is shared between single signer
|
||||||
|
// and multisig, hence why it's still core
|
||||||
|
pub(crate) fn sign_core<R: RngCore + CryptoRng>(
|
||||||
|
rng: &mut R,
|
||||||
|
I: &EdwardsPoint,
|
||||||
|
input: &ClsagInput,
|
||||||
|
mask: Scalar,
|
||||||
|
msg: &[u8; 32],
|
||||||
|
A: EdwardsPoint,
|
||||||
|
AH: EdwardsPoint,
|
||||||
|
) -> (Clsag, EdwardsPoint, Scalar, Scalar) {
|
||||||
|
let r: usize = input.decoys.i.into();
|
||||||
|
|
||||||
|
let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate();
|
||||||
|
let z = input.commitment.mask - mask;
|
||||||
|
|
||||||
|
let H = hash_to_point(&input.decoys.ring[r][0]);
|
||||||
|
let D = H * z;
|
||||||
|
let mut s = Vec::with_capacity(input.decoys.ring.len());
|
||||||
|
for _ in 0 .. input.decoys.ring.len() {
|
||||||
|
s.push(random_scalar(rng));
|
||||||
|
}
|
||||||
|
let ((D, p, c), c1) =
|
||||||
|
core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, &Mode::Sign(r, A, AH));
|
||||||
|
|
||||||
|
(Clsag { D, s, c1 }, pseudo_out, p, c * z)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Generate CLSAG signatures for the given inputs.
|
||||||
|
/// inputs is of the form (private key, key image, input).
|
||||||
|
/// sum_outputs is for the sum of the outputs' commitment masks.
|
||||||
|
pub fn sign<R: RngCore + CryptoRng>(
|
||||||
|
rng: &mut R,
|
||||||
|
mut inputs: Vec<(Zeroizing<Scalar>, EdwardsPoint, ClsagInput)>,
|
||||||
|
sum_outputs: Scalar,
|
||||||
|
msg: [u8; 32],
|
||||||
|
) -> Vec<(Clsag, EdwardsPoint)> {
|
||||||
|
let mut res = Vec::with_capacity(inputs.len());
|
||||||
|
let mut sum_pseudo_outs = Scalar::ZERO;
|
||||||
|
for i in 0 .. inputs.len() {
|
||||||
|
let mut mask = random_scalar(rng);
|
||||||
|
if i == (inputs.len() - 1) {
|
||||||
|
mask = sum_outputs - sum_pseudo_outs;
|
||||||
|
} else {
|
||||||
|
sum_pseudo_outs += mask;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut nonce = Zeroizing::new(random_scalar(rng));
|
||||||
|
let (mut clsag, pseudo_out, p, c) = Clsag::sign_core(
|
||||||
|
rng,
|
||||||
|
&inputs[i].1,
|
||||||
|
&inputs[i].2,
|
||||||
|
mask,
|
||||||
|
&msg,
|
||||||
|
nonce.deref() * ED25519_BASEPOINT_TABLE,
|
||||||
|
nonce.deref() *
|
||||||
|
hash_to_point(&inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
|
||||||
|
);
|
||||||
|
clsag.s[usize::from(inputs[i].2.decoys.i)] =
|
||||||
|
(-((p * inputs[i].0.deref()) + c)) + nonce.deref();
|
||||||
|
inputs[i].0.zeroize();
|
||||||
|
nonce.zeroize();
|
||||||
|
|
||||||
|
debug_assert!(clsag
|
||||||
|
.verify(&inputs[i].2.decoys.ring, &inputs[i].1, &pseudo_out, &msg)
|
||||||
|
.is_ok());
|
||||||
|
|
||||||
|
res.push((clsag, pseudo_out));
|
||||||
|
}
|
||||||
|
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify the CLSAG signature against the given Transaction data.
|
||||||
|
pub fn verify(
|
||||||
|
&self,
|
||||||
|
ring: &[[EdwardsPoint; 2]],
|
||||||
|
I: &EdwardsPoint,
|
||||||
|
pseudo_out: &EdwardsPoint,
|
||||||
|
msg: &[u8; 32],
|
||||||
|
) -> Result<(), ClsagError> {
|
||||||
|
// Preliminary checks. s, c1, and points must also be encoded canonically, which isn't checked
|
||||||
|
// here
|
||||||
|
if ring.is_empty() {
|
||||||
|
Err(ClsagError::InvalidRing)?;
|
||||||
|
}
|
||||||
|
if ring.len() != self.s.len() {
|
||||||
|
Err(ClsagError::InvalidS)?;
|
||||||
|
}
|
||||||
|
if I.is_identity() {
|
||||||
|
Err(ClsagError::InvalidImage)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let D = self.D.mul_by_cofactor();
|
||||||
|
if D.is_identity() {
|
||||||
|
Err(ClsagError::InvalidD)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (_, c1) = core(ring, I, pseudo_out, msg, &D, &self.s, &Mode::Verify(self.c1));
|
||||||
|
if c1 != self.c1 {
|
||||||
|
Err(ClsagError::InvalidC1)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn fee_weight(ring_len: usize) -> usize {
|
||||||
|
(ring_len * 32) + 32 + 32
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
write_raw_vec(write_scalar, &self.s, w)?;
|
||||||
|
w.write_all(&self.c1.to_bytes())?;
|
||||||
|
write_point(&self.D, w)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(decoys: usize, r: &mut R) -> io::Result<Clsag> {
|
||||||
|
Ok(Clsag { s: read_raw_vec(read_scalar, decoys, r)?, c1: read_scalar(r)?, D: read_point(r)? })
|
||||||
|
}
|
||||||
|
}
|
||||||
305
coins/monero/src/ringct/clsag/multisig.rs
Normal file
305
coins/monero/src/ringct/clsag/multisig.rs
Normal file
@@ -0,0 +1,305 @@
|
|||||||
|
use core::{ops::Deref, fmt::Debug};
|
||||||
|
use std_shims::io::{self, Read, Write};
|
||||||
|
use std::sync::{Arc, RwLock};
|
||||||
|
|
||||||
|
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
||||||
|
use rand_chacha::ChaCha20Rng;
|
||||||
|
|
||||||
|
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||||
|
|
||||||
|
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
||||||
|
|
||||||
|
use group::{ff::Field, Group, GroupEncoding};
|
||||||
|
|
||||||
|
use transcript::{Transcript, RecommendedTranscript};
|
||||||
|
use dalek_ff_group as dfg;
|
||||||
|
use dleq::DLEqProof;
|
||||||
|
use frost::{
|
||||||
|
dkg::lagrange,
|
||||||
|
curve::Ed25519,
|
||||||
|
Participant, FrostError, ThresholdKeys, ThresholdView,
|
||||||
|
algorithm::{WriteAddendum, Algorithm},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::ringct::{
|
||||||
|
hash_to_point,
|
||||||
|
clsag::{ClsagInput, Clsag},
|
||||||
|
};
|
||||||
|
|
||||||
|
fn dleq_transcript() -> RecommendedTranscript {
|
||||||
|
RecommendedTranscript::new(b"monero_key_image_dleq")
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ClsagInput {
|
||||||
|
fn transcript<T: Transcript>(&self, transcript: &mut T) {
|
||||||
|
// Doesn't domain separate as this is considered part of the larger CLSAG proof
|
||||||
|
|
||||||
|
// Ring index
|
||||||
|
transcript.append_message(b"real_spend", [self.decoys.i]);
|
||||||
|
|
||||||
|
// Ring
|
||||||
|
for (i, pair) in self.decoys.ring.iter().enumerate() {
|
||||||
|
// Doesn't include global output indexes as CLSAG doesn't care and won't be affected by it
|
||||||
|
// They're just a unreliable reference to this data which will be included in the message
|
||||||
|
// if in use
|
||||||
|
transcript.append_message(b"member", [u8::try_from(i).expect("ring size exceeded 255")]);
|
||||||
|
transcript.append_message(b"key", pair[0].compress().to_bytes());
|
||||||
|
transcript.append_message(b"commitment", pair[1].compress().to_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Doesn't include the commitment's parts as the above ring + index includes the commitment
|
||||||
|
// The only potential malleability would be if the G/H relationship is known breaking the
|
||||||
|
// discrete log problem, which breaks everything already
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// CLSAG input and the mask to use for it.
|
||||||
|
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
|
||||||
|
pub struct ClsagDetails {
|
||||||
|
input: ClsagInput,
|
||||||
|
mask: Scalar,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ClsagDetails {
|
||||||
|
pub fn new(input: ClsagInput, mask: Scalar) -> ClsagDetails {
|
||||||
|
ClsagDetails { input, mask }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Addendum produced during the FROST signing process with relevant data.
|
||||||
|
#[derive(Clone, PartialEq, Eq, Zeroize, Debug)]
|
||||||
|
pub struct ClsagAddendum {
|
||||||
|
pub(crate) key_image: dfg::EdwardsPoint,
|
||||||
|
dleq: DLEqProof<dfg::EdwardsPoint>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WriteAddendum for ClsagAddendum {
|
||||||
|
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||||
|
writer.write_all(self.key_image.compress().to_bytes().as_ref())?;
|
||||||
|
self.dleq.write(writer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
struct Interim {
|
||||||
|
p: Scalar,
|
||||||
|
c: Scalar,
|
||||||
|
|
||||||
|
clsag: Clsag,
|
||||||
|
pseudo_out: EdwardsPoint,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// FROST algorithm for producing a CLSAG signature.
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct ClsagMultisig {
|
||||||
|
transcript: RecommendedTranscript,
|
||||||
|
|
||||||
|
pub(crate) H: EdwardsPoint,
|
||||||
|
// Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires
|
||||||
|
// an extra round
|
||||||
|
image: EdwardsPoint,
|
||||||
|
|
||||||
|
details: Arc<RwLock<Option<ClsagDetails>>>,
|
||||||
|
|
||||||
|
msg: Option<[u8; 32]>,
|
||||||
|
interim: Option<Interim>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ClsagMultisig {
|
||||||
|
pub fn new(
|
||||||
|
transcript: RecommendedTranscript,
|
||||||
|
output_key: EdwardsPoint,
|
||||||
|
details: Arc<RwLock<Option<ClsagDetails>>>,
|
||||||
|
) -> ClsagMultisig {
|
||||||
|
ClsagMultisig {
|
||||||
|
transcript,
|
||||||
|
|
||||||
|
H: hash_to_point(&output_key),
|
||||||
|
image: EdwardsPoint::identity(),
|
||||||
|
|
||||||
|
details,
|
||||||
|
|
||||||
|
msg: None,
|
||||||
|
interim: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn input(&self) -> ClsagInput {
|
||||||
|
(*self.details.read().unwrap()).as_ref().unwrap().input.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn mask(&self) -> Scalar {
|
||||||
|
(*self.details.read().unwrap()).as_ref().unwrap().mask
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn add_key_image_share(
|
||||||
|
image: &mut EdwardsPoint,
|
||||||
|
generator: EdwardsPoint,
|
||||||
|
offset: Scalar,
|
||||||
|
included: &[Participant],
|
||||||
|
participant: Participant,
|
||||||
|
share: EdwardsPoint,
|
||||||
|
) {
|
||||||
|
if image.is_identity().into() {
|
||||||
|
*image = generator * offset;
|
||||||
|
}
|
||||||
|
*image += share * lagrange::<dfg::Scalar>(participant, included).0;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Algorithm<Ed25519> for ClsagMultisig {
|
||||||
|
type Transcript = RecommendedTranscript;
|
||||||
|
type Addendum = ClsagAddendum;
|
||||||
|
type Signature = (Clsag, EdwardsPoint);
|
||||||
|
|
||||||
|
fn nonces(&self) -> Vec<Vec<dfg::EdwardsPoint>> {
|
||||||
|
vec![vec![dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)]]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
||||||
|
&mut self,
|
||||||
|
rng: &mut R,
|
||||||
|
keys: &ThresholdKeys<Ed25519>,
|
||||||
|
) -> ClsagAddendum {
|
||||||
|
ClsagAddendum {
|
||||||
|
key_image: dfg::EdwardsPoint(self.H) * keys.secret_share().deref(),
|
||||||
|
dleq: DLEqProof::prove(
|
||||||
|
rng,
|
||||||
|
// Doesn't take in a larger transcript object due to the usage of this
|
||||||
|
// Every prover would immediately write their own DLEq proof, when they can only do so in
|
||||||
|
// the proper order if they want to reach consensus
|
||||||
|
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to
|
||||||
|
// try to merge later in some form, when it should instead just merge xH (as it does)
|
||||||
|
&mut dleq_transcript(),
|
||||||
|
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
|
||||||
|
keys.secret_share(),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_addendum<R: Read>(&self, reader: &mut R) -> io::Result<ClsagAddendum> {
|
||||||
|
let mut bytes = [0; 32];
|
||||||
|
reader.read_exact(&mut bytes)?;
|
||||||
|
// dfg ensures the point is torsion free
|
||||||
|
let xH = Option::<dfg::EdwardsPoint>::from(dfg::EdwardsPoint::from_bytes(&bytes))
|
||||||
|
.ok_or_else(|| io::Error::other("invalid key image"))?;
|
||||||
|
// Ensure this is a canonical point
|
||||||
|
if xH.to_bytes() != bytes {
|
||||||
|
Err(io::Error::other("non-canonical key image"))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ClsagAddendum { key_image: xH, dleq: DLEqProof::<dfg::EdwardsPoint>::read(reader)? })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_addendum(
|
||||||
|
&mut self,
|
||||||
|
view: &ThresholdView<Ed25519>,
|
||||||
|
l: Participant,
|
||||||
|
addendum: ClsagAddendum,
|
||||||
|
) -> Result<(), FrostError> {
|
||||||
|
// TODO: This check is faulty if two shares are additive inverses of each other
|
||||||
|
if self.image.is_identity().into() {
|
||||||
|
self.transcript.domain_separate(b"CLSAG");
|
||||||
|
self.input().transcript(&mut self.transcript);
|
||||||
|
self.transcript.append_message(b"mask", self.mask().to_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
self.transcript.append_message(b"participant", l.to_bytes());
|
||||||
|
|
||||||
|
addendum
|
||||||
|
.dleq
|
||||||
|
.verify(
|
||||||
|
&mut dleq_transcript(),
|
||||||
|
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
|
||||||
|
&[view.original_verification_share(l), addendum.key_image],
|
||||||
|
)
|
||||||
|
.map_err(|_| FrostError::InvalidPreprocess(l))?;
|
||||||
|
|
||||||
|
self.transcript.append_message(b"key_image_share", addendum.key_image.compress().to_bytes());
|
||||||
|
add_key_image_share(
|
||||||
|
&mut self.image,
|
||||||
|
self.H,
|
||||||
|
view.offset().0,
|
||||||
|
view.included(),
|
||||||
|
l,
|
||||||
|
addendum.key_image.0,
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn transcript(&mut self) -> &mut Self::Transcript {
|
||||||
|
&mut self.transcript
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sign_share(
|
||||||
|
&mut self,
|
||||||
|
view: &ThresholdView<Ed25519>,
|
||||||
|
nonce_sums: &[Vec<dfg::EdwardsPoint>],
|
||||||
|
nonces: Vec<Zeroizing<dfg::Scalar>>,
|
||||||
|
msg: &[u8],
|
||||||
|
) -> dfg::Scalar {
|
||||||
|
// Use the transcript to get a seeded random number generator
|
||||||
|
// The transcript contains private data, preventing passive adversaries from recreating this
|
||||||
|
// process even if they have access to commitments (specifically, the ring index being signed
|
||||||
|
// for, along with the mask which should not only require knowing the shared keys yet also the
|
||||||
|
// input commitment masks)
|
||||||
|
let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"decoy_responses"));
|
||||||
|
|
||||||
|
self.msg = Some(msg.try_into().expect("CLSAG message should be 32-bytes"));
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let (clsag, pseudo_out, p, c) = Clsag::sign_core(
|
||||||
|
&mut rng,
|
||||||
|
&self.image,
|
||||||
|
&self.input(),
|
||||||
|
self.mask(),
|
||||||
|
self.msg.as_ref().unwrap(),
|
||||||
|
nonce_sums[0][0].0,
|
||||||
|
nonce_sums[0][1].0,
|
||||||
|
);
|
||||||
|
self.interim = Some(Interim { p, c, clsag, pseudo_out });
|
||||||
|
|
||||||
|
(-(dfg::Scalar(p) * view.secret_share().deref())) + nonces[0].deref()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
fn verify(
|
||||||
|
&self,
|
||||||
|
_: dfg::EdwardsPoint,
|
||||||
|
_: &[Vec<dfg::EdwardsPoint>],
|
||||||
|
sum: dfg::Scalar,
|
||||||
|
) -> Option<Self::Signature> {
|
||||||
|
let interim = self.interim.as_ref().unwrap();
|
||||||
|
let mut clsag = interim.clsag.clone();
|
||||||
|
clsag.s[usize::from(self.input().decoys.i)] = sum.0 - interim.c;
|
||||||
|
if clsag
|
||||||
|
.verify(
|
||||||
|
&self.input().decoys.ring,
|
||||||
|
&self.image,
|
||||||
|
&interim.pseudo_out,
|
||||||
|
self.msg.as_ref().unwrap(),
|
||||||
|
)
|
||||||
|
.is_ok()
|
||||||
|
{
|
||||||
|
return Some((clsag, interim.pseudo_out));
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn verify_share(
|
||||||
|
&self,
|
||||||
|
verification_share: dfg::EdwardsPoint,
|
||||||
|
nonces: &[Vec<dfg::EdwardsPoint>],
|
||||||
|
share: dfg::Scalar,
|
||||||
|
) -> Result<Vec<(dfg::Scalar, dfg::EdwardsPoint)>, ()> {
|
||||||
|
let interim = self.interim.as_ref().unwrap();
|
||||||
|
Ok(vec![
|
||||||
|
(share, dfg::EdwardsPoint::generator()),
|
||||||
|
(dfg::Scalar(interim.p), verification_share),
|
||||||
|
(-dfg::Scalar::ONE, nonces[0][0]),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
||||||
8
coins/monero/src/ringct/hash_to_point.rs
Normal file
8
coins/monero/src/ringct/hash_to_point.rs
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
use curve25519_dalek::edwards::EdwardsPoint;
|
||||||
|
|
||||||
|
pub use monero_generators::{hash_to_point as raw_hash_to_point};
|
||||||
|
|
||||||
|
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
|
||||||
|
pub fn hash_to_point(key: &EdwardsPoint) -> EdwardsPoint {
|
||||||
|
raw_hash_to_point(key.compress().to_bytes())
|
||||||
|
}
|
||||||
@@ -1,11 +1,4 @@
|
|||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
|
||||||
#![doc = include_str!("../README.md")]
|
|
||||||
#![deny(missing_docs)]
|
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
|
||||||
#![allow(non_snake_case)]
|
|
||||||
|
|
||||||
use std_shims::{
|
use std_shims::{
|
||||||
vec,
|
|
||||||
vec::Vec,
|
vec::Vec,
|
||||||
io::{self, Read, Write},
|
io::{self, Read, Write},
|
||||||
};
|
};
|
||||||
@@ -14,39 +7,32 @@ use zeroize::Zeroize;
|
|||||||
|
|
||||||
use curve25519_dalek::{traits::IsIdentity, Scalar, EdwardsPoint};
|
use curve25519_dalek::{traits::IsIdentity, Scalar, EdwardsPoint};
|
||||||
|
|
||||||
use monero_io::*;
|
use monero_generators::H;
|
||||||
use monero_generators::{H, hash_to_point};
|
|
||||||
use monero_primitives::keccak256_to_scalar;
|
|
||||||
|
|
||||||
/// Errors when working with MLSAGs.
|
use crate::{hash_to_scalar, ringct::hash_to_point, serialize::*};
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, thiserror::Error)]
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||||
|
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
||||||
pub enum MlsagError {
|
pub enum MlsagError {
|
||||||
/// Invalid ring (such as too small or too large).
|
#[cfg_attr(feature = "std", error("invalid ring"))]
|
||||||
#[error("invalid ring")]
|
|
||||||
InvalidRing,
|
InvalidRing,
|
||||||
/// Invalid amount of key images.
|
#[cfg_attr(feature = "std", error("invalid amount of key images"))]
|
||||||
#[error("invalid amount of key images")]
|
|
||||||
InvalidAmountOfKeyImages,
|
InvalidAmountOfKeyImages,
|
||||||
/// Invalid ss matrix.
|
#[cfg_attr(feature = "std", error("invalid ss"))]
|
||||||
#[error("invalid ss")]
|
|
||||||
InvalidSs,
|
InvalidSs,
|
||||||
/// Invalid key image.
|
#[cfg_attr(feature = "std", error("key image was identity"))]
|
||||||
#[error("invalid key image")]
|
IdentityKeyImage,
|
||||||
InvalidKeyImage,
|
#[cfg_attr(feature = "std", error("invalid ci"))]
|
||||||
/// Invalid ci vector.
|
|
||||||
#[error("invalid ci")]
|
|
||||||
InvalidCi,
|
InvalidCi,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A vector of rings, forming a matrix, to verify the MLSAG with.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
pub struct RingMatrix {
|
pub struct RingMatrix {
|
||||||
matrix: Vec<Vec<EdwardsPoint>>,
|
matrix: Vec<Vec<EdwardsPoint>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RingMatrix {
|
impl RingMatrix {
|
||||||
/// Construct a ring matrix from an already formatted series of points.
|
pub fn new(matrix: Vec<Vec<EdwardsPoint>>) -> Result<Self, MlsagError> {
|
||||||
fn new(matrix: Vec<Vec<EdwardsPoint>>) -> Result<Self, MlsagError> {
|
|
||||||
// Monero requires that there is more than one ring member for MLSAG signatures:
|
// Monero requires that there is more than one ring member for MLSAG signatures:
|
||||||
// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/
|
// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/
|
||||||
// src/ringct/rctSigs.cpp#L462
|
// src/ringct/rctSigs.cpp#L462
|
||||||
@@ -74,17 +60,16 @@ impl RingMatrix {
|
|||||||
RingMatrix::new(matrix)
|
RingMatrix::new(matrix)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterate over the members of the matrix.
|
pub fn iter(&self) -> impl Iterator<Item = &[EdwardsPoint]> {
|
||||||
fn iter(&self) -> impl Iterator<Item = &[EdwardsPoint]> {
|
|
||||||
self.matrix.iter().map(AsRef::as_ref)
|
self.matrix.iter().map(AsRef::as_ref)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the amount of members in the ring.
|
/// Return the amount of members in the ring.
|
||||||
pub fn members(&self) -> usize {
|
pub fn members(&self) -> usize {
|
||||||
self.matrix.len()
|
self.matrix.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the length of a ring member.
|
/// Returns the length of a ring member.
|
||||||
///
|
///
|
||||||
/// A ring member is a vector of points for which the signer knows all of the discrete logarithms
|
/// A ring member is a vector of points for which the signer knows all of the discrete logarithms
|
||||||
/// of.
|
/// of.
|
||||||
@@ -94,15 +79,13 @@ impl RingMatrix {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The MLSAG linkable ring signature, as used in Monero.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
pub struct Mlsag {
|
pub struct Mlsag {
|
||||||
ss: Vec<Vec<Scalar>>,
|
pub ss: Vec<Vec<Scalar>>,
|
||||||
cc: Scalar,
|
pub cc: Scalar,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Mlsag {
|
impl Mlsag {
|
||||||
/// Write a MLSAG.
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
for ss in &self.ss {
|
for ss in &self.ss {
|
||||||
write_raw_vec(write_scalar, ss, w)?;
|
write_raw_vec(write_scalar, ss, w)?;
|
||||||
@@ -110,7 +93,6 @@ impl Mlsag {
|
|||||||
write_scalar(&self.cc, w)
|
write_scalar(&self.cc, w)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read a MLSAG.
|
|
||||||
pub fn read<R: Read>(mixins: usize, ss_2_elements: usize, r: &mut R) -> io::Result<Mlsag> {
|
pub fn read<R: Read>(mixins: usize, ss_2_elements: usize, r: &mut R) -> io::Result<Mlsag> {
|
||||||
Ok(Mlsag {
|
Ok(Mlsag {
|
||||||
ss: (0 .. mixins)
|
ss: (0 .. mixins)
|
||||||
@@ -120,7 +102,6 @@ impl Mlsag {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Verify a MLSAG.
|
|
||||||
pub fn verify(
|
pub fn verify(
|
||||||
&self,
|
&self,
|
||||||
msg: &[u8; 32],
|
msg: &[u8; 32],
|
||||||
@@ -155,24 +136,23 @@ impl Mlsag {
|
|||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
let L = EdwardsPoint::vartime_double_scalar_mul_basepoint(&ci, ring_member_entry, s);
|
let L = EdwardsPoint::vartime_double_scalar_mul_basepoint(&ci, ring_member_entry, s);
|
||||||
|
|
||||||
let compressed_ring_member_entry = ring_member_entry.compress();
|
buf.extend_from_slice(ring_member_entry.compress().as_bytes());
|
||||||
buf.extend_from_slice(compressed_ring_member_entry.as_bytes());
|
|
||||||
buf.extend_from_slice(L.compress().as_bytes());
|
buf.extend_from_slice(L.compress().as_bytes());
|
||||||
|
|
||||||
// Not all dimensions need to be linkable, e.g. commitments, and only linkable layers need
|
// Not all dimensions need to be linkable, e.g. commitments, and only linkable layers need
|
||||||
// to have key images.
|
// to have key images.
|
||||||
if let Some(ki) = ki {
|
if let Some(ki) = ki {
|
||||||
if ki.is_identity() || (!ki.is_torsion_free()) {
|
if ki.is_identity() {
|
||||||
Err(MlsagError::InvalidKeyImage)?;
|
Err(MlsagError::IdentityKeyImage)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
let R = (s * hash_to_point(compressed_ring_member_entry.to_bytes())) + (ci * ki);
|
let R = (s * hash_to_point(ring_member_entry)) + (ci * ki);
|
||||||
buf.extend_from_slice(R.compress().as_bytes());
|
buf.extend_from_slice(R.compress().as_bytes());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ci = keccak256_to_scalar(&buf);
|
ci = hash_to_scalar(&buf);
|
||||||
// keep the msg in the buffer.
|
// keep the msg in the buffer.
|
||||||
buf.drain(msg.len() ..);
|
buf.drain(msg.len() ..);
|
||||||
}
|
}
|
||||||
@@ -184,9 +164,8 @@ impl Mlsag {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Builder for a RingMatrix when using an aggregate signature.
|
/// An aggregate ring matrix builder, usable to set up the ring matrix to prove/verify an aggregate
|
||||||
///
|
/// MLSAG signature.
|
||||||
/// This handles the formatting as necessary.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
pub struct AggregateRingMatrixBuilder {
|
pub struct AggregateRingMatrixBuilder {
|
||||||
key_ring: Vec<Vec<EdwardsPoint>>,
|
key_ring: Vec<Vec<EdwardsPoint>>,
|
||||||
@@ -197,12 +176,12 @@ pub struct AggregateRingMatrixBuilder {
|
|||||||
impl AggregateRingMatrixBuilder {
|
impl AggregateRingMatrixBuilder {
|
||||||
/// Create a new AggregateRingMatrixBuilder.
|
/// Create a new AggregateRingMatrixBuilder.
|
||||||
///
|
///
|
||||||
/// This takes in the transaction's outputs' commitments and fee used.
|
/// Takes in the transaction's outputs; commitments and fee.
|
||||||
pub fn new(commitments: &[EdwardsPoint], fee: u64) -> Self {
|
pub fn new(commitments: &[EdwardsPoint], fee: u64) -> Self {
|
||||||
AggregateRingMatrixBuilder {
|
AggregateRingMatrixBuilder {
|
||||||
key_ring: vec![],
|
key_ring: vec![],
|
||||||
amounts_ring: vec![],
|
amounts_ring: vec![],
|
||||||
sum_out: commitments.iter().sum::<EdwardsPoint>() + (*H * Scalar::from(fee)),
|
sum_out: commitments.iter().sum::<EdwardsPoint>() + (H() * Scalar::from(fee)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -227,7 +206,7 @@ impl AggregateRingMatrixBuilder {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Build and return the [`RingMatrix`].
|
/// Build and return the [`RingMatrix`]
|
||||||
pub fn build(mut self) -> Result<RingMatrix, MlsagError> {
|
pub fn build(mut self) -> Result<RingMatrix, MlsagError> {
|
||||||
for (i, amount_commitment) in self.amounts_ring.drain(..).enumerate() {
|
for (i, amount_commitment) in self.amounts_ring.drain(..).enumerate() {
|
||||||
self.key_ring[i].push(amount_commitment);
|
self.key_ring[i].push(amount_commitment);
|
||||||
400
coins/monero/src/ringct/mod.rs
Normal file
400
coins/monero/src/ringct/mod.rs
Normal file
@@ -0,0 +1,400 @@
|
|||||||
|
use core::ops::Deref;
|
||||||
|
use std_shims::{
|
||||||
|
vec::Vec,
|
||||||
|
io::{self, Read, Write},
|
||||||
|
};
|
||||||
|
|
||||||
|
use zeroize::{Zeroize, Zeroizing};
|
||||||
|
|
||||||
|
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
||||||
|
|
||||||
|
pub(crate) mod hash_to_point;
|
||||||
|
pub use hash_to_point::{raw_hash_to_point, hash_to_point};
|
||||||
|
|
||||||
|
/// MLSAG struct, along with verifying functionality.
|
||||||
|
pub mod mlsag;
|
||||||
|
/// CLSAG struct, along with signing and verifying functionality.
|
||||||
|
pub mod clsag;
|
||||||
|
/// BorromeanRange struct, along with verifying functionality.
|
||||||
|
pub mod borromean;
|
||||||
|
/// Bulletproofs(+) structs, along with proving and verifying functionality.
|
||||||
|
pub mod bulletproofs;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
Protocol,
|
||||||
|
serialize::*,
|
||||||
|
ringct::{mlsag::Mlsag, clsag::Clsag, borromean::BorromeanRange, bulletproofs::Bulletproofs},
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Generate a key image for a given key. Defined as `x * hash_to_point(xG)`.
|
||||||
|
pub fn generate_key_image(secret: &Zeroizing<Scalar>) -> EdwardsPoint {
|
||||||
|
hash_to_point(&(ED25519_BASEPOINT_TABLE * secret.deref())) * secret.deref()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub enum EncryptedAmount {
|
||||||
|
Original { mask: [u8; 32], amount: [u8; 32] },
|
||||||
|
Compact { amount: [u8; 8] },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EncryptedAmount {
|
||||||
|
pub fn read<R: Read>(compact: bool, r: &mut R) -> io::Result<EncryptedAmount> {
|
||||||
|
Ok(if !compact {
|
||||||
|
EncryptedAmount::Original { mask: read_bytes(r)?, amount: read_bytes(r)? }
|
||||||
|
} else {
|
||||||
|
EncryptedAmount::Compact { amount: read_bytes(r)? }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
match self {
|
||||||
|
EncryptedAmount::Original { mask, amount } => {
|
||||||
|
w.write_all(mask)?;
|
||||||
|
w.write_all(amount)
|
||||||
|
}
|
||||||
|
EncryptedAmount::Compact { amount } => w.write_all(amount),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||||
|
pub enum RctType {
|
||||||
|
/// No RCT proofs.
|
||||||
|
Null,
|
||||||
|
/// One MLSAG for multiple inputs and Borromean range proofs (RCTTypeFull).
|
||||||
|
MlsagAggregate,
|
||||||
|
// One MLSAG for each input and a Borromean range proof (RCTTypeSimple).
|
||||||
|
MlsagIndividual,
|
||||||
|
// One MLSAG for each input and a Bulletproof (RCTTypeBulletproof).
|
||||||
|
Bulletproofs,
|
||||||
|
/// One MLSAG for each input and a Bulletproof, yet starting to use EncryptedAmount::Compact
|
||||||
|
/// (RCTTypeBulletproof2).
|
||||||
|
BulletproofsCompactAmount,
|
||||||
|
/// One CLSAG for each input and a Bulletproof (RCTTypeCLSAG).
|
||||||
|
Clsag,
|
||||||
|
/// One CLSAG for each input and a Bulletproof+ (RCTTypeBulletproofPlus).
|
||||||
|
BulletproofsPlus,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RctType {
|
||||||
|
pub fn to_byte(self) -> u8 {
|
||||||
|
match self {
|
||||||
|
RctType::Null => 0,
|
||||||
|
RctType::MlsagAggregate => 1,
|
||||||
|
RctType::MlsagIndividual => 2,
|
||||||
|
RctType::Bulletproofs => 3,
|
||||||
|
RctType::BulletproofsCompactAmount => 4,
|
||||||
|
RctType::Clsag => 5,
|
||||||
|
RctType::BulletproofsPlus => 6,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_byte(byte: u8) -> Option<Self> {
|
||||||
|
Some(match byte {
|
||||||
|
0 => RctType::Null,
|
||||||
|
1 => RctType::MlsagAggregate,
|
||||||
|
2 => RctType::MlsagIndividual,
|
||||||
|
3 => RctType::Bulletproofs,
|
||||||
|
4 => RctType::BulletproofsCompactAmount,
|
||||||
|
5 => RctType::Clsag,
|
||||||
|
6 => RctType::BulletproofsPlus,
|
||||||
|
_ => None?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn compact_encrypted_amounts(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
RctType::Null |
|
||||||
|
RctType::MlsagAggregate |
|
||||||
|
RctType::MlsagIndividual |
|
||||||
|
RctType::Bulletproofs => false,
|
||||||
|
RctType::BulletproofsCompactAmount | RctType::Clsag | RctType::BulletproofsPlus => true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub struct RctBase {
|
||||||
|
pub fee: u64,
|
||||||
|
pub pseudo_outs: Vec<EdwardsPoint>,
|
||||||
|
pub encrypted_amounts: Vec<EncryptedAmount>,
|
||||||
|
pub commitments: Vec<EdwardsPoint>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RctBase {
|
||||||
|
pub(crate) fn fee_weight(outputs: usize, fee: u64) -> usize {
|
||||||
|
// 1 byte for the RCT signature type
|
||||||
|
1 + (outputs * (8 + 32)) + varint_len(fee)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W, rct_type: RctType) -> io::Result<()> {
|
||||||
|
w.write_all(&[rct_type.to_byte()])?;
|
||||||
|
match rct_type {
|
||||||
|
RctType::Null => Ok(()),
|
||||||
|
_ => {
|
||||||
|
write_varint(&self.fee, w)?;
|
||||||
|
if rct_type == RctType::MlsagIndividual {
|
||||||
|
write_raw_vec(write_point, &self.pseudo_outs, w)?;
|
||||||
|
}
|
||||||
|
for encrypted_amount in &self.encrypted_amounts {
|
||||||
|
encrypted_amount.write(w)?;
|
||||||
|
}
|
||||||
|
write_raw_vec(write_point, &self.commitments, w)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(inputs: usize, outputs: usize, r: &mut R) -> io::Result<(RctBase, RctType)> {
|
||||||
|
let rct_type =
|
||||||
|
RctType::from_byte(read_byte(r)?).ok_or_else(|| io::Error::other("invalid RCT type"))?;
|
||||||
|
|
||||||
|
match rct_type {
|
||||||
|
RctType::Null | RctType::MlsagAggregate | RctType::MlsagIndividual => {}
|
||||||
|
RctType::Bulletproofs |
|
||||||
|
RctType::BulletproofsCompactAmount |
|
||||||
|
RctType::Clsag |
|
||||||
|
RctType::BulletproofsPlus => {
|
||||||
|
if outputs == 0 {
|
||||||
|
// Because the Bulletproofs(+) layout must be canonical, there must be 1 Bulletproof if
|
||||||
|
// Bulletproofs are in use
|
||||||
|
// If there are Bulletproofs, there must be a matching amount of outputs, implicitly
|
||||||
|
// banning 0 outputs
|
||||||
|
// Since HF 12 (CLSAG being 13), a 2-output minimum has also been enforced
|
||||||
|
Err(io::Error::other("RCT with Bulletproofs(+) had 0 outputs"))?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
if rct_type == RctType::Null {
|
||||||
|
RctBase { fee: 0, pseudo_outs: vec![], encrypted_amounts: vec![], commitments: vec![] }
|
||||||
|
} else {
|
||||||
|
RctBase {
|
||||||
|
fee: read_varint(r)?,
|
||||||
|
pseudo_outs: if rct_type == RctType::MlsagIndividual {
|
||||||
|
read_raw_vec(read_point, inputs, r)?
|
||||||
|
} else {
|
||||||
|
vec![]
|
||||||
|
},
|
||||||
|
encrypted_amounts: (0 .. outputs)
|
||||||
|
.map(|_| EncryptedAmount::read(rct_type.compact_encrypted_amounts(), r))
|
||||||
|
.collect::<Result<_, _>>()?,
|
||||||
|
commitments: read_raw_vec(read_point, outputs, r)?,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
rct_type,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub enum RctPrunable {
|
||||||
|
Null,
|
||||||
|
AggregateMlsagBorromean {
|
||||||
|
borromean: Vec<BorromeanRange>,
|
||||||
|
mlsag: Mlsag,
|
||||||
|
},
|
||||||
|
MlsagBorromean {
|
||||||
|
borromean: Vec<BorromeanRange>,
|
||||||
|
mlsags: Vec<Mlsag>,
|
||||||
|
},
|
||||||
|
MlsagBulletproofs {
|
||||||
|
bulletproofs: Bulletproofs,
|
||||||
|
mlsags: Vec<Mlsag>,
|
||||||
|
pseudo_outs: Vec<EdwardsPoint>,
|
||||||
|
},
|
||||||
|
Clsag {
|
||||||
|
bulletproofs: Bulletproofs,
|
||||||
|
clsags: Vec<Clsag>,
|
||||||
|
pseudo_outs: Vec<EdwardsPoint>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RctPrunable {
|
||||||
|
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
|
||||||
|
// 1 byte for number of BPs (technically a VarInt, yet there's always just zero or one)
|
||||||
|
1 + Bulletproofs::fee_weight(protocol.bp_plus(), outputs) +
|
||||||
|
(inputs * (Clsag::fee_weight(protocol.ring_len()) + 32))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W, rct_type: RctType) -> io::Result<()> {
|
||||||
|
match self {
|
||||||
|
RctPrunable::Null => Ok(()),
|
||||||
|
RctPrunable::AggregateMlsagBorromean { borromean, mlsag } => {
|
||||||
|
write_raw_vec(BorromeanRange::write, borromean, w)?;
|
||||||
|
mlsag.write(w)
|
||||||
|
}
|
||||||
|
RctPrunable::MlsagBorromean { borromean, mlsags } => {
|
||||||
|
write_raw_vec(BorromeanRange::write, borromean, w)?;
|
||||||
|
write_raw_vec(Mlsag::write, mlsags, w)
|
||||||
|
}
|
||||||
|
RctPrunable::MlsagBulletproofs { bulletproofs, mlsags, pseudo_outs } => {
|
||||||
|
if rct_type == RctType::Bulletproofs {
|
||||||
|
w.write_all(&1u32.to_le_bytes())?;
|
||||||
|
} else {
|
||||||
|
w.write_all(&[1])?;
|
||||||
|
}
|
||||||
|
bulletproofs.write(w)?;
|
||||||
|
|
||||||
|
write_raw_vec(Mlsag::write, mlsags, w)?;
|
||||||
|
write_raw_vec(write_point, pseudo_outs, w)
|
||||||
|
}
|
||||||
|
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs } => {
|
||||||
|
w.write_all(&[1])?;
|
||||||
|
bulletproofs.write(w)?;
|
||||||
|
|
||||||
|
write_raw_vec(Clsag::write, clsags, w)?;
|
||||||
|
write_raw_vec(write_point, pseudo_outs, w)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn serialize(&self, rct_type: RctType) -> Vec<u8> {
|
||||||
|
let mut serialized = vec![];
|
||||||
|
self.write(&mut serialized, rct_type).unwrap();
|
||||||
|
serialized
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(
|
||||||
|
rct_type: RctType,
|
||||||
|
ring_length: usize,
|
||||||
|
inputs: usize,
|
||||||
|
outputs: usize,
|
||||||
|
r: &mut R,
|
||||||
|
) -> io::Result<RctPrunable> {
|
||||||
|
// While we generally don't bother with misc consensus checks, this affects the safety of
|
||||||
|
// the below defined rct_type function
|
||||||
|
// The exact line preventing zero-input transactions is:
|
||||||
|
// https://github.com/monero-project/monero/blob/00fd416a99686f0956361d1cd0337fe56e58d4a7/
|
||||||
|
// src/ringct/rctSigs.cpp#L609
|
||||||
|
// And then for RctNull, that's only allowed for miner TXs which require one input of
|
||||||
|
// Input::Gen
|
||||||
|
if inputs == 0 {
|
||||||
|
Err(io::Error::other("transaction had no inputs"))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(match rct_type {
|
||||||
|
RctType::Null => RctPrunable::Null,
|
||||||
|
RctType::MlsagAggregate => RctPrunable::AggregateMlsagBorromean {
|
||||||
|
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
|
||||||
|
mlsag: Mlsag::read(ring_length, inputs + 1, r)?,
|
||||||
|
},
|
||||||
|
RctType::MlsagIndividual => RctPrunable::MlsagBorromean {
|
||||||
|
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
|
||||||
|
mlsags: (0 .. inputs).map(|_| Mlsag::read(ring_length, 2, r)).collect::<Result<_, _>>()?,
|
||||||
|
},
|
||||||
|
RctType::Bulletproofs | RctType::BulletproofsCompactAmount => {
|
||||||
|
RctPrunable::MlsagBulletproofs {
|
||||||
|
bulletproofs: {
|
||||||
|
if (if rct_type == RctType::Bulletproofs {
|
||||||
|
u64::from(read_u32(r)?)
|
||||||
|
} else {
|
||||||
|
read_varint(r)?
|
||||||
|
}) != 1
|
||||||
|
{
|
||||||
|
Err(io::Error::other("n bulletproofs instead of one"))?;
|
||||||
|
}
|
||||||
|
Bulletproofs::read(r)?
|
||||||
|
},
|
||||||
|
mlsags: (0 .. inputs)
|
||||||
|
.map(|_| Mlsag::read(ring_length, 2, r))
|
||||||
|
.collect::<Result<_, _>>()?,
|
||||||
|
pseudo_outs: read_raw_vec(read_point, inputs, r)?,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
RctType::Clsag | RctType::BulletproofsPlus => RctPrunable::Clsag {
|
||||||
|
bulletproofs: {
|
||||||
|
if read_varint::<_, u64>(r)? != 1 {
|
||||||
|
Err(io::Error::other("n bulletproofs instead of one"))?;
|
||||||
|
}
|
||||||
|
(if rct_type == RctType::Clsag { Bulletproofs::read } else { Bulletproofs::read_plus })(
|
||||||
|
r,
|
||||||
|
)?
|
||||||
|
},
|
||||||
|
clsags: (0 .. inputs).map(|_| Clsag::read(ring_length, r)).collect::<Result<_, _>>()?,
|
||||||
|
pseudo_outs: read_raw_vec(read_point, inputs, r)?,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
match self {
|
||||||
|
RctPrunable::Null => panic!("Serializing RctPrunable::Null for a signature"),
|
||||||
|
RctPrunable::AggregateMlsagBorromean { borromean, .. } |
|
||||||
|
RctPrunable::MlsagBorromean { borromean, .. } => {
|
||||||
|
borromean.iter().try_for_each(|rs| rs.write(w))
|
||||||
|
}
|
||||||
|
RctPrunable::MlsagBulletproofs { bulletproofs, .. } |
|
||||||
|
RctPrunable::Clsag { bulletproofs, .. } => bulletproofs.signature_write(w),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub struct RctSignatures {
|
||||||
|
pub base: RctBase,
|
||||||
|
pub prunable: RctPrunable,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RctSignatures {
|
||||||
|
/// RctType for a given RctSignatures struct.
|
||||||
|
pub fn rct_type(&self) -> RctType {
|
||||||
|
match &self.prunable {
|
||||||
|
RctPrunable::Null => RctType::Null,
|
||||||
|
RctPrunable::AggregateMlsagBorromean { .. } => RctType::MlsagAggregate,
|
||||||
|
RctPrunable::MlsagBorromean { .. } => RctType::MlsagIndividual,
|
||||||
|
// RctBase ensures there's at least one output, making the following
|
||||||
|
// inferences guaranteed/expects impossible on any valid RctSignatures
|
||||||
|
RctPrunable::MlsagBulletproofs { .. } => {
|
||||||
|
if matches!(
|
||||||
|
self
|
||||||
|
.base
|
||||||
|
.encrypted_amounts
|
||||||
|
.first()
|
||||||
|
.expect("MLSAG with Bulletproofs didn't have any outputs"),
|
||||||
|
EncryptedAmount::Original { .. }
|
||||||
|
) {
|
||||||
|
RctType::Bulletproofs
|
||||||
|
} else {
|
||||||
|
RctType::BulletproofsCompactAmount
|
||||||
|
}
|
||||||
|
}
|
||||||
|
RctPrunable::Clsag { bulletproofs, .. } => {
|
||||||
|
if matches!(bulletproofs, Bulletproofs::Original { .. }) {
|
||||||
|
RctType::Clsag
|
||||||
|
} else {
|
||||||
|
RctType::BulletproofsPlus
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize, fee: u64) -> usize {
|
||||||
|
RctBase::fee_weight(outputs, fee) + RctPrunable::fee_weight(protocol, inputs, outputs)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
let rct_type = self.rct_type();
|
||||||
|
self.base.write(w, rct_type)?;
|
||||||
|
self.prunable.write(w, rct_type)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut serialized = vec![];
|
||||||
|
self.write(&mut serialized).unwrap();
|
||||||
|
serialized
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: Read>(
|
||||||
|
ring_length: usize,
|
||||||
|
inputs: usize,
|
||||||
|
outputs: usize,
|
||||||
|
r: &mut R,
|
||||||
|
) -> io::Result<RctSignatures> {
|
||||||
|
let base = RctBase::read(inputs, outputs, r)?;
|
||||||
|
Ok(RctSignatures {
|
||||||
|
base: base.0,
|
||||||
|
prunable: RctPrunable::read(base.1, ring_length, inputs, outputs, r)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,10 +1,7 @@
|
|||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
|
||||||
#![doc = include_str!("../README.md")]
|
|
||||||
#![deny(missing_docs)]
|
|
||||||
|
|
||||||
use core::future::Future;
|
|
||||||
use std::{sync::Arc, io::Read, time::Duration};
|
use std::{sync::Arc, io::Read, time::Duration};
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
use digest_auth::{WwwAuthenticateHeader, AuthContext};
|
use digest_auth::{WwwAuthenticateHeader, AuthContext};
|
||||||
@@ -13,7 +10,7 @@ use simple_request::{
|
|||||||
Response, Client,
|
Response, Client,
|
||||||
};
|
};
|
||||||
|
|
||||||
use monero_rpc::{RpcError, Rpc};
|
use crate::rpc::{RpcError, RpcConnection, Rpc};
|
||||||
|
|
||||||
const DEFAULT_TIMEOUT: Duration = Duration::from_secs(30);
|
const DEFAULT_TIMEOUT: Duration = Duration::from_secs(30);
|
||||||
|
|
||||||
@@ -36,13 +33,13 @@ enum Authentication {
|
|||||||
///
|
///
|
||||||
/// Requires tokio.
|
/// Requires tokio.
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct SimpleRequestRpc {
|
pub struct HttpRpc {
|
||||||
authentication: Authentication,
|
authentication: Authentication,
|
||||||
url: String,
|
url: String,
|
||||||
request_timeout: Duration,
|
request_timeout: Duration,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SimpleRequestRpc {
|
impl HttpRpc {
|
||||||
fn digest_auth_challenge(
|
fn digest_auth_challenge(
|
||||||
response: &Response,
|
response: &Response,
|
||||||
) -> Result<Option<(WwwAuthenticateHeader, u64)>, RpcError> {
|
) -> Result<Option<(WwwAuthenticateHeader, u64)>, RpcError> {
|
||||||
@@ -63,7 +60,7 @@ impl SimpleRequestRpc {
|
|||||||
///
|
///
|
||||||
/// A daemon requiring authentication can be used via including the username and password in the
|
/// A daemon requiring authentication can be used via including the username and password in the
|
||||||
/// URL.
|
/// URL.
|
||||||
pub async fn new(url: String) -> Result<SimpleRequestRpc, RpcError> {
|
pub async fn new(url: String) -> Result<Rpc<HttpRpc>, RpcError> {
|
||||||
Self::with_custom_timeout(url, DEFAULT_TIMEOUT).await
|
Self::with_custom_timeout(url, DEFAULT_TIMEOUT).await
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -74,7 +71,7 @@ impl SimpleRequestRpc {
|
|||||||
pub async fn with_custom_timeout(
|
pub async fn with_custom_timeout(
|
||||||
mut url: String,
|
mut url: String,
|
||||||
request_timeout: Duration,
|
request_timeout: Duration,
|
||||||
) -> Result<SimpleRequestRpc, RpcError> {
|
) -> Result<Rpc<HttpRpc>, RpcError> {
|
||||||
let authentication = if url.contains('@') {
|
let authentication = if url.contains('@') {
|
||||||
// Parse out the username and password
|
// Parse out the username and password
|
||||||
let url_clone = url;
|
let url_clone = url;
|
||||||
@@ -122,11 +119,11 @@ impl SimpleRequestRpc {
|
|||||||
Authentication::Unauthenticated(Client::with_connection_pool())
|
Authentication::Unauthenticated(Client::with_connection_pool())
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(SimpleRequestRpc { authentication, url, request_timeout })
|
Ok(Rpc(HttpRpc { authentication, url, request_timeout }))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SimpleRequestRpc {
|
impl HttpRpc {
|
||||||
async fn inner_post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError> {
|
async fn inner_post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError> {
|
||||||
let request_fn = |uri| {
|
let request_fn = |uri| {
|
||||||
Request::post(uri)
|
Request::post(uri)
|
||||||
@@ -279,16 +276,11 @@ impl SimpleRequestRpc {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rpc for SimpleRequestRpc {
|
#[async_trait]
|
||||||
fn post(
|
impl RpcConnection for HttpRpc {
|
||||||
&self,
|
async fn post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError> {
|
||||||
route: &str,
|
|
||||||
body: Vec<u8>,
|
|
||||||
) -> impl Send + Future<Output = Result<Vec<u8>, RpcError>> {
|
|
||||||
async move {
|
|
||||||
tokio::time::timeout(self.request_timeout, self.inner_post(route, body))
|
tokio::time::timeout(self.request_timeout, self.inner_post(route, body))
|
||||||
.await
|
.await
|
||||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?
|
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
761
coins/monero/src/rpc/mod.rs
Normal file
761
coins/monero/src/rpc/mod.rs
Normal file
@@ -0,0 +1,761 @@
|
|||||||
|
use core::fmt::Debug;
|
||||||
|
#[cfg(not(feature = "std"))]
|
||||||
|
use alloc::boxed::Box;
|
||||||
|
use std_shims::{
|
||||||
|
vec::Vec,
|
||||||
|
io,
|
||||||
|
string::{String, ToString},
|
||||||
|
};
|
||||||
|
|
||||||
|
use async_trait::async_trait;
|
||||||
|
|
||||||
|
use curve25519_dalek::edwards::EdwardsPoint;
|
||||||
|
|
||||||
|
use monero_generators::decompress_point;
|
||||||
|
|
||||||
|
use serde::{Serialize, Deserialize, de::DeserializeOwned};
|
||||||
|
use serde_json::{Value, json};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
Protocol,
|
||||||
|
serialize::*,
|
||||||
|
transaction::{Input, Timelock, Transaction},
|
||||||
|
block::Block,
|
||||||
|
wallet::{FeePriority, Fee},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(feature = "http-rpc")]
|
||||||
|
mod http;
|
||||||
|
#[cfg(feature = "http-rpc")]
|
||||||
|
pub use http::*;
|
||||||
|
|
||||||
|
// Number of blocks the fee estimate will be valid for
|
||||||
|
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
||||||
|
// src/wallet/wallet2.cpp#L121
|
||||||
|
const GRACE_BLOCKS_FOR_FEE_ESTIMATE: u64 = 10;
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
pub struct EmptyResponse {}
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
pub struct JsonRpcResponse<T> {
|
||||||
|
result: T,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct TransactionResponse {
|
||||||
|
tx_hash: String,
|
||||||
|
as_hex: String,
|
||||||
|
pruned_as_hex: String,
|
||||||
|
}
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct TransactionsResponse {
|
||||||
|
#[serde(default)]
|
||||||
|
missed_tx: Vec<String>,
|
||||||
|
txs: Vec<TransactionResponse>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
pub struct OutputResponse {
|
||||||
|
pub height: usize,
|
||||||
|
pub unlocked: bool,
|
||||||
|
key: String,
|
||||||
|
mask: String,
|
||||||
|
txid: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
||||||
|
pub enum RpcError {
|
||||||
|
#[cfg_attr(feature = "std", error("internal error ({0})"))]
|
||||||
|
InternalError(&'static str),
|
||||||
|
#[cfg_attr(feature = "std", error("connection error ({0})"))]
|
||||||
|
ConnectionError(String),
|
||||||
|
#[cfg_attr(feature = "std", error("invalid node ({0})"))]
|
||||||
|
InvalidNode(String),
|
||||||
|
#[cfg_attr(feature = "std", error("unsupported protocol version ({0})"))]
|
||||||
|
UnsupportedProtocol(usize),
|
||||||
|
#[cfg_attr(feature = "std", error("transactions not found"))]
|
||||||
|
TransactionsNotFound(Vec<[u8; 32]>),
|
||||||
|
#[cfg_attr(feature = "std", error("invalid point ({0})"))]
|
||||||
|
InvalidPoint(String),
|
||||||
|
#[cfg_attr(feature = "std", error("pruned transaction"))]
|
||||||
|
PrunedTransaction,
|
||||||
|
#[cfg_attr(feature = "std", error("invalid transaction ({0:?})"))]
|
||||||
|
InvalidTransaction([u8; 32]),
|
||||||
|
#[cfg_attr(feature = "std", error("unexpected fee response"))]
|
||||||
|
InvalidFee,
|
||||||
|
#[cfg_attr(feature = "std", error("invalid priority"))]
|
||||||
|
InvalidPriority,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
|
||||||
|
hex::decode(value).map_err(|_| RpcError::InvalidNode("expected hex wasn't hex".to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn hash_hex(hash: &str) -> Result<[u8; 32], RpcError> {
|
||||||
|
rpc_hex(hash)?.try_into().map_err(|_| RpcError::InvalidNode("hash wasn't 32-bytes".to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
|
||||||
|
decompress_point(
|
||||||
|
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?,
|
||||||
|
)
|
||||||
|
.ok_or_else(|| RpcError::InvalidPoint(point.to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read an EPEE VarInt, distinct from the VarInts used throughout the rest of the protocol
|
||||||
|
fn read_epee_vi<R: io::Read>(reader: &mut R) -> io::Result<u64> {
|
||||||
|
let vi_start = read_byte(reader)?;
|
||||||
|
let len = match vi_start & 0b11 {
|
||||||
|
0 => 1,
|
||||||
|
1 => 2,
|
||||||
|
2 => 4,
|
||||||
|
3 => 8,
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
let mut vi = u64::from(vi_start >> 2);
|
||||||
|
for i in 1 .. len {
|
||||||
|
vi |= u64::from(read_byte(reader)?) << (((i - 1) * 8) + 6);
|
||||||
|
}
|
||||||
|
Ok(vi)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
pub trait RpcConnection: Clone + Debug {
|
||||||
|
/// Perform a POST request to the specified route with the specified body.
|
||||||
|
///
|
||||||
|
/// The implementor is left to handle anything such as authentication.
|
||||||
|
async fn post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Make this provided methods for RpcConnection?
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Rpc<R: RpcConnection>(R);
|
||||||
|
impl<R: RpcConnection> Rpc<R> {
|
||||||
|
/// Perform a RPC call to the specified route with the provided parameters.
|
||||||
|
///
|
||||||
|
/// This is NOT a JSON-RPC call. They use a route of "json_rpc" and are available via
|
||||||
|
/// `json_rpc_call`.
|
||||||
|
pub async fn rpc_call<Params: Serialize + Debug, Response: DeserializeOwned + Debug>(
|
||||||
|
&self,
|
||||||
|
route: &str,
|
||||||
|
params: Option<Params>,
|
||||||
|
) -> Result<Response, RpcError> {
|
||||||
|
let res = self
|
||||||
|
.0
|
||||||
|
.post(
|
||||||
|
route,
|
||||||
|
if let Some(params) = params {
|
||||||
|
serde_json::to_string(¶ms).unwrap().into_bytes()
|
||||||
|
} else {
|
||||||
|
vec![]
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
let res_str = std_shims::str::from_utf8(&res)
|
||||||
|
.map_err(|_| RpcError::InvalidNode("response wasn't utf-8".to_string()))?;
|
||||||
|
serde_json::from_str(res_str)
|
||||||
|
.map_err(|_| RpcError::InvalidNode(format!("response wasn't json: {res_str}")))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Perform a JSON-RPC call with the specified method with the provided parameters
|
||||||
|
pub async fn json_rpc_call<Response: DeserializeOwned + Debug>(
|
||||||
|
&self,
|
||||||
|
method: &str,
|
||||||
|
params: Option<Value>,
|
||||||
|
) -> Result<Response, RpcError> {
|
||||||
|
let mut req = json!({ "method": method });
|
||||||
|
if let Some(params) = params {
|
||||||
|
req.as_object_mut().unwrap().insert("params".into(), params);
|
||||||
|
}
|
||||||
|
Ok(self.rpc_call::<_, JsonRpcResponse<Response>>("json_rpc", Some(req)).await?.result)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Perform a binary call to the specified route with the provided parameters.
|
||||||
|
pub async fn bin_call(&self, route: &str, params: Vec<u8>) -> Result<Vec<u8>, RpcError> {
|
||||||
|
self.0.post(route, params).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the active blockchain protocol version.
|
||||||
|
pub async fn get_protocol(&self) -> Result<Protocol, RpcError> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct ProtocolResponse {
|
||||||
|
major_version: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct LastHeaderResponse {
|
||||||
|
block_header: ProtocolResponse,
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(
|
||||||
|
match self
|
||||||
|
.json_rpc_call::<LastHeaderResponse>("get_last_block_header", None)
|
||||||
|
.await?
|
||||||
|
.block_header
|
||||||
|
.major_version
|
||||||
|
{
|
||||||
|
13 | 14 => Protocol::v14,
|
||||||
|
15 | 16 => Protocol::v16,
|
||||||
|
protocol => Err(RpcError::UnsupportedProtocol(protocol))?,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_height(&self) -> Result<usize, RpcError> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct HeightResponse {
|
||||||
|
height: usize,
|
||||||
|
}
|
||||||
|
Ok(self.rpc_call::<Option<()>, HeightResponse>("get_height", None).await?.height)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_transactions(&self, hashes: &[[u8; 32]]) -> Result<Vec<Transaction>, RpcError> {
|
||||||
|
if hashes.is_empty() {
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut hashes_hex = hashes.iter().map(hex::encode).collect::<Vec<_>>();
|
||||||
|
let mut all_txs = Vec::with_capacity(hashes.len());
|
||||||
|
while !hashes_hex.is_empty() {
|
||||||
|
// Monero errors if more than 100 is requested unless using a non-restricted RPC
|
||||||
|
const TXS_PER_REQUEST: usize = 100;
|
||||||
|
let this_count = TXS_PER_REQUEST.min(hashes_hex.len());
|
||||||
|
|
||||||
|
let txs: TransactionsResponse = self
|
||||||
|
.rpc_call(
|
||||||
|
"get_transactions",
|
||||||
|
Some(json!({
|
||||||
|
"txs_hashes": hashes_hex.drain(.. this_count).collect::<Vec<_>>(),
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if !txs.missed_tx.is_empty() {
|
||||||
|
Err(RpcError::TransactionsNotFound(
|
||||||
|
txs.missed_tx.iter().map(|hash| hash_hex(hash)).collect::<Result<_, _>>()?,
|
||||||
|
))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
all_txs.extend(txs.txs);
|
||||||
|
}
|
||||||
|
|
||||||
|
all_txs
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, res)| {
|
||||||
|
let tx = Transaction::read::<&[u8]>(
|
||||||
|
&mut rpc_hex(if !res.as_hex.is_empty() { &res.as_hex } else { &res.pruned_as_hex })?
|
||||||
|
.as_ref(),
|
||||||
|
)
|
||||||
|
.map_err(|_| match hash_hex(&res.tx_hash) {
|
||||||
|
Ok(hash) => RpcError::InvalidTransaction(hash),
|
||||||
|
Err(err) => err,
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// https://github.com/monero-project/monero/issues/8311
|
||||||
|
if res.as_hex.is_empty() {
|
||||||
|
match tx.prefix.inputs.first() {
|
||||||
|
Some(Input::Gen { .. }) => (),
|
||||||
|
_ => Err(RpcError::PrunedTransaction)?,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This does run a few keccak256 hashes, which is pointless if the node is trusted
|
||||||
|
// In exchange, this provides resilience against invalid/malicious nodes
|
||||||
|
if tx.hash() != hashes[i] {
|
||||||
|
Err(RpcError::InvalidNode(
|
||||||
|
"replied with transaction wasn't the requested transaction".to_string(),
|
||||||
|
))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(tx)
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_transaction(&self, tx: [u8; 32]) -> Result<Transaction, RpcError> {
|
||||||
|
self.get_transactions(&[tx]).await.map(|mut txs| txs.swap_remove(0))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the hash of a block from the node by the block's numbers.
|
||||||
|
/// This function does not verify the returned block hash is actually for the number in question.
|
||||||
|
pub async fn get_block_hash(&self, number: usize) -> Result<[u8; 32], RpcError> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct BlockHeaderResponse {
|
||||||
|
hash: String,
|
||||||
|
}
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct BlockHeaderByHeightResponse {
|
||||||
|
block_header: BlockHeaderResponse,
|
||||||
|
}
|
||||||
|
|
||||||
|
let header: BlockHeaderByHeightResponse =
|
||||||
|
self.json_rpc_call("get_block_header_by_height", Some(json!({ "height": number }))).await?;
|
||||||
|
hash_hex(&header.block_header.hash)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a block from the node by its hash.
|
||||||
|
/// This function does not verify the returned block actually has the hash in question.
|
||||||
|
pub async fn get_block(&self, hash: [u8; 32]) -> Result<Block, RpcError> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct BlockResponse {
|
||||||
|
blob: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
let res: BlockResponse =
|
||||||
|
self.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await?;
|
||||||
|
|
||||||
|
let block = Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref())
|
||||||
|
.map_err(|_| RpcError::InvalidNode("invalid block".to_string()))?;
|
||||||
|
if block.hash() != hash {
|
||||||
|
Err(RpcError::InvalidNode("different block than requested (hash)".to_string()))?;
|
||||||
|
}
|
||||||
|
Ok(block)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_block_by_number(&self, number: usize) -> Result<Block, RpcError> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct BlockResponse {
|
||||||
|
blob: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
let res: BlockResponse =
|
||||||
|
self.json_rpc_call("get_block", Some(json!({ "height": number }))).await?;
|
||||||
|
|
||||||
|
let block = Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref())
|
||||||
|
.map_err(|_| RpcError::InvalidNode("invalid block".to_string()))?;
|
||||||
|
|
||||||
|
// Make sure this is actually the block for this number
|
||||||
|
match block.miner_tx.prefix.inputs.first() {
|
||||||
|
Some(Input::Gen(actual)) => {
|
||||||
|
if usize::try_from(*actual).unwrap() == number {
|
||||||
|
Ok(block)
|
||||||
|
} else {
|
||||||
|
Err(RpcError::InvalidNode("different block than requested (number)".to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => Err(RpcError::InvalidNode(
|
||||||
|
"block's miner_tx didn't have an input of kind Input::Gen".to_string(),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_block_transactions(&self, hash: [u8; 32]) -> Result<Vec<Transaction>, RpcError> {
|
||||||
|
let block = self.get_block(hash).await?;
|
||||||
|
let mut res = vec![block.miner_tx];
|
||||||
|
res.extend(self.get_transactions(&block.txs).await?);
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_block_transactions_by_number(
|
||||||
|
&self,
|
||||||
|
number: usize,
|
||||||
|
) -> Result<Vec<Transaction>, RpcError> {
|
||||||
|
self.get_block_transactions(self.get_block_hash(number).await?).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the output indexes of the specified transaction.
|
||||||
|
pub async fn get_o_indexes(&self, hash: [u8; 32]) -> Result<Vec<u64>, RpcError> {
|
||||||
|
/*
|
||||||
|
TODO: Use these when a suitable epee serde lib exists
|
||||||
|
|
||||||
|
#[derive(Serialize, Debug)]
|
||||||
|
struct Request {
|
||||||
|
txid: [u8; 32],
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct OIndexes {
|
||||||
|
o_indexes: Vec<u64>,
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Given the immaturity of Rust epee libraries, this is a homegrown one which is only validated
|
||||||
|
// to work against this specific function
|
||||||
|
|
||||||
|
// Header for EPEE, an 8-byte magic and a version
|
||||||
|
const EPEE_HEADER: &[u8] = b"\x01\x11\x01\x01\x01\x01\x02\x01\x01";
|
||||||
|
|
||||||
|
let mut request = EPEE_HEADER.to_vec();
|
||||||
|
// Number of fields (shifted over 2 bits as the 2 LSBs are reserved for metadata)
|
||||||
|
request.push(1 << 2);
|
||||||
|
// Length of field name
|
||||||
|
request.push(4);
|
||||||
|
// Field name
|
||||||
|
request.extend(b"txid");
|
||||||
|
// Type of field
|
||||||
|
request.push(10);
|
||||||
|
// Length of string, since this byte array is technically a string
|
||||||
|
request.push(32 << 2);
|
||||||
|
// The "string"
|
||||||
|
request.extend(hash);
|
||||||
|
|
||||||
|
let indexes_buf = self.bin_call("get_o_indexes.bin", request).await?;
|
||||||
|
let mut indexes: &[u8] = indexes_buf.as_ref();
|
||||||
|
|
||||||
|
(|| {
|
||||||
|
let mut res = None;
|
||||||
|
let mut is_okay = false;
|
||||||
|
|
||||||
|
if read_bytes::<_, { EPEE_HEADER.len() }>(&mut indexes)? != EPEE_HEADER {
|
||||||
|
Err(io::Error::other("invalid header"))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let read_object = |reader: &mut &[u8]| -> io::Result<Vec<u64>> {
|
||||||
|
let fields = read_byte(reader)? >> 2;
|
||||||
|
|
||||||
|
for _ in 0 .. fields {
|
||||||
|
let name_len = read_byte(reader)?;
|
||||||
|
let name = read_raw_vec(read_byte, name_len.into(), reader)?;
|
||||||
|
|
||||||
|
let type_with_array_flag = read_byte(reader)?;
|
||||||
|
let kind = type_with_array_flag & (!0x80);
|
||||||
|
|
||||||
|
let iters = if type_with_array_flag != kind { read_epee_vi(reader)? } else { 1 };
|
||||||
|
|
||||||
|
if (&name == b"o_indexes") && (kind != 5) {
|
||||||
|
Err(io::Error::other("o_indexes weren't u64s"))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let f = match kind {
|
||||||
|
// i64
|
||||||
|
1 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
|
||||||
|
// i32
|
||||||
|
2 => |reader: &mut &[u8]| read_raw_vec(read_byte, 4, reader),
|
||||||
|
// i16
|
||||||
|
3 => |reader: &mut &[u8]| read_raw_vec(read_byte, 2, reader),
|
||||||
|
// i8
|
||||||
|
4 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
|
||||||
|
// u64
|
||||||
|
5 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
|
||||||
|
// u32
|
||||||
|
6 => |reader: &mut &[u8]| read_raw_vec(read_byte, 4, reader),
|
||||||
|
// u16
|
||||||
|
7 => |reader: &mut &[u8]| read_raw_vec(read_byte, 2, reader),
|
||||||
|
// u8
|
||||||
|
8 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
|
||||||
|
// double
|
||||||
|
9 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
|
||||||
|
// string, or any collection of bytes
|
||||||
|
10 => |reader: &mut &[u8]| {
|
||||||
|
let len = read_epee_vi(reader)?;
|
||||||
|
read_raw_vec(
|
||||||
|
read_byte,
|
||||||
|
len.try_into().map_err(|_| io::Error::other("u64 length exceeded usize"))?,
|
||||||
|
reader,
|
||||||
|
)
|
||||||
|
},
|
||||||
|
// bool
|
||||||
|
11 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
|
||||||
|
// object, errors here as it shouldn't be used on this call
|
||||||
|
12 => {
|
||||||
|
|_: &mut &[u8]| Err(io::Error::other("node used object in reply to get_o_indexes"))
|
||||||
|
}
|
||||||
|
// array, so far unused
|
||||||
|
13 => |_: &mut &[u8]| Err(io::Error::other("node used the unused array type")),
|
||||||
|
_ => |_: &mut &[u8]| Err(io::Error::other("node used an invalid type")),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut bytes_res = vec![];
|
||||||
|
for _ in 0 .. iters {
|
||||||
|
bytes_res.push(f(reader)?);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut actual_res = Vec::with_capacity(bytes_res.len());
|
||||||
|
match name.as_slice() {
|
||||||
|
b"o_indexes" => {
|
||||||
|
for o_index in bytes_res {
|
||||||
|
actual_res.push(u64::from_le_bytes(
|
||||||
|
o_index
|
||||||
|
.try_into()
|
||||||
|
.map_err(|_| io::Error::other("node didn't provide 8 bytes for a u64"))?,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
res = Some(actual_res);
|
||||||
|
}
|
||||||
|
b"status" => {
|
||||||
|
if bytes_res
|
||||||
|
.first()
|
||||||
|
.ok_or_else(|| io::Error::other("status wasn't a string"))?
|
||||||
|
.as_slice() !=
|
||||||
|
b"OK"
|
||||||
|
{
|
||||||
|
// TODO: Better handle non-OK responses
|
||||||
|
Err(io::Error::other("response wasn't OK"))?;
|
||||||
|
}
|
||||||
|
is_okay = true;
|
||||||
|
}
|
||||||
|
_ => continue,
|
||||||
|
}
|
||||||
|
|
||||||
|
if is_okay && res.is_some() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Didn't return a response with a status
|
||||||
|
// (if the status wasn't okay, we would've already errored)
|
||||||
|
if !is_okay {
|
||||||
|
Err(io::Error::other("response didn't contain a status"))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the Vec was empty, it would've been omitted, hence the unwrap_or
|
||||||
|
// TODO: Test against a 0-output TX, such as the ones found in block 202612
|
||||||
|
Ok(res.unwrap_or(vec![]))
|
||||||
|
};
|
||||||
|
|
||||||
|
read_object(&mut indexes)
|
||||||
|
})()
|
||||||
|
.map_err(|_| RpcError::InvalidNode("invalid binary response".to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the output distribution, from the specified height to the specified height (both
|
||||||
|
/// inclusive).
|
||||||
|
pub async fn get_output_distribution(
|
||||||
|
&self,
|
||||||
|
from: usize,
|
||||||
|
to: usize,
|
||||||
|
) -> Result<Vec<u64>, RpcError> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct Distribution {
|
||||||
|
distribution: Vec<u64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct Distributions {
|
||||||
|
distributions: Vec<Distribution>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut distributions: Distributions = self
|
||||||
|
.json_rpc_call(
|
||||||
|
"get_output_distribution",
|
||||||
|
Some(json!({
|
||||||
|
"binary": false,
|
||||||
|
"amounts": [0],
|
||||||
|
"cumulative": true,
|
||||||
|
"from_height": from,
|
||||||
|
"to_height": to,
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(distributions.distributions.swap_remove(0).distribution)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the specified outputs from the RingCT (zero-amount) pool
|
||||||
|
pub async fn get_outs(&self, indexes: &[u64]) -> Result<Vec<OutputResponse>, RpcError> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct OutsResponse {
|
||||||
|
status: String,
|
||||||
|
outs: Vec<OutputResponse>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let res: OutsResponse = self
|
||||||
|
.rpc_call(
|
||||||
|
"get_outs",
|
||||||
|
Some(json!({
|
||||||
|
"get_txid": true,
|
||||||
|
"outputs": indexes.iter().map(|o| json!({
|
||||||
|
"amount": 0,
|
||||||
|
"index": o
|
||||||
|
})).collect::<Vec<_>>()
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if res.status != "OK" {
|
||||||
|
Err(RpcError::InvalidNode("bad response to get_outs".to_string()))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(res.outs)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the specified outputs from the RingCT (zero-amount) pool, but only return them if their
|
||||||
|
/// timelock has been satisfied.
|
||||||
|
///
|
||||||
|
/// The timelock being satisfied is distinct from being free of the 10-block lock applied to all
|
||||||
|
/// Monero transactions.
|
||||||
|
pub async fn get_unlocked_outputs(
|
||||||
|
&self,
|
||||||
|
indexes: &[u64],
|
||||||
|
height: usize,
|
||||||
|
fingerprintable_canonical: bool,
|
||||||
|
) -> Result<Vec<Option<[EdwardsPoint; 2]>>, RpcError> {
|
||||||
|
let outs: Vec<OutputResponse> = self.get_outs(indexes).await?;
|
||||||
|
|
||||||
|
// Only need to fetch txs to do canonical check on timelock
|
||||||
|
let txs = if fingerprintable_canonical {
|
||||||
|
self
|
||||||
|
.get_transactions(
|
||||||
|
&outs.iter().map(|out| hash_hex(&out.txid)).collect::<Result<Vec<_>, _>>()?,
|
||||||
|
)
|
||||||
|
.await?
|
||||||
|
} else {
|
||||||
|
Vec::new()
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: https://github.com/serai-dex/serai/issues/104
|
||||||
|
outs
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(i, out)| {
|
||||||
|
// Allow keys to be invalid, though if they are, return None to trigger selection of a new
|
||||||
|
// decoy
|
||||||
|
// Only valid keys can be used in CLSAG proofs, hence the need for re-selection, yet
|
||||||
|
// invalid keys may honestly exist on the blockchain
|
||||||
|
// Only a recent hard fork checked output keys were valid points
|
||||||
|
let Some(key) = decompress_point(
|
||||||
|
rpc_hex(&out.key)?
|
||||||
|
.try_into()
|
||||||
|
.map_err(|_| RpcError::InvalidNode("non-32-byte point".to_string()))?,
|
||||||
|
) else {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
Ok(Some([key, rpc_point(&out.mask)?]).filter(|_| {
|
||||||
|
if fingerprintable_canonical {
|
||||||
|
Timelock::Block(height) >= txs[i].prefix.timelock
|
||||||
|
} else {
|
||||||
|
out.unlocked
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_fee_v14(&self, priority: FeePriority) -> Result<Fee, RpcError> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct FeeResponseV14 {
|
||||||
|
status: String,
|
||||||
|
fee: u64,
|
||||||
|
quantization_mask: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
||||||
|
// src/wallet/wallet2.cpp#L7569-L7584
|
||||||
|
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
||||||
|
// src/wallet/wallet2.cpp#L7660-L7661
|
||||||
|
let priority_idx =
|
||||||
|
usize::try_from(if priority.fee_priority() == 0 { 1 } else { priority.fee_priority() - 1 })
|
||||||
|
.map_err(|_| RpcError::InvalidPriority)?;
|
||||||
|
let multipliers = [1, 5, 25, 1000];
|
||||||
|
if priority_idx >= multipliers.len() {
|
||||||
|
// though not an RPC error, it seems sensible to treat as such
|
||||||
|
Err(RpcError::InvalidPriority)?;
|
||||||
|
}
|
||||||
|
let fee_multiplier = multipliers[priority_idx];
|
||||||
|
|
||||||
|
let res: FeeResponseV14 = self
|
||||||
|
.json_rpc_call(
|
||||||
|
"get_fee_estimate",
|
||||||
|
Some(json!({ "grace_blocks": GRACE_BLOCKS_FOR_FEE_ESTIMATE })),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if res.status != "OK" {
|
||||||
|
Err(RpcError::InvalidFee)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Fee { per_weight: res.fee * fee_multiplier, mask: res.quantization_mask })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the currently estimated fee from the node.
|
||||||
|
///
|
||||||
|
/// This may be manipulated to unsafe levels and MUST be sanity checked.
|
||||||
|
// TODO: Take a sanity check argument
|
||||||
|
pub async fn get_fee(&self, protocol: Protocol, priority: FeePriority) -> Result<Fee, RpcError> {
|
||||||
|
// TODO: Implement wallet2's adjust_priority which by default automatically uses a lower
|
||||||
|
// priority than provided depending on the backlog in the pool
|
||||||
|
if protocol.v16_fee() {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct FeeResponse {
|
||||||
|
status: String,
|
||||||
|
fees: Vec<u64>,
|
||||||
|
quantization_mask: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
let res: FeeResponse = self
|
||||||
|
.json_rpc_call(
|
||||||
|
"get_fee_estimate",
|
||||||
|
Some(json!({ "grace_blocks": GRACE_BLOCKS_FOR_FEE_ESTIMATE })),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
||||||
|
// src/wallet/wallet2.cpp#L7615-L7620
|
||||||
|
let priority_idx = usize::try_from(if priority.fee_priority() >= 4 {
|
||||||
|
3
|
||||||
|
} else {
|
||||||
|
priority.fee_priority().saturating_sub(1)
|
||||||
|
})
|
||||||
|
.map_err(|_| RpcError::InvalidPriority)?;
|
||||||
|
|
||||||
|
if res.status != "OK" {
|
||||||
|
Err(RpcError::InvalidFee)
|
||||||
|
} else if priority_idx >= res.fees.len() {
|
||||||
|
Err(RpcError::InvalidPriority)
|
||||||
|
} else {
|
||||||
|
Ok(Fee { per_weight: res.fees[priority_idx], mask: res.quantization_mask })
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.get_fee_v14(priority).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn publish_transaction(&self, tx: &Transaction) -> Result<(), RpcError> {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct SendRawResponse {
|
||||||
|
status: String,
|
||||||
|
double_spend: bool,
|
||||||
|
fee_too_low: bool,
|
||||||
|
invalid_input: bool,
|
||||||
|
invalid_output: bool,
|
||||||
|
low_mixin: bool,
|
||||||
|
not_relayed: bool,
|
||||||
|
overspend: bool,
|
||||||
|
too_big: bool,
|
||||||
|
too_few_outputs: bool,
|
||||||
|
reason: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
let res: SendRawResponse = self
|
||||||
|
.rpc_call("send_raw_transaction", Some(json!({ "tx_as_hex": hex::encode(tx.serialize()) })))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if res.status != "OK" {
|
||||||
|
Err(RpcError::InvalidTransaction(tx.hash()))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Take &Address, not &str?
|
||||||
|
pub async fn generate_blocks(
|
||||||
|
&self,
|
||||||
|
address: &str,
|
||||||
|
block_count: usize,
|
||||||
|
) -> Result<(Vec<[u8; 32]>, usize), RpcError> {
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
struct BlocksResponse {
|
||||||
|
blocks: Vec<String>,
|
||||||
|
height: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
let res = self
|
||||||
|
.json_rpc_call::<BlocksResponse>(
|
||||||
|
"generateblocks",
|
||||||
|
Some(json!({
|
||||||
|
"wallet_address": address,
|
||||||
|
"amount_of_blocks": block_count
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut blocks = Vec::with_capacity(res.blocks.len());
|
||||||
|
for block in res.blocks {
|
||||||
|
blocks.push(hash_hex(&block)?);
|
||||||
|
}
|
||||||
|
Ok((blocks, res.height))
|
||||||
|
}
|
||||||
|
}
|
||||||
172
coins/monero/src/serialize.rs
Normal file
172
coins/monero/src/serialize.rs
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
use core::fmt::Debug;
|
||||||
|
use std_shims::{
|
||||||
|
vec::Vec,
|
||||||
|
io::{self, Read, Write},
|
||||||
|
};
|
||||||
|
|
||||||
|
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
||||||
|
|
||||||
|
use monero_generators::decompress_point;
|
||||||
|
|
||||||
|
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
||||||
|
|
||||||
|
mod sealed {
|
||||||
|
pub trait VarInt: TryInto<u64> + TryFrom<u64> + Copy {
|
||||||
|
const BITS: usize;
|
||||||
|
}
|
||||||
|
impl VarInt for u8 {
|
||||||
|
const BITS: usize = 8;
|
||||||
|
}
|
||||||
|
impl VarInt for u32 {
|
||||||
|
const BITS: usize = 32;
|
||||||
|
}
|
||||||
|
impl VarInt for u64 {
|
||||||
|
const BITS: usize = 64;
|
||||||
|
}
|
||||||
|
impl VarInt for usize {
|
||||||
|
const BITS: usize = core::mem::size_of::<usize>() * 8;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// This will panic if the VarInt exceeds u64::MAX
|
||||||
|
pub(crate) fn varint_len<U: sealed::VarInt>(varint: U) -> usize {
|
||||||
|
let varint_u64: u64 = varint.try_into().map_err(|_| "varint exceeded u64").unwrap();
|
||||||
|
((usize::try_from(u64::BITS - varint_u64.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
|
||||||
|
w.write_all(&[*byte])
|
||||||
|
}
|
||||||
|
|
||||||
|
// This will panic if the VarInt exceeds u64::MAX
|
||||||
|
pub(crate) fn write_varint<W: Write, U: sealed::VarInt>(varint: &U, w: &mut W) -> io::Result<()> {
|
||||||
|
let mut varint: u64 = (*varint).try_into().map_err(|_| "varint exceeded u64").unwrap();
|
||||||
|
while {
|
||||||
|
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
||||||
|
varint >>= 7;
|
||||||
|
if varint != 0 {
|
||||||
|
b |= VARINT_CONTINUATION_MASK;
|
||||||
|
}
|
||||||
|
write_byte(&b, w)?;
|
||||||
|
varint != 0
|
||||||
|
} {}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn write_scalar<W: Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
|
||||||
|
w.write_all(&scalar.to_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn write_point<W: Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
|
||||||
|
w.write_all(&point.compress().to_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn write_raw_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
||||||
|
f: F,
|
||||||
|
values: &[T],
|
||||||
|
w: &mut W,
|
||||||
|
) -> io::Result<()> {
|
||||||
|
for value in values {
|
||||||
|
f(value, w)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn write_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
||||||
|
f: F,
|
||||||
|
values: &[T],
|
||||||
|
w: &mut W,
|
||||||
|
) -> io::Result<()> {
|
||||||
|
write_varint(&values.len(), w)?;
|
||||||
|
write_raw_vec(f, values, w)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_bytes<R: Read, const N: usize>(r: &mut R) -> io::Result<[u8; N]> {
|
||||||
|
let mut res = [0; N];
|
||||||
|
r.read_exact(&mut res)?;
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_byte<R: Read>(r: &mut R) -> io::Result<u8> {
|
||||||
|
Ok(read_bytes::<_, 1>(r)?[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_u16<R: Read>(r: &mut R) -> io::Result<u16> {
|
||||||
|
read_bytes(r).map(u16::from_le_bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_u32<R: Read>(r: &mut R) -> io::Result<u32> {
|
||||||
|
read_bytes(r).map(u32::from_le_bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_u64<R: Read>(r: &mut R) -> io::Result<u64> {
|
||||||
|
read_bytes(r).map(u64::from_le_bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_varint<R: Read, U: sealed::VarInt>(r: &mut R) -> io::Result<U> {
|
||||||
|
let mut bits = 0;
|
||||||
|
let mut res = 0;
|
||||||
|
while {
|
||||||
|
let b = read_byte(r)?;
|
||||||
|
if (bits != 0) && (b == 0) {
|
||||||
|
Err(io::Error::other("non-canonical varint"))?;
|
||||||
|
}
|
||||||
|
if ((bits + 7) >= U::BITS) && (b >= (1 << (U::BITS - bits))) {
|
||||||
|
Err(io::Error::other("varint overflow"))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits;
|
||||||
|
bits += 7;
|
||||||
|
b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK
|
||||||
|
} {}
|
||||||
|
res.try_into().map_err(|_| io::Error::other("VarInt does not fit into integer type"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// All scalar fields supported by monero-serai are checked to be canonical for valid transactions
|
||||||
|
// While from_bytes_mod_order would be more flexible, it's not currently needed and would be
|
||||||
|
// inaccurate to include now. While casting a wide net may be preferable, it'd also be inaccurate
|
||||||
|
// for now. There's also further edge cases as noted by
|
||||||
|
// https://github.com/monero-project/monero/issues/8438, where some scalars had an archaic
|
||||||
|
// reduction applied
|
||||||
|
pub(crate) fn read_scalar<R: Read>(r: &mut R) -> io::Result<Scalar> {
|
||||||
|
Option::from(Scalar::from_canonical_bytes(read_bytes(r)?))
|
||||||
|
.ok_or_else(|| io::Error::other("unreduced scalar"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||||
|
let bytes = read_bytes(r)?;
|
||||||
|
decompress_point(bytes).ok_or_else(|| io::Error::other("invalid point"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||||
|
read_point(r)
|
||||||
|
.ok()
|
||||||
|
.filter(EdwardsPoint::is_torsion_free)
|
||||||
|
.ok_or_else(|| io::Error::other("invalid point"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_raw_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
||||||
|
f: F,
|
||||||
|
len: usize,
|
||||||
|
r: &mut R,
|
||||||
|
) -> io::Result<Vec<T>> {
|
||||||
|
let mut res = vec![];
|
||||||
|
for _ in 0 .. len {
|
||||||
|
res.push(f(r)?);
|
||||||
|
}
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_array<R: Read, T: Debug, F: Fn(&mut R) -> io::Result<T>, const N: usize>(
|
||||||
|
f: F,
|
||||||
|
r: &mut R,
|
||||||
|
) -> io::Result<[T; N]> {
|
||||||
|
read_raw_vec(f, N, r).map(|vec| vec.try_into().unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
||||||
|
f: F,
|
||||||
|
r: &mut R,
|
||||||
|
) -> io::Result<Vec<T>> {
|
||||||
|
read_raw_vec(f, read_varint(r)?, r)
|
||||||
|
}
|
||||||
@@ -2,11 +2,14 @@ use hex_literal::hex;
|
|||||||
|
|
||||||
use rand_core::{RngCore, OsRng};
|
use rand_core::{RngCore, OsRng};
|
||||||
|
|
||||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
|
use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
|
||||||
|
|
||||||
use monero_io::decompress_point;
|
use monero_generators::decompress_point;
|
||||||
|
|
||||||
use crate::{Network, AddressType, MoneroAddress};
|
use crate::{
|
||||||
|
random_scalar,
|
||||||
|
wallet::address::{Network, AddressType, AddressMeta, MoneroAddress},
|
||||||
|
};
|
||||||
|
|
||||||
const SPEND: [u8; 32] = hex!("f8631661f6ab4e6fda310c797330d86e23a682f20d5bc8cc27b18051191f16d7");
|
const SPEND: [u8; 32] = hex!("f8631661f6ab4e6fda310c797330d86e23a682f20d5bc8cc27b18051191f16d7");
|
||||||
const VIEW: [u8; 32] = hex!("4a1535063ad1fee2dabbf909d4fd9a873e29541b401f0944754e17c9a41820ce");
|
const VIEW: [u8; 32] = hex!("4a1535063ad1fee2dabbf909d4fd9a873e29541b401f0944754e17c9a41820ce");
|
||||||
@@ -27,49 +30,14 @@ const SUBADDRESS: &str =
|
|||||||
|
|
||||||
const FEATURED_JSON: &str = include_str!("vectors/featured_addresses.json");
|
const FEATURED_JSON: &str = include_str!("vectors/featured_addresses.json");
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_encoded_len_for_bytes() {
|
|
||||||
// For an encoding of length `l`, we prune to the amount of bytes which encodes with length `l`
|
|
||||||
// This assumes length `l` -> amount of bytes has a singular answer, which is tested here
|
|
||||||
use crate::base58check::*;
|
|
||||||
let mut set = std::collections::HashSet::new();
|
|
||||||
for i in 0 .. BLOCK_LEN {
|
|
||||||
set.insert(encoded_len_for_bytes(i));
|
|
||||||
}
|
|
||||||
assert_eq!(set.len(), BLOCK_LEN);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn base58check() {
|
|
||||||
use crate::base58check::*;
|
|
||||||
|
|
||||||
assert_eq!(encode(&[]), String::new());
|
|
||||||
assert!(decode("").unwrap().is_empty());
|
|
||||||
|
|
||||||
let full_block = &[1, 2, 3, 4, 5, 6, 7, 8];
|
|
||||||
assert_eq!(&decode(&encode(full_block)).unwrap(), full_block);
|
|
||||||
|
|
||||||
let partial_block = &[1, 2, 3];
|
|
||||||
assert_eq!(&decode(&encode(partial_block)).unwrap(), partial_block);
|
|
||||||
|
|
||||||
let max_encoded_block = &[u8::MAX; 8];
|
|
||||||
assert_eq!(&decode(&encode(max_encoded_block)).unwrap(), max_encoded_block);
|
|
||||||
|
|
||||||
let max_decoded_block = "zzzzzzzzzzz";
|
|
||||||
assert!(decode(max_decoded_block).is_none());
|
|
||||||
|
|
||||||
let full_and_partial_block = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11];
|
|
||||||
assert_eq!(&decode(&encode(full_and_partial_block)).unwrap(), full_and_partial_block);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn standard_address() {
|
fn standard_address() {
|
||||||
let addr = MoneroAddress::from_str(Network::Mainnet, STANDARD).unwrap();
|
let addr = MoneroAddress::from_str(Network::Mainnet, STANDARD).unwrap();
|
||||||
assert_eq!(addr.network(), Network::Mainnet);
|
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||||
assert_eq!(addr.kind(), &AddressType::Legacy);
|
assert_eq!(addr.meta.kind, AddressType::Standard);
|
||||||
assert!(!addr.is_subaddress());
|
assert!(!addr.meta.kind.is_subaddress());
|
||||||
assert_eq!(addr.payment_id(), None);
|
assert_eq!(addr.meta.kind.payment_id(), None);
|
||||||
assert!(!addr.is_guaranteed());
|
assert!(!addr.meta.kind.is_guaranteed());
|
||||||
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
||||||
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
||||||
assert_eq!(addr.to_string(), STANDARD);
|
assert_eq!(addr.to_string(), STANDARD);
|
||||||
@@ -78,11 +46,11 @@ fn standard_address() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn integrated_address() {
|
fn integrated_address() {
|
||||||
let addr = MoneroAddress::from_str(Network::Mainnet, INTEGRATED).unwrap();
|
let addr = MoneroAddress::from_str(Network::Mainnet, INTEGRATED).unwrap();
|
||||||
assert_eq!(addr.network(), Network::Mainnet);
|
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||||
assert_eq!(addr.kind(), &AddressType::LegacyIntegrated(PAYMENT_ID));
|
assert_eq!(addr.meta.kind, AddressType::Integrated(PAYMENT_ID));
|
||||||
assert!(!addr.is_subaddress());
|
assert!(!addr.meta.kind.is_subaddress());
|
||||||
assert_eq!(addr.payment_id(), Some(PAYMENT_ID));
|
assert_eq!(addr.meta.kind.payment_id(), Some(PAYMENT_ID));
|
||||||
assert!(!addr.is_guaranteed());
|
assert!(!addr.meta.kind.is_guaranteed());
|
||||||
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
||||||
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
||||||
assert_eq!(addr.to_string(), INTEGRATED);
|
assert_eq!(addr.to_string(), INTEGRATED);
|
||||||
@@ -91,11 +59,11 @@ fn integrated_address() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn subaddress() {
|
fn subaddress() {
|
||||||
let addr = MoneroAddress::from_str(Network::Mainnet, SUBADDRESS).unwrap();
|
let addr = MoneroAddress::from_str(Network::Mainnet, SUBADDRESS).unwrap();
|
||||||
assert_eq!(addr.network(), Network::Mainnet);
|
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||||
assert_eq!(addr.kind(), &AddressType::Subaddress);
|
assert_eq!(addr.meta.kind, AddressType::Subaddress);
|
||||||
assert!(addr.is_subaddress());
|
assert!(addr.meta.kind.is_subaddress());
|
||||||
assert_eq!(addr.payment_id(), None);
|
assert_eq!(addr.meta.kind.payment_id(), None);
|
||||||
assert!(!addr.is_guaranteed());
|
assert!(!addr.meta.kind.is_guaranteed());
|
||||||
assert_eq!(addr.spend.compress().to_bytes(), SUB_SPEND);
|
assert_eq!(addr.spend.compress().to_bytes(), SUB_SPEND);
|
||||||
assert_eq!(addr.view.compress().to_bytes(), SUB_VIEW);
|
assert_eq!(addr.view.compress().to_bytes(), SUB_VIEW);
|
||||||
assert_eq!(addr.to_string(), SUBADDRESS);
|
assert_eq!(addr.to_string(), SUBADDRESS);
|
||||||
@@ -107,8 +75,8 @@ fn featured() {
|
|||||||
[(Network::Mainnet, 'C'), (Network::Testnet, 'K'), (Network::Stagenet, 'F')]
|
[(Network::Mainnet, 'C'), (Network::Testnet, 'K'), (Network::Stagenet, 'F')]
|
||||||
{
|
{
|
||||||
for _ in 0 .. 100 {
|
for _ in 0 .. 100 {
|
||||||
let spend = &Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE;
|
let spend = &random_scalar(&mut OsRng) * ED25519_BASEPOINT_TABLE;
|
||||||
let view = &Scalar::random(&mut OsRng) * ED25519_BASEPOINT_TABLE;
|
let view = &random_scalar(&mut OsRng) * ED25519_BASEPOINT_TABLE;
|
||||||
|
|
||||||
for features in 0 .. (1 << 3) {
|
for features in 0 .. (1 << 3) {
|
||||||
const SUBADDRESS_FEATURE_BIT: u8 = 1;
|
const SUBADDRESS_FEATURE_BIT: u8 = 1;
|
||||||
@@ -125,7 +93,8 @@ fn featured() {
|
|||||||
let guaranteed = (features & GUARANTEED_FEATURE_BIT) == GUARANTEED_FEATURE_BIT;
|
let guaranteed = (features & GUARANTEED_FEATURE_BIT) == GUARANTEED_FEATURE_BIT;
|
||||||
|
|
||||||
let kind = AddressType::Featured { subaddress, payment_id, guaranteed };
|
let kind = AddressType::Featured { subaddress, payment_id, guaranteed };
|
||||||
let addr = MoneroAddress::new(network, kind, spend, view);
|
let meta = AddressMeta::new(network, kind);
|
||||||
|
let addr = MoneroAddress::new(meta, spend, view);
|
||||||
|
|
||||||
assert_eq!(addr.to_string().chars().next().unwrap(), first);
|
assert_eq!(addr.to_string().chars().next().unwrap(), first);
|
||||||
assert_eq!(MoneroAddress::from_str(network, &addr.to_string()).unwrap(), addr);
|
assert_eq!(MoneroAddress::from_str(network, &addr.to_string()).unwrap(), addr);
|
||||||
@@ -189,12 +158,14 @@ fn featured_vectors() {
|
|||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
MoneroAddress::new(
|
MoneroAddress::new(
|
||||||
|
AddressMeta::new(
|
||||||
network,
|
network,
|
||||||
AddressType::Featured {
|
AddressType::Featured {
|
||||||
subaddress: vector.subaddress,
|
subaddress: vector.subaddress,
|
||||||
payment_id: vector.payment_id,
|
payment_id: vector.payment_id,
|
||||||
guaranteed: vector.guaranteed
|
guaranteed: vector.guaranteed
|
||||||
},
|
}
|
||||||
|
),
|
||||||
spend,
|
spend,
|
||||||
view
|
view
|
||||||
)
|
)
|
||||||
95
coins/monero/src/tests/bulletproofs/mod.rs
Normal file
95
coins/monero/src/tests/bulletproofs/mod.rs
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
use hex_literal::hex;
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use curve25519_dalek::scalar::Scalar;
|
||||||
|
use monero_generators::decompress_point;
|
||||||
|
use multiexp::BatchVerifier;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
Commitment, random_scalar,
|
||||||
|
ringct::bulletproofs::{Bulletproofs, original::OriginalStruct},
|
||||||
|
};
|
||||||
|
|
||||||
|
mod plus;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bulletproofs_vector() {
|
||||||
|
let scalar = |scalar| Scalar::from_canonical_bytes(scalar).unwrap();
|
||||||
|
let point = |point| decompress_point(point).unwrap();
|
||||||
|
|
||||||
|
// Generated from Monero
|
||||||
|
assert!(Bulletproofs::Original(OriginalStruct {
|
||||||
|
A: point(hex!("ef32c0b9551b804decdcb107eb22aa715b7ce259bf3c5cac20e24dfa6b28ac71")),
|
||||||
|
S: point(hex!("e1285960861783574ee2b689ae53622834eb0b035d6943103f960cd23e063fa0")),
|
||||||
|
T1: point(hex!("4ea07735f184ba159d0e0eb662bac8cde3eb7d39f31e567b0fbda3aa23fe5620")),
|
||||||
|
T2: point(hex!("b8390aa4b60b255630d40e592f55ec6b7ab5e3a96bfcdcd6f1cd1d2fc95f441e")),
|
||||||
|
taux: scalar(hex!("5957dba8ea9afb23d6e81cc048a92f2d502c10c749dc1b2bd148ae8d41ec7107")),
|
||||||
|
mu: scalar(hex!("923023b234c2e64774b820b4961f7181f6c1dc152c438643e5a25b0bf271bc02")),
|
||||||
|
L: vec![
|
||||||
|
point(hex!("c45f656316b9ebf9d357fb6a9f85b5f09e0b991dd50a6e0ae9b02de3946c9d99")),
|
||||||
|
point(hex!("9304d2bf0f27183a2acc58cc755a0348da11bd345485fda41b872fee89e72aac")),
|
||||||
|
point(hex!("1bb8b71925d155dd9569f64129ea049d6149fdc4e7a42a86d9478801d922129b")),
|
||||||
|
point(hex!("5756a7bf887aa72b9a952f92f47182122e7b19d89e5dd434c747492b00e1c6b7")),
|
||||||
|
point(hex!("6e497c910d102592830555356af5ff8340e8d141e3fb60ea24cfa587e964f07d")),
|
||||||
|
point(hex!("f4fa3898e7b08e039183d444f3d55040f3c790ed806cb314de49f3068bdbb218")),
|
||||||
|
point(hex!("0bbc37597c3ead517a3841e159c8b7b79a5ceaee24b2a9a20350127aab428713")),
|
||||||
|
],
|
||||||
|
R: vec![
|
||||||
|
point(hex!("609420ba1702781692e84accfd225adb3d077aedc3cf8125563400466b52dbd9")),
|
||||||
|
point(hex!("fb4e1d079e7a2b0ec14f7e2a3943bf50b6d60bc346a54fcf562fb234b342abf8")),
|
||||||
|
point(hex!("6ae3ac97289c48ce95b9c557289e82a34932055f7f5e32720139824fe81b12e5")),
|
||||||
|
point(hex!("d071cc2ffbdab2d840326ad15f68c01da6482271cae3cf644670d1632f29a15c")),
|
||||||
|
point(hex!("e52a1754b95e1060589ba7ce0c43d0060820ebfc0d49dc52884bc3c65ad18af5")),
|
||||||
|
point(hex!("41573b06140108539957df71aceb4b1816d2409ce896659aa5c86f037ca5e851")),
|
||||||
|
point(hex!("a65970b2cc3c7b08b2b5b739dbc8e71e646783c41c625e2a5b1535e3d2e0f742")),
|
||||||
|
],
|
||||||
|
a: scalar(hex!("0077c5383dea44d3cd1bc74849376bd60679612dc4b945255822457fa0c0a209")),
|
||||||
|
b: scalar(hex!("fe80cf5756473482581e1d38644007793ddc66fdeb9404ec1689a907e4863302")),
|
||||||
|
t: scalar(hex!("40dfb08e09249040df997851db311bd6827c26e87d6f0f332c55be8eef10e603"))
|
||||||
|
})
|
||||||
|
.verify(
|
||||||
|
&mut OsRng,
|
||||||
|
&[
|
||||||
|
// For some reason, these vectors are * INV_EIGHT
|
||||||
|
point(hex!("8e8f23f315edae4f6c2f948d9a861e0ae32d356b933cd11d2f0e031ac744c41f"))
|
||||||
|
.mul_by_cofactor(),
|
||||||
|
point(hex!("2829cbd025aa54cd6e1b59a032564f22f0b2e5627f7f2c4297f90da438b5510f"))
|
||||||
|
.mul_by_cofactor(),
|
||||||
|
]
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! bulletproofs_tests {
|
||||||
|
($name: ident, $max: ident, $plus: literal) => {
|
||||||
|
#[test]
|
||||||
|
fn $name() {
|
||||||
|
// Create Bulletproofs for all possible output quantities
|
||||||
|
let mut verifier = BatchVerifier::new(16);
|
||||||
|
for i in 1 ..= 16 {
|
||||||
|
let commitments = (1 ..= i)
|
||||||
|
.map(|i| Commitment::new(random_scalar(&mut OsRng), u64::try_from(i).unwrap()))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let bp = Bulletproofs::prove(&mut OsRng, &commitments, $plus).unwrap();
|
||||||
|
|
||||||
|
let commitments = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||||
|
assert!(bp.verify(&mut OsRng, &commitments));
|
||||||
|
assert!(bp.batch_verify(&mut OsRng, &mut verifier, i, &commitments));
|
||||||
|
}
|
||||||
|
assert!(verifier.verify_vartime());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn $max() {
|
||||||
|
// Check Bulletproofs errors if we try to prove for too many outputs
|
||||||
|
let mut commitments = vec![];
|
||||||
|
for _ in 0 .. 17 {
|
||||||
|
commitments.push(Commitment::new(Scalar::ZERO, 0));
|
||||||
|
}
|
||||||
|
assert!(Bulletproofs::prove(&mut OsRng, &commitments, $plus).is_err());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
bulletproofs_tests!(bulletproofs, bulletproofs_max, false);
|
||||||
|
bulletproofs_tests!(bulletproofs_plus, bulletproofs_plus_max, true);
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user