mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 12:19:24 +00:00
Compare commits
6 Commits
testnet-2
...
aggressive
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7685cc305f | ||
|
|
3ca76c51e4 | ||
|
|
286e96ccd8 | ||
|
|
f93106af6b | ||
|
|
dd5fb0df47 | ||
|
|
3a626cc51e |
11
.github/actions/bitcoin/action.yml
vendored
11
.github/actions/bitcoin/action.yml
vendored
@@ -12,7 +12,7 @@ runs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Bitcoin Daemon Cache
|
- name: Bitcoin Daemon Cache
|
||||||
id: cache-bitcoind
|
id: cache-bitcoind
|
||||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: bitcoin.tar.gz
|
path: bitcoin.tar.gz
|
||||||
key: bitcoind-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
key: bitcoind-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||||
@@ -37,4 +37,11 @@ runs:
|
|||||||
|
|
||||||
- name: Bitcoin Regtest Daemon
|
- name: Bitcoin Regtest Daemon
|
||||||
shell: bash
|
shell: bash
|
||||||
run: PATH=$PATH:/usr/bin ./orchestration/dev/coins/bitcoin/run.sh -daemon
|
run: |
|
||||||
|
RPC_USER=serai
|
||||||
|
RPC_PASS=seraidex
|
||||||
|
|
||||||
|
bitcoind -txindex -regtest \
|
||||||
|
-rpcuser=$RPC_USER -rpcpassword=$RPC_PASS \
|
||||||
|
-rpcbind=127.0.0.1 -rpcbind=$(hostname) -rpcallowip=0.0.0.0/0 \
|
||||||
|
-daemon
|
||||||
|
|||||||
68
.github/actions/build-dependencies/action.yml
vendored
68
.github/actions/build-dependencies/action.yml
vendored
@@ -1,49 +1,43 @@
|
|||||||
name: build-dependencies
|
name: build-dependencies
|
||||||
description: Installs build dependencies for Serai
|
description: Installs build dependencies for Serai
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
github-token:
|
||||||
|
description: "GitHub token to install Protobuf with"
|
||||||
|
require: true
|
||||||
|
default:
|
||||||
|
|
||||||
|
rust-toolchain:
|
||||||
|
description: "Rust toolchain to install"
|
||||||
|
required: false
|
||||||
|
default: stable
|
||||||
|
|
||||||
|
rust-components:
|
||||||
|
description: "Rust components to install"
|
||||||
|
required: false
|
||||||
|
default:
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Remove unused packages
|
- name: Install Protobuf
|
||||||
shell: bash
|
uses: arduino/setup-protoc@v2.0.0
|
||||||
run: |
|
with:
|
||||||
sudo apt remove -y "*msbuild*" "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli
|
repo-token: ${{ inputs.github-token }}
|
||||||
sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*"
|
|
||||||
sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*"
|
|
||||||
sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*"
|
|
||||||
sudo apt autoremove -y
|
|
||||||
sudo apt clean
|
|
||||||
docker system prune -a --volumes
|
|
||||||
if: runner.os == 'Linux'
|
|
||||||
|
|
||||||
- name: Remove unused packages
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
(gem uninstall -aIx) || (exit 0)
|
|
||||||
brew uninstall --force "*msbuild*" "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli
|
|
||||||
brew uninstall --force "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*"
|
|
||||||
brew uninstall --force "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*"
|
|
||||||
brew uninstall --force "*qemu*" "*sql*" "*texinfo*" "*imagemagick*"
|
|
||||||
brew cleanup
|
|
||||||
if: runner.os == 'macOS'
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
if [ "$RUNNER_OS" == "Linux" ]; then
|
|
||||||
sudo apt install -y ca-certificates protobuf-compiler
|
|
||||||
elif [ "$RUNNER_OS" == "Windows" ]; then
|
|
||||||
choco install protoc
|
|
||||||
elif [ "$RUNNER_OS" == "macOS" ]; then
|
|
||||||
brew install protobuf
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Install solc
|
- name: Install solc
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cargo install svm-rs
|
pip3 install solc-select==0.2.1
|
||||||
svm install 0.8.25
|
solc-select install 0.8.16
|
||||||
svm use 0.8.25
|
solc-select use 0.8.16
|
||||||
|
|
||||||
|
- name: Install Rust
|
||||||
|
uses: dtolnay/rust-toolchain@master
|
||||||
|
with:
|
||||||
|
toolchain: ${{ inputs.rust-toolchain }}
|
||||||
|
components: ${{ inputs.rust-components }}
|
||||||
|
targets: wasm32-unknown-unknown, riscv32imac-unknown-none-elf
|
||||||
|
|
||||||
# - name: Cache Rust
|
# - name: Cache Rust
|
||||||
# uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43
|
# uses: Swatinem/rust-cache@v2
|
||||||
|
|||||||
11
.github/actions/monero-wallet-rpc/action.yml
vendored
11
.github/actions/monero-wallet-rpc/action.yml
vendored
@@ -5,14 +5,14 @@ inputs:
|
|||||||
version:
|
version:
|
||||||
description: "Version to download and run"
|
description: "Version to download and run"
|
||||||
required: false
|
required: false
|
||||||
default: v0.18.3.1
|
default: v0.18.2.0
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Monero Wallet RPC Cache
|
- name: Monero Wallet RPC Cache
|
||||||
id: cache-monero-wallet-rpc
|
id: cache-monero-wallet-rpc
|
||||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: monero-wallet-rpc
|
path: monero-wallet-rpc
|
||||||
key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||||
@@ -41,9 +41,4 @@ runs:
|
|||||||
|
|
||||||
- name: Monero Wallet RPC
|
- name: Monero Wallet RPC
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: ./monero-wallet-rpc --disable-rpc-login --rpc-bind-port 6061 --allow-mismatched-daemon-version --wallet-dir ./ --detach
|
||||||
./monero-wallet-rpc --allow-mismatched-daemon-version \
|
|
||||||
--daemon-address 0.0.0.0:18081 --daemon-login serai:seraidex \
|
|
||||||
--disable-rpc-login --rpc-bind-port 18082 \
|
|
||||||
--wallet-dir ./ \
|
|
||||||
--detach
|
|
||||||
|
|||||||
12
.github/actions/monero/action.yml
vendored
12
.github/actions/monero/action.yml
vendored
@@ -5,16 +5,16 @@ inputs:
|
|||||||
version:
|
version:
|
||||||
description: "Version to download and run"
|
description: "Version to download and run"
|
||||||
required: false
|
required: false
|
||||||
default: v0.18.3.1
|
default: v0.18.2.0
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Monero Daemon Cache
|
- name: Monero Daemon Cache
|
||||||
id: cache-monerod
|
id: cache-monerod
|
||||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: /usr/bin/monerod
|
path: monerod
|
||||||
key: monerod-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
key: monerod-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||||
|
|
||||||
- name: Download the Monero Daemon
|
- name: Download the Monero Daemon
|
||||||
@@ -37,10 +37,8 @@ runs:
|
|||||||
wget https://downloads.getmonero.org/cli/$FILE
|
wget https://downloads.getmonero.org/cli/$FILE
|
||||||
tar -xvf $FILE
|
tar -xvf $FILE
|
||||||
|
|
||||||
sudo mv monero-x86_64-linux-gnu-${{ inputs.version }}/monerod /usr/bin/monerod
|
mv monero-x86_64-linux-gnu-${{ inputs.version }}/monerod monerod
|
||||||
sudo chmod 777 /usr/bin/monerod
|
|
||||||
sudo chmod +x /usr/bin/monerod
|
|
||||||
|
|
||||||
- name: Monero Regtest Daemon
|
- name: Monero Regtest Daemon
|
||||||
shell: bash
|
shell: bash
|
||||||
run: PATH=$PATH:/usr/bin ./orchestration/dev/coins/monero/run.sh --detach
|
run: ./monerod --regtest --offline --fixed-difficulty=1 --detach
|
||||||
|
|||||||
14
.github/actions/test-dependencies/action.yml
vendored
14
.github/actions/test-dependencies/action.yml
vendored
@@ -2,10 +2,15 @@ name: test-dependencies
|
|||||||
description: Installs test dependencies for Serai
|
description: Installs test dependencies for Serai
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
|
github-token:
|
||||||
|
description: "GitHub token to install Protobuf with"
|
||||||
|
require: true
|
||||||
|
default:
|
||||||
|
|
||||||
monero-version:
|
monero-version:
|
||||||
description: "Monero version to download and run as a regtest node"
|
description: "Monero version to download and run as a regtest node"
|
||||||
required: false
|
required: false
|
||||||
default: v0.18.3.1
|
default: v0.18.2.0
|
||||||
|
|
||||||
bitcoin-version:
|
bitcoin-version:
|
||||||
description: "Bitcoin version to download and run as a regtest node"
|
description: "Bitcoin version to download and run as a regtest node"
|
||||||
@@ -17,12 +22,13 @@ runs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Install Build Dependencies
|
- name: Install Build Dependencies
|
||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
with:
|
||||||
|
github-token: ${{ inputs.github-token }}
|
||||||
|
|
||||||
- name: Install Foundry
|
- name: Install Foundry
|
||||||
uses: foundry-rs/foundry-toolchain@cb603ca0abb544f301eaed59ac0baf579aa6aecf
|
uses: foundry-rs/foundry-toolchain@v1
|
||||||
with:
|
with:
|
||||||
version: nightly-09fe3e041369a816365a020f715ad6f94dbce9f2
|
version: nightly
|
||||||
cache: false
|
|
||||||
|
|
||||||
- name: Run a Monero Regtest Node
|
- name: Run a Monero Regtest Node
|
||||||
uses: ./.github/actions/monero
|
uses: ./.github/actions/monero
|
||||||
|
|||||||
2
.github/nightly-version
vendored
2
.github/nightly-version
vendored
@@ -1 +1 @@
|
|||||||
nightly-2024-02-07
|
nightly-2023-07-01
|
||||||
|
|||||||
35
.github/workflows/coins-tests.yml
vendored
35
.github/workflows/coins-tests.yml
vendored
@@ -1,35 +0,0 @@
|
|||||||
name: coins/ Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "coins/**"
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "coins/**"
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-coins:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Test Dependencies
|
|
||||||
uses: ./.github/actions/test-dependencies
|
|
||||||
|
|
||||||
- name: Run Tests
|
|
||||||
run: |
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
|
||||||
-p bitcoin-serai \
|
|
||||||
-p ethereum-serai \
|
|
||||||
-p monero-generators \
|
|
||||||
-p monero-serai
|
|
||||||
31
.github/workflows/common-tests.yml
vendored
31
.github/workflows/common-tests.yml
vendored
@@ -1,31 +0,0 @@
|
|||||||
name: common/ Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-common:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Run Tests
|
|
||||||
run: |
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
|
||||||
-p std-shims \
|
|
||||||
-p zalloc \
|
|
||||||
-p serai-db \
|
|
||||||
-p serai-env
|
|
||||||
40
.github/workflows/coordinator-tests.yml
vendored
40
.github/workflows/coordinator-tests.yml
vendored
@@ -1,40 +0,0 @@
|
|||||||
name: Coordinator Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "coins/**"
|
|
||||||
- "message-queue/**"
|
|
||||||
- "coordinator/**"
|
|
||||||
- "orchestration/**"
|
|
||||||
- "tests/docker/**"
|
|
||||||
- "tests/coordinator/**"
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "coins/**"
|
|
||||||
- "message-queue/**"
|
|
||||||
- "coordinator/**"
|
|
||||||
- "orchestration/**"
|
|
||||||
- "tests/docker/**"
|
|
||||||
- "tests/coordinator/**"
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Run coordinator Docker tests
|
|
||||||
run: cd tests/coordinator && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
|
||||||
40
.github/workflows/crypto-tests.yml
vendored
40
.github/workflows/crypto-tests.yml
vendored
@@ -1,40 +0,0 @@
|
|||||||
name: crypto/ Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-crypto:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Run Tests
|
|
||||||
run: |
|
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
|
||||||
-p flexible-transcript \
|
|
||||||
-p ff-group-tests \
|
|
||||||
-p dalek-ff-group \
|
|
||||||
-p minimal-ed448 \
|
|
||||||
-p ciphersuite \
|
|
||||||
-p multiexp \
|
|
||||||
-p schnorr-signatures \
|
|
||||||
-p dleq \
|
|
||||||
-p dkg \
|
|
||||||
-p modular-frost \
|
|
||||||
-p frost-schnorrkel
|
|
||||||
7
.github/workflows/daily-deny.yml
vendored
7
.github/workflows/daily-deny.yml
vendored
@@ -9,14 +9,17 @@ jobs:
|
|||||||
name: Run cargo deny
|
name: Run cargo deny
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Advisory Cache
|
- name: Advisory Cache
|
||||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: ~/.cargo/advisory-db
|
path: ~/.cargo/advisory-db
|
||||||
key: rust-advisory-db
|
key: rust-advisory-db
|
||||||
|
|
||||||
|
- name: Install cargo
|
||||||
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
|
||||||
- name: Install cargo deny
|
- name: Install cargo deny
|
||||||
run: cargo install --locked cargo-deny
|
run: cargo install --locked cargo-deny
|
||||||
|
|
||||||
|
|||||||
22
.github/workflows/full-stack-tests.yml
vendored
22
.github/workflows/full-stack-tests.yml
vendored
@@ -1,22 +0,0 @@
|
|||||||
name: Full Stack Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Run Full Stack Docker tests
|
|
||||||
run: cd tests/full-stack && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
|
||||||
83
.github/workflows/lint.yml
vendored
83
.github/workflows/lint.yml
vendored
@@ -1,83 +0,0 @@
|
|||||||
name: Lint
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
pull_request:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
clippy:
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
os: [ubuntu-latest, macos-13, macos-14, windows-latest]
|
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Get nightly version to use
|
|
||||||
id: nightly
|
|
||||||
shell: bash
|
|
||||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Install nightly rust
|
|
||||||
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32-unknown-unknown -c clippy
|
|
||||||
|
|
||||||
- name: Run Clippy
|
|
||||||
run: cargo +${{ steps.nightly.outputs.version }} clippy --all-features --all-targets -- -D warnings -A clippy::items_after_test_module
|
|
||||||
|
|
||||||
# Also verify the lockfile isn't dirty
|
|
||||||
# This happens when someone edits a Cargo.toml yet doesn't do anything
|
|
||||||
# which causes the lockfile to be updated
|
|
||||||
# The above clippy run will cause it to be updated, so checking there's
|
|
||||||
# no differences present now performs the desired check
|
|
||||||
- name: Verify lockfile
|
|
||||||
shell: bash
|
|
||||||
run: git diff | wc -l | LC_ALL="en_US.utf8" grep -x -e "^[ ]*0"
|
|
||||||
|
|
||||||
deny:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Advisory Cache
|
|
||||||
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
|
||||||
with:
|
|
||||||
path: ~/.cargo/advisory-db
|
|
||||||
key: rust-advisory-db
|
|
||||||
|
|
||||||
- name: Install cargo deny
|
|
||||||
run: cargo install --locked cargo-deny
|
|
||||||
|
|
||||||
- name: Run cargo deny
|
|
||||||
run: cargo deny -L error --all-features check
|
|
||||||
|
|
||||||
fmt:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Get nightly version to use
|
|
||||||
id: nightly
|
|
||||||
shell: bash
|
|
||||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Install nightly rust
|
|
||||||
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -c rustfmt
|
|
||||||
|
|
||||||
- name: Run rustfmt
|
|
||||||
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check
|
|
||||||
|
|
||||||
machete:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
- name: Verify all dependencies are in use
|
|
||||||
run: |
|
|
||||||
cargo install cargo-machete
|
|
||||||
cargo machete
|
|
||||||
36
.github/workflows/message-queue-tests.yml
vendored
36
.github/workflows/message-queue-tests.yml
vendored
@@ -1,36 +0,0 @@
|
|||||||
name: Message Queue Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "message-queue/**"
|
|
||||||
- "orchestration/**"
|
|
||||||
- "tests/docker/**"
|
|
||||||
- "tests/message-queue/**"
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "message-queue/**"
|
|
||||||
- "orchestration/**"
|
|
||||||
- "tests/docker/**"
|
|
||||||
- "tests/message-queue/**"
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Run message-queue Docker tests
|
|
||||||
run: cd tests/message-queue && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
|
||||||
26
.github/workflows/mini-tests.yml
vendored
26
.github/workflows/mini-tests.yml
vendored
@@ -1,26 +0,0 @@
|
|||||||
name: mini/ Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
paths:
|
|
||||||
- "mini/**"
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "mini/**"
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-common:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Run Tests
|
|
||||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p mini-serai
|
|
||||||
17
.github/workflows/monero-tests.yaml
vendored
17
.github/workflows/monero-tests.yaml
vendored
@@ -13,20 +13,20 @@ on:
|
|||||||
- "coins/monero/**"
|
- "coins/monero/**"
|
||||||
- "processor/**"
|
- "processor/**"
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Only run these once since they will be consistent regardless of any node
|
# Only run these once since they will be consistent regardless of any node
|
||||||
unit-tests:
|
unit-tests:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Test Dependencies
|
- name: Test Dependencies
|
||||||
uses: ./.github/actions/test-dependencies
|
uses: ./.github/actions/test-dependencies
|
||||||
|
with:
|
||||||
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Run Unit Tests Without Features
|
- name: Run Unit Tests Without Features
|
||||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib
|
run: cargo test --package monero-serai --lib
|
||||||
|
|
||||||
# Doesn't run unit tests with features as the tests workflow will
|
# Doesn't run unit tests with features as the tests workflow will
|
||||||
|
|
||||||
@@ -38,19 +38,22 @@ jobs:
|
|||||||
version: [v0.17.3.2, v0.18.2.0]
|
version: [v0.17.3.2, v0.18.2.0]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Test Dependencies
|
- name: Test Dependencies
|
||||||
uses: ./.github/actions/test-dependencies
|
uses: ./.github/actions/test-dependencies
|
||||||
with:
|
with:
|
||||||
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
monero-version: ${{ matrix.version }}
|
monero-version: ${{ matrix.version }}
|
||||||
|
|
||||||
- name: Run Integration Tests Without Features
|
- name: Run Integration Tests Without Features
|
||||||
# Runs with the binaries feature so the binaries build
|
# Runs with the binaries feature so the binaries build
|
||||||
# https://github.com/rust-lang/cargo/issues/8396
|
# https://github.com/rust-lang/cargo/issues/8396
|
||||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --features binaries --test '*'
|
run: cargo test --package monero-serai --features binaries --test '*'
|
||||||
|
|
||||||
- name: Run Integration Tests
|
- name: Run Integration Tests
|
||||||
# Don't run if the the tests workflow also will
|
# Don't run if the the tests workflow also will
|
||||||
if: ${{ matrix.version != 'v0.18.2.0' }}
|
if: ${{ matrix.version != 'v0.18.2.0' }}
|
||||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
|
run: |
|
||||||
|
cargo test --package monero-serai --all-features --test '*'
|
||||||
|
cargo test --package serai-processor --all-features monero
|
||||||
|
|||||||
4
.github/workflows/monthly-nightly-update.yml
vendored
4
.github/workflows/monthly-nightly-update.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
|||||||
name: Update nightly
|
name: Update nightly
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
submodules: "recursive"
|
submodules: "recursive"
|
||||||
|
|
||||||
@@ -28,7 +28,7 @@ jobs:
|
|||||||
git push -u origin $(date +"nightly-%Y-%m")
|
git push -u origin $(date +"nightly-%Y-%m")
|
||||||
|
|
||||||
- name: Pull Request
|
- name: Pull Request
|
||||||
uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410
|
uses: actions/github-script@v6
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const { repo, owner } = context.repo;
|
const { repo, owner } = context.repo;
|
||||||
|
|||||||
22
.github/workflows/no-std.yml
vendored
22
.github/workflows/no-std.yml
vendored
@@ -4,32 +4,18 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- develop
|
- develop
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "coins/**"
|
|
||||||
- "tests/no-std/**"
|
|
||||||
|
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "coins/**"
|
|
||||||
- "tests/no-std/**"
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
- name: Install Build Dependencies
|
||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
with:
|
||||||
- name: Install RISC-V Toolchain
|
github-token: ${{ inputs.github-token }}
|
||||||
run: sudo apt update && sudo apt install -y gcc-riscv64-unknown-elf gcc-multilib && rustup target add riscv32imac-unknown-none-elf
|
|
||||||
|
|
||||||
- name: Verify no-std builds
|
- name: Verify no-std builds
|
||||||
run: cd tests/no-std && CFLAGS=-I/usr/include cargo build --target riscv32imac-unknown-none-elf
|
run: cd tests/no-std && cargo build --target riscv32imac-unknown-none-elf
|
||||||
|
|||||||
90
.github/workflows/pages.yml
vendored
90
.github/workflows/pages.yml
vendored
@@ -1,90 +0,0 @@
|
|||||||
# MIT License
|
|
||||||
#
|
|
||||||
# Copyright (c) 2022 just-the-docs
|
|
||||||
#
|
|
||||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
# of this software and associated documentation files (the "Software"), to deal
|
|
||||||
# in the Software without restriction, including without limitation the rights
|
|
||||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
# copies of the Software, and to permit persons to whom the Software is
|
|
||||||
# furnished to do so, subject to the following conditions:
|
|
||||||
#
|
|
||||||
# The above copyright notice and this permission notice shall be included in all
|
|
||||||
# copies or substantial portions of the Software.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
# SOFTWARE.
|
|
||||||
|
|
||||||
# This workflow uses actions that are not certified by GitHub.
|
|
||||||
# They are provided by a third-party and are governed by
|
|
||||||
# separate terms of service, privacy policy, and support
|
|
||||||
# documentation.
|
|
||||||
|
|
||||||
# Sample workflow for building and deploying a Jekyll site to GitHub Pages
|
|
||||||
name: Deploy Jekyll site to Pages
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- "develop"
|
|
||||||
paths:
|
|
||||||
- "docs/**"
|
|
||||||
|
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pages: write
|
|
||||||
id-token: write
|
|
||||||
|
|
||||||
# Allow one concurrent deployment
|
|
||||||
concurrency:
|
|
||||||
group: "pages"
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Build job
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
working-directory: docs
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
- name: Setup Ruby
|
|
||||||
uses: ruby/setup-ruby@v1
|
|
||||||
with:
|
|
||||||
bundler-cache: true
|
|
||||||
cache-version: 0
|
|
||||||
working-directory: "${{ github.workspace }}/docs"
|
|
||||||
- name: Setup Pages
|
|
||||||
id: pages
|
|
||||||
uses: actions/configure-pages@v3
|
|
||||||
- name: Build with Jekyll
|
|
||||||
run: bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}"
|
|
||||||
env:
|
|
||||||
JEKYLL_ENV: production
|
|
||||||
- name: Upload artifact
|
|
||||||
uses: actions/upload-pages-artifact@v1
|
|
||||||
with:
|
|
||||||
path: "docs/_site/"
|
|
||||||
|
|
||||||
# Deployment job
|
|
||||||
deploy:
|
|
||||||
environment:
|
|
||||||
name: github-pages
|
|
||||||
url: ${{ steps.deployment.outputs.page_url }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: build
|
|
||||||
steps:
|
|
||||||
- name: Deploy to GitHub Pages
|
|
||||||
id: deployment
|
|
||||||
uses: actions/deploy-pages@v2
|
|
||||||
40
.github/workflows/processor-tests.yml
vendored
40
.github/workflows/processor-tests.yml
vendored
@@ -1,40 +0,0 @@
|
|||||||
name: Processor Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "coins/**"
|
|
||||||
- "message-queue/**"
|
|
||||||
- "processor/**"
|
|
||||||
- "orchestration/**"
|
|
||||||
- "tests/docker/**"
|
|
||||||
- "tests/processor/**"
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "coins/**"
|
|
||||||
- "message-queue/**"
|
|
||||||
- "processor/**"
|
|
||||||
- "orchestration/**"
|
|
||||||
- "tests/docker/**"
|
|
||||||
- "tests/processor/**"
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Run processor Docker tests
|
|
||||||
run: cd tests/processor && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
|
||||||
36
.github/workflows/reproducible-runtime.yml
vendored
36
.github/workflows/reproducible-runtime.yml
vendored
@@ -1,36 +0,0 @@
|
|||||||
name: Reproducible Runtime
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
paths:
|
|
||||||
- "Cargo.lock"
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "substrate/**"
|
|
||||||
- "orchestration/runtime/**"
|
|
||||||
- "tests/reproducible-runtime/**"
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "Cargo.lock"
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "substrate/**"
|
|
||||||
- "orchestration/runtime/**"
|
|
||||||
- "tests/reproducible-runtime/**"
|
|
||||||
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Run Reproducible Runtime tests
|
|
||||||
run: cd tests/reproducible-runtime && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
|
||||||
117
.github/workflows/tests.yml
vendored
117
.github/workflows/tests.yml
vendored
@@ -4,77 +4,82 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- develop
|
- develop
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "coins/**"
|
|
||||||
- "message-queue/**"
|
|
||||||
- "processor/**"
|
|
||||||
- "coordinator/**"
|
|
||||||
- "substrate/**"
|
|
||||||
|
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
|
||||||
- "common/**"
|
|
||||||
- "crypto/**"
|
|
||||||
- "coins/**"
|
|
||||||
- "message-queue/**"
|
|
||||||
- "processor/**"
|
|
||||||
- "coordinator/**"
|
|
||||||
- "substrate/**"
|
|
||||||
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test-infra:
|
clippy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Get nightly version to use
|
||||||
|
id: nightly
|
||||||
|
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Build Dependencies
|
- name: Build Dependencies
|
||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
with:
|
||||||
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
rust-toolchain: ${{ steps.nightly.outputs.version }}
|
||||||
|
rust-components: clippy
|
||||||
|
|
||||||
- name: Run Tests
|
- name: Run Clippy
|
||||||
|
# Allow dbg_macro when run locally, yet not when pushed
|
||||||
|
run: cargo clippy --all-features --all-targets -- -D clippy::dbg_macro $(grep "\S" ../../clippy-config | grep -v "#")
|
||||||
|
|
||||||
|
deny:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Advisory Cache
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ~/.cargo/advisory-db
|
||||||
|
key: rust-advisory-db
|
||||||
|
|
||||||
|
- name: Install cargo
|
||||||
|
uses: dtolnay/rust-toolchain@stable
|
||||||
|
|
||||||
|
- name: Install cargo deny
|
||||||
|
run: cargo install --locked cargo-deny
|
||||||
|
|
||||||
|
- name: Run cargo deny
|
||||||
|
run: cargo deny -L error --all-features check
|
||||||
|
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Test Dependencies
|
||||||
|
uses: ./.github/actions/test-dependencies
|
||||||
|
with:
|
||||||
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build node
|
||||||
run: |
|
run: |
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
cd substrate/node
|
||||||
-p serai-message-queue \
|
cargo build
|
||||||
-p serai-processor-messages \
|
|
||||||
-p serai-processor \
|
|
||||||
-p tendermint-machine \
|
|
||||||
-p tributary-chain \
|
|
||||||
-p serai-coordinator \
|
|
||||||
-p serai-docker-tests
|
|
||||||
|
|
||||||
test-substrate:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
|
||||||
|
|
||||||
- name: Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
|
|
||||||
- name: Run Tests
|
- name: Run Tests
|
||||||
run: |
|
run: GITHUB_CI=true cargo test --all-features
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
|
||||||
-p serai-primitives \
|
|
||||||
-p serai-coins-primitives \
|
|
||||||
-p serai-coins-pallet \
|
|
||||||
-p serai-dex-pallet \
|
|
||||||
-p serai-validator-sets-primitives \
|
|
||||||
-p serai-validator-sets-pallet \
|
|
||||||
-p serai-in-instructions-primitives \
|
|
||||||
-p serai-in-instructions-pallet \
|
|
||||||
-p serai-signals-pallet \
|
|
||||||
-p serai-runtime \
|
|
||||||
-p serai-node
|
|
||||||
|
|
||||||
test-serai-client:
|
fmt:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Build Dependencies
|
- name: Get nightly version to use
|
||||||
uses: ./.github/actions/build-dependencies
|
id: nightly
|
||||||
|
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Run Tests
|
- name: Install rustfmt
|
||||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client
|
uses: dtolnay/rust-toolchain@master
|
||||||
|
with:
|
||||||
|
toolchain: ${{ steps.nightly.outputs.version }}
|
||||||
|
components: rustfmt
|
||||||
|
|
||||||
|
- name: Run rustfmt
|
||||||
|
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check
|
||||||
|
|||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,7 +1,2 @@
|
|||||||
target
|
target
|
||||||
Dockerfile
|
|
||||||
Dockerfile.fast-epoch
|
|
||||||
!orchestration/runtime/Dockerfile
|
|
||||||
.test-logs
|
|
||||||
|
|
||||||
.vscode
|
.vscode
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
edition = "2021"
|
|
||||||
tab_spaces = 2
|
tab_spaces = 2
|
||||||
|
|
||||||
max_width = 100
|
max_width = 100
|
||||||
|
|||||||
7235
Cargo.lock
generated
7235
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
115
Cargo.toml
115
Cargo.toml
@@ -1,24 +1,8 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
resolver = "2"
|
|
||||||
members = [
|
members = [
|
||||||
# Version patches
|
|
||||||
"patches/zstd",
|
|
||||||
"patches/rocksdb",
|
|
||||||
"patches/proc-macro-crate",
|
|
||||||
|
|
||||||
# std patches
|
|
||||||
"patches/matches",
|
|
||||||
"patches/is-terminal",
|
|
||||||
|
|
||||||
# Rewrites/redirects
|
|
||||||
"patches/option-ext",
|
|
||||||
"patches/directories-next",
|
|
||||||
|
|
||||||
"common/std-shims",
|
"common/std-shims",
|
||||||
"common/zalloc",
|
"common/zalloc",
|
||||||
"common/db",
|
"common/db",
|
||||||
"common/env",
|
|
||||||
"common/request",
|
|
||||||
|
|
||||||
"crypto/transcript",
|
"crypto/transcript",
|
||||||
|
|
||||||
@@ -35,7 +19,6 @@ members = [
|
|||||||
"crypto/frost",
|
"crypto/frost",
|
||||||
"crypto/schnorrkel",
|
"crypto/schnorrkel",
|
||||||
|
|
||||||
"coins/bitcoin",
|
|
||||||
"coins/ethereum",
|
"coins/ethereum",
|
||||||
"coins/monero/generators",
|
"coins/monero/generators",
|
||||||
"coins/monero",
|
"coins/monero",
|
||||||
@@ -51,8 +34,8 @@ members = [
|
|||||||
|
|
||||||
"substrate/primitives",
|
"substrate/primitives",
|
||||||
|
|
||||||
"substrate/coins/primitives",
|
"substrate/tokens/primitives",
|
||||||
"substrate/coins/pallet",
|
"substrate/tokens/pallet",
|
||||||
|
|
||||||
"substrate/in-instructions/primitives",
|
"substrate/in-instructions/primitives",
|
||||||
"substrate/in-instructions/pallet",
|
"substrate/in-instructions/pallet",
|
||||||
@@ -60,28 +43,12 @@ members = [
|
|||||||
"substrate/validator-sets/primitives",
|
"substrate/validator-sets/primitives",
|
||||||
"substrate/validator-sets/pallet",
|
"substrate/validator-sets/pallet",
|
||||||
|
|
||||||
"substrate/signals/primitives",
|
|
||||||
"substrate/signals/pallet",
|
|
||||||
|
|
||||||
"substrate/abi",
|
|
||||||
|
|
||||||
"substrate/runtime",
|
"substrate/runtime",
|
||||||
"substrate/node",
|
"substrate/node",
|
||||||
|
|
||||||
"substrate/client",
|
"substrate/client",
|
||||||
|
|
||||||
"orchestration",
|
|
||||||
|
|
||||||
"mini",
|
|
||||||
|
|
||||||
"tests/no-std",
|
"tests/no-std",
|
||||||
|
|
||||||
"tests/docker",
|
|
||||||
"tests/message-queue",
|
|
||||||
"tests/processor",
|
|
||||||
"tests/coordinator",
|
|
||||||
"tests/full-stack",
|
|
||||||
"tests/reproducible-runtime",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Always compile Monero (and a variety of dependencies) with optimizations due
|
# Always compile Monero (and a variety of dependencies) with optimizations due
|
||||||
@@ -103,81 +70,3 @@ monero-serai = { opt-level = 3 }
|
|||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
panic = "unwind"
|
panic = "unwind"
|
||||||
|
|
||||||
[patch.crates-io]
|
|
||||||
# https://github.com/rust-lang-nursery/lazy-static.rs/issues/201
|
|
||||||
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
|
|
||||||
|
|
||||||
# Needed due to dockertest's usage of `Rc`s when we need `Arc`s
|
|
||||||
dockertest = { git = "https://github.com/kayabaNerve/dockertest-rs", branch = "arc" }
|
|
||||||
|
|
||||||
# wasmtime pulls in an old version for this
|
|
||||||
zstd = { path = "patches/zstd" }
|
|
||||||
# Needed for WAL compression
|
|
||||||
rocksdb = { path = "patches/rocksdb" }
|
|
||||||
# proc-macro-crate 2 binds to an old version of toml for msrv so we patch to 3
|
|
||||||
proc-macro-crate = { path = "patches/proc-macro-crate" }
|
|
||||||
|
|
||||||
# is-terminal now has an std-based solution with an equivalent API
|
|
||||||
is-terminal = { path = "patches/is-terminal" }
|
|
||||||
# So does matches
|
|
||||||
matches = { path = "patches/matches" }
|
|
||||||
|
|
||||||
# directories-next was created because directories was unmaintained
|
|
||||||
# directories-next is now unmaintained while directories is maintained
|
|
||||||
# The directories author pulls in ridiculously pointless crates and prefers
|
|
||||||
# copyleft licenses
|
|
||||||
# The following two patches resolve everything
|
|
||||||
option-ext = { path = "patches/option-ext" }
|
|
||||||
directories-next = { path = "patches/directories-next" }
|
|
||||||
|
|
||||||
[workspace.lints.clippy]
|
|
||||||
unwrap_or_default = "allow"
|
|
||||||
borrow_as_ptr = "deny"
|
|
||||||
cast_lossless = "deny"
|
|
||||||
cast_possible_truncation = "deny"
|
|
||||||
cast_possible_wrap = "deny"
|
|
||||||
cast_precision_loss = "deny"
|
|
||||||
cast_ptr_alignment = "deny"
|
|
||||||
cast_sign_loss = "deny"
|
|
||||||
checked_conversions = "deny"
|
|
||||||
cloned_instead_of_copied = "deny"
|
|
||||||
enum_glob_use = "deny"
|
|
||||||
expl_impl_clone_on_copy = "deny"
|
|
||||||
explicit_into_iter_loop = "deny"
|
|
||||||
explicit_iter_loop = "deny"
|
|
||||||
flat_map_option = "deny"
|
|
||||||
float_cmp = "deny"
|
|
||||||
fn_params_excessive_bools = "deny"
|
|
||||||
ignored_unit_patterns = "deny"
|
|
||||||
implicit_clone = "deny"
|
|
||||||
inefficient_to_string = "deny"
|
|
||||||
invalid_upcast_comparisons = "deny"
|
|
||||||
large_stack_arrays = "deny"
|
|
||||||
linkedlist = "deny"
|
|
||||||
macro_use_imports = "deny"
|
|
||||||
manual_instant_elapsed = "deny"
|
|
||||||
manual_let_else = "deny"
|
|
||||||
manual_ok_or = "deny"
|
|
||||||
manual_string_new = "deny"
|
|
||||||
map_unwrap_or = "deny"
|
|
||||||
match_bool = "deny"
|
|
||||||
match_same_arms = "deny"
|
|
||||||
missing_fields_in_debug = "deny"
|
|
||||||
needless_continue = "deny"
|
|
||||||
needless_pass_by_value = "deny"
|
|
||||||
ptr_cast_constness = "deny"
|
|
||||||
range_minus_one = "deny"
|
|
||||||
range_plus_one = "deny"
|
|
||||||
redundant_closure_for_method_calls = "deny"
|
|
||||||
redundant_else = "deny"
|
|
||||||
string_add_assign = "deny"
|
|
||||||
unchecked_duration_subtraction = "deny"
|
|
||||||
uninlined_format_args = "deny"
|
|
||||||
unnecessary_box_returns = "deny"
|
|
||||||
unnecessary_join = "deny"
|
|
||||||
unnecessary_wraps = "deny"
|
|
||||||
unnested_or_patterns = "deny"
|
|
||||||
unused_async = "deny"
|
|
||||||
unused_self = "deny"
|
|
||||||
zero_sized_map_values = "deny"
|
|
||||||
|
|||||||
34
README.md
34
README.md
@@ -5,22 +5,19 @@ Bitcoin, Ethereum, DAI, and Monero, offering a liquidity-pool-based trading
|
|||||||
experience. Funds are stored in an economically secured threshold-multisig
|
experience. Funds are stored in an economically secured threshold-multisig
|
||||||
wallet.
|
wallet.
|
||||||
|
|
||||||
[Getting Started](spec/Getting%20Started.md)
|
[Getting Started](docs/Getting%20Started.md)
|
||||||
|
|
||||||
### Layout
|
### Layout
|
||||||
|
|
||||||
- `audits`: Audits for various parts of Serai.
|
- `audits`: Audits for various parts of Serai.
|
||||||
|
|
||||||
- `spec`: The specification of the Serai protocol, both internally and as
|
- `docs`: Documentation on the Serai protocol.
|
||||||
networked.
|
|
||||||
|
|
||||||
- `docs`: User-facing documentation on the Serai protocol.
|
|
||||||
|
|
||||||
- `common`: Crates containing utilities common to a variety of areas under
|
- `common`: Crates containing utilities common to a variety of areas under
|
||||||
Serai, none neatly fitting under another category.
|
Serai, none neatly fitting under another category.
|
||||||
|
|
||||||
- `crypto`: A series of composable cryptographic libraries built around the
|
- `crypto`: A series of composable cryptographic libraries built around the
|
||||||
`ff`/`group` APIs, achieving a variety of tasks. These range from generic
|
`ff`/`group` APIs achieving a variety of tasks. These range from generic
|
||||||
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
|
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
|
||||||
needed for Bitcoin-Monero atomic swaps.
|
needed for Bitcoin-Monero atomic swaps.
|
||||||
|
|
||||||
@@ -28,9 +25,6 @@ wallet.
|
|||||||
wider community. This means they will always support the functionality Serai
|
wider community. This means they will always support the functionality Serai
|
||||||
needs, yet won't disadvantage other use cases when possible.
|
needs, yet won't disadvantage other use cases when possible.
|
||||||
|
|
||||||
- `message-queue`: An ordered message server so services can talk to each other,
|
|
||||||
even when the other is offline.
|
|
||||||
|
|
||||||
- `processor`: A generic chain processor to process data for Serai and process
|
- `processor`: A generic chain processor to process data for Serai and process
|
||||||
events from Serai, executing transactions as expected and needed.
|
events from Serai, executing transactions as expected and needed.
|
||||||
|
|
||||||
@@ -39,28 +33,12 @@ wallet.
|
|||||||
|
|
||||||
- `substrate`: Substrate crates used to instantiate the Serai network.
|
- `substrate`: Substrate crates used to instantiate the Serai network.
|
||||||
|
|
||||||
- `orchestration`: Dockerfiles and scripts to deploy a Serai node/test
|
- `deploy`: Scripts to deploy a Serai node/test environment.
|
||||||
environment.
|
|
||||||
|
|
||||||
- `tests`: Tests for various crates. Generally, `crate/src/tests` is used, or
|
|
||||||
`crate/tests`, yet any tests requiring crates' binaries are placed here.
|
|
||||||
|
|
||||||
### Security
|
|
||||||
|
|
||||||
Serai hosts a bug bounty program via
|
|
||||||
[Immunefi](https://immunefi.com/bounty/serai/). For in-scope critical
|
|
||||||
vulnerabilities, we will reward whitehats with up to $30,000.
|
|
||||||
|
|
||||||
Anything not in-scope should still be submitted through Immunefi, with rewards
|
|
||||||
issued at the discretion of the Immunefi program managers.
|
|
||||||
|
|
||||||
### Links
|
### Links
|
||||||
|
|
||||||
- [Website](https://serai.exchange/): https://serai.exchange/
|
|
||||||
- [Immunefi](https://immunefi.com/bounty/serai/): https://immunefi.com/bounty/serai/
|
|
||||||
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
|
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
|
||||||
- [Mastodon](https://cryptodon.lol/@serai): https://cryptodon.lol/@serai
|
- [Mastodon](https://cryptodon.lol/@serai): https://cryptodon.lol/@serai
|
||||||
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
|
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
|
||||||
- [Matrix](https://matrix.to/#/#serai:matrix.org): https://matrix.to/#/#serai:matrix.org
|
- [Matrix](https://matrix.to/#/#serai:matrix.org):
|
||||||
- [Reddit](https://www.reddit.com/r/SeraiDEX/): https://www.reddit.com/r/SeraiDEX/
|
https://matrix.to/#/#serai:matrix.org
|
||||||
- [Telegram](https://t.me/SeraiDEX): https://t.me/SeraiDEX
|
|
||||||
|
|||||||
Binary file not shown.
@@ -1,21 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2023 Cypher Stack
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
# Cypher Stack /coins/bitcoin Audit, August 2023
|
|
||||||
|
|
||||||
This audit was over the /coins/bitcoin folder. It is encompassing up to commit
|
|
||||||
5121ca75199dff7bd34230880a1fdd793012068c.
|
|
||||||
|
|
||||||
Please see https://github.com/cypherstack/serai-btc-audit for provenance.
|
|
||||||
51
clippy-config
Normal file
51
clippy-config
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# No warnings allowed
|
||||||
|
-D warnings
|
||||||
|
|
||||||
|
# nursery
|
||||||
|
-D clippy::nursery
|
||||||
|
# Erratic and unhelpful
|
||||||
|
-A clippy::missing_const_for_fn
|
||||||
|
# Too many false/irrelevant positives
|
||||||
|
-A clippy::redundant_pub_crate
|
||||||
|
# Flags on any debug_assert using an RNG
|
||||||
|
-A clippy::debug_assert_with_mut_call
|
||||||
|
# Stylistic preference
|
||||||
|
-A clippy::option_if_let_else
|
||||||
|
|
||||||
|
# pedantic
|
||||||
|
-D clippy::unnecessary_wraps
|
||||||
|
-D clippy::unused_async
|
||||||
|
-D clippy::unused_self
|
||||||
|
|
||||||
|
# restrictions
|
||||||
|
|
||||||
|
# Safety
|
||||||
|
-D clippy::as_conversions
|
||||||
|
-D clippy::disallowed_script_idents
|
||||||
|
-D clippy::wildcard_enum_match_arm
|
||||||
|
|
||||||
|
# Clarity
|
||||||
|
-D clippy::assertions_on_result_states
|
||||||
|
-D clippy::deref_by_slicing
|
||||||
|
-D clippy::empty_structs_with_brackets
|
||||||
|
-D clippy::get_unwrap
|
||||||
|
-D clippy::rest_pat_in_fully_bound_structs
|
||||||
|
-D clippy::semicolon_inside_block
|
||||||
|
-D clippy::tests_outside_test_module
|
||||||
|
|
||||||
|
# Quality
|
||||||
|
-D clippy::format_push_string
|
||||||
|
-D clippy::string_to_string
|
||||||
|
|
||||||
|
# These potentially should be enabled in the future
|
||||||
|
# -D clippy::missing_errors_doc
|
||||||
|
# -D clippy::missing_panics_doc
|
||||||
|
# -D clippy::doc_markdown
|
||||||
|
|
||||||
|
# TODO: Enable this
|
||||||
|
# -D clippy::cargo
|
||||||
|
|
||||||
|
# Not in nightly yet
|
||||||
|
# -D clippy::redundant_type_annotations
|
||||||
|
# -D clippy::big_endian_bytes
|
||||||
|
# -D clippy::host_endian_bytes
|
||||||
@@ -1,68 +1,37 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "bitcoin-serai"
|
name = "bitcoin-serai"
|
||||||
version = "0.3.0"
|
version = "0.2.0"
|
||||||
description = "A Bitcoin library for FROST-signing transactions"
|
description = "A Bitcoin library for FROST-signing transactions"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
|
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.74"
|
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
|
||||||
all-features = true
|
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
std-shims = { version = "0.1.1", path = "../../common/std-shims", default-features = false }
|
lazy_static = "1"
|
||||||
|
thiserror = "1"
|
||||||
|
|
||||||
thiserror = { version = "1", default-features = false, optional = true }
|
zeroize = "^1.5"
|
||||||
|
rand_core = "0.6"
|
||||||
|
|
||||||
zeroize = { version = "^1.5", default-features = false }
|
sha2 = "0.10"
|
||||||
rand_core = { version = "0.6", default-features = false }
|
|
||||||
|
|
||||||
bitcoin = { version = "0.31", default-features = false, features = ["no-std"] }
|
secp256k1 = { version = "0.27", features = ["global-context"] }
|
||||||
|
bitcoin = { version = "0.30", features = ["serde"] }
|
||||||
|
|
||||||
k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] }
|
k256 = { version = "^0.13.1", default-features = false, features = ["std", "arithmetic", "bits"] }
|
||||||
|
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", features = ["recommended"] }
|
||||||
|
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["secp256k1"] }
|
||||||
|
|
||||||
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
|
hex = "0.4"
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["secp256k1"], optional = true }
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json = "1"
|
||||||
hex = { version = "0.4", default-features = false, optional = true }
|
reqwest = { version = "0.11", features = ["json"] }
|
||||||
serde = { version = "1", default-features = false, features = ["derive"], optional = true }
|
|
||||||
serde_json = { version = "1", default-features = false, optional = true }
|
|
||||||
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls", "basic-auth"], optional = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
secp256k1 = { version = "0.28", default-features = false, features = ["std"] }
|
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["tests"] }
|
||||||
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
|
tokio = { version = "1", features = ["full"] }
|
||||||
|
|
||||||
tokio = { version = "1", features = ["macros"] }
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
std = [
|
|
||||||
"std-shims/std",
|
|
||||||
|
|
||||||
"thiserror",
|
|
||||||
|
|
||||||
"zeroize/std",
|
|
||||||
"rand_core/std",
|
|
||||||
|
|
||||||
"bitcoin/std",
|
|
||||||
"bitcoin/serde",
|
|
||||||
|
|
||||||
"k256/std",
|
|
||||||
|
|
||||||
"transcript/std",
|
|
||||||
"frost",
|
|
||||||
|
|
||||||
"hex/std",
|
|
||||||
"serde/std",
|
|
||||||
"serde_json/std",
|
|
||||||
"simple-request",
|
|
||||||
]
|
|
||||||
hazmat = []
|
hazmat = []
|
||||||
default = ["std"]
|
|
||||||
|
|||||||
@@ -1,6 +1,26 @@
|
|||||||
|
use core::fmt::Debug;
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
|
||||||
|
use zeroize::Zeroizing;
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
use transcript::Transcript;
|
||||||
|
|
||||||
|
use secp256k1::schnorr::Signature;
|
||||||
use k256::{
|
use k256::{
|
||||||
elliptic_curve::sec1::{Tag, ToEncodedPoint},
|
elliptic_curve::{
|
||||||
ProjectivePoint,
|
ops::Reduce,
|
||||||
|
sec1::{Tag, ToEncodedPoint},
|
||||||
|
},
|
||||||
|
U256, Scalar, ProjectivePoint,
|
||||||
|
};
|
||||||
|
use frost::{
|
||||||
|
curve::{Ciphersuite, Secp256k1},
|
||||||
|
Participant, ThresholdKeys, ThresholdView, FrostError,
|
||||||
|
algorithm::{Hram as HramTrait, Algorithm, Schnorr as FrostSchnorr},
|
||||||
};
|
};
|
||||||
|
|
||||||
use bitcoin::key::XOnlyPublicKey;
|
use bitcoin::key::XOnlyPublicKey;
|
||||||
@@ -12,15 +32,13 @@ pub fn x(key: &ProjectivePoint) -> [u8; 32] {
|
|||||||
(*encoded.x().expect("point at infinity")).into()
|
(*encoded.x().expect("point at infinity")).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert a non-infinity even point to a XOnlyPublicKey. Panics on invalid input.
|
/// Convert a non-infinite even point to a XOnlyPublicKey. Panics on invalid input.
|
||||||
pub fn x_only(key: &ProjectivePoint) -> XOnlyPublicKey {
|
pub fn x_only(key: &ProjectivePoint) -> XOnlyPublicKey {
|
||||||
XOnlyPublicKey::from_slice(&x(key)).expect("x_only was passed a point which was infinity or odd")
|
XOnlyPublicKey::from_slice(&x(key)).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Make a point even by adding the generator until it is even.
|
/// Make a point even by adding the generator until it is even. Returns the even point and the
|
||||||
///
|
/// amount of additions required.
|
||||||
/// Returns the even point and the amount of additions required.
|
|
||||||
#[cfg(any(feature = "std", feature = "hazmat"))]
|
|
||||||
pub fn make_even(mut key: ProjectivePoint) -> (ProjectivePoint, u64) {
|
pub fn make_even(mut key: ProjectivePoint) -> (ProjectivePoint, u64) {
|
||||||
let mut c = 0;
|
let mut c = 0;
|
||||||
while key.to_encoded_point(true).tag() == Tag::CompressedOddY {
|
while key.to_encoded_point(true).tag() == Tag::CompressedOddY {
|
||||||
@@ -30,70 +48,49 @@ pub fn make_even(mut key: ProjectivePoint) -> (ProjectivePoint, u64) {
|
|||||||
(key, c)
|
(key, c)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "std")]
|
/// A BIP-340 compatible HRAm for use with the modular-frost Schnorr Algorithm.
|
||||||
mod frost_crypto {
|
///
|
||||||
use core::fmt::Debug;
|
/// If passed an odd nonce, it will have the generator added until it is even.
|
||||||
use std_shims::{vec::Vec, io};
|
#[derive(Clone, Copy, Debug)]
|
||||||
|
pub struct Hram {}
|
||||||
|
|
||||||
use zeroize::Zeroizing;
|
lazy_static! {
|
||||||
use rand_core::{RngCore, CryptoRng};
|
static ref TAG_HASH: [u8; 32] = Sha256::digest(b"BIP0340/challenge").into();
|
||||||
|
}
|
||||||
|
|
||||||
use bitcoin::hashes::{HashEngine, Hash, sha256::Hash as Sha256};
|
#[allow(non_snake_case)]
|
||||||
|
impl HramTrait<Secp256k1> for Hram {
|
||||||
use transcript::Transcript;
|
|
||||||
|
|
||||||
use k256::{elliptic_curve::ops::Reduce, U256, Scalar};
|
|
||||||
|
|
||||||
use frost::{
|
|
||||||
curve::{Ciphersuite, Secp256k1},
|
|
||||||
Participant, ThresholdKeys, ThresholdView, FrostError,
|
|
||||||
algorithm::{Hram as HramTrait, Algorithm, Schnorr as FrostSchnorr},
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
/// A BIP-340 compatible HRAm for use with the modular-frost Schnorr Algorithm.
|
|
||||||
///
|
|
||||||
/// If passed an odd nonce, it will have the generator added until it is even.
|
|
||||||
///
|
|
||||||
/// If the key is odd, this will panic.
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
|
||||||
pub struct Hram;
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
impl HramTrait<Secp256k1> for Hram {
|
|
||||||
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
||||||
// Convert the nonce to be even
|
// Convert the nonce to be even
|
||||||
let (R, _) = make_even(*R);
|
let (R, _) = make_even(*R);
|
||||||
|
|
||||||
const TAG_HASH: Sha256 = Sha256::const_hash(b"BIP0340/challenge");
|
let mut data = Sha256::new();
|
||||||
|
data.update(*TAG_HASH);
|
||||||
|
data.update(*TAG_HASH);
|
||||||
|
data.update(x(&R));
|
||||||
|
data.update(x(A));
|
||||||
|
data.update(m);
|
||||||
|
|
||||||
let mut data = Sha256::engine();
|
Scalar::reduce(U256::from_be_slice(&data.finalize()))
|
||||||
data.input(TAG_HASH.as_ref());
|
|
||||||
data.input(TAG_HASH.as_ref());
|
|
||||||
data.input(&x(&R));
|
|
||||||
data.input(&x(A));
|
|
||||||
data.input(m);
|
|
||||||
|
|
||||||
Scalar::reduce(U256::from_be_slice(Sha256::from_engine(data).as_ref()))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// BIP-340 Schnorr signature algorithm.
|
/// BIP-340 Schnorr signature algorithm.
|
||||||
///
|
///
|
||||||
/// This must be used with a ThresholdKeys whose group key is even. If it is odd, this will panic.
|
/// This must be used with a ThresholdKeys whose group key is even. If it is odd, this will panic.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Schnorr<T: Sync + Clone + Debug + Transcript>(FrostSchnorr<Secp256k1, T, Hram>);
|
pub struct Schnorr<T: Sync + Clone + Debug + Transcript>(FrostSchnorr<Secp256k1, T, Hram>);
|
||||||
impl<T: Sync + Clone + Debug + Transcript> Schnorr<T> {
|
impl<T: Sync + Clone + Debug + Transcript> Schnorr<T> {
|
||||||
/// Construct a Schnorr algorithm continuing the specified transcript.
|
/// Construct a Schnorr algorithm continuing the specified transcript.
|
||||||
pub fn new(transcript: T) -> Schnorr<T> {
|
pub fn new(transcript: T) -> Schnorr<T> {
|
||||||
Schnorr(FrostSchnorr::new(transcript))
|
Schnorr(FrostSchnorr::new(transcript))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Sync + Clone + Debug + Transcript> Algorithm<Secp256k1> for Schnorr<T> {
|
impl<T: Sync + Clone + Debug + Transcript> Algorithm<Secp256k1> for Schnorr<T> {
|
||||||
type Transcript = T;
|
type Transcript = T;
|
||||||
type Addendum = ();
|
type Addendum = ();
|
||||||
type Signature = [u8; 64];
|
type Signature = Signature;
|
||||||
|
|
||||||
fn transcript(&mut self) -> &mut Self::Transcript {
|
fn transcript(&mut self) -> &mut Self::Transcript {
|
||||||
self.0.transcript()
|
self.0.transcript()
|
||||||
@@ -147,8 +144,8 @@ mod frost_crypto {
|
|||||||
(sig.R, offset) = make_even(sig.R);
|
(sig.R, offset) = make_even(sig.R);
|
||||||
// s = r + cx. Since we added to the r, add to s
|
// s = r + cx. Since we added to the r, add to s
|
||||||
sig.s += Scalar::from(offset);
|
sig.s += Scalar::from(offset);
|
||||||
// Convert to a Bitcoin signature by dropping the byte for the point's sign bit
|
// Convert to a secp256k1 signature
|
||||||
sig.serialize()[1 ..].try_into().unwrap()
|
Signature::from_slice(&sig.serialize()[1 ..]).unwrap()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -160,7 +157,4 @@ mod frost_crypto {
|
|||||||
) -> Result<Vec<(Scalar, ProjectivePoint)>, ()> {
|
) -> Result<Vec<(Scalar, ProjectivePoint)>, ()> {
|
||||||
self.0.verify_share(verification_share, nonces, share)
|
self.0.verify_share(verification_share, nonces, share)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
#[cfg(feature = "std")]
|
|
||||||
pub use frost_crypto::*;
|
|
||||||
|
|||||||
@@ -1,9 +1,5 @@
|
|||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
#![doc = include_str!("../README.md")]
|
#![doc = include_str!("../README.md")]
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
|
||||||
|
|
||||||
#[cfg(not(feature = "std"))]
|
|
||||||
extern crate alloc;
|
|
||||||
|
|
||||||
/// The bitcoin Rust library.
|
/// The bitcoin Rust library.
|
||||||
pub use bitcoin;
|
pub use bitcoin;
|
||||||
@@ -17,7 +13,6 @@ pub(crate) mod crypto;
|
|||||||
/// Wallet functionality to create transactions.
|
/// Wallet functionality to create transactions.
|
||||||
pub mod wallet;
|
pub mod wallet;
|
||||||
/// A minimal asynchronous Bitcoin RPC client.
|
/// A minimal asynchronous Bitcoin RPC client.
|
||||||
#[cfg(feature = "std")]
|
|
||||||
pub mod rpc;
|
pub mod rpc;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|||||||
@@ -1,13 +1,10 @@
|
|||||||
use core::fmt::Debug;
|
use core::fmt::Debug;
|
||||||
use std::collections::HashSet;
|
|
||||||
|
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
use serde::{Deserialize, de::DeserializeOwned};
|
use serde::{Deserialize, de::DeserializeOwned};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
use simple_request::{hyper, Request, Client};
|
|
||||||
|
|
||||||
use bitcoin::{
|
use bitcoin::{
|
||||||
hashes::{Hash, hex::FromHex},
|
hashes::{Hash, hex::FromHex},
|
||||||
consensus::encode,
|
consensus::encode,
|
||||||
@@ -29,10 +26,7 @@ enum RpcResponse<T> {
|
|||||||
|
|
||||||
/// A minimal asynchronous Bitcoin RPC client.
|
/// A minimal asynchronous Bitcoin RPC client.
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct Rpc {
|
pub struct Rpc(String);
|
||||||
client: Client,
|
|
||||||
url: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
||||||
pub enum RpcError {
|
pub enum RpcError {
|
||||||
@@ -40,64 +34,15 @@ pub enum RpcError {
|
|||||||
ConnectionError,
|
ConnectionError,
|
||||||
#[error("request had an error: {0:?}")]
|
#[error("request had an error: {0:?}")]
|
||||||
RequestError(Error),
|
RequestError(Error),
|
||||||
#[error("node replied with invalid JSON")]
|
#[error("node sent an invalid response")]
|
||||||
InvalidJson(serde_json::error::Category),
|
InvalidResponse,
|
||||||
#[error("node sent an invalid response ({0})")]
|
|
||||||
InvalidResponse(&'static str),
|
|
||||||
#[error("node was missing expected methods")]
|
|
||||||
MissingMethods(HashSet<&'static str>),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Rpc {
|
impl Rpc {
|
||||||
/// Create a new connection to a Bitcoin RPC.
|
|
||||||
///
|
|
||||||
/// An RPC call is performed to ensure the node is reachable (and that an invalid URL wasn't
|
|
||||||
/// provided).
|
|
||||||
///
|
|
||||||
/// Additionally, a set of expected methods is checked to be offered by the Bitcoin RPC. If these
|
|
||||||
/// methods aren't provided, an error with the missing methods is returned. This ensures all RPC
|
|
||||||
/// routes explicitly provided by this library are at least possible.
|
|
||||||
///
|
|
||||||
/// Each individual RPC route may still fail at time-of-call, regardless of the arguments
|
|
||||||
/// provided to this library, if the RPC has an incompatible argument layout. That is not checked
|
|
||||||
/// at time of RPC creation.
|
|
||||||
pub async fn new(url: String) -> Result<Rpc, RpcError> {
|
pub async fn new(url: String) -> Result<Rpc, RpcError> {
|
||||||
let rpc = Rpc { client: Client::with_connection_pool(), url };
|
let rpc = Rpc(url);
|
||||||
|
|
||||||
// Make an RPC request to verify the node is reachable and sane
|
// Make an RPC request to verify the node is reachable and sane
|
||||||
let res: String = rpc.rpc_call("help", json!([])).await?;
|
rpc.get_latest_block_number().await?;
|
||||||
|
|
||||||
// Verify all methods we expect are present
|
|
||||||
// If we had a more expanded RPC, due to differences in RPC versions, it wouldn't make sense to
|
|
||||||
// error if all methods weren't present
|
|
||||||
// We only provide a very minimal set of methods which have been largely consistent, hence why
|
|
||||||
// this is sane
|
|
||||||
let mut expected_methods = HashSet::from([
|
|
||||||
"help",
|
|
||||||
"getblockcount",
|
|
||||||
"getblockhash",
|
|
||||||
"getblockheader",
|
|
||||||
"getblock",
|
|
||||||
"sendrawtransaction",
|
|
||||||
"getrawtransaction",
|
|
||||||
]);
|
|
||||||
for line in res.split('\n') {
|
|
||||||
// This doesn't check if the arguments are as expected
|
|
||||||
// This is due to Bitcoin supporting a large amount of optional arguments, which
|
|
||||||
// occasionally change, with their own mechanism of text documentation, making matching off
|
|
||||||
// it a quite involved task
|
|
||||||
// Instead, once we've confirmed the methods are present, we assume our arguments are aligned
|
|
||||||
// Else we'll error at time of call
|
|
||||||
if expected_methods.remove(line.split(' ').next().unwrap_or("")) &&
|
|
||||||
expected_methods.is_empty()
|
|
||||||
{
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !expected_methods.is_empty() {
|
|
||||||
Err(RpcError::MissingMethods(expected_methods))?;
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(rpc)
|
Ok(rpc)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -107,28 +52,19 @@ impl Rpc {
|
|||||||
method: &str,
|
method: &str,
|
||||||
params: serde_json::Value,
|
params: serde_json::Value,
|
||||||
) -> Result<Response, RpcError> {
|
) -> Result<Response, RpcError> {
|
||||||
let mut request = Request::from(
|
let client = reqwest::Client::new();
|
||||||
hyper::Request::post(&self.url)
|
let res = client
|
||||||
.header("Content-Type", "application/json")
|
.post(&self.0)
|
||||||
.body(
|
.json(&json!({ "jsonrpc": "2.0", "method": method, "params": params }))
|
||||||
serde_json::to_vec(&json!({ "jsonrpc": "2.0", "method": method, "params": params }))
|
.send()
|
||||||
.unwrap()
|
|
||||||
.into(),
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
);
|
|
||||||
request.with_basic_auth();
|
|
||||||
let mut res = self
|
|
||||||
.client
|
|
||||||
.request(request)
|
|
||||||
.await
|
.await
|
||||||
.map_err(|_| RpcError::ConnectionError)?
|
.map_err(|_| RpcError::ConnectionError)?
|
||||||
.body()
|
.text()
|
||||||
.await
|
.await
|
||||||
.map_err(|_| RpcError::ConnectionError)?;
|
.map_err(|_| RpcError::ConnectionError)?;
|
||||||
|
|
||||||
let res: RpcResponse<Response> =
|
let res: RpcResponse<Response> =
|
||||||
serde_json::from_reader(&mut res).map_err(|e| RpcError::InvalidJson(e.classify()))?;
|
serde_json::from_str(&res).map_err(|_| RpcError::InvalidResponse)?;
|
||||||
match res {
|
match res {
|
||||||
RpcResponse::Ok { result } => Ok(result),
|
RpcResponse::Ok { result } => Ok(result),
|
||||||
RpcResponse::Err { error } => Err(RpcError::RequestError(error)),
|
RpcResponse::Err { error } => Err(RpcError::RequestError(error)),
|
||||||
@@ -149,11 +85,11 @@ impl Rpc {
|
|||||||
|
|
||||||
/// Get the hash of a block by the block's number.
|
/// Get the hash of a block by the block's number.
|
||||||
pub async fn get_block_hash(&self, number: usize) -> Result<[u8; 32], RpcError> {
|
pub async fn get_block_hash(&self, number: usize) -> Result<[u8; 32], RpcError> {
|
||||||
let mut hash = self
|
let mut hash = *self
|
||||||
.rpc_call::<BlockHash>("getblockhash", json!([number]))
|
.rpc_call::<BlockHash>("getblockhash", json!([number]))
|
||||||
.await?
|
.await?
|
||||||
.as_raw_hash()
|
.as_raw_hash()
|
||||||
.to_byte_array();
|
.as_byte_array();
|
||||||
// bitcoin stores the inner bytes in reverse order.
|
// bitcoin stores the inner bytes in reverse order.
|
||||||
hash.reverse();
|
hash.reverse();
|
||||||
Ok(hash)
|
Ok(hash)
|
||||||
@@ -171,15 +107,13 @@ impl Rpc {
|
|||||||
/// Get a block by its hash.
|
/// Get a block by its hash.
|
||||||
pub async fn get_block(&self, hash: &[u8; 32]) -> Result<Block, RpcError> {
|
pub async fn get_block(&self, hash: &[u8; 32]) -> Result<Block, RpcError> {
|
||||||
let hex = self.rpc_call::<String>("getblock", json!([hex::encode(hash), 0])).await?;
|
let hex = self.rpc_call::<String>("getblock", json!([hex::encode(hash), 0])).await?;
|
||||||
let bytes: Vec<u8> = FromHex::from_hex(&hex)
|
let bytes: Vec<u8> = FromHex::from_hex(&hex).map_err(|_| RpcError::InvalidResponse)?;
|
||||||
.map_err(|_| RpcError::InvalidResponse("node didn't use hex to encode the block"))?;
|
let block: Block = encode::deserialize(&bytes).map_err(|_| RpcError::InvalidResponse)?;
|
||||||
let block: Block = encode::deserialize(&bytes)
|
|
||||||
.map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized block"))?;
|
|
||||||
|
|
||||||
let mut block_hash = *block.block_hash().as_raw_hash().as_byte_array();
|
let mut block_hash = *block.block_hash().as_raw_hash().as_byte_array();
|
||||||
block_hash.reverse();
|
block_hash.reverse();
|
||||||
if hash != &block_hash {
|
if hash != &block_hash {
|
||||||
Err(RpcError::InvalidResponse("node replied with a different block"))?;
|
Err(RpcError::InvalidResponse)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(block)
|
Ok(block)
|
||||||
@@ -187,22 +121,9 @@ impl Rpc {
|
|||||||
|
|
||||||
/// Publish a transaction.
|
/// Publish a transaction.
|
||||||
pub async fn send_raw_transaction(&self, tx: &Transaction) -> Result<Txid, RpcError> {
|
pub async fn send_raw_transaction(&self, tx: &Transaction) -> Result<Txid, RpcError> {
|
||||||
let txid = match self.rpc_call("sendrawtransaction", json!([encode::serialize_hex(tx)])).await {
|
let txid = self.rpc_call("sendrawtransaction", json!([encode::serialize_hex(tx)])).await?;
|
||||||
Ok(txid) => txid,
|
|
||||||
Err(e) => {
|
|
||||||
// A const from Bitcoin's bitcoin/src/rpc/protocol.h
|
|
||||||
const RPC_VERIFY_ALREADY_IN_CHAIN: isize = -27;
|
|
||||||
// If this was already successfully published, consider this having succeeded
|
|
||||||
if let RpcError::RequestError(Error { code, .. }) = e {
|
|
||||||
if code == RPC_VERIFY_ALREADY_IN_CHAIN {
|
|
||||||
return Ok(tx.txid());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e)?
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if txid != tx.txid() {
|
if txid != tx.txid() {
|
||||||
Err(RpcError::InvalidResponse("returned TX ID inequals calculated TX ID"))?;
|
Err(RpcError::InvalidResponse)?;
|
||||||
}
|
}
|
||||||
Ok(txid)
|
Ok(txid)
|
||||||
}
|
}
|
||||||
@@ -210,15 +131,13 @@ impl Rpc {
|
|||||||
/// Get a transaction by its hash.
|
/// Get a transaction by its hash.
|
||||||
pub async fn get_transaction(&self, hash: &[u8; 32]) -> Result<Transaction, RpcError> {
|
pub async fn get_transaction(&self, hash: &[u8; 32]) -> Result<Transaction, RpcError> {
|
||||||
let hex = self.rpc_call::<String>("getrawtransaction", json!([hex::encode(hash)])).await?;
|
let hex = self.rpc_call::<String>("getrawtransaction", json!([hex::encode(hash)])).await?;
|
||||||
let bytes: Vec<u8> = FromHex::from_hex(&hex)
|
let bytes: Vec<u8> = FromHex::from_hex(&hex).map_err(|_| RpcError::InvalidResponse)?;
|
||||||
.map_err(|_| RpcError::InvalidResponse("node didn't use hex to encode the transaction"))?;
|
let tx: Transaction = encode::deserialize(&bytes).map_err(|_| RpcError::InvalidResponse)?;
|
||||||
let tx: Transaction = encode::deserialize(&bytes)
|
|
||||||
.map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized transaction"))?;
|
|
||||||
|
|
||||||
let mut tx_hash = *tx.txid().as_raw_hash().as_byte_array();
|
let mut tx_hash = *tx.txid().as_raw_hash().as_byte_array();
|
||||||
tx_hash.reverse();
|
tx_hash.reverse();
|
||||||
if hash != &tx_hash {
|
if hash != &tx_hash {
|
||||||
Err(RpcError::InvalidResponse("node replied with a different transaction"))?;
|
Err(RpcError::InvalidResponse)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(tx)
|
Ok(tx)
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
use rand_core::OsRng;
|
use rand_core::OsRng;
|
||||||
|
|
||||||
use secp256k1::{Secp256k1 as BContext, Message, schnorr::Signature};
|
use sha2::{Digest, Sha256};
|
||||||
|
|
||||||
|
use secp256k1::{SECP256K1, Message};
|
||||||
|
|
||||||
use k256::Scalar;
|
use k256::Scalar;
|
||||||
use transcript::{Transcript, RecommendedTranscript};
|
use transcript::{Transcript, RecommendedTranscript};
|
||||||
@@ -20,7 +22,7 @@ fn test_algorithm() {
|
|||||||
let mut keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
let mut keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
const MESSAGE: &[u8] = b"Hello, World!";
|
||||||
|
|
||||||
for keys in keys.values_mut() {
|
for (_, keys) in keys.iter_mut() {
|
||||||
let (_, offset) = make_even(keys.group_key());
|
let (_, offset) = make_even(keys.group_key());
|
||||||
*keys = keys.offset(Scalar::from(offset));
|
*keys = keys.offset(Scalar::from(offset));
|
||||||
}
|
}
|
||||||
@@ -29,16 +31,15 @@ fn test_algorithm() {
|
|||||||
Schnorr::<RecommendedTranscript>::new(RecommendedTranscript::new(b"bitcoin-serai sign test"));
|
Schnorr::<RecommendedTranscript>::new(RecommendedTranscript::new(b"bitcoin-serai sign test"));
|
||||||
let sig = sign(
|
let sig = sign(
|
||||||
&mut OsRng,
|
&mut OsRng,
|
||||||
&algo,
|
algo.clone(),
|
||||||
keys.clone(),
|
keys.clone(),
|
||||||
algorithm_machines(&mut OsRng, &algo, &keys),
|
algorithm_machines(&mut OsRng, algo, &keys),
|
||||||
Hash::hash(MESSAGE).as_ref(),
|
&Sha256::digest(MESSAGE),
|
||||||
);
|
);
|
||||||
|
|
||||||
BContext::new()
|
SECP256K1
|
||||||
.verify_schnorr(
|
.verify_schnorr(
|
||||||
&Signature::from_slice(&sig)
|
&sig,
|
||||||
.expect("couldn't convert produced signature to secp256k1::Signature"),
|
|
||||||
&Message::from(Hash::hash(MESSAGE)),
|
&Message::from(Hash::hash(MESSAGE)),
|
||||||
&x_only(&keys[&Participant::new(1).unwrap()].group_key()),
|
&x_only(&keys[&Participant::new(1).unwrap()].group_key()),
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,57 +1,41 @@
|
|||||||
use std_shims::{
|
use std::{
|
||||||
vec::Vec,
|
io::{self, Read, Write},
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
io::{self, Write},
|
|
||||||
};
|
};
|
||||||
#[cfg(feature = "std")]
|
|
||||||
use std_shims::io::Read;
|
|
||||||
|
|
||||||
use k256::{
|
use k256::{
|
||||||
elliptic_curve::sec1::{Tag, ToEncodedPoint},
|
elliptic_curve::sec1::{Tag, ToEncodedPoint},
|
||||||
Scalar, ProjectivePoint,
|
Scalar, ProjectivePoint,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
use frost::{
|
use frost::{
|
||||||
curve::{Ciphersuite, Secp256k1},
|
curve::{Ciphersuite, Secp256k1},
|
||||||
ThresholdKeys,
|
ThresholdKeys,
|
||||||
};
|
};
|
||||||
|
|
||||||
use bitcoin::{
|
use bitcoin::{
|
||||||
consensus::encode::serialize, key::TweakedPublicKey, address::Payload, OutPoint, ScriptBuf,
|
consensus::encode::{Decodable, serialize},
|
||||||
TxOut, Transaction, Block,
|
key::TweakedPublicKey,
|
||||||
|
OutPoint, ScriptBuf, TxOut, Transaction, Block, Network, Address,
|
||||||
};
|
};
|
||||||
#[cfg(feature = "std")]
|
|
||||||
use bitcoin::consensus::encode::Decodable;
|
|
||||||
|
|
||||||
use crate::crypto::x_only;
|
use crate::crypto::{x_only, make_even};
|
||||||
#[cfg(feature = "std")]
|
|
||||||
use crate::crypto::make_even;
|
|
||||||
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
mod send;
|
mod send;
|
||||||
#[cfg(feature = "std")]
|
|
||||||
pub use send::*;
|
pub use send::*;
|
||||||
|
|
||||||
/// Tweak keys to ensure they're usable with Bitcoin.
|
/// Tweak keys to ensure they're usable with Bitcoin.
|
||||||
///
|
|
||||||
/// Taproot keys, which these keys are used as, must be even. This offsets the keys until they're
|
|
||||||
/// even.
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
|
pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
|
||||||
let (_, offset) = make_even(keys.group_key());
|
let (_, offset) = make_even(keys.group_key());
|
||||||
keys.offset(Scalar::from(offset))
|
keys.offset(Scalar::from(offset))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the Taproot address payload for a public key.
|
/// Return the Taproot address for a public key.
|
||||||
///
|
pub fn address(network: Network, key: ProjectivePoint) -> Option<Address> {
|
||||||
/// If the key is odd, this will return None.
|
|
||||||
pub fn address_payload(key: ProjectivePoint) -> Option<Payload> {
|
|
||||||
if key.to_encoded_point(true).tag() != Tag::CompressedEvenY {
|
if key.to_encoded_point(true).tag() != Tag::CompressedEvenY {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(Payload::p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key))))
|
Some(Address::p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key)), network))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A spendable output.
|
/// A spendable output.
|
||||||
@@ -71,11 +55,6 @@ impl ReceivedOutput {
|
|||||||
self.offset
|
self.offset
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The Bitcoin output for this output.
|
|
||||||
pub fn output(&self) -> &TxOut {
|
|
||||||
&self.output
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The outpoint for this output.
|
/// The outpoint for this output.
|
||||||
pub fn outpoint(&self) -> &OutPoint {
|
pub fn outpoint(&self) -> &OutPoint {
|
||||||
&self.outpoint
|
&self.outpoint
|
||||||
@@ -83,16 +62,17 @@ impl ReceivedOutput {
|
|||||||
|
|
||||||
/// The value of this output.
|
/// The value of this output.
|
||||||
pub fn value(&self) -> u64 {
|
pub fn value(&self) -> u64 {
|
||||||
self.output.value.to_sat()
|
self.output.value
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read a ReceivedOutput from a generic satisfying Read.
|
/// Read a ReceivedOutput from a generic satisfying Read.
|
||||||
#[cfg(feature = "std")]
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
|
||||||
Ok(ReceivedOutput {
|
Ok(ReceivedOutput {
|
||||||
offset: Secp256k1::read_F(r)?,
|
offset: Secp256k1::read_F(r)?,
|
||||||
output: TxOut::consensus_decode(r).map_err(|_| io::Error::other("invalid TxOut"))?,
|
output: TxOut::consensus_decode(r)
|
||||||
outpoint: OutPoint::consensus_decode(r).map_err(|_| io::Error::other("invalid OutPoint"))?,
|
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid TxOut"))?,
|
||||||
|
outpoint: OutPoint::consensus_decode(r)
|
||||||
|
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid OutPoint"))?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -103,9 +83,9 @@ impl ReceivedOutput {
|
|||||||
w.write_all(&serialize(&self.outpoint))
|
w.write_all(&serialize(&self.outpoint))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Serialize a ReceivedOutput to a `Vec<u8>`.
|
/// Serialize a ReceivedOutput to a Vec<u8>.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut res = Vec::new();
|
let mut res = vec![];
|
||||||
self.write(&mut res).unwrap();
|
self.write(&mut res).unwrap();
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
@@ -124,7 +104,8 @@ impl Scanner {
|
|||||||
/// Returns None if this key can't be scanned for.
|
/// Returns None if this key can't be scanned for.
|
||||||
pub fn new(key: ProjectivePoint) -> Option<Scanner> {
|
pub fn new(key: ProjectivePoint) -> Option<Scanner> {
|
||||||
let mut scripts = HashMap::new();
|
let mut scripts = HashMap::new();
|
||||||
scripts.insert(address_payload(key)?.script_pubkey(), Scalar::ZERO);
|
// Uses Network::Bitcoin since network is irrelevant here
|
||||||
|
scripts.insert(address(Network::Bitcoin, key)?.script_pubkey(), Scalar::ZERO);
|
||||||
Some(Scanner { key, scripts })
|
Some(Scanner { key, scripts })
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -133,15 +114,9 @@ impl Scanner {
|
|||||||
/// Due to Bitcoin's requirement that points are even, not every offset may be used.
|
/// Due to Bitcoin's requirement that points are even, not every offset may be used.
|
||||||
/// If an offset isn't usable, it will be incremented until it is. If this offset is already
|
/// If an offset isn't usable, it will be incremented until it is. If this offset is already
|
||||||
/// present, None is returned. Else, Some(offset) will be, with the used offset.
|
/// present, None is returned. Else, Some(offset) will be, with the used offset.
|
||||||
///
|
|
||||||
/// This means offsets are surjective, not bijective, and the order offsets are registered in
|
|
||||||
/// may determine the validity of future offsets.
|
|
||||||
pub fn register_offset(&mut self, mut offset: Scalar) -> Option<Scalar> {
|
pub fn register_offset(&mut self, mut offset: Scalar) -> Option<Scalar> {
|
||||||
// This loop will terminate as soon as an even point is found, with any point having a ~50%
|
|
||||||
// chance of being even
|
|
||||||
// That means this should terminate within a very small amount of iterations
|
|
||||||
loop {
|
loop {
|
||||||
match address_payload(self.key + (ProjectivePoint::GENERATOR * offset)) {
|
match address(Network::Bitcoin, self.key + (ProjectivePoint::GENERATOR * offset)) {
|
||||||
Some(address) => {
|
Some(address) => {
|
||||||
let script = address.script_pubkey();
|
let script = address.script_pubkey();
|
||||||
if self.scripts.contains_key(&script) {
|
if self.scripts.contains_key(&script) {
|
||||||
@@ -157,16 +132,13 @@ impl Scanner {
|
|||||||
|
|
||||||
/// Scan a transaction.
|
/// Scan a transaction.
|
||||||
pub fn scan_transaction(&self, tx: &Transaction) -> Vec<ReceivedOutput> {
|
pub fn scan_transaction(&self, tx: &Transaction) -> Vec<ReceivedOutput> {
|
||||||
let mut res = Vec::new();
|
let mut res = vec![];
|
||||||
for (vout, output) in tx.output.iter().enumerate() {
|
for (vout, output) in tx.output.iter().enumerate() {
|
||||||
// If the vout index exceeds 2**32, stop scanning outputs
|
|
||||||
let Ok(vout) = u32::try_from(vout) else { break };
|
|
||||||
|
|
||||||
if let Some(offset) = self.scripts.get(&output.script_pubkey) {
|
if let Some(offset) = self.scripts.get(&output.script_pubkey) {
|
||||||
res.push(ReceivedOutput {
|
res.push(ReceivedOutput {
|
||||||
offset: *offset,
|
offset: *offset,
|
||||||
output: output.clone(),
|
output: output.clone(),
|
||||||
outpoint: OutPoint::new(tx.txid(), vout),
|
outpoint: OutPoint::new(tx.txid(), u32::try_from(vout).unwrap()),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -179,7 +151,7 @@ impl Scanner {
|
|||||||
/// must be immediately spendable, a post-processing pass is needed to remove those outputs.
|
/// must be immediately spendable, a post-processing pass is needed to remove those outputs.
|
||||||
/// Alternatively, scan_transaction can be called on `block.txdata[1 ..]`.
|
/// Alternatively, scan_transaction can be called on `block.txdata[1 ..]`.
|
||||||
pub fn scan_block(&self, block: &Block) -> Vec<ReceivedOutput> {
|
pub fn scan_block(&self, block: &Block) -> Vec<ReceivedOutput> {
|
||||||
let mut res = Vec::new();
|
let mut res = vec![];
|
||||||
for tx in &block.txdata {
|
for tx in &block.txdata {
|
||||||
res.extend(self.scan_transaction(tx));
|
res.extend(self.scan_transaction(tx));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
use std_shims::{
|
use std::{
|
||||||
io::{self, Read},
|
io::{self, Read},
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
};
|
};
|
||||||
@@ -13,25 +13,24 @@ use k256::{elliptic_curve::sec1::ToEncodedPoint, Scalar};
|
|||||||
use frost::{curve::Secp256k1, Participant, ThresholdKeys, FrostError, sign::*};
|
use frost::{curve::Secp256k1, Participant, ThresholdKeys, FrostError, sign::*};
|
||||||
|
|
||||||
use bitcoin::{
|
use bitcoin::{
|
||||||
hashes::Hash,
|
|
||||||
sighash::{TapSighashType, SighashCache, Prevouts},
|
sighash::{TapSighashType, SighashCache, Prevouts},
|
||||||
absolute::LockTime,
|
absolute::LockTime,
|
||||||
script::{PushBytesBuf, ScriptBuf},
|
script::{PushBytesBuf, ScriptBuf},
|
||||||
transaction::{Version, Transaction},
|
OutPoint, Sequence, Witness, TxIn, TxOut, Transaction, Network, Address,
|
||||||
OutPoint, Sequence, Witness, TxIn, Amount, TxOut, Address,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
crypto::Schnorr,
|
crypto::Schnorr,
|
||||||
wallet::{ReceivedOutput, address_payload},
|
wallet::{address, ReceivedOutput},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
// https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04/src/policy/policy.cpp#L26-L63
|
// https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04/src/policy/policy.h#L27
|
||||||
// As the above notes, a lower amount may not be considered dust if contained in a SegWit output
|
const MAX_STANDARD_TX_WEIGHT: u64 = 400_000;
|
||||||
// This doesn't bother with delineation due to how marginal these values are, and because it isn't
|
|
||||||
// worth the complexity to implement differentation
|
#[rustfmt::skip]
|
||||||
pub const DUST: u64 = 546;
|
//https://github.com/bitcoin/bitcoin/blob/a245429d680eb95cf4c0c78e58e63e3f0f5d979a/src/test/transaction_tests.cpp#L815-L816
|
||||||
|
const DUST: u64 = 674;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
||||||
pub enum TransactionError {
|
pub enum TransactionError {
|
||||||
@@ -43,8 +42,6 @@ pub enum TransactionError {
|
|||||||
DustPayment,
|
DustPayment,
|
||||||
#[error("too much data was specified")]
|
#[error("too much data was specified")]
|
||||||
TooMuchData,
|
TooMuchData,
|
||||||
#[error("fee was too low to pass the default minimum fee rate")]
|
|
||||||
TooLowFee,
|
|
||||||
#[error("not enough funds for these payments")]
|
#[error("not enough funds for these payments")]
|
||||||
NotEnoughFunds,
|
NotEnoughFunds,
|
||||||
#[error("transaction was too large")]
|
#[error("transaction was too large")]
|
||||||
@@ -64,7 +61,7 @@ impl SignableTransaction {
|
|||||||
fn calculate_weight(inputs: usize, payments: &[(Address, u64)], change: Option<&Address>) -> u64 {
|
fn calculate_weight(inputs: usize, payments: &[(Address, u64)], change: Option<&Address>) -> u64 {
|
||||||
// Expand this a full transaction in order to use the bitcoin library's weight function
|
// Expand this a full transaction in order to use the bitcoin library's weight function
|
||||||
let mut tx = Transaction {
|
let mut tx = Transaction {
|
||||||
version: Version(2),
|
version: 2,
|
||||||
lock_time: LockTime::ZERO,
|
lock_time: LockTime::ZERO,
|
||||||
input: vec![
|
input: vec![
|
||||||
TxIn {
|
TxIn {
|
||||||
@@ -84,18 +81,15 @@ impl SignableTransaction {
|
|||||||
.iter()
|
.iter()
|
||||||
// The payment is a fixed size so we don't have to use it here
|
// The payment is a fixed size so we don't have to use it here
|
||||||
// The script pub key is not of a fixed size and does have to be used here
|
// The script pub key is not of a fixed size and does have to be used here
|
||||||
.map(|payment| TxOut {
|
.map(|payment| TxOut { value: payment.1, script_pubkey: payment.0.script_pubkey() })
|
||||||
value: Amount::from_sat(payment.1),
|
|
||||||
script_pubkey: payment.0.script_pubkey(),
|
|
||||||
})
|
|
||||||
.collect(),
|
.collect(),
|
||||||
};
|
};
|
||||||
if let Some(change) = change {
|
if let Some(change) = change {
|
||||||
// Use a 0 value since we're currently unsure what the change amount will be, and since
|
// Use a 0 value since we're currently unsure what the change amount will be, and since
|
||||||
// the value is fixed size (so any value could be used here)
|
// the value is fixed size (so any value could be used here)
|
||||||
tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.script_pubkey() });
|
tx.output.push(TxOut { value: 0, script_pubkey: change.script_pubkey() });
|
||||||
}
|
}
|
||||||
u64::from(tx.weight())
|
u64::try_from(tx.weight()).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the fee necessary for this transaction to achieve the fee rate specified at
|
/// Returns the fee necessary for this transaction to achieve the fee rate specified at
|
||||||
@@ -106,12 +100,6 @@ impl SignableTransaction {
|
|||||||
self.needed_fee
|
self.needed_fee
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the fee this transaction will use.
|
|
||||||
pub fn fee(&self) -> u64 {
|
|
||||||
self.prevouts.iter().map(|prevout| prevout.value.to_sat()).sum::<u64>() -
|
|
||||||
self.tx.output.iter().map(|prevout| prevout.value.to_sat()).sum::<u64>()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new SignableTransaction.
|
/// Create a new SignableTransaction.
|
||||||
///
|
///
|
||||||
/// If a change address is specified, any leftover funds will be sent to it if the leftover funds
|
/// If a change address is specified, any leftover funds will be sent to it if the leftover funds
|
||||||
@@ -122,7 +110,7 @@ impl SignableTransaction {
|
|||||||
pub fn new(
|
pub fn new(
|
||||||
mut inputs: Vec<ReceivedOutput>,
|
mut inputs: Vec<ReceivedOutput>,
|
||||||
payments: &[(Address, u64)],
|
payments: &[(Address, u64)],
|
||||||
change: Option<&Address>,
|
change: Option<Address>,
|
||||||
data: Option<Vec<u8>>,
|
data: Option<Vec<u8>>,
|
||||||
fee_per_weight: u64,
|
fee_per_weight: u64,
|
||||||
) -> Result<SignableTransaction, TransactionError> {
|
) -> Result<SignableTransaction, TransactionError> {
|
||||||
@@ -140,11 +128,11 @@ impl SignableTransaction {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if data.as_ref().map_or(0, Vec::len) > 80 {
|
if data.as_ref().map(|data| data.len()).unwrap_or(0) > 80 {
|
||||||
Err(TransactionError::TooMuchData)?;
|
Err(TransactionError::TooMuchData)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let input_sat = inputs.iter().map(|input| input.output.value.to_sat()).sum::<u64>();
|
let input_sat = inputs.iter().map(|input| input.output.value).sum::<u64>();
|
||||||
let offsets = inputs.iter().map(|input| input.offset).collect();
|
let offsets = inputs.iter().map(|input| input.offset).collect();
|
||||||
let tx_ins = inputs
|
let tx_ins = inputs
|
||||||
.iter()
|
.iter()
|
||||||
@@ -159,18 +147,15 @@ impl SignableTransaction {
|
|||||||
let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>();
|
let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>();
|
||||||
let mut tx_outs = payments
|
let mut tx_outs = payments
|
||||||
.iter()
|
.iter()
|
||||||
.map(|payment| TxOut {
|
.map(|payment| TxOut { value: payment.1, script_pubkey: payment.0.script_pubkey() })
|
||||||
value: Amount::from_sat(payment.1),
|
|
||||||
script_pubkey: payment.0.script_pubkey(),
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
// Add the OP_RETURN output
|
// Add the OP_RETURN output
|
||||||
if let Some(data) = data {
|
if let Some(data) = data {
|
||||||
tx_outs.push(TxOut {
|
tx_outs.push(TxOut {
|
||||||
value: Amount::ZERO,
|
value: 0,
|
||||||
script_pubkey: ScriptBuf::new_op_return(
|
script_pubkey: ScriptBuf::new_op_return(
|
||||||
PushBytesBuf::try_from(data)
|
&PushBytesBuf::try_from(data)
|
||||||
.expect("data didn't fit into PushBytes depsite being checked"),
|
.expect("data didn't fit into PushBytes depsite being checked"),
|
||||||
),
|
),
|
||||||
})
|
})
|
||||||
@@ -178,47 +163,17 @@ impl SignableTransaction {
|
|||||||
|
|
||||||
let mut weight = Self::calculate_weight(tx_ins.len(), payments, None);
|
let mut weight = Self::calculate_weight(tx_ins.len(), payments, None);
|
||||||
let mut needed_fee = fee_per_weight * weight;
|
let mut needed_fee = fee_per_weight * weight;
|
||||||
|
|
||||||
// "Virtual transaction size" is weight ceildiv 4 per
|
|
||||||
// https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki
|
|
||||||
|
|
||||||
// https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04/
|
|
||||||
// src/policy/policy.cpp#L295-L298
|
|
||||||
// implements this as expected
|
|
||||||
|
|
||||||
// Technically, it takes whatever's greater, the weight or the amount of signature operations
|
|
||||||
// multiplied by DEFAULT_BYTES_PER_SIGOP (20)
|
|
||||||
// We only use 1 signature per input, and our inputs have a weight exceeding 20
|
|
||||||
// Accordingly, our inputs' weight will always be greater than the cost of the signature ops
|
|
||||||
let vsize = weight.div_ceil(4);
|
|
||||||
debug_assert_eq!(
|
|
||||||
u64::try_from(bitcoin::policy::get_virtual_tx_size(
|
|
||||||
weight.try_into().unwrap(),
|
|
||||||
tx_ins.len().try_into().unwrap()
|
|
||||||
))
|
|
||||||
.unwrap(),
|
|
||||||
vsize
|
|
||||||
);
|
|
||||||
// Technically, if there isn't change, this TX may still pay enough of a fee to pass the
|
|
||||||
// minimum fee. Such edge cases aren't worth programming when they go against intent, as the
|
|
||||||
// specified fee rate is too low to be valid
|
|
||||||
// bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE is in sats/kilo-vbyte
|
|
||||||
if needed_fee < ((u64::from(bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE) * vsize) / 1000) {
|
|
||||||
Err(TransactionError::TooLowFee)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if input_sat < (payment_sat + needed_fee) {
|
if input_sat < (payment_sat + needed_fee) {
|
||||||
Err(TransactionError::NotEnoughFunds)?;
|
Err(TransactionError::NotEnoughFunds)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If there's a change address, check if there's change to give it
|
// If there's a change address, check if there's change to give it
|
||||||
if let Some(change) = change {
|
if let Some(change) = change.as_ref() {
|
||||||
let weight_with_change = Self::calculate_weight(tx_ins.len(), payments, Some(change));
|
let weight_with_change = Self::calculate_weight(tx_ins.len(), payments, Some(change));
|
||||||
let fee_with_change = fee_per_weight * weight_with_change;
|
let fee_with_change = fee_per_weight * weight_with_change;
|
||||||
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
|
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
|
||||||
if value >= DUST {
|
if value >= DUST {
|
||||||
tx_outs
|
tx_outs.push(TxOut { value, script_pubkey: change.script_pubkey() });
|
||||||
.push(TxOut { value: Amount::from_sat(value), script_pubkey: change.script_pubkey() });
|
|
||||||
weight = weight_with_change;
|
weight = weight_with_change;
|
||||||
needed_fee = fee_with_change;
|
needed_fee = fee_with_change;
|
||||||
}
|
}
|
||||||
@@ -229,41 +184,24 @@ impl SignableTransaction {
|
|||||||
Err(TransactionError::NoOutputs)?;
|
Err(TransactionError::NoOutputs)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if weight > u64::from(bitcoin::policy::MAX_STANDARD_TX_WEIGHT) {
|
if weight > MAX_STANDARD_TX_WEIGHT {
|
||||||
Err(TransactionError::TooLargeTransaction)?;
|
Err(TransactionError::TooLargeTransaction)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(SignableTransaction {
|
Ok(SignableTransaction {
|
||||||
tx: Transaction {
|
tx: Transaction { version: 2, lock_time: LockTime::ZERO, input: tx_ins, output: tx_outs },
|
||||||
version: Version(2),
|
|
||||||
lock_time: LockTime::ZERO,
|
|
||||||
input: tx_ins,
|
|
||||||
output: tx_outs,
|
|
||||||
},
|
|
||||||
offsets,
|
offsets,
|
||||||
prevouts: inputs.drain(..).map(|input| input.output).collect(),
|
prevouts: inputs.drain(..).map(|input| input.output).collect(),
|
||||||
needed_fee,
|
needed_fee,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the TX ID of the transaction this will create.
|
|
||||||
pub fn txid(&self) -> [u8; 32] {
|
|
||||||
let mut res = self.tx.txid().to_byte_array();
|
|
||||||
res.reverse();
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the outputs this transaction will create.
|
|
||||||
pub fn outputs(&self) -> &[TxOut] {
|
|
||||||
&self.tx.output
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a multisig machine for this transaction.
|
/// Create a multisig machine for this transaction.
|
||||||
///
|
///
|
||||||
/// Returns None if the wrong keys are used.
|
/// Returns None if the wrong keys are used.
|
||||||
pub fn multisig(
|
pub fn multisig(
|
||||||
self,
|
self,
|
||||||
keys: &ThresholdKeys<Secp256k1>,
|
keys: ThresholdKeys<Secp256k1>,
|
||||||
mut transcript: RecommendedTranscript,
|
mut transcript: RecommendedTranscript,
|
||||||
) -> Option<TransactionMachine> {
|
) -> Option<TransactionMachine> {
|
||||||
transcript.domain_separate(b"bitcoin_transaction");
|
transcript.domain_separate(b"bitcoin_transaction");
|
||||||
@@ -277,18 +215,18 @@ impl SignableTransaction {
|
|||||||
}
|
}
|
||||||
for payment in &tx.output {
|
for payment in &tx.output {
|
||||||
transcript.append_message(b"output_script", payment.script_pubkey.as_bytes());
|
transcript.append_message(b"output_script", payment.script_pubkey.as_bytes());
|
||||||
transcript.append_message(b"output_amount", payment.value.to_sat().to_le_bytes());
|
transcript.append_message(b"output_amount", payment.value.to_le_bytes());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut sigs = vec![];
|
let mut sigs = vec![];
|
||||||
for i in 0 .. tx.input.len() {
|
for i in 0 .. tx.input.len() {
|
||||||
let mut transcript = transcript.clone();
|
let mut transcript = transcript.clone();
|
||||||
// This unwrap is safe since any transaction with this many inputs violates the maximum
|
|
||||||
// size allowed under standards, which this lib will error on creation of
|
|
||||||
transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes());
|
transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes());
|
||||||
|
|
||||||
let offset = keys.clone().offset(self.offsets[i]);
|
let offset = keys.clone().offset(self.offsets[i]);
|
||||||
if address_payload(offset.group_key())?.script_pubkey() != self.prevouts[i].script_pubkey {
|
if address(Network::Bitcoin, offset.group_key())?.script_pubkey() !=
|
||||||
|
self.prevouts[i].script_pubkey
|
||||||
|
{
|
||||||
None?;
|
None?;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -305,7 +243,7 @@ impl SignableTransaction {
|
|||||||
/// A FROST signing machine to produce a Bitcoin transaction.
|
/// A FROST signing machine to produce a Bitcoin transaction.
|
||||||
///
|
///
|
||||||
/// This does not support caching its preprocess. When sign is called, the message must be empty.
|
/// This does not support caching its preprocess. When sign is called, the message must be empty.
|
||||||
/// This will panic if either `cache` is called or the message isn't empty.
|
/// This will panic if it isn't.
|
||||||
pub struct TransactionMachine {
|
pub struct TransactionMachine {
|
||||||
tx: SignableTransaction,
|
tx: SignableTransaction,
|
||||||
sigs: Vec<AlgorithmMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
|
sigs: Vec<AlgorithmMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
|
||||||
@@ -355,10 +293,10 @@ impl SignMachine<Transaction> for TransactionSignMachine {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn from_cache(
|
fn from_cache(
|
||||||
(): (),
|
_: (),
|
||||||
_: ThresholdKeys<Secp256k1>,
|
_: ThresholdKeys<Secp256k1>,
|
||||||
_: CachedPreprocess,
|
_: CachedPreprocess,
|
||||||
) -> (Self, Self::Preprocess) {
|
) -> Result<Self, FrostError> {
|
||||||
unimplemented!(
|
unimplemented!(
|
||||||
"Bitcoin transactions don't support caching their preprocesses due to {}",
|
"Bitcoin transactions don't support caching their preprocesses due to {}",
|
||||||
"being already bound to a specific transaction"
|
"being already bound to a specific transaction"
|
||||||
@@ -401,9 +339,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
|
|||||||
commitments[i].clone(),
|
commitments[i].clone(),
|
||||||
cache
|
cache
|
||||||
.taproot_key_spend_signature_hash(i, &prevouts, TapSighashType::Default)
|
.taproot_key_spend_signature_hash(i, &prevouts, TapSighashType::Default)
|
||||||
// This should never happen since the inputs align with the TX the cache was
|
.unwrap()
|
||||||
// constructed with, and because i is always < prevouts.len()
|
|
||||||
.expect("taproot_key_spend_signature_hash failed to return a hash")
|
|
||||||
.as_ref(),
|
.as_ref(),
|
||||||
)?;
|
)?;
|
||||||
shares.push(share);
|
shares.push(share);
|
||||||
@@ -437,7 +373,7 @@ impl SignatureMachine<Transaction> for TransactionSignatureMachine {
|
|||||||
)?;
|
)?;
|
||||||
|
|
||||||
let mut witness = Witness::new();
|
let mut witness = Witness::new();
|
||||||
witness.push(sig);
|
witness.push(sig.as_ref());
|
||||||
input.witness = witness;
|
input.witness = witness;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,18 +1,14 @@
|
|||||||
use std::sync::OnceLock;
|
|
||||||
|
|
||||||
use bitcoin_serai::rpc::Rpc;
|
use bitcoin_serai::rpc::Rpc;
|
||||||
|
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
static SEQUENTIAL_CELL: OnceLock<Mutex<()>> = OnceLock::new();
|
lazy_static::lazy_static! {
|
||||||
#[allow(non_snake_case)]
|
pub static ref SEQUENTIAL: Mutex<()> = Mutex::new(());
|
||||||
pub fn SEQUENTIAL() -> &'static Mutex<()> {
|
|
||||||
SEQUENTIAL_CELL.get_or_init(|| Mutex::new(()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub(crate) async fn rpc() -> Rpc {
|
pub(crate) async fn rpc() -> Rpc {
|
||||||
let rpc = Rpc::new("http://serai:seraidex@127.0.0.1:8332".to_string()).await.unwrap();
|
let rpc = Rpc::new("http://serai:seraidex@127.0.0.1:18443".to_string()).await.unwrap();
|
||||||
|
|
||||||
// If this node has already been interacted with, clear its chain
|
// If this node has already been interacted with, clear its chain
|
||||||
if rpc.get_latest_block_number().await.unwrap() > 0 {
|
if rpc.get_latest_block_number().await.unwrap() > 0 {
|
||||||
@@ -34,7 +30,7 @@ macro_rules! async_sequential {
|
|||||||
$(
|
$(
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn $name() {
|
async fn $name() {
|
||||||
let guard = runner::SEQUENTIAL().lock().await;
|
let guard = runner::SEQUENTIAL.lock().await;
|
||||||
let local = tokio::task::LocalSet::new();
|
let local = tokio::task::LocalSet::new();
|
||||||
local.run_until(async move {
|
local.run_until(async move {
|
||||||
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {
|
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {
|
||||||
|
|||||||
@@ -22,12 +22,9 @@ use bitcoin_serai::{
|
|||||||
hashes::Hash as HashTrait,
|
hashes::Hash as HashTrait,
|
||||||
blockdata::opcodes::all::OP_RETURN,
|
blockdata::opcodes::all::OP_RETURN,
|
||||||
script::{PushBytesBuf, Instruction, Instructions, Script},
|
script::{PushBytesBuf, Instruction, Instructions, Script},
|
||||||
address::NetworkChecked,
|
OutPoint, TxOut, Transaction, Network, Address,
|
||||||
OutPoint, Amount, TxOut, Transaction, Network, Address,
|
|
||||||
},
|
|
||||||
wallet::{
|
|
||||||
tweak_keys, address_payload, ReceivedOutput, Scanner, TransactionError, SignableTransaction,
|
|
||||||
},
|
},
|
||||||
|
wallet::{tweak_keys, address, ReceivedOutput, Scanner, TransactionError, SignableTransaction},
|
||||||
rpc::Rpc,
|
rpc::Rpc,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -46,10 +43,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
|
|||||||
rpc
|
rpc
|
||||||
.rpc_call::<Vec<String>>(
|
.rpc_call::<Vec<String>>(
|
||||||
"generatetoaddress",
|
"generatetoaddress",
|
||||||
serde_json::json!([
|
serde_json::json!([1, address(Network::Regtest, key).unwrap()]),
|
||||||
1,
|
|
||||||
Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())
|
|
||||||
]),
|
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@@ -58,7 +52,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
|
|||||||
rpc
|
rpc
|
||||||
.rpc_call::<Vec<String>>(
|
.rpc_call::<Vec<String>>(
|
||||||
"generatetoaddress",
|
"generatetoaddress",
|
||||||
serde_json::json!([100, Address::p2sh(Script::new(), Network::Regtest).unwrap()]),
|
serde_json::json!([100, Address::p2sh(Script::empty(), Network::Regtest).unwrap()]),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@@ -70,7 +64,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
|
|||||||
|
|
||||||
assert_eq!(outputs.len(), 1);
|
assert_eq!(outputs.len(), 1);
|
||||||
assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].txid(), 0));
|
assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].txid(), 0));
|
||||||
assert_eq!(outputs[0].value(), block.txdata[0].output[0].value.to_sat());
|
assert_eq!(outputs[0].value(), block.txdata[0].output[0].value);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ReceivedOutput::read::<&[u8]>(&mut outputs[0].serialize().as_ref()).unwrap(),
|
ReceivedOutput::read::<&[u8]>(&mut outputs[0].serialize().as_ref()).unwrap(),
|
||||||
@@ -82,7 +76,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
|
|||||||
|
|
||||||
fn keys() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, ProjectivePoint) {
|
fn keys() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, ProjectivePoint) {
|
||||||
let mut keys = key_gen(&mut OsRng);
|
let mut keys = key_gen(&mut OsRng);
|
||||||
for keys in keys.values_mut() {
|
for (_, keys) in keys.iter_mut() {
|
||||||
*keys = tweak_keys(keys);
|
*keys = tweak_keys(keys);
|
||||||
}
|
}
|
||||||
let key = keys.values().next().unwrap().group_key();
|
let key = keys.values().next().unwrap().group_key();
|
||||||
@@ -91,14 +85,14 @@ fn keys() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, ProjectivePoint) {
|
|||||||
|
|
||||||
fn sign(
|
fn sign(
|
||||||
keys: &HashMap<Participant, ThresholdKeys<Secp256k1>>,
|
keys: &HashMap<Participant, ThresholdKeys<Secp256k1>>,
|
||||||
tx: &SignableTransaction,
|
tx: SignableTransaction,
|
||||||
) -> Transaction {
|
) -> Transaction {
|
||||||
let mut machines = HashMap::new();
|
let mut machines = HashMap::new();
|
||||||
for i in (1 ..= THRESHOLD).map(|i| Participant::new(i).unwrap()) {
|
for i in (1 ..= THRESHOLD).map(|i| Participant::new(i).unwrap()) {
|
||||||
machines.insert(
|
machines.insert(
|
||||||
i,
|
i,
|
||||||
tx.clone()
|
tx.clone()
|
||||||
.multisig(&keys[&i].clone(), RecommendedTranscript::new(b"bitcoin-serai Test Transaction"))
|
.multisig(keys[&i].clone(), RecommendedTranscript::new(b"bitcoin-serai Test Transaction"))
|
||||||
.unwrap(),
|
.unwrap(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -193,7 +187,7 @@ async_sequential! {
|
|||||||
assert_eq!(output.offset(), Scalar::ZERO);
|
assert_eq!(output.offset(), Scalar::ZERO);
|
||||||
|
|
||||||
let inputs = vec![output];
|
let inputs = vec![output];
|
||||||
let addr = || Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap());
|
let addr = || address(Network::Regtest, key).unwrap();
|
||||||
let payments = vec![(addr(), 1000)];
|
let payments = vec![(addr(), 1000)];
|
||||||
|
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok());
|
assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok());
|
||||||
@@ -206,7 +200,7 @@ async_sequential! {
|
|||||||
// No change
|
// No change
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &[(addr(), 1000)], None, None, FEE).is_ok());
|
assert!(SignableTransaction::new(inputs.clone(), &[(addr(), 1000)], None, None, FEE).is_ok());
|
||||||
// Consolidation TX
|
// Consolidation TX
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &[], Some(&addr()), None, FEE).is_ok());
|
assert!(SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, FEE).is_ok());
|
||||||
// Data
|
// Data
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &[], None, Some(vec![]), FEE).is_ok());
|
assert!(SignableTransaction::new(inputs.clone(), &[], None, Some(vec![]), FEE).is_ok());
|
||||||
// No outputs
|
// No outputs
|
||||||
@@ -228,18 +222,13 @@ async_sequential! {
|
|||||||
Err(TransactionError::TooMuchData),
|
Err(TransactionError::TooMuchData),
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
SignableTransaction::new(inputs.clone(), &[], Some(&addr()), None, 0),
|
|
||||||
Err(TransactionError::TooLowFee),
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
SignableTransaction::new(inputs.clone(), &[(addr(), inputs[0].value() * 2)], None, None, FEE),
|
SignableTransaction::new(inputs.clone(), &[(addr(), inputs[0].value() * 2)], None, None, FEE),
|
||||||
Err(TransactionError::NotEnoughFunds),
|
Err(TransactionError::NotEnoughFunds),
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
SignableTransaction::new(inputs, &vec![(addr(), 1000); 10000], None, None, FEE),
|
SignableTransaction::new(inputs, &vec![(addr(), 1000); 10000], None, None, 0),
|
||||||
Err(TransactionError::TooLargeTransaction),
|
Err(TransactionError::TooLargeTransaction),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -261,26 +250,24 @@ async_sequential! {
|
|||||||
|
|
||||||
// Declare payments, change, fee
|
// Declare payments, change, fee
|
||||||
let payments = [
|
let payments = [
|
||||||
(Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap()), 1005),
|
(address(Network::Regtest, key).unwrap(), 1005),
|
||||||
(Address::<NetworkChecked>::new(Network::Regtest, address_payload(offset_key).unwrap()), 1007)
|
(address(Network::Regtest, offset_key).unwrap(), 1007)
|
||||||
];
|
];
|
||||||
|
|
||||||
let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
|
let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
|
||||||
let change_key = key + (ProjectivePoint::GENERATOR * change_offset);
|
let change_key = key + (ProjectivePoint::GENERATOR * change_offset);
|
||||||
let change_addr =
|
let change_addr = address(Network::Regtest, change_key).unwrap();
|
||||||
Address::<NetworkChecked>::new(Network::Regtest, address_payload(change_key).unwrap());
|
|
||||||
|
|
||||||
// Create and sign the TX
|
// Create and sign the TX
|
||||||
let tx = SignableTransaction::new(
|
let tx = SignableTransaction::new(
|
||||||
vec![output.clone(), offset_output.clone()],
|
vec![output.clone(), offset_output.clone()],
|
||||||
&payments,
|
&payments,
|
||||||
Some(&change_addr),
|
Some(change_addr.clone()),
|
||||||
None,
|
None,
|
||||||
FEE
|
FEE
|
||||||
).unwrap();
|
).unwrap();
|
||||||
let needed_fee = tx.needed_fee();
|
let needed_fee = tx.needed_fee();
|
||||||
let expected_id = tx.txid();
|
let tx = sign(&keys, tx);
|
||||||
let tx = sign(&keys, &tx);
|
|
||||||
|
|
||||||
assert_eq!(tx.output.len(), 3);
|
assert_eq!(tx.output.len(), 3);
|
||||||
|
|
||||||
@@ -297,24 +284,21 @@ async_sequential! {
|
|||||||
|
|
||||||
// Make sure the payments were properly created
|
// Make sure the payments were properly created
|
||||||
for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) {
|
for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) {
|
||||||
assert_eq!(
|
assert_eq!(output, &TxOut { script_pubkey: payment.0.script_pubkey(), value: payment.1 });
|
||||||
output,
|
|
||||||
&TxOut { script_pubkey: payment.0.script_pubkey(), value: Amount::from_sat(payment.1) },
|
|
||||||
);
|
|
||||||
assert_eq!(scanned.value(), payment.1 );
|
assert_eq!(scanned.value(), payment.1 );
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure the change is correct
|
// Make sure the change is correct
|
||||||
assert_eq!(needed_fee, u64::from(tx.weight()) * FEE);
|
assert_eq!(needed_fee, u64::try_from(tx.weight()).unwrap() * FEE);
|
||||||
let input_value = output.value() + offset_output.value();
|
let input_value = output.value() + offset_output.value();
|
||||||
let output_value = tx.output.iter().map(|output| output.value.to_sat()).sum::<u64>();
|
let output_value = tx.output.iter().map(|output| output.value).sum::<u64>();
|
||||||
assert_eq!(input_value - output_value, needed_fee);
|
assert_eq!(input_value - output_value, needed_fee);
|
||||||
|
|
||||||
let change_amount =
|
let change_amount =
|
||||||
input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee;
|
input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tx.output[2],
|
tx.output[2],
|
||||||
TxOut { script_pubkey: change_addr.script_pubkey(), value: Amount::from_sat(change_amount) },
|
TxOut { script_pubkey: change_addr.script_pubkey(), value: change_amount },
|
||||||
);
|
);
|
||||||
|
|
||||||
// This also tests send_raw_transaction and get_transaction, which the RPC test can't
|
// This also tests send_raw_transaction and get_transaction, which the RPC test can't
|
||||||
@@ -323,7 +307,6 @@ async_sequential! {
|
|||||||
let mut hash = *tx.txid().as_raw_hash().as_byte_array();
|
let mut hash = *tx.txid().as_raw_hash().as_byte_array();
|
||||||
hash.reverse();
|
hash.reverse();
|
||||||
assert_eq!(tx, rpc.get_transaction(&hash).await.unwrap());
|
assert_eq!(tx, rpc.get_transaction(&hash).await.unwrap());
|
||||||
assert_eq!(expected_id, hash);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn test_data() {
|
async fn test_data() {
|
||||||
@@ -341,10 +324,10 @@ async_sequential! {
|
|||||||
|
|
||||||
let tx = sign(
|
let tx = sign(
|
||||||
&keys,
|
&keys,
|
||||||
&SignableTransaction::new(
|
SignableTransaction::new(
|
||||||
vec![output],
|
vec![output],
|
||||||
&[],
|
&[],
|
||||||
Some(&Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())),
|
address(Network::Regtest, key),
|
||||||
Some(data.clone()),
|
Some(data.clone()),
|
||||||
FEE
|
FEE
|
||||||
).unwrap()
|
).unwrap()
|
||||||
|
|||||||
6
coins/ethereum/.gitignore
vendored
6
coins/ethereum/.gitignore
vendored
@@ -1,7 +1,3 @@
|
|||||||
# Solidity build outputs
|
# solidity build outputs
|
||||||
cache
|
cache
|
||||||
artifacts
|
artifacts
|
||||||
|
|
||||||
# Auto-generated ABI files
|
|
||||||
src/abi/schnorr.rs
|
|
||||||
src/abi/router.rs
|
|
||||||
|
|||||||
@@ -7,39 +7,31 @@ repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
|
|||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
|
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
publish = false
|
publish = false
|
||||||
rust-version = "1.74"
|
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
all-features = true
|
all-features = true
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
thiserror = { version = "1", default-features = false }
|
thiserror = "1"
|
||||||
eyre = { version = "0.6", default-features = false }
|
rand_core = "0.6"
|
||||||
|
|
||||||
sha3 = { version = "0.10", default-features = false, features = ["std"] }
|
serde_json = "1"
|
||||||
|
serde = "1"
|
||||||
|
|
||||||
group = { version = "0.13", default-features = false }
|
sha2 = "0.10"
|
||||||
k256 = { version = "^0.13.1", default-features = false, features = ["std", "ecdsa"] }
|
sha3 = "0.10"
|
||||||
|
|
||||||
|
group = "0.13"
|
||||||
|
k256 = { version = "^0.13.1", default-features = false, features = ["std", "arithmetic", "bits", "ecdsa"] }
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] }
|
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] }
|
||||||
|
|
||||||
ethers-core = { version = "2", default-features = false }
|
eyre = "0.6"
|
||||||
ethers-providers = { version = "2", default-features = false }
|
|
||||||
ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] }
|
ethers = { version = "2", default-features = false, features = ["abigen", "ethers-solc"] }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] }
|
ethers-solc = "2"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
rand_core = { version = "0.6", default-features = false, features = ["std"] }
|
|
||||||
|
|
||||||
hex = { version = "0.4", default-features = false, features = ["std"] }
|
|
||||||
serde = { version = "1", default-features = false, features = ["std"] }
|
|
||||||
serde_json = { version = "1", default-features = false, features = ["std"] }
|
|
||||||
|
|
||||||
sha2 = { version = "0.10", default-features = false, features = ["std"] }
|
|
||||||
|
|
||||||
tokio = { version = "1", features = ["macros"] }
|
tokio = { version = "1", features = ["macros"] }
|
||||||
|
|||||||
@@ -1,42 +1,16 @@
|
|||||||
use std::process::Command;
|
use ethers_solc::{Project, ProjectPathsConfig};
|
||||||
|
|
||||||
use ethers_contract::Abigen;
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
println!("cargo:rerun-if-changed=contracts/*");
|
println!("cargo:rerun-if-changed=contracts");
|
||||||
println!("cargo:rerun-if-changed=artifacts/*");
|
println!("cargo:rerun-if-changed=artifacts");
|
||||||
|
|
||||||
for line in String::from_utf8(Command::new("solc").args(["--version"]).output().unwrap().stdout)
|
// configure the project with all its paths, solc, cache etc.
|
||||||
.unwrap()
|
let project = Project::builder()
|
||||||
.lines()
|
.paths(ProjectPathsConfig::hardhat(env!("CARGO_MANIFEST_DIR")).unwrap())
|
||||||
{
|
.build()
|
||||||
if let Some(version) = line.strip_prefix("Version: ") {
|
|
||||||
let version = version.split('+').next().unwrap();
|
|
||||||
assert_eq!(version, "0.8.25");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rustfmt::skip]
|
|
||||||
let args = [
|
|
||||||
"--base-path", ".",
|
|
||||||
"-o", "./artifacts", "--overwrite",
|
|
||||||
"--bin", "--abi",
|
|
||||||
"--optimize",
|
|
||||||
"./contracts/Schnorr.sol", "./contracts/Router.sol",
|
|
||||||
];
|
|
||||||
assert!(Command::new("solc").args(args).status().unwrap().success());
|
|
||||||
|
|
||||||
Abigen::new("Schnorr", "./artifacts/Schnorr.abi")
|
|
||||||
.unwrap()
|
|
||||||
.generate()
|
|
||||||
.unwrap()
|
|
||||||
.write_to_file("./src/abi/schnorr.rs")
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
project.compile().unwrap();
|
||||||
|
|
||||||
Abigen::new("Router", "./artifacts/Router.abi")
|
// Tell Cargo that if a source file changes, to rerun this build script.
|
||||||
.unwrap()
|
project.rerun_if_sources_changed();
|
||||||
.generate()
|
|
||||||
.unwrap()
|
|
||||||
.write_to_file("./src/abi/router.rs")
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,90 +0,0 @@
|
|||||||
// SPDX-License-Identifier: AGPLv3
|
|
||||||
pragma solidity ^0.8.0;
|
|
||||||
|
|
||||||
import "./Schnorr.sol";
|
|
||||||
|
|
||||||
contract Router is Schnorr {
|
|
||||||
// Contract initializer
|
|
||||||
// TODO: Replace with a MuSig of the genesis validators
|
|
||||||
address public initializer;
|
|
||||||
|
|
||||||
// Nonce is incremented for each batch of transactions executed
|
|
||||||
uint256 public nonce;
|
|
||||||
|
|
||||||
// fixed parity for the public keys used in this contract
|
|
||||||
uint8 constant public KEY_PARITY = 27;
|
|
||||||
|
|
||||||
// current public key's x-coordinate
|
|
||||||
// note: this key must always use the fixed parity defined above
|
|
||||||
bytes32 public seraiKey;
|
|
||||||
|
|
||||||
struct OutInstruction {
|
|
||||||
address to;
|
|
||||||
uint256 value;
|
|
||||||
bytes data;
|
|
||||||
}
|
|
||||||
|
|
||||||
struct Signature {
|
|
||||||
bytes32 c;
|
|
||||||
bytes32 s;
|
|
||||||
}
|
|
||||||
|
|
||||||
// success is a uint256 representing a bitfield of transaction successes
|
|
||||||
event Executed(uint256 nonce, bytes32 batch, uint256 success);
|
|
||||||
|
|
||||||
// error types
|
|
||||||
error NotInitializer();
|
|
||||||
error AlreadyInitialized();
|
|
||||||
error InvalidKey();
|
|
||||||
error TooManyTransactions();
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
initializer = msg.sender;
|
|
||||||
}
|
|
||||||
|
|
||||||
// initSeraiKey can be called by the contract initializer to set the first
|
|
||||||
// public key, only if the public key has yet to be set.
|
|
||||||
function initSeraiKey(bytes32 _seraiKey) external {
|
|
||||||
if (msg.sender != initializer) revert NotInitializer();
|
|
||||||
if (seraiKey != 0) revert AlreadyInitialized();
|
|
||||||
if (_seraiKey == bytes32(0)) revert InvalidKey();
|
|
||||||
seraiKey = _seraiKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
// updateSeraiKey validates the given Schnorr signature against the current public key,
|
|
||||||
// and if successful, updates the contract's public key to the given one.
|
|
||||||
function updateSeraiKey(
|
|
||||||
bytes32 _seraiKey,
|
|
||||||
Signature memory sig
|
|
||||||
) public {
|
|
||||||
if (_seraiKey == bytes32(0)) revert InvalidKey();
|
|
||||||
bytes32 message = keccak256(abi.encodePacked("updateSeraiKey", _seraiKey));
|
|
||||||
if (!verify(KEY_PARITY, seraiKey, message, sig.c, sig.s)) revert InvalidSignature();
|
|
||||||
seraiKey = _seraiKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
// execute accepts a list of transactions to execute as well as a Schnorr signature.
|
|
||||||
// if signature verification passes, the given transactions are executed.
|
|
||||||
// if signature verification fails, this function will revert.
|
|
||||||
function execute(
|
|
||||||
OutInstruction[] calldata transactions,
|
|
||||||
Signature memory sig
|
|
||||||
) public {
|
|
||||||
if (transactions.length > 256) revert TooManyTransactions();
|
|
||||||
|
|
||||||
bytes32 message = keccak256(abi.encode("execute", nonce, transactions));
|
|
||||||
// This prevents re-entrancy from causing double spends yet does allow
|
|
||||||
// out-of-order execution via re-entrancy
|
|
||||||
nonce++;
|
|
||||||
if (!verify(KEY_PARITY, seraiKey, message, sig.c, sig.s)) revert InvalidSignature();
|
|
||||||
|
|
||||||
uint256 successes;
|
|
||||||
for(uint256 i = 0; i < transactions.length; i++) {
|
|
||||||
(bool success, ) = transactions[i].to.call{value: transactions[i].value, gas: 200_000}(transactions[i].data);
|
|
||||||
assembly {
|
|
||||||
successes := or(successes, shl(i, success))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
emit Executed(nonce, message, successes);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
// SPDX-License-Identifier: AGPLv3
|
//SPDX-License-Identifier: AGPLv3
|
||||||
pragma solidity ^0.8.0;
|
pragma solidity ^0.8.0;
|
||||||
|
|
||||||
// see https://github.com/noot/schnorr-verify for implementation details
|
// see https://github.com/noot/schnorr-verify for implementation details
|
||||||
@@ -7,32 +7,29 @@ contract Schnorr {
|
|||||||
uint256 constant public Q =
|
uint256 constant public Q =
|
||||||
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
|
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
|
||||||
|
|
||||||
error InvalidSOrA();
|
|
||||||
error InvalidSignature();
|
|
||||||
|
|
||||||
// parity := public key y-coord parity (27 or 28)
|
// parity := public key y-coord parity (27 or 28)
|
||||||
// px := public key x-coord
|
// px := public key x-coord
|
||||||
// message := 32-byte hash of the message
|
// message := 32-byte message
|
||||||
// c := schnorr signature challenge
|
|
||||||
// s := schnorr signature
|
// s := schnorr signature
|
||||||
|
// e := schnorr signature challenge
|
||||||
function verify(
|
function verify(
|
||||||
uint8 parity,
|
uint8 parity,
|
||||||
bytes32 px,
|
bytes32 px,
|
||||||
bytes32 message,
|
bytes32 message,
|
||||||
bytes32 c,
|
bytes32 s,
|
||||||
bytes32 s
|
bytes32 e
|
||||||
) public view returns (bool) {
|
) public view returns (bool) {
|
||||||
// ecrecover = (m, v, r, s);
|
// ecrecover = (m, v, r, s);
|
||||||
bytes32 sa = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
|
bytes32 sp = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
|
||||||
bytes32 ca = bytes32(Q - mulmod(uint256(c), uint256(px), Q));
|
bytes32 ep = bytes32(Q - mulmod(uint256(e), uint256(px), Q));
|
||||||
|
|
||||||
if (sa == 0) revert InvalidSOrA();
|
require(sp != 0);
|
||||||
// the ecrecover precompile implementation checks that the `r` and `s`
|
// the ecrecover precompile implementation checks that the `r` and `s`
|
||||||
// inputs are non-zero (in this case, `px` and `ca`), thus we don't need to
|
// inputs are non-zero (in this case, `px` and `ep`), thus we don't need to
|
||||||
// check if they're zero.
|
// check if they're zero.will make me
|
||||||
address R = ecrecover(sa, parity, px, ca);
|
address R = ecrecover(sp, parity, px, ep);
|
||||||
if (R == address(0)) revert InvalidSignature();
|
require(R != address(0), "ecrecover failed");
|
||||||
return c == keccak256(
|
return e == keccak256(
|
||||||
abi.encodePacked(R, uint8(parity), px, block.chainid, message)
|
abi.encodePacked(R, uint8(parity), px, block.chainid, message)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +0,0 @@
|
|||||||
#[rustfmt::skip]
|
|
||||||
#[allow(clippy::all)]
|
|
||||||
pub(crate) mod schnorr;
|
|
||||||
#[rustfmt::skip]
|
|
||||||
#[allow(clippy::all)]
|
|
||||||
pub(crate) mod router;
|
|
||||||
52
coins/ethereum/src/contract.rs
Normal file
52
coins/ethereum/src/contract.rs
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
use crate::crypto::ProcessedSignature;
|
||||||
|
use ethers::{contract::ContractFactory, prelude::*, solc::artifacts::contract::ContractBytecode};
|
||||||
|
use eyre::{eyre, Result};
|
||||||
|
use std::fs::File;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum EthereumError {
|
||||||
|
#[error("failed to verify Schnorr signature")]
|
||||||
|
VerificationError,
|
||||||
|
}
|
||||||
|
|
||||||
|
abigen!(
|
||||||
|
Schnorr,
|
||||||
|
"./artifacts/Schnorr.sol/Schnorr.json",
|
||||||
|
event_derives(serde::Deserialize, serde::Serialize),
|
||||||
|
);
|
||||||
|
|
||||||
|
pub async fn deploy_schnorr_verifier_contract(
|
||||||
|
client: Arc<SignerMiddleware<Provider<Http>, LocalWallet>>,
|
||||||
|
) -> Result<Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>> {
|
||||||
|
let path = "./artifacts/Schnorr.sol/Schnorr.json";
|
||||||
|
let artifact: ContractBytecode = serde_json::from_reader(File::open(path).unwrap()).unwrap();
|
||||||
|
let abi = artifact.abi.unwrap();
|
||||||
|
let bin = artifact.bytecode.unwrap().object;
|
||||||
|
let factory = ContractFactory::new(abi, bin.into_bytes().unwrap(), client.clone());
|
||||||
|
let contract = factory.deploy(())?.send().await?;
|
||||||
|
let contract = Schnorr::new(contract.address(), client);
|
||||||
|
Ok(contract)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn call_verify(
|
||||||
|
contract: &Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>,
|
||||||
|
params: &ProcessedSignature,
|
||||||
|
) -> Result<()> {
|
||||||
|
if contract
|
||||||
|
.verify(
|
||||||
|
params.parity + 27,
|
||||||
|
params.px.to_bytes().into(),
|
||||||
|
params.message,
|
||||||
|
params.s.to_bytes().into(),
|
||||||
|
params.e.to_bytes().into(),
|
||||||
|
)
|
||||||
|
.call()
|
||||||
|
.await?
|
||||||
|
{
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(eyre!(EthereumError::VerificationError))
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,54 +1,50 @@
|
|||||||
use sha3::{Digest, Keccak256};
|
use sha3::{Digest, Keccak256};
|
||||||
|
|
||||||
use group::ff::PrimeField;
|
use group::Group;
|
||||||
use k256::{
|
use k256::{
|
||||||
elliptic_curve::{
|
elliptic_curve::{
|
||||||
bigint::ArrayEncoding, ops::Reduce, point::AffineCoordinates, sec1::ToEncodedPoint,
|
bigint::ArrayEncoding, ops::Reduce, point::DecompressPoint, sec1::ToEncodedPoint,
|
||||||
},
|
},
|
||||||
ProjectivePoint, Scalar, U256,
|
AffinePoint, ProjectivePoint, Scalar, U256,
|
||||||
};
|
};
|
||||||
|
|
||||||
use frost::{
|
use frost::{algorithm::Hram, curve::Secp256k1};
|
||||||
algorithm::{Hram, SchnorrSignature},
|
|
||||||
curve::Secp256k1,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub(crate) fn keccak256(data: &[u8]) -> [u8; 32] {
|
pub fn keccak256(data: &[u8]) -> [u8; 32] {
|
||||||
Keccak256::digest(data).into()
|
Keccak256::digest(data).try_into().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn address(point: &ProjectivePoint) -> [u8; 20] {
|
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||||
|
Scalar::reduce(U256::from_be_slice(&keccak256(data)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn address(point: &ProjectivePoint) -> [u8; 20] {
|
||||||
let encoded_point = point.to_encoded_point(false);
|
let encoded_point = point.to_encoded_point(false);
|
||||||
// Last 20 bytes of the hash of the concatenated x and y coordinates
|
keccak256(&encoded_point.as_ref()[1 .. 65])[12 .. 32].try_into().unwrap()
|
||||||
// We obtain the concatenated x and y coordinates via the uncompressed encoding of the point
|
|
||||||
keccak256(&encoded_point.as_ref()[1 .. 65])[12 ..].try_into().unwrap()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
pub fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
|
||||||
pub struct PublicKey {
|
if r.is_zero().into() || s.is_zero().into() {
|
||||||
pub A: ProjectivePoint,
|
return None;
|
||||||
pub px: Scalar,
|
}
|
||||||
pub parity: u8,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PublicKey {
|
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
pub fn new(A: ProjectivePoint) -> Option<PublicKey> {
|
let R = AffinePoint::decompress(&r.to_bytes(), v.into());
|
||||||
let affine = A.to_affine();
|
#[allow(non_snake_case)]
|
||||||
let parity = u8::from(bool::from(affine.y_is_odd())) + 27;
|
if let Some(R) = Option::<AffinePoint>::from(R) {
|
||||||
if parity != 27 {
|
#[allow(non_snake_case)]
|
||||||
None?;
|
let R = ProjectivePoint::from(R);
|
||||||
|
|
||||||
|
let r = r.invert().unwrap();
|
||||||
|
let u1 = ProjectivePoint::GENERATOR * (-message * r);
|
||||||
|
let u2 = R * (s * r);
|
||||||
|
let key: ProjectivePoint = u1 + u2;
|
||||||
|
if !bool::from(key.is_identity()) {
|
||||||
|
return Some(address(&key));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let x_coord = affine.x();
|
None
|
||||||
let x_coord_scalar = <Scalar as Reduce<U256>>::reduce_bytes(&x_coord);
|
|
||||||
// Return None if a reduction would occur
|
|
||||||
if x_coord_scalar.to_repr() != x_coord {
|
|
||||||
None?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(PublicKey { A, px: x_coord_scalar, parity })
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
@@ -59,33 +55,53 @@ impl Hram<Secp256k1> for EthereumHram {
|
|||||||
let a_encoded_point = A.to_encoded_point(true);
|
let a_encoded_point = A.to_encoded_point(true);
|
||||||
let mut a_encoded = a_encoded_point.as_ref().to_owned();
|
let mut a_encoded = a_encoded_point.as_ref().to_owned();
|
||||||
a_encoded[0] += 25; // Ethereum uses 27/28 for point parity
|
a_encoded[0] += 25; // Ethereum uses 27/28 for point parity
|
||||||
assert!((a_encoded[0] == 27) || (a_encoded[0] == 28));
|
|
||||||
let mut data = address(R).to_vec();
|
let mut data = address(R).to_vec();
|
||||||
data.append(&mut a_encoded);
|
data.append(&mut a_encoded);
|
||||||
data.extend(m);
|
data.append(&mut m.to_vec());
|
||||||
Scalar::reduce(U256::from_be_slice(&keccak256(&data)))
|
Scalar::reduce(U256::from_be_slice(&keccak256(&data)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Signature {
|
pub struct ProcessedSignature {
|
||||||
pub(crate) c: Scalar,
|
pub s: Scalar,
|
||||||
pub(crate) s: Scalar,
|
pub px: Scalar,
|
||||||
|
pub parity: u8,
|
||||||
|
pub message: [u8; 32],
|
||||||
|
pub e: Scalar,
|
||||||
}
|
}
|
||||||
impl Signature {
|
|
||||||
pub fn new(
|
#[allow(non_snake_case)]
|
||||||
public_key: &PublicKey,
|
pub fn preprocess_signature_for_ecrecover(
|
||||||
|
m: [u8; 32],
|
||||||
|
R: &ProjectivePoint,
|
||||||
|
s: Scalar,
|
||||||
|
A: &ProjectivePoint,
|
||||||
chain_id: U256,
|
chain_id: U256,
|
||||||
m: &[u8],
|
) -> (Scalar, Scalar) {
|
||||||
signature: SchnorrSignature<Secp256k1>,
|
let processed_sig = process_signature_for_contract(m, R, s, A, chain_id);
|
||||||
) -> Option<Signature> {
|
let sr = processed_sig.s.mul(&processed_sig.px).negate();
|
||||||
let c = EthereumHram::hram(
|
let er = processed_sig.e.mul(&processed_sig.px).negate();
|
||||||
&signature.R,
|
(sr, er)
|
||||||
&public_key.A,
|
}
|
||||||
&[chain_id.to_be_byte_array().as_slice(), &keccak256(m)].concat(),
|
|
||||||
);
|
#[allow(non_snake_case)]
|
||||||
if !signature.verify(public_key.A, c) {
|
pub fn process_signature_for_contract(
|
||||||
None?;
|
m: [u8; 32],
|
||||||
}
|
R: &ProjectivePoint,
|
||||||
Some(Signature { c, s: signature.s })
|
s: Scalar,
|
||||||
|
A: &ProjectivePoint,
|
||||||
|
chain_id: U256,
|
||||||
|
) -> ProcessedSignature {
|
||||||
|
let encoded_pk = A.to_encoded_point(true);
|
||||||
|
let px = &encoded_pk.as_ref()[1 .. 33];
|
||||||
|
let px_scalar = Scalar::reduce(U256::from_be_slice(px));
|
||||||
|
let e = EthereumHram::hram(R, A, &[chain_id.to_be_byte_array().as_slice(), &m].concat());
|
||||||
|
ProcessedSignature {
|
||||||
|
s,
|
||||||
|
px: px_scalar,
|
||||||
|
parity: &encoded_pk.as_ref()[0] - 2,
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
message: m,
|
||||||
|
e,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,16 +1,2 @@
|
|||||||
use thiserror::Error;
|
pub mod contract;
|
||||||
|
|
||||||
pub mod crypto;
|
pub mod crypto;
|
||||||
|
|
||||||
pub(crate) mod abi;
|
|
||||||
pub mod schnorr;
|
|
||||||
pub mod router;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests;
|
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
|
||||||
pub enum Error {
|
|
||||||
#[error("failed to verify Schnorr signature")]
|
|
||||||
InvalidSignature,
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,30 +0,0 @@
|
|||||||
pub use crate::abi::router::*;
|
|
||||||
|
|
||||||
/*
|
|
||||||
use crate::crypto::{ProcessedSignature, PublicKey};
|
|
||||||
use ethers::{contract::ContractFactory, prelude::*, solc::artifacts::contract::ContractBytecode};
|
|
||||||
use eyre::Result;
|
|
||||||
use std::{convert::From, fs::File, sync::Arc};
|
|
||||||
|
|
||||||
pub async fn router_update_public_key<M: Middleware + 'static>(
|
|
||||||
contract: &Router<M>,
|
|
||||||
public_key: &PublicKey,
|
|
||||||
signature: &ProcessedSignature,
|
|
||||||
) -> std::result::Result<Option<TransactionReceipt>, eyre::ErrReport> {
|
|
||||||
let tx = contract.update_public_key(public_key.px.to_bytes().into(), signature.into());
|
|
||||||
let pending_tx = tx.send().await?;
|
|
||||||
let receipt = pending_tx.await?;
|
|
||||||
Ok(receipt)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn router_execute<M: Middleware + 'static>(
|
|
||||||
contract: &Router<M>,
|
|
||||||
txs: Vec<Rtransaction>,
|
|
||||||
signature: &ProcessedSignature,
|
|
||||||
) -> std::result::Result<Option<TransactionReceipt>, eyre::ErrReport> {
|
|
||||||
let tx = contract.execute(txs, signature.into()).send();
|
|
||||||
let pending_tx = tx.send().await?;
|
|
||||||
let receipt = pending_tx.await?;
|
|
||||||
Ok(receipt)
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
use eyre::{eyre, Result};
|
|
||||||
|
|
||||||
use group::ff::PrimeField;
|
|
||||||
|
|
||||||
use ethers_providers::{Provider, Http};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Error,
|
|
||||||
crypto::{keccak256, PublicKey, Signature},
|
|
||||||
};
|
|
||||||
pub use crate::abi::schnorr::*;
|
|
||||||
|
|
||||||
pub async fn call_verify(
|
|
||||||
contract: &Schnorr<Provider<Http>>,
|
|
||||||
public_key: &PublicKey,
|
|
||||||
message: &[u8],
|
|
||||||
signature: &Signature,
|
|
||||||
) -> Result<()> {
|
|
||||||
if contract
|
|
||||||
.verify(
|
|
||||||
public_key.parity,
|
|
||||||
public_key.px.to_repr().into(),
|
|
||||||
keccak256(message),
|
|
||||||
signature.c.to_repr().into(),
|
|
||||||
signature.s.to_repr().into(),
|
|
||||||
)
|
|
||||||
.call()
|
|
||||||
.await?
|
|
||||||
{
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(eyre!(Error::InvalidSignature))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,132 +0,0 @@
|
|||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
use sha2::Sha256;
|
|
||||||
use sha3::{Digest, Keccak256};
|
|
||||||
|
|
||||||
use group::Group;
|
|
||||||
use k256::{
|
|
||||||
ecdsa::{hazmat::SignPrimitive, signature::DigestVerifier, SigningKey, VerifyingKey},
|
|
||||||
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, point::DecompressPoint},
|
|
||||||
U256, Scalar, AffinePoint, ProjectivePoint,
|
|
||||||
};
|
|
||||||
|
|
||||||
use frost::{
|
|
||||||
curve::Secp256k1,
|
|
||||||
algorithm::{Hram, IetfSchnorr},
|
|
||||||
tests::{algorithm_machines, sign},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{crypto::*, tests::key_gen};
|
|
||||||
|
|
||||||
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
|
||||||
Scalar::reduce(U256::from_be_slice(&keccak256(data)))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
|
|
||||||
if r.is_zero().into() || s.is_zero().into() || !((v == 27) || (v == 28)) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let R = AffinePoint::decompress(&r.to_bytes(), (v - 27).into());
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
if let Some(R) = Option::<AffinePoint>::from(R) {
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let R = ProjectivePoint::from(R);
|
|
||||||
|
|
||||||
let r = r.invert().unwrap();
|
|
||||||
let u1 = ProjectivePoint::GENERATOR * (-message * r);
|
|
||||||
let u2 = R * (s * r);
|
|
||||||
let key: ProjectivePoint = u1 + u2;
|
|
||||||
if !bool::from(key.is_identity()) {
|
|
||||||
return Some(address(&key));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_ecrecover() {
|
|
||||||
let private = SigningKey::random(&mut OsRng);
|
|
||||||
let public = VerifyingKey::from(&private);
|
|
||||||
|
|
||||||
// Sign the signature
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
|
||||||
let (sig, recovery_id) = private
|
|
||||||
.as_nonzero_scalar()
|
|
||||||
.try_sign_prehashed_rfc6979::<Sha256>(&Keccak256::digest(MESSAGE), b"")
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Sanity check the signature verifies
|
|
||||||
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
|
|
||||||
{
|
|
||||||
assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Perform the ecrecover
|
|
||||||
assert_eq!(
|
|
||||||
ecrecover(
|
|
||||||
hash_to_scalar(MESSAGE),
|
|
||||||
u8::from(recovery_id.unwrap().is_y_odd()) + 27,
|
|
||||||
*sig.r(),
|
|
||||||
*sig.s()
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
address(&ProjectivePoint::from(public.as_affine()))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run the sign test with the EthereumHram
|
|
||||||
#[test]
|
|
||||||
fn test_signing() {
|
|
||||||
let (keys, _) = key_gen();
|
|
||||||
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
|
||||||
|
|
||||||
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
|
||||||
let _sig =
|
|
||||||
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub fn preprocess_signature_for_ecrecover(
|
|
||||||
R: ProjectivePoint,
|
|
||||||
public_key: &PublicKey,
|
|
||||||
chain_id: U256,
|
|
||||||
m: &[u8],
|
|
||||||
s: Scalar,
|
|
||||||
) -> (u8, Scalar, Scalar) {
|
|
||||||
let c = EthereumHram::hram(
|
|
||||||
&R,
|
|
||||||
&public_key.A,
|
|
||||||
&[chain_id.to_be_byte_array().as_slice(), &keccak256(m)].concat(),
|
|
||||||
);
|
|
||||||
let sa = -(s * public_key.px);
|
|
||||||
let ca = -(c * public_key.px);
|
|
||||||
(public_key.parity, sa, ca)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_ecrecover_hack() {
|
|
||||||
let (keys, public_key) = key_gen();
|
|
||||||
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
|
||||||
let hashed_message = keccak256(MESSAGE);
|
|
||||||
let chain_id = U256::ONE;
|
|
||||||
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
|
||||||
|
|
||||||
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
|
||||||
let sig = sign(
|
|
||||||
&mut OsRng,
|
|
||||||
&algo,
|
|
||||||
keys.clone(),
|
|
||||||
algorithm_machines(&mut OsRng, &algo, &keys),
|
|
||||||
full_message,
|
|
||||||
);
|
|
||||||
|
|
||||||
let (parity, sa, ca) =
|
|
||||||
preprocess_signature_for_ecrecover(sig.R, &public_key, chain_id, MESSAGE, sig.s);
|
|
||||||
let q = ecrecover(sa, parity, public_key.px, ca).unwrap();
|
|
||||||
assert_eq!(q, address(&sig.R));
|
|
||||||
}
|
|
||||||
@@ -1,92 +0,0 @@
|
|||||||
use std::{sync::Arc, time::Duration, fs::File, collections::HashMap};
|
|
||||||
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
use group::ff::PrimeField;
|
|
||||||
use k256::{Scalar, ProjectivePoint};
|
|
||||||
use frost::{curve::Secp256k1, Participant, ThresholdKeys, tests::key_gen as frost_key_gen};
|
|
||||||
|
|
||||||
use ethers_core::{
|
|
||||||
types::{H160, Signature as EthersSignature},
|
|
||||||
abi::Abi,
|
|
||||||
};
|
|
||||||
use ethers_contract::ContractFactory;
|
|
||||||
use ethers_providers::{Middleware, Provider, Http};
|
|
||||||
|
|
||||||
use crate::crypto::PublicKey;
|
|
||||||
|
|
||||||
mod crypto;
|
|
||||||
mod schnorr;
|
|
||||||
mod router;
|
|
||||||
|
|
||||||
pub fn key_gen() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, PublicKey) {
|
|
||||||
let mut keys = frost_key_gen::<_, Secp256k1>(&mut OsRng);
|
|
||||||
let mut group_key = keys[&Participant::new(1).unwrap()].group_key();
|
|
||||||
|
|
||||||
let mut offset = Scalar::ZERO;
|
|
||||||
while PublicKey::new(group_key).is_none() {
|
|
||||||
offset += Scalar::ONE;
|
|
||||||
group_key += ProjectivePoint::GENERATOR;
|
|
||||||
}
|
|
||||||
for keys in keys.values_mut() {
|
|
||||||
*keys = keys.offset(offset);
|
|
||||||
}
|
|
||||||
let public_key = PublicKey::new(group_key).unwrap();
|
|
||||||
|
|
||||||
(keys, public_key)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Replace with a contract deployment from an unknown account, so the environment solely has
|
|
||||||
// to fund the deployer, not create/pass a wallet
|
|
||||||
// TODO: Deterministic deployments across chains
|
|
||||||
pub async fn deploy_contract(
|
|
||||||
chain_id: u32,
|
|
||||||
client: Arc<Provider<Http>>,
|
|
||||||
wallet: &k256::ecdsa::SigningKey,
|
|
||||||
name: &str,
|
|
||||||
) -> eyre::Result<H160> {
|
|
||||||
let abi: Abi =
|
|
||||||
serde_json::from_reader(File::open(format!("./artifacts/{name}.abi")).unwrap()).unwrap();
|
|
||||||
|
|
||||||
let hex_bin_buf = std::fs::read_to_string(format!("./artifacts/{name}.bin")).unwrap();
|
|
||||||
let hex_bin =
|
|
||||||
if let Some(stripped) = hex_bin_buf.strip_prefix("0x") { stripped } else { &hex_bin_buf };
|
|
||||||
let bin = hex::decode(hex_bin).unwrap();
|
|
||||||
let factory = ContractFactory::new(abi, bin.into(), client.clone());
|
|
||||||
|
|
||||||
let mut deployment_tx = factory.deploy(())?.tx;
|
|
||||||
deployment_tx.set_chain_id(chain_id);
|
|
||||||
deployment_tx.set_gas(1_000_000);
|
|
||||||
let (max_fee_per_gas, max_priority_fee_per_gas) = client.estimate_eip1559_fees(None).await?;
|
|
||||||
deployment_tx.as_eip1559_mut().unwrap().max_fee_per_gas = Some(max_fee_per_gas);
|
|
||||||
deployment_tx.as_eip1559_mut().unwrap().max_priority_fee_per_gas = Some(max_priority_fee_per_gas);
|
|
||||||
|
|
||||||
let sig_hash = deployment_tx.sighash();
|
|
||||||
let (sig, rid) = wallet.sign_prehash_recoverable(sig_hash.as_ref()).unwrap();
|
|
||||||
|
|
||||||
// EIP-155 v
|
|
||||||
let mut v = u64::from(rid.to_byte());
|
|
||||||
assert!((v == 0) || (v == 1));
|
|
||||||
v += u64::from((chain_id * 2) + 35);
|
|
||||||
|
|
||||||
let r = sig.r().to_repr();
|
|
||||||
let r_ref: &[u8] = r.as_ref();
|
|
||||||
let s = sig.s().to_repr();
|
|
||||||
let s_ref: &[u8] = s.as_ref();
|
|
||||||
let deployment_tx =
|
|
||||||
deployment_tx.rlp_signed(&EthersSignature { r: r_ref.into(), s: s_ref.into(), v });
|
|
||||||
|
|
||||||
let pending_tx = client.send_raw_transaction(deployment_tx).await?;
|
|
||||||
|
|
||||||
let mut receipt;
|
|
||||||
while {
|
|
||||||
receipt = client.get_transaction_receipt(pending_tx.tx_hash()).await?;
|
|
||||||
receipt.is_none()
|
|
||||||
} {
|
|
||||||
tokio::time::sleep(Duration::from_secs(6)).await;
|
|
||||||
}
|
|
||||||
let receipt = receipt.unwrap();
|
|
||||||
assert!(receipt.status == Some(1.into()));
|
|
||||||
|
|
||||||
Ok(receipt.contract_address.unwrap())
|
|
||||||
}
|
|
||||||
@@ -1,109 +0,0 @@
|
|||||||
use std::{convert::TryFrom, sync::Arc, collections::HashMap};
|
|
||||||
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
use group::ff::PrimeField;
|
|
||||||
use frost::{
|
|
||||||
curve::Secp256k1,
|
|
||||||
Participant, ThresholdKeys,
|
|
||||||
algorithm::IetfSchnorr,
|
|
||||||
tests::{algorithm_machines, sign},
|
|
||||||
};
|
|
||||||
|
|
||||||
use ethers_core::{
|
|
||||||
types::{H160, U256, Bytes},
|
|
||||||
abi::AbiEncode,
|
|
||||||
utils::{Anvil, AnvilInstance},
|
|
||||||
};
|
|
||||||
use ethers_providers::{Middleware, Provider, Http};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
crypto::{keccak256, PublicKey, EthereumHram, Signature},
|
|
||||||
router::{self, *},
|
|
||||||
tests::{key_gen, deploy_contract},
|
|
||||||
};
|
|
||||||
|
|
||||||
async fn setup_test() -> (
|
|
||||||
u32,
|
|
||||||
AnvilInstance,
|
|
||||||
Router<Provider<Http>>,
|
|
||||||
HashMap<Participant, ThresholdKeys<Secp256k1>>,
|
|
||||||
PublicKey,
|
|
||||||
) {
|
|
||||||
let anvil = Anvil::new().spawn();
|
|
||||||
|
|
||||||
let provider = Provider::<Http>::try_from(anvil.endpoint()).unwrap();
|
|
||||||
let chain_id = provider.get_chainid().await.unwrap().as_u32();
|
|
||||||
let wallet = anvil.keys()[0].clone().into();
|
|
||||||
let client = Arc::new(provider);
|
|
||||||
|
|
||||||
let contract_address =
|
|
||||||
deploy_contract(chain_id, client.clone(), &wallet, "Router").await.unwrap();
|
|
||||||
let contract = Router::new(contract_address, client.clone());
|
|
||||||
|
|
||||||
let (keys, public_key) = key_gen();
|
|
||||||
|
|
||||||
// Set the key to the threshold keys
|
|
||||||
let tx = contract.init_serai_key(public_key.px.to_repr().into()).gas(100_000);
|
|
||||||
let pending_tx = tx.send().await.unwrap();
|
|
||||||
let receipt = pending_tx.await.unwrap().unwrap();
|
|
||||||
assert!(receipt.status == Some(1.into()));
|
|
||||||
|
|
||||||
(chain_id, anvil, contract, keys, public_key)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_deploy_contract() {
|
|
||||||
setup_test().await;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hash_and_sign(
|
|
||||||
keys: &HashMap<Participant, ThresholdKeys<Secp256k1>>,
|
|
||||||
public_key: &PublicKey,
|
|
||||||
chain_id: U256,
|
|
||||||
message: &[u8],
|
|
||||||
) -> Signature {
|
|
||||||
let hashed_message = keccak256(message);
|
|
||||||
|
|
||||||
let mut chain_id_bytes = [0; 32];
|
|
||||||
chain_id.to_big_endian(&mut chain_id_bytes);
|
|
||||||
let full_message = &[chain_id_bytes.as_slice(), &hashed_message].concat();
|
|
||||||
|
|
||||||
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
|
||||||
let sig = sign(
|
|
||||||
&mut OsRng,
|
|
||||||
&algo,
|
|
||||||
keys.clone(),
|
|
||||||
algorithm_machines(&mut OsRng, &algo, keys),
|
|
||||||
full_message,
|
|
||||||
);
|
|
||||||
|
|
||||||
Signature::new(public_key, k256::U256::from_words(chain_id.0), message, sig).unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_router_execute() {
|
|
||||||
let (chain_id, _anvil, contract, keys, public_key) = setup_test().await;
|
|
||||||
|
|
||||||
let to = H160([0u8; 20]);
|
|
||||||
let value = U256([0u64; 4]);
|
|
||||||
let data = Bytes::from([0]);
|
|
||||||
let tx = OutInstruction { to, value, data: data.clone() };
|
|
||||||
|
|
||||||
let nonce_call = contract.nonce();
|
|
||||||
let nonce = nonce_call.call().await.unwrap();
|
|
||||||
|
|
||||||
let encoded =
|
|
||||||
("execute".to_string(), nonce, vec![router::OutInstruction { to, value, data }]).encode();
|
|
||||||
let sig = hash_and_sign(&keys, &public_key, chain_id.into(), &encoded);
|
|
||||||
|
|
||||||
let tx = contract
|
|
||||||
.execute(vec![tx], router::Signature { c: sig.c.to_repr().into(), s: sig.s.to_repr().into() })
|
|
||||||
.gas(300_000);
|
|
||||||
let pending_tx = tx.send().await.unwrap();
|
|
||||||
let receipt = dbg!(pending_tx.await.unwrap().unwrap());
|
|
||||||
assert!(receipt.status == Some(1.into()));
|
|
||||||
|
|
||||||
println!("gas used: {:?}", receipt.cumulative_gas_used);
|
|
||||||
println!("logs: {:?}", receipt.logs);
|
|
||||||
}
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
use std::{convert::TryFrom, sync::Arc};
|
|
||||||
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
use ::k256::{elliptic_curve::bigint::ArrayEncoding, U256, Scalar};
|
|
||||||
|
|
||||||
use ethers_core::utils::{keccak256, Anvil, AnvilInstance};
|
|
||||||
use ethers_providers::{Middleware, Provider, Http};
|
|
||||||
|
|
||||||
use frost::{
|
|
||||||
curve::Secp256k1,
|
|
||||||
algorithm::IetfSchnorr,
|
|
||||||
tests::{algorithm_machines, sign},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
crypto::*,
|
|
||||||
schnorr::*,
|
|
||||||
tests::{key_gen, deploy_contract},
|
|
||||||
};
|
|
||||||
|
|
||||||
async fn setup_test() -> (u32, AnvilInstance, Schnorr<Provider<Http>>) {
|
|
||||||
let anvil = Anvil::new().spawn();
|
|
||||||
|
|
||||||
let provider = Provider::<Http>::try_from(anvil.endpoint()).unwrap();
|
|
||||||
let chain_id = provider.get_chainid().await.unwrap().as_u32();
|
|
||||||
let wallet = anvil.keys()[0].clone().into();
|
|
||||||
let client = Arc::new(provider);
|
|
||||||
|
|
||||||
let contract_address =
|
|
||||||
deploy_contract(chain_id, client.clone(), &wallet, "Schnorr").await.unwrap();
|
|
||||||
let contract = Schnorr::new(contract_address, client.clone());
|
|
||||||
(chain_id, anvil, contract)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_deploy_contract() {
|
|
||||||
setup_test().await;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_ecrecover_hack() {
|
|
||||||
let (chain_id, _anvil, contract) = setup_test().await;
|
|
||||||
let chain_id = U256::from(chain_id);
|
|
||||||
|
|
||||||
let (keys, public_key) = key_gen();
|
|
||||||
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
|
||||||
let hashed_message = keccak256(MESSAGE);
|
|
||||||
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
|
||||||
|
|
||||||
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
|
||||||
let sig = sign(
|
|
||||||
&mut OsRng,
|
|
||||||
&algo,
|
|
||||||
keys.clone(),
|
|
||||||
algorithm_machines(&mut OsRng, &algo, &keys),
|
|
||||||
full_message,
|
|
||||||
);
|
|
||||||
let sig = Signature::new(&public_key, chain_id, MESSAGE, sig).unwrap();
|
|
||||||
|
|
||||||
call_verify(&contract, &public_key, MESSAGE, &sig).await.unwrap();
|
|
||||||
// Test an invalid signature fails
|
|
||||||
let mut sig = sig;
|
|
||||||
sig.s += Scalar::ONE;
|
|
||||||
assert!(call_verify(&contract, &public_key, MESSAGE, &sig).await.is_err());
|
|
||||||
}
|
|
||||||
71
coins/ethereum/tests/contract.rs
Normal file
71
coins/ethereum/tests/contract.rs
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
use std::{convert::TryFrom, sync::Arc, time::Duration};
|
||||||
|
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use ::k256::{elliptic_curve::bigint::ArrayEncoding, U256};
|
||||||
|
|
||||||
|
use ethers::{
|
||||||
|
prelude::*,
|
||||||
|
utils::{keccak256, Anvil, AnvilInstance},
|
||||||
|
};
|
||||||
|
|
||||||
|
use frost::{
|
||||||
|
curve::Secp256k1,
|
||||||
|
Participant,
|
||||||
|
algorithm::IetfSchnorr,
|
||||||
|
tests::{key_gen, algorithm_machines, sign},
|
||||||
|
};
|
||||||
|
|
||||||
|
use ethereum_serai::{
|
||||||
|
crypto,
|
||||||
|
contract::{Schnorr, call_verify, deploy_schnorr_verifier_contract},
|
||||||
|
};
|
||||||
|
|
||||||
|
async fn deploy_test_contract(
|
||||||
|
) -> (u32, AnvilInstance, Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>) {
|
||||||
|
let anvil = Anvil::new().spawn();
|
||||||
|
|
||||||
|
let wallet: LocalWallet = anvil.keys()[0].clone().into();
|
||||||
|
let provider =
|
||||||
|
Provider::<Http>::try_from(anvil.endpoint()).unwrap().interval(Duration::from_millis(10u64));
|
||||||
|
let chain_id = provider.get_chainid().await.unwrap().as_u32();
|
||||||
|
let client = Arc::new(SignerMiddleware::new_with_provider_chain(provider, wallet).await.unwrap());
|
||||||
|
|
||||||
|
(chain_id, anvil, deploy_schnorr_verifier_contract(client).await.unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_deploy_contract() {
|
||||||
|
deploy_test_contract().await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_ecrecover_hack() {
|
||||||
|
let (chain_id, _anvil, contract) = deploy_test_contract().await;
|
||||||
|
let chain_id = U256::from(chain_id);
|
||||||
|
|
||||||
|
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||||
|
let group_key = keys[&Participant::new(1).unwrap()].group_key();
|
||||||
|
|
||||||
|
const MESSAGE: &[u8] = b"Hello, World!";
|
||||||
|
let hashed_message = keccak256(MESSAGE);
|
||||||
|
|
||||||
|
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
||||||
|
|
||||||
|
let algo = IetfSchnorr::<Secp256k1, crypto::EthereumHram>::ietf();
|
||||||
|
let sig = sign(
|
||||||
|
&mut OsRng,
|
||||||
|
algo.clone(),
|
||||||
|
keys.clone(),
|
||||||
|
algorithm_machines(&mut OsRng, algo, &keys),
|
||||||
|
full_message,
|
||||||
|
);
|
||||||
|
let mut processed_sig =
|
||||||
|
crypto::process_signature_for_contract(hashed_message, &sig.R, sig.s, &group_key, chain_id);
|
||||||
|
|
||||||
|
call_verify(&contract, &processed_sig).await.unwrap();
|
||||||
|
|
||||||
|
// test invalid signature fails
|
||||||
|
processed_sig.message[0] = 0;
|
||||||
|
assert!(call_verify(&contract, &processed_sig).await.is_err());
|
||||||
|
}
|
||||||
92
coins/ethereum/tests/crypto.rs
Normal file
92
coins/ethereum/tests/crypto.rs
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
use k256::{
|
||||||
|
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, sec1::ToEncodedPoint},
|
||||||
|
ProjectivePoint, Scalar, U256,
|
||||||
|
};
|
||||||
|
use frost::{curve::Secp256k1, Participant};
|
||||||
|
|
||||||
|
use ethereum_serai::crypto::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ecrecover() {
|
||||||
|
use rand_core::OsRng;
|
||||||
|
use sha2::Sha256;
|
||||||
|
use sha3::{Digest, Keccak256};
|
||||||
|
use k256::ecdsa::{hazmat::SignPrimitive, signature::DigestVerifier, SigningKey, VerifyingKey};
|
||||||
|
|
||||||
|
let private = SigningKey::random(&mut OsRng);
|
||||||
|
let public = VerifyingKey::from(&private);
|
||||||
|
|
||||||
|
const MESSAGE: &[u8] = b"Hello, World!";
|
||||||
|
let (sig, recovery_id) = private
|
||||||
|
.as_nonzero_scalar()
|
||||||
|
.try_sign_prehashed_rfc6979::<Sha256>(&Keccak256::digest(MESSAGE), b"")
|
||||||
|
.unwrap();
|
||||||
|
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
|
||||||
|
{
|
||||||
|
assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ());
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
ecrecover(hash_to_scalar(MESSAGE), recovery_id.unwrap().is_y_odd().into(), *sig.r(), *sig.s())
|
||||||
|
.unwrap(),
|
||||||
|
address(&ProjectivePoint::from(public.as_affine()))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_signing() {
|
||||||
|
use frost::{
|
||||||
|
algorithm::IetfSchnorr,
|
||||||
|
tests::{algorithm_machines, key_gen, sign},
|
||||||
|
};
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||||
|
let _group_key = keys[&Participant::new(1).unwrap()].group_key();
|
||||||
|
|
||||||
|
const MESSAGE: &[u8] = b"Hello, World!";
|
||||||
|
|
||||||
|
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
||||||
|
let _sig = sign(
|
||||||
|
&mut OsRng,
|
||||||
|
algo,
|
||||||
|
keys.clone(),
|
||||||
|
algorithm_machines(&mut OsRng, IetfSchnorr::<Secp256k1, EthereumHram>::ietf(), &keys),
|
||||||
|
MESSAGE,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ecrecover_hack() {
|
||||||
|
use frost::{
|
||||||
|
algorithm::IetfSchnorr,
|
||||||
|
tests::{algorithm_machines, key_gen, sign},
|
||||||
|
};
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||||
|
let group_key = keys[&Participant::new(1).unwrap()].group_key();
|
||||||
|
let group_key_encoded = group_key.to_encoded_point(true);
|
||||||
|
let group_key_compressed = group_key_encoded.as_ref();
|
||||||
|
let group_key_x = Scalar::reduce(U256::from_be_slice(&group_key_compressed[1 .. 33]));
|
||||||
|
|
||||||
|
const MESSAGE: &[u8] = b"Hello, World!";
|
||||||
|
let hashed_message = keccak256(MESSAGE);
|
||||||
|
let chain_id = U256::ONE;
|
||||||
|
|
||||||
|
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
||||||
|
|
||||||
|
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
||||||
|
let sig = sign(
|
||||||
|
&mut OsRng,
|
||||||
|
algo.clone(),
|
||||||
|
keys.clone(),
|
||||||
|
algorithm_machines(&mut OsRng, algo, &keys),
|
||||||
|
full_message,
|
||||||
|
);
|
||||||
|
|
||||||
|
let (sr, er) =
|
||||||
|
preprocess_signature_for_ecrecover(hashed_message, &sig.R, sig.s, &group_key, chain_id);
|
||||||
|
let q = ecrecover(sr, group_key_compressed[0] - 2, group_key_x, er).unwrap();
|
||||||
|
assert_eq!(q, address(&sig.R));
|
||||||
|
}
|
||||||
2
coins/ethereum/tests/mod.rs
Normal file
2
coins/ethereum/tests/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
mod contract;
|
||||||
|
mod crypto;
|
||||||
@@ -6,20 +6,16 @@ license = "MIT"
|
|||||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.74"
|
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
all-features = true
|
all-features = true
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
std-shims = { path = "../../common/std-shims", version = "^0.1.1", default-features = false }
|
std-shims = { path = "../../common/std-shims", version = "0.1", default-features = false }
|
||||||
|
|
||||||
async-trait = { version = "0.1", default-features = false }
|
async-trait = { version = "0.1", default-features = false }
|
||||||
thiserror = { version = "1", default-features = false, optional = true }
|
thiserror = { version = "1", optional = true }
|
||||||
|
|
||||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||||
subtle = { version = "^2.4", default-features = false }
|
subtle = { version = "^2.4", default-features = false }
|
||||||
@@ -31,45 +27,48 @@ rand_chacha = { version = "0.3", default-features = false }
|
|||||||
# Used to select decoys
|
# Used to select decoys
|
||||||
rand_distr = { version = "0.4", default-features = false }
|
rand_distr = { version = "0.4", default-features = false }
|
||||||
|
|
||||||
|
crc = { version = "3", default-features = false }
|
||||||
sha3 = { version = "0.10", default-features = false }
|
sha3 = { version = "0.10", default-features = false }
|
||||||
pbkdf2 = { version = "0.12", features = ["simple"], default-features = false }
|
|
||||||
|
|
||||||
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "precomputed-tables"] }
|
curve25519-dalek = { version = "^3.2", default-features = false }
|
||||||
|
|
||||||
# Used for the hash to curve, along with the more complicated proofs
|
# Used for the hash to curve, along with the more complicated proofs
|
||||||
group = { version = "0.13", default-features = false }
|
group = { version = "0.13", default-features = false }
|
||||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.3", default-features = false }
|
||||||
multiexp = { path = "../../crypto/multiexp", version = "0.4", default-features = false, features = ["batch"] }
|
multiexp = { path = "../../crypto/multiexp", version = "0.3", default-features = false, features = ["batch"] }
|
||||||
|
|
||||||
# Needed for multisig
|
# Needed for multisig
|
||||||
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
|
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
|
||||||
dleq = { path = "../../crypto/dleq", version = "0.4", default-features = false, features = ["serialize"], optional = true }
|
dleq = { path = "../../crypto/dleq", version = "0.3", features = ["serialize"], optional = true }
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["ed25519"], optional = true }
|
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["ed25519"], optional = true }
|
||||||
|
|
||||||
monero-generators = { path = "generators", version = "0.4", default-features = false }
|
monero-generators = { path = "generators", version = "0.3", default-features = false }
|
||||||
|
|
||||||
async-lock = { version = "3", default-features = false, optional = true }
|
futures = { version = "0.3", default-features = false, features = ["alloc"], optional = true }
|
||||||
|
|
||||||
hex-literal = "0.4"
|
hex-literal = "0.4"
|
||||||
hex = { version = "0.4", default-features = false, features = ["alloc"] }
|
hex = { version = "0.4", default-features = false, features = ["alloc"] }
|
||||||
serde = { version = "1", default-features = false, features = ["derive", "alloc"] }
|
serde = { version = "1", default-features = false, features = ["derive"] }
|
||||||
serde_json = { version = "1", default-features = false, features = ["alloc"] }
|
serde_json = { version = "1", default-features = false, features = ["alloc"] }
|
||||||
|
|
||||||
base58-monero = { version = "2", default-features = false, features = ["check"] }
|
base58-monero = { version = "1", git = "https://github.com/monero-rs/base58-monero", rev = "5045e8d2b817b3b6c1190661f504e879bc769c29", default-features = false, features = ["check"] }
|
||||||
|
|
||||||
# Used for the provided HTTP RPC
|
# Used for the provided RPC
|
||||||
digest_auth = { version = "0.3", default-features = false, optional = true }
|
digest_auth = { version = "0.3", optional = true }
|
||||||
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls"], optional = true }
|
reqwest = { version = "0.11", features = ["json"], optional = true }
|
||||||
tokio = { version = "1", default-features = false, optional = true }
|
|
||||||
|
# Used for the binaries
|
||||||
|
tokio = { version = "1", features = ["full"], optional = true }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.3", default-features = false }
|
||||||
monero-generators = { path = "generators", version = "0.4", default-features = false }
|
monero-generators = { path = "generators", version = "0.3", default-features = false }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tokio = { version = "1", features = ["sync", "macros"] }
|
tokio = { version = "1", features = ["full"] }
|
||||||
|
monero-rpc = "0.3"
|
||||||
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
|
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["tests"] }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
std = [
|
std = [
|
||||||
@@ -81,33 +80,28 @@ std = [
|
|||||||
"subtle/std",
|
"subtle/std",
|
||||||
|
|
||||||
"rand_core/std",
|
"rand_core/std",
|
||||||
"rand/std",
|
|
||||||
"rand_chacha/std",
|
"rand_chacha/std",
|
||||||
|
"rand/std",
|
||||||
"rand_distr/std",
|
"rand_distr/std",
|
||||||
|
|
||||||
"sha3/std",
|
"sha3/std",
|
||||||
"pbkdf2/std",
|
|
||||||
|
"curve25519-dalek/std",
|
||||||
|
|
||||||
"multiexp/std",
|
"multiexp/std",
|
||||||
|
|
||||||
"transcript/std",
|
|
||||||
"dleq/std",
|
|
||||||
|
|
||||||
"monero-generators/std",
|
"monero-generators/std",
|
||||||
|
|
||||||
"async-lock?/std",
|
"futures/std",
|
||||||
|
|
||||||
"hex/std",
|
"hex/std",
|
||||||
"serde/std",
|
"serde/std",
|
||||||
"serde_json/std",
|
"serde_json/std",
|
||||||
|
|
||||||
"base58-monero/std",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
cache-distribution = ["async-lock"]
|
http_rpc = ["digest_auth", "reqwest"]
|
||||||
http-rpc = ["digest_auth", "simple-request", "tokio"]
|
|
||||||
multisig = ["transcript", "frost", "dleq", "std"]
|
multisig = ["transcript", "frost", "dleq", "std"]
|
||||||
binaries = ["tokio/rt-multi-thread", "tokio/macros", "http-rpc"]
|
binaries = ["tokio"]
|
||||||
experimental = []
|
experimental = []
|
||||||
|
|
||||||
default = ["std", "http-rpc"]
|
default = ["std", "http_rpc"]
|
||||||
|
|||||||
@@ -41,13 +41,13 @@ fn generators(prefix: &'static str, path: &str) {
|
|||||||
.write_all(
|
.write_all(
|
||||||
format!(
|
format!(
|
||||||
"
|
"
|
||||||
pub(crate) static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
|
pub static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
|
||||||
pub fn GENERATORS() -> &'static Generators {{
|
pub fn GENERATORS() -> &'static Generators {{
|
||||||
GENERATORS_CELL.get_or_init(|| Generators {{
|
GENERATORS_CELL.get_or_init(|| Generators {{
|
||||||
G: vec![
|
G: [
|
||||||
{G_str}
|
{G_str}
|
||||||
],
|
],
|
||||||
H: vec![
|
H: [
|
||||||
{H_str}
|
{H_str}
|
||||||
],
|
],
|
||||||
}})
|
}})
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "monero-generators"
|
name = "monero-generators"
|
||||||
version = "0.4.0"
|
version = "0.3.0"
|
||||||
description = "Monero's hash_to_point and generators"
|
description = "Monero's hash_to_point and generators"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
|
||||||
@@ -11,24 +11,18 @@ edition = "2021"
|
|||||||
all-features = true
|
all-features = true
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
|
std-shims = { path = "../../../common/std-shims", version = "0.1", default-features = false }
|
||||||
|
|
||||||
subtle = { version = "^2.4", default-features = false }
|
subtle = { version = "^2.4", default-features = false }
|
||||||
|
|
||||||
sha3 = { version = "0.10", default-features = false }
|
sha3 = { version = "0.10", default-features = false }
|
||||||
|
|
||||||
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "precomputed-tables"] }
|
curve25519-dalek = { version = "3", default-features = false }
|
||||||
|
|
||||||
group = { version = "0.13", default-features = false }
|
group = { version = "0.13", default-features = false }
|
||||||
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.3" }
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
hex = "0.4"
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
std = ["std-shims/std", "subtle/std", "sha3/std", "dalek-ff-group/std"]
|
std = ["std-shims/std"]
|
||||||
default = ["std"]
|
default = ["std"]
|
||||||
|
|||||||
@@ -4,4 +4,4 @@ Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
|
|||||||
An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
|
An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
|
||||||
`hash_to_point` here, is included, as needed to generate generators.
|
`hash_to_point` here, is included, as needed to generate generators.
|
||||||
|
|
||||||
This library is usable under no-std when the `std` feature is disabled.
|
This library is usable under no_std when the `alloc` feature is enabled.
|
||||||
|
|||||||
@@ -7,18 +7,10 @@ use dalek_ff_group::FieldElement;
|
|||||||
|
|
||||||
use crate::hash;
|
use crate::hash;
|
||||||
|
|
||||||
/// Decompress canonically encoded ed25519 point
|
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
|
||||||
/// It does not check if the point is in the prime order subgroup
|
#[allow(clippy::many_single_char_names)]
|
||||||
pub fn decompress_point(bytes: [u8; 32]) -> Option<EdwardsPoint> {
|
|
||||||
CompressedEdwardsY(bytes)
|
|
||||||
.decompress()
|
|
||||||
// Ban points which are either unreduced or -0
|
|
||||||
.filter(|point| point.compress().to_bytes() == bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Monero's hash to point function, as named `hash_to_ec`.
|
|
||||||
pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
|
pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case, clippy::unreadable_literal)]
|
||||||
let A = FieldElement::from(486662u64);
|
let A = FieldElement::from(486662u64);
|
||||||
|
|
||||||
let v = FieldElement::from_square(hash(&bytes)).double();
|
let v = FieldElement::from_square(hash(&bytes)).double();
|
||||||
@@ -56,5 +48,5 @@ pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
|
|||||||
let mut bytes = Y.to_repr();
|
let mut bytes = Y.to_repr();
|
||||||
bytes[31] |= sign.unwrap_u8() << 7;
|
bytes[31] |= sign.unwrap_u8() << 7;
|
||||||
|
|
||||||
decompress_point(bytes).unwrap().mul_by_cofactor()
|
CompressedEdwardsY(bytes).decompress().unwrap().mul_by_cofactor()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,11 +5,11 @@
|
|||||||
|
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
|
|
||||||
use std_shims::{sync::OnceLock, vec::Vec};
|
use std_shims::sync::OnceLock;
|
||||||
|
|
||||||
use sha3::{Digest, Keccak256};
|
use sha3::{Digest, Keccak256};
|
||||||
|
|
||||||
use curve25519_dalek::edwards::{EdwardsPoint as DalekPoint};
|
use curve25519_dalek::edwards::{EdwardsPoint as DalekPoint, CompressedEdwardsY};
|
||||||
|
|
||||||
use group::{Group, GroupEncoding};
|
use group::{Group, GroupEncoding};
|
||||||
use dalek_ff_group::EdwardsPoint;
|
use dalek_ff_group::EdwardsPoint;
|
||||||
@@ -18,10 +18,7 @@ mod varint;
|
|||||||
use varint::write_varint;
|
use varint::write_varint;
|
||||||
|
|
||||||
mod hash_to_point;
|
mod hash_to_point;
|
||||||
pub use hash_to_point::{hash_to_point, decompress_point};
|
pub use hash_to_point::hash_to_point;
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests;
|
|
||||||
|
|
||||||
fn hash(data: &[u8]) -> [u8; 32] {
|
fn hash(data: &[u8]) -> [u8; 32] {
|
||||||
Keccak256::digest(data).into()
|
Keccak256::digest(data).into()
|
||||||
@@ -32,7 +29,10 @@ static H_CELL: OnceLock<DalekPoint> = OnceLock::new();
|
|||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
pub fn H() -> DalekPoint {
|
pub fn H() -> DalekPoint {
|
||||||
*H_CELL.get_or_init(|| {
|
*H_CELL.get_or_init(|| {
|
||||||
decompress_point(hash(&EdwardsPoint::generator().to_bytes())).unwrap().mul_by_cofactor()
|
CompressedEdwardsY(hash(&EdwardsPoint::generator().to_bytes()))
|
||||||
|
.decompress()
|
||||||
|
.unwrap()
|
||||||
|
.mul_by_cofactor()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -56,13 +56,14 @@ const MAX_MN: usize = MAX_M * N;
|
|||||||
/// Container struct for Bulletproofs(+) generators.
|
/// Container struct for Bulletproofs(+) generators.
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
pub struct Generators {
|
pub struct Generators {
|
||||||
pub G: Vec<EdwardsPoint>,
|
pub G: [EdwardsPoint; MAX_MN],
|
||||||
pub H: Vec<EdwardsPoint>,
|
pub H: [EdwardsPoint; MAX_MN],
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate generators as needed for Bulletproofs(+), as Monero does.
|
/// Generate generators as needed for Bulletproofs(+), as Monero does.
|
||||||
pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
|
pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
|
||||||
let mut res = Generators { G: Vec::with_capacity(MAX_MN), H: Vec::with_capacity(MAX_MN) };
|
let mut res =
|
||||||
|
Generators { G: [EdwardsPoint::identity(); MAX_MN], H: [EdwardsPoint::identity(); MAX_MN] };
|
||||||
for i in 0 .. MAX_MN {
|
for i in 0 .. MAX_MN {
|
||||||
let i = 2 * i;
|
let i = 2 * i;
|
||||||
|
|
||||||
@@ -72,8 +73,8 @@ pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
|
|||||||
|
|
||||||
write_varint(&i.try_into().unwrap(), &mut even).unwrap();
|
write_varint(&i.try_into().unwrap(), &mut even).unwrap();
|
||||||
write_varint(&(i + 1).try_into().unwrap(), &mut odd).unwrap();
|
write_varint(&(i + 1).try_into().unwrap(), &mut odd).unwrap();
|
||||||
res.H.push(EdwardsPoint(hash_to_point(hash(&even))));
|
res.H[i / 2] = EdwardsPoint(hash_to_point(hash(&even)));
|
||||||
res.G.push(EdwardsPoint(hash_to_point(hash(&odd))));
|
res.G[i / 2] = EdwardsPoint(hash_to_point(hash(&odd)));
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,38 +0,0 @@
|
|||||||
use crate::{decompress_point, hash_to_point};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn crypto_tests() {
|
|
||||||
// tests.txt file copied from monero repo
|
|
||||||
// https://github.com/monero-project/monero/
|
|
||||||
// blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/tests/crypto/tests.txt
|
|
||||||
let reader = include_str!("./tests.txt");
|
|
||||||
|
|
||||||
for line in reader.lines() {
|
|
||||||
let mut words = line.split_whitespace();
|
|
||||||
let command = words.next().unwrap();
|
|
||||||
|
|
||||||
match command {
|
|
||||||
"check_key" => {
|
|
||||||
let key = words.next().unwrap();
|
|
||||||
let expected = match words.next().unwrap() {
|
|
||||||
"true" => true,
|
|
||||||
"false" => false,
|
|
||||||
_ => unreachable!("invalid result"),
|
|
||||||
};
|
|
||||||
|
|
||||||
let actual = decompress_point(hex::decode(key).unwrap().try_into().unwrap());
|
|
||||||
|
|
||||||
assert_eq!(actual.is_some(), expected);
|
|
||||||
}
|
|
||||||
"hash_to_ec" => {
|
|
||||||
let bytes = words.next().unwrap();
|
|
||||||
let expected = words.next().unwrap();
|
|
||||||
|
|
||||||
let actual = hash_to_point(hex::decode(bytes).unwrap().try_into().unwrap());
|
|
||||||
|
|
||||||
assert_eq!(hex::encode(actual.compress().to_bytes()), expected);
|
|
||||||
}
|
|
||||||
_ => unreachable!("unknown command"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
mod hash_to_point;
|
|
||||||
@@ -1,628 +0,0 @@
|
|||||||
check_key c2cb3cf3840aa9893e00ec77093d3d44dba7da840b51c48462072d58d8efd183 false
|
|
||||||
check_key bd85a61bae0c101d826cbed54b1290f941d26e70607a07fc6f0ad611eb8f70a6 true
|
|
||||||
check_key 328f81cad4eba24ab2bad7c0e56b1e2e7346e625bcb06ae649aef3ffa0b8bef3 false
|
|
||||||
check_key 6016a5463b9e5a58c3410d3f892b76278883473c3f0b69459172d3de49e85abe true
|
|
||||||
check_key 4c71282b2add07cdc6898a2622553f1ca4eb851e5cb121181628be5f3814c5b1 false
|
|
||||||
check_key 69393c25c3b50e177f81f20f852dd604e768eb30052e23108b3cfa1a73f2736e true
|
|
||||||
check_key 3d5a89b676cb84c2be3428d20a660dc6a37cae13912e127888a5132e8bac2163 true
|
|
||||||
check_key 78cd665deb28cebc6208f307734c56fccdf5fa7e2933fadfcdd2b6246e9ae95c false
|
|
||||||
check_key e03b2414e260580f86ee294cd4c636a5b153e617f704e81dad248fbf715b2ee4 true
|
|
||||||
check_key 28c3503ce82d7cdc8e0d96c4553bcf0352bbcfc73925495dbe541e7e1df105fc false
|
|
||||||
check_key 06855c3c3e0d03fec354059bda319b39916bdc10b6581e3f41b335ee7b014fd5 false
|
|
||||||
check_key 556381485df0d7d5a268ab5ecfb2984b060acc63471183fcf538bf273b0c0cb5 true
|
|
||||||
check_key c7f76d82ac64b1e7fdc32761ff00d6f0f7ada4cf223aa5a11187e3a02e1d5319 true
|
|
||||||
check_key cfa85d8bdb6f633fcf031adee3a299ac42eeb6bd707744049f652f6322f5aa47 true
|
|
||||||
check_key 91e9b63ced2b08979fee713365464cc3417c4f238f9bdd3396efbb3c58e195ee true
|
|
||||||
check_key 7b56e76fe94bd30b3b2f2c4ba5fe4c504821753a8965eb1cbcf8896e2d6aba19 true
|
|
||||||
check_key 7338df494bc416cf5edcc02069e067f39cb269ce67bd9faba956021ce3b3de3a false
|
|
||||||
check_key f9a1f27b1618342a558379f4815fa5039a8fe9d98a09f45c1af857ba99231dc1 false
|
|
||||||
check_key b2a1f37718180d4448a7fcb5f788048b1a7132dde1cfd25f0b9b01776a21c687 true
|
|
||||||
check_key 0d3a0f9443a8b24510ad1e76a8117cca03bce416edfe35e3c2a2c2712454f8dc false
|
|
||||||
check_key d8d3d806a76f120c4027dc9c9d741ad32e06861b9cfbc4ce39289c04e251bb3c false
|
|
||||||
check_key 1e9e3ba7bc536cd113606842835d1f05b4b9e65875742f3a35bfb2d63164b5d5 true
|
|
||||||
check_key 5c52d0087997a2cdf1d01ed0560d94b4bfd328cb741cb9a8d46ff50374b35a57 true
|
|
||||||
check_key bb669d4d7ffc4b91a14defedcdbd96b330108b01adc63aa685e2165284c0033b false
|
|
||||||
check_key d2709ae751a0a6fd796c98456fa95a7b64b75a3434f1caa3496eeaf5c14109b4 true
|
|
||||||
check_key e0c238cba781684e655b10a7d4af04ab7ff2e7022182d7ed2279d6adf36b3e7a false
|
|
||||||
check_key 34ebb4bf871572cee5c6935716fab8c8ec28feef4f039763d8f039b84a50bf4c false
|
|
||||||
check_key 4730d4f38ec3f3b83e32e6335d2506df4ee39858848842c5a0184417fcc639e4 true
|
|
||||||
check_key d42cf7fdf5e17e0a8a7f88505a2b7a3d297113bd93d3c20fa87e11509ec905a2 true
|
|
||||||
check_key b757c95059cefabb0080d3a8ebca82e46efecfd29881be3121857f9d915e388c false
|
|
||||||
check_key bbe777aaf04d02b96c0632f4b1c6f35f1c7bcbc5f22af192f92c077709a2b50b false
|
|
||||||
check_key 73518522aabd28566f858c33fccb34b7a4de0e283f6f783f625604ee647afad9 true
|
|
||||||
check_key f230622c4a8f6e516590466bd10f86b64fbef61695f6a054d37604e0b024d5af false
|
|
||||||
check_key bc6b9a8379fd6c369f7c3bd9ddce58db6b78f27a41d798bb865c3920824d0943 false
|
|
||||||
check_key 45a4f87c25898cd6be105fa1602b85c4d862782adaac8b85c996c4a2bcd8af47 true
|
|
||||||
check_key eb4ad3561d21c4311affbd7cc2c7ff5fd509f72f88ba67dc097a75c31fdbd990 false
|
|
||||||
check_key 2f34f4630c09a23b7ecc19f02b4190a26df69e07e13de8069ae5ff80d23762fc true
|
|
||||||
check_key 2ea4e4fb5085eb5c8adee0d5ab7d35c67d74d343bd816cd13924536cffc2527c true
|
|
||||||
check_key 5d35467ee6705a0d35818aa9ae94e4603c3e5500bfc4cf4c4f77a7160a597aa6 true
|
|
||||||
check_key 8ff42bc76796e20c99b6e879369bd4b46a256db1366416291de9166e39d5a093 true
|
|
||||||
check_key 0262ba718850df6c621e8a24cd9e4831c047e38818a89e15c7a06a489a4558e1 false
|
|
||||||
check_key 58b29b2ba238b534b08fb46f05f430e61cb77dc251b0bb50afec1b6061fd9247 false
|
|
||||||
check_key 153170e3dc2b0e1b368fc0d0e31053e872f094cdace9a2846367f0d9245a109b false
|
|
||||||
check_key 40419d309d07522d493bb047ca9b5fb6c401aae226eefae6fd395f5bb9114200 true
|
|
||||||
check_key 713068818d256ef69c78cd6082492013fbd48de3c9e7e076415dd0a692994504 true
|
|
||||||
check_key a7218ee08e50781b0c87312d5e0031467e863c10081668e3792d96cbcee4e474 true
|
|
||||||
check_key 356ce516b00e674ef1729c75b0a68090e7265cef675bbf32bf809495b67e9342 false
|
|
||||||
check_key 52a5c053293675e3efd2c585047002ea6d77931cbf38f541b9070d319dc0d237 false
|
|
||||||
check_key 77c0080bf157e069b18c4c604cc9505c5ec6f0f9930e087592d70507ca1b5534 false
|
|
||||||
check_key e733bc41f880a4cfb1ca6f397916504130807289cacfca10b15f5b8d058ed1bf false
|
|
||||||
check_key c4f1d3c884908a574ecea8be10e02277de35ef84a1d10f105f2be996f285161f true
|
|
||||||
check_key aed677f7f69e146aa0863606ac580fc0bbdc22a88c4b4386abaa4bdfff66bcc9 false
|
|
||||||
check_key 6ad0edf59769599af8caa986f502afc67aecbebb8107aaf5e7d3ae51d5cf8dd8 false
|
|
||||||
check_key 64a0a70e99be1f775c222ee9cd6f1bee6f632cb9417899af398ff9aff70661c6 true
|
|
||||||
check_key c63afaa03bb5c4ed7bc77aac175dbfb73f904440b2e3056a65850ac1bd261332 false
|
|
||||||
check_key a4e89cd2471c26951513b1cfbdcf053a86575e095af52495276aa56ede8ce344 false
|
|
||||||
check_key 2ce935d97f7c3ddb973de685d20f58ee39938fe557216328045ec2b83f3132be true
|
|
||||||
check_key 3e3d38b1fca93c1559ac030d586616354c668aa76245a09e3fa6de55ac730973 true
|
|
||||||
check_key 8b81b9681f76a4254007fd07ed1ded25fc675973ccb23afd06074805194733a4 false
|
|
||||||
check_key 26d1c15dfc371489439e29bcef2afcf7ed01fac24960fdc2e7c20847a8067588 true
|
|
||||||
check_key 85c1199b5a4591fc4cc36d23660648c1b9cfbb0e9c47199fa3eea33299a3dcec false
|
|
||||||
check_key 60830ba5449c1f04ac54675dfc7cac7510106c4b7549852551f8fe65971123e2 false
|
|
||||||
check_key 3e43c28c024597b3b836e4bc16905047cbf6e841b80e0b8cd6a325049070c2a5 false
|
|
||||||
check_key 474792c16a0032343a6f28f4cb564747c3b1ea0b6a6b9a42f7c71d7cc3dd3b44 true
|
|
||||||
check_key c8ec5e67cb5786673085191881950a3ca20dde88f46851b01dd91c695cfbad16 true
|
|
||||||
check_key 861c4b24b24a87b8559e0bb665f84dcc506c147a909f335ae4573b92299f042f false
|
|
||||||
check_key 2c9e0fe3e4983d79f86c8c36928528f1bc90d94352ce427032cdef6906d84d0b true
|
|
||||||
check_key 9293742822c2dff63fdc1bf6645c864fd527cea2ddba6d4f3048d202fc340c9a true
|
|
||||||
check_key 3956422ad380ef19cb9fe360ef09cc7aaec7163eea4114392a7a0b2e2671914e true
|
|
||||||
check_key 5ae8e72cadda85e525922fec11bd53a261cf26ee230fe85a1187f831b1b2c258 false
|
|
||||||
check_key 973feca43a0baf450c30ace5dc19015e19400f0898316e28d9f3c631da31f99a true
|
|
||||||
check_key dd946c91a2077f45c5c16939e53859d9beabaf065e7b1b993d5e5cd385f8716e true
|
|
||||||
check_key b3928f2d67e47f6bd6da81f72e64908d8ff391af5689f0202c4c6fec7666ffe8 true
|
|
||||||
check_key 313382e82083697d7f9d256c3b3800b099b56c3ef33cacdccbd40a65622e25fc false
|
|
||||||
check_key 7d65380c12144802d39ed9306eed79fe165854273700437c0b4b50559800c058 true
|
|
||||||
check_key 4db5c20a49422fd27739c9ca80e2271a8a125dfcead22cb8f035d0e1b7b163be true
|
|
||||||
check_key dd76a9f565ef0e44d1531349ec4c5f7c3c387c2f5823e693b4952f4b0b70808c true
|
|
||||||
check_key 66430bf628eae23918c3ed17b42138db1f98c24819e55fc4a07452d0c85603eb true
|
|
||||||
check_key 9f0b677830c3f089c27daf724bb10be848537f8285de83ab0292d35afb617f77 false
|
|
||||||
check_key cbf98287391fb00b1e68ad64e9fb10198025864c099b8b9334d840457e673874 true
|
|
||||||
check_key a42552e9446e49a83aed9e3370506671216b2d1471392293b8fc2b81c81a73ee false
|
|
||||||
check_key fb3de55ac81a923d506a514602d65d004ec9d13e8b47e82d73af06da73006673 false
|
|
||||||
check_key e17abb78e58a4b72ff4ad7387b290f2811be880b394b8bcaae7748ac09930169 false
|
|
||||||
check_key 9ffbda7ace69753761cdb5eb01f75433efa5cdb6a4f1b664874182c6a95adcba true
|
|
||||||
check_key 507123c979179ea0a3f7f67fb485f71c8636ec4ec70aa47b92f3c707e7541a54 false
|
|
||||||
check_key f1d0b156571994ef578c61cb6545d34f834eb30e4357539a5633c862d4dffa91 false
|
|
||||||
check_key 3de62311ec14f9ee95828c190b2dc3f03059d6119e8dfccb7323efc640e07c75 false
|
|
||||||
check_key 5e50bb48bc9f6dd11d52c1f0d10d8ae5674d7a4af89cbbce178dafc8a562e5fe false
|
|
||||||
check_key 20b2c16497be101995391ceefb979814b0ea76f1ed5b6987985bcdcd17b36a81 false
|
|
||||||
check_key d63bff73b914ce791c840e99bfae0d47afdb99c2375e33c8f149d0df03d97873 false
|
|
||||||
check_key 3f24b3d94b5ddd244e4c4e67a6d9f533f0396ca30454aa0ca799f21328b81d47 true
|
|
||||||
check_key 6a44c016f09225a6d2e830290719d33eb29b53b553eea7737ed3a6e297b2e7d2 true
|
|
||||||
check_key ff0f34df0c76c207b8340be2009db72f730c69c2bbfeea2013105eaccf1d1f8e true
|
|
||||||
check_key 4baf559869fe4e915e219c3c8d9a2330fc91e542a5a2a7311d4d59fee996f807 true
|
|
||||||
check_key 1632207dfef26e97d13b0d0035ea9468fc5a8a89b0990fce77bb143c9d7f3b67 true
|
|
||||||
check_key fcb3dee3993d1a47630f29410903dd03706bd5e81c5802e6f1b9095cbdb404d3 true
|
|
||||||
check_key fb527092b9809e3d27d7588c7ef89915a769b99c1e03e7f72bbead9ed837daae false
|
|
||||||
check_key 902b118d27d40ab9cbd55edd375801ce302cdb59e09c8659a3ea1401918d8bba false
|
|
||||||
check_key 4d6fbf25ca51e263a700f1abf84f758dde3d11b632e908b3093d64fe2e70ea0a true
|
|
||||||
check_key f4c3211ec70affc1c9a94a6589460ee8360dad5f8c679152f16994038532e3fc true
|
|
||||||
check_key c2b3d73ac14956d7fdf12fa92235af1bb09e1566a6a6ffd0025682c750abdd69 false
|
|
||||||
check_key b7e68c12207d2e2104fb2ca224829b6fccc1c0e2154e8a931e3c837a945f4430 false
|
|
||||||
check_key 56ca0ca227708f1099bda1463db9559541c8c11ffad7b3d95c717471f25a01bf true
|
|
||||||
check_key 3eef3a46833e4d851671182a682e344e36bea7211a001f3b8af1093a9c83f1b2 true
|
|
||||||
check_key bd1f4a4f26cab7c1cbc0e17049b90854d6d28d2d55181e1b5f7a8045fcdfa06e true
|
|
||||||
check_key 8537b01c87e7c184d9555e8d93363dcd9b60a8acc94cd3e41eb7525fd3e1d35a false
|
|
||||||
check_key 68ace49179d549bad391d98ab2cc8afee65f98ce14955c3c1b16e850fabec231 true
|
|
||||||
check_key f9922f8a660e7c3e4f3735a817d18b72f59166a0be2d99795f953cf233a27e24 true
|
|
||||||
check_key 036b6be3da26e80508d5a5a6a5999a1fe0db1ac4e9ade8f1ea2eaf2ea9b1a70e true
|
|
||||||
check_key 5e595e886ce16b5ea31f53bcb619f16c8437276618c595739fece6339731feb0 false
|
|
||||||
check_key 4ee2cebae3476ed2eeb7efef9d20958538b3642f938403302682a04115c0f8ed false
|
|
||||||
check_key 519eedbd0da8676063ce7d5a605b3fc27afeecded857afa24b894ad248c87b5d false
|
|
||||||
check_key ce2b627c0accf4a3105796680c37792b30c6337d2d4fea11678282455ff82ff7 false
|
|
||||||
check_key aa26ed99071a8416215e8e7ded784aa7c2b303aab67e66f7539905d7e922eb4d false
|
|
||||||
check_key 435ae49c9ca26758aa103bdcca8d51393b1906fe27a61c5245361e554f335ec2 true
|
|
||||||
check_key 42568af395bd30024f6ccc95205c0e11a6ad1a7ee100f0ec46fcdf0af88e91fb false
|
|
||||||
check_key 0b4a78d1fde56181445f04ca4780f0725daa9c375b496fab6c037d6b2c2275db true
|
|
||||||
check_key 2f82d2a3c8ce801e1ad334f9e074a4fbf76ffac4080a7331dc1359c2b4f674a4 false
|
|
||||||
check_key 24297d8832d733ed052dd102d4c40e813f702006f325644ccf0cb2c31f77953f false
|
|
||||||
check_key 5231a53f6bea7c75b273bde4a9f673044ed87796f20e0909978f29d98fc8d4f0 true
|
|
||||||
check_key 94b5affcf78be5cf62765c32a0794bc06b4900e8a47ddba0e166ec20cec05935 true
|
|
||||||
check_key c14b4d846ea52ffbbb36aa62f059453af3cfae306280dada185d2d385ef8f317 true
|
|
||||||
check_key cceb34fddf01a6182deb79c6000a998742d4800d23d1d8472e3f43cd61f94508 true
|
|
||||||
check_key 1faffa33407fba1634d4136cf9447896776c16293b033c6794f06774b514744c true
|
|
||||||
check_key faaac98f644a2b77fb09ba0ebf5fcddf3ff55f6604c0e9e77f0278063e25113a true
|
|
||||||
check_key 09e8525b00bea395978279ca979247a76f38f86dce4465eb76c140a7f904c109 true
|
|
||||||
check_key 2d797fc725e7fb6d3b412694e7386040effe4823cdf01f6ec7edea4bc0e77e20 false
|
|
||||||
check_key bbb74dabee651a65f46bca472df6a8a749cc4ba5ca35078df5f6d27a772f922a false
|
|
||||||
check_key 77513ca00f3866607c3eff5c2c011beffa775c0022c5a4e7de1120a27e6687fd true
|
|
||||||
check_key 10064c14ace2a998fc2843eeeb62884fe3f7ab331ca70613d6a978f44d9868eb false
|
|
||||||
check_key 026ae84beb5e54c62629a7b63702e85044e38cadfc9a1fcabee6099ba185005c false
|
|
||||||
check_key aef91536292b7ba34a3e787fb019523c2fa7a0d56fca069cc82ccb6b02a45b14 false
|
|
||||||
check_key 147bb1a82c623c722540feaad82b7adf4b85c6ec0cbcef3ca52906f3e85617ac true
|
|
||||||
check_key fc9fb281a0847d58dc9340ef35ef02f7d20671142f12bdd1bfb324ab61d03911 false
|
|
||||||
check_key b739801b9455ac617ca4a7190e2806669f638d4b2f9288171afb55e1542c8d71 false
|
|
||||||
check_key 494cc1e2ee997eb1eb051f83c4c89968116714ddf74e460d4fa1c6e7c72e3eb3 true
|
|
||||||
check_key ed2fbdf2b727ed9284db90ec900a942224787a880bc41d95c4bc4cf136260fd7 true
|
|
||||||
check_key 02843d3e6fc6835ad03983670a592361a26948eb3e31648d572416a944d4909e true
|
|
||||||
check_key c14fea556a7e1b6b6c3d4e2e38a4e7e95d834220ff0140d3f7f561a34e460801 true
|
|
||||||
check_key 5f8f82a35452d0b0d09ffb40a1154641916c31e161ad1a6ab8cfddc2004efdf6 false
|
|
||||||
check_key 7b93d72429fab07b49956007eba335bb8c5629fbf9e7a601eaa030f196934a56 true
|
|
||||||
check_key 6a63ed96d2e46c2874beaf82344065d94b1e5c04406997f94caf4ccd97cfbab9 false
|
|
||||||
check_key c915f409e1e0f776d1f440aa6969cfec97559ef864b07d8c0d7c1163871b4603 true
|
|
||||||
check_key d06bc33630fc94303c2c369481308f805f5ce53c40141160aa4a1f072967617e false
|
|
||||||
check_key 1aafb14ca15043c2589bcd32c7c5f29479216a1980e127e9536729faf1c40266 true
|
|
||||||
check_key 58c115624a20f4b0c152ccd048c54a28a938556863ab8521b154d3165d3649cd false
|
|
||||||
check_key 9001ba086e8aa8a67e128f36d700cc641071556306db7ec9b8ac12a6256b27b7 false
|
|
||||||
check_key 898c468541634fb0def11f82c781341fce0def7b15695af4e642e397218c730c true
|
|
||||||
check_key 47ea6539e65b7b611b0e1ae9ee170adf7c31581ca9f78796d8ebbcc5cd74b712 false
|
|
||||||
check_key 0c60952a64eeac446652f5d3c136fd36966cf66310c15ee6ab2ecbf981461257 false
|
|
||||||
check_key 682264c4686dc7736b6e46bdc8ab231239bc5dac3f5cb9681a1e97a527945e8e true
|
|
||||||
check_key 276006845ca0ea4238b231434e20ad8b8b2a36876effbe1d1e3ffb1f14973397 true
|
|
||||||
check_key eecd3a49e55e32446f86c045dce123ef6fe2e5c57db1d850644b3c56ec689fce true
|
|
||||||
check_key a4dced63589118db3d5aebf6b5670e71250f07485ca4bb6dddf9cce3e4c227a1 false
|
|
||||||
check_key b8ade608ba43d55db7ab481da88b74a9be513fca651c03e04d30cc79f50e0276 false
|
|
||||||
check_key 0d91de88d007a03fe782f904808b036ff63dec6b73ce080c55231afd4ed261c3 true
|
|
||||||
check_key 87c59becb52dd16501edadbb0e06b0406d69541c4d46115351e79951a8dd9c28 true
|
|
||||||
check_key 9aee723be2265171fe10a86d1d3e9cf5a4e46178e859db83f86d1c6db104a247 false
|
|
||||||
check_key 509d34ae5bf56db011845b8cdf0cc7729ed602fce765e9564cb433b4d4421a43 false
|
|
||||||
check_key 06e766d9a6640558767c2aab29f73199130bfdc07fd858a73e6ae8e7b7ba23ba false
|
|
||||||
check_key 801c4fe5ab3e7cf13f7aa2ca3bc57cc8eba587d21f8bc4cd40b1e98db7aec8d9 false
|
|
||||||
check_key d85ad63aeb7d2faa22e5c9b87cd27f45b01e6d0fdc4c3ddf105584ac0a021465 false
|
|
||||||
check_key a7ca13051eb2baeb5befa5e236e482e0bb71803ad06a6eae3ae48742393329d2 true
|
|
||||||
check_key 5a9ba3ec20f116173d933bf5cf35c320ed3751432f3ab453e4a6c51c1d243257 false
|
|
||||||
check_key a4091add8a6710c03285a422d6e67863a48b818f61c62e989b1e9b2ace240a87 false
|
|
||||||
check_key bdee0c6442e6808f25bb18e21b19032cf93a55a5f5c6426fba2227a41c748684 true
|
|
||||||
check_key d4aeb6cdad9667ec3b65c7fbc5bfd1b82bba1939c6bb448a86e40aec42be5f25 false
|
|
||||||
check_key 73525b30a77f1212f7e339ec11f48c453e476f3669e6e70bebabc2fe9e37c160 true
|
|
||||||
check_key 45501f2dc4d0a3131f9e0fe37a51c14869ab610abd8bf0158111617924953629 false
|
|
||||||
check_key 07d0e4c592aa3676adf81cca31a95d50c8c269d995a78cde27b2a9a7a93083a6 false
|
|
||||||
check_key a1797d6178c18add443d22fdbf45ca5e49ead2f78b70bdf1500f570ee90adca5 true
|
|
||||||
check_key 0961e82e6e7855d7b7bf96777e14ae729f91c5bbd20f805bd7daac5ccbec4bab false
|
|
||||||
check_key 57f5ba0ad36e997a4fb585cd2fc81b9cc5418db702c4d1e366639bb432d37c73 true
|
|
||||||
check_key 82b005be61580856841e042ee8be74ae4ca66bb6733478e81ca1e56213de5c05 false
|
|
||||||
check_key d7733dcae1874c93e9a2bd46385f720801f913744d60479930dad7d56c767cdc false
|
|
||||||
check_key b8b8b698609ac3f1bd8f4965151b43b362e6c5e3d1c1feae312c1d43976d59ab true
|
|
||||||
check_key 4bba7815a9a1b86a5b80b17ac0b514e2faa7a24024f269b330e5b7032ae8c04e true
|
|
||||||
check_key 0f70da8f8266b58acda259935ef1a947c923f8698622c5503520ff31162e877b false
|
|
||||||
check_key 233eaa3db80f314c6c895d1328a658a9175158fa2483ed216670c288a04b27bc false
|
|
||||||
check_key a889f124fabfd7a1e2d176f485be0cbd8b3eeaafeee4f40e99e2a56befb665be true
|
|
||||||
check_key 2b7b8abc198b11cf7efa21bc63ec436f790fe1f9b8c044440f183ab291af61d6 true
|
|
||||||
check_key 2491804714f7938cf501fb2adf07597b4899b919cabbaab49518b8f8767fdc6a true
|
|
||||||
check_key 52744a54fcb00dc930a5d7c2bc866cbfc1e75dd38b38021fd792bb0ca9f43164 true
|
|
||||||
check_key e42cbf70b81ba318419104dffbb0cdc3b7e7d4698e422206b753a4e2e6fc69bb false
|
|
||||||
check_key 2faff73e4fed62965f3dbf2e6446b5fea0364666cc8c9450b6ed63bbb6f5f0e7 true
|
|
||||||
check_key 8b963928d75be661c3c18ddd4f4d1f37ebc095ce1edc13fe8b23784c8f416dfd false
|
|
||||||
check_key b1162f952808434e4d2562ffda98bd311613d655d8cf85dc86e0a6c59f7158bc true
|
|
||||||
check_key 5a69adcd9e4f5b0020467e968d85877cb3aa04fa86088d4499b57ca65a665836 true
|
|
||||||
check_key 61ab47da432c829d0bc9d4fdb59520b135428eec665ad509678188b81c7adf49 false
|
|
||||||
check_key 154bb547f22f65a87c0c3f56294f5791d04a3c14c8125d256aeed8ec54c4a06e true
|
|
||||||
check_key 0a78197861c30fd3547b5f2eabd96d3ac22ac0632f03b7afd9d5d2bfc2db352f true
|
|
||||||
check_key 8bdeadcca1f1f8a4a67b01ed2f10ef31aba7b034e8d1df3a69fe9aebf32454e0 false
|
|
||||||
check_key f4b17dfca559be7d5cea500ac01e834624fed9befae3af746b39073d5f63190d true
|
|
||||||
check_key 622c52821e16ddc63b58f3ec2b959fe8c6ea6b1a596d9a58fd81178963f41c01 true
|
|
||||||
check_key 07bedd5d55c937ef5e23a56c6e58f31adb91224d985285d7fef39ede3a9efb17 false
|
|
||||||
check_key 5179bf3b7458648e57dc20f003c6bbfd55e8cd7c0a6e90df6ef8e8183b46f99d true
|
|
||||||
check_key 683c80c3f304f10fdd53a84813b5c25b1627ebd14eb29b258b41cd14396ef41f true
|
|
||||||
check_key c266244ed597c438170875fe7874f81258a830105ca1108131e6b8fea95eb8ba true
|
|
||||||
check_key 0c1cdc693df29c2d1e66b2ce3747e34a30287d5eb6c302495634ec856593fe8e true
|
|
||||||
check_key 28950f508f6a0d4c20ab5e4d55b80565a6a539092e72b7eb0ed9fa5017ecef88 false
|
|
||||||
check_key 8328a2a5fcfc4433b1c283539a8943e6eb8cc16c59f29dedc3af2c77cfd56f25 true
|
|
||||||
check_key 5d0f82319676d4d3636ff5dc2a38ea5ec8aeaac4835fdcab983ab35d76b7967b false
|
|
||||||
check_key cafcc75e94a014115f25c23aaae86e67352f928f468d4312b92240ff0f3a4481 false
|
|
||||||
check_key 3e5fdd8072574218f389d018e959669e8ca4ef20b114ea7dce7bfb32339f9f42 true
|
|
||||||
check_key 591763e3390a78ccb529ceea3d3a97165878b179ad2edaa166fd3c78ec69d391 true
|
|
||||||
check_key 7a0a196935bf79dc2b1c3050e8f2bf0665f7773fc07511b828ec1c4b1451d317 false
|
|
||||||
check_key 9cf0c034162131fbaa94a608f58546d0acbcc2e67b62a0b2be2ce75fc8c25b9a false
|
|
||||||
check_key e3840846e3d32644d45654b96def09a5d6968caca9048c13fcaab7ae8851c316 false
|
|
||||||
check_key a4e330253739af588d70fbda23543f6df7d76d894a486d169e5fedf7ed32d2e2 false
|
|
||||||
check_key cfb41db7091223865f7ecbdda92b9a6fb08887827831451de5bcb3165395d95d true
|
|
||||||
check_key 3d10bd023cef8ae30229fdbfa7446a3c218423d00f330857ff6adde080749015 false
|
|
||||||
check_key 4403b53b8d4112bb1727bb8b5fd63d1f79f107705ffe17867704e70a61875328 false
|
|
||||||
check_key 121ef0813a9f76b7a9c045058557c5072de6a102f06a9b103ead6af079420c29 true
|
|
||||||
check_key 386204cf473caf3854351dda55844a41162eb9ce4740e1e31cfef037b41bc56e false
|
|
||||||
check_key eb5872300dc658161df469364283e4658f37f6a1349976f8973bd6b5d1d57a39 true
|
|
||||||
check_key b8f32188f0fc62eeb38a561ff7b7f3c94440e6d366a05ef7636958bc97834d02 false
|
|
||||||
check_key a817f129a8292df79eef8531736fdebb2e985304653e7ef286574d0703b40fb4 false
|
|
||||||
check_key 2c06595bc103447b9c20a71cd358c704cb43b0b34c23fb768e6730ac9494f39e true
|
|
||||||
check_key dd84bc4c366ced4f65c50c26beb8a9bc26c88b7d4a77effbb0f7af1b28e25734 false
|
|
||||||
check_key 76b4d33810eed637f90d49a530ac5415df97cafdac6f17eda1ba7eb9a14e5886 true
|
|
||||||
check_key 926ce5161c4c92d90ec4efc58e5f449a2c385766c42d2e60af16b7362097aef5 false
|
|
||||||
check_key 20c661f1e95e94a745eb9ec7a4fa719eff2f64052968e448d4734f90952aefee false
|
|
||||||
check_key 671b50abbd119c756010416e15fcdcc9a8e92eed0f67cbca240c3a9154db55c0 false
|
|
||||||
check_key df7aeee8458433e5c68253b8ef006a1c74ce3aef8951056f1fa918a8eb855213 false
|
|
||||||
check_key 70c81a38b92849cf547e3d5a6570d78e5228d4eaf9c8fdd15959edc9eb750daf false
|
|
||||||
check_key 55a512100b72d4ae0cfc16c75566fcaa3a7bb9116840db1559c71fd0e961cc36 false
|
|
||||||
check_key dbfbec4d0d2433a794ad40dc0aea965b6582875805c9a7351b47377403296acd true
|
|
||||||
check_key 0a7fe09eb9342214f98b38964f72ae3c787c19e5d7e256af9216f108f88b00a3 true
|
|
||||||
check_key a82e54681475f53ced9730ee9e3a607e341014d9403f5a42f3dbdbe8fc52e842 true
|
|
||||||
check_key 4d1f90059f7895a3f89abf16162e8d69b399c417f515ccb43b83144bbe8105f6 true
|
|
||||||
check_key 94e5c5b8486b1f2ff4e98ddf3b9295787eb252ba9b408ca4d7724595861da834 false
|
|
||||||
check_key d16e3e8dfa6d33d1d2db21c651006ccddbf4ce2e556594de5a22ae433e774ae6 false
|
|
||||||
check_key a1b203ec5e36098a3af08d6077068fec57eab3a754cbb5f8192983f37191c2df false
|
|
||||||
check_key 5378bb3ec8b4e49849bd7477356ed86f40757dd1ea3cee1e5183c7e7be4c3406 false
|
|
||||||
check_key 541a4162edeb57130295441dc1cb604072d7323b6c7dffa02ea5e4fed1d2ee9e true
|
|
||||||
check_key d8e86e189edcc4b5c262c26004691edd7bd909090997f886b00ed4b6af64d547 false
|
|
||||||
check_key 18a8731d1983d1df2ce2703b4c85e7357b6356634ac1412e6c2ac33ad35f8364 false
|
|
||||||
check_key b21212eac1eb11e811022514c5041233c4a07083a5b20acd7d632a938dc627de true
|
|
||||||
check_key 50efcfac1a55e9829d89334513d6d921abeb237594174015d154512054e4f9d1 true
|
|
||||||
check_key 9c44e8bcba31ddb4e67808422e42062540742ebd73439da0ba7837bf26649ec4 true
|
|
||||||
check_key b068a4f90d5bd78fd350daa129de35e5297b0ad6be9c85c7a6f129e3760a1482 false
|
|
||||||
check_key e9df93932f0096fcf2055564457c6dc685051673a4a6cd87779924be5c4abead true
|
|
||||||
check_key eddab2fc52dac8ed12914d1eb5b0da9978662c4d35b388d64ddf8f065606acaf true
|
|
||||||
check_key 54d3e6b3f2143d9083b4c98e4c22d98f99d274228050b2dc11695bf86631e89f true
|
|
||||||
check_key 6da1d5ef1827de8bbf886623561b058032e196d17f983cbc52199b31b2acc75b true
|
|
||||||
check_key e2a2df18e2235ebd743c9714e334f415d4ca4baf7ad1b335fb45021353d5117f true
|
|
||||||
check_key f34cb7d6e861c8bfe6e15ac19de68e74ccc9b345a7b751a10a5c7f85a99dfeb6 false
|
|
||||||
check_key f36e2f5967eb56244f9e4981a831f4d19c805e31983662641fe384e68176604a true
|
|
||||||
check_key c7e2dc9e8aa6f9c23d379e0f5e3057a69b931b886bbb74ded9f660c06d457463 true
|
|
||||||
check_key b97324364941e06f2ab4f5153a368f9b07c524a89e246720099042ad9e8c1c5b false
|
|
||||||
check_key eff75c70d425f5bba0eef426e116a4697e54feefac870660d9cf24c685078d75 false
|
|
||||||
check_key 161f3cd1a5873788755437e399136bcbf51ff5534700b3a8064f822995a15d24 false
|
|
||||||
check_key 63d6d3d2c21e88b06c9ff856809572024d86c85d85d6d62a52105c0672d92e66 false
|
|
||||||
check_key 1dc19b610b293de602f43dca6c204ce304702e6dc15d2a9337da55961bd26834 false
|
|
||||||
check_key 28a16d02405f509e1cfef5236c0c5f73c3bcadcd23c8eff377253941f82769db true
|
|
||||||
check_key 682d9cc3b65d149b8c2e54d6e20101e12b7cf96be90c9458e7a69699ec0c8ed7 false
|
|
||||||
check_key 0000000000000000000000000000000000000000000000000000000000000000 true
|
|
||||||
check_key 0000000000000000000000000000000000000000000000000000000000000080 true
|
|
||||||
check_key 0100000000000000000000000000000000000000000000000000000000000000 true
|
|
||||||
check_key 0100000000000000000000000000000000000000000000000000000000000080 false
|
|
||||||
check_key 0200000000000000000000000000000000000000000000000000000000000000 false
|
|
||||||
check_key 0200000000000000000000000000000000000000000000000000000000000080 false
|
|
||||||
check_key 0300000000000000000000000000000000000000000000000000000000000000 true
|
|
||||||
check_key 0300000000000000000000000000000000000000000000000000000000000080 true
|
|
||||||
check_key 0400000000000000000000000000000000000000000000000000000000000000 true
|
|
||||||
check_key 0400000000000000000000000000000000000000000000000000000000000080 true
|
|
||||||
check_key 0500000000000000000000000000000000000000000000000000000000000000 true
|
|
||||||
check_key 0500000000000000000000000000000000000000000000000000000000000080 true
|
|
||||||
check_key 0600000000000000000000000000000000000000000000000000000000000000 true
|
|
||||||
check_key 0600000000000000000000000000000000000000000000000000000000000080 true
|
|
||||||
check_key 0700000000000000000000000000000000000000000000000000000000000000 false
|
|
||||||
check_key 0700000000000000000000000000000000000000000000000000000000000080 false
|
|
||||||
check_key 0800000000000000000000000000000000000000000000000000000000000000 false
|
|
||||||
check_key 0800000000000000000000000000000000000000000000000000000000000080 false
|
|
||||||
check_key 0900000000000000000000000000000000000000000000000000000000000000 true
|
|
||||||
check_key 0900000000000000000000000000000000000000000000000000000000000080 true
|
|
||||||
check_key 0a00000000000000000000000000000000000000000000000000000000000000 true
|
|
||||||
check_key 0a00000000000000000000000000000000000000000000000000000000000080 true
|
|
||||||
check_key 0b00000000000000000000000000000000000000000000000000000000000000 false
|
|
||||||
check_key 0b00000000000000000000000000000000000000000000000000000000000080 false
|
|
||||||
check_key 0c00000000000000000000000000000000000000000000000000000000000000 false
|
|
||||||
check_key 0c00000000000000000000000000000000000000000000000000000000000080 false
|
|
||||||
check_key 0d00000000000000000000000000000000000000000000000000000000000000 false
|
|
||||||
check_key 0d00000000000000000000000000000000000000000000000000000000000080 false
|
|
||||||
check_key 0e00000000000000000000000000000000000000000000000000000000000000 true
|
|
||||||
check_key 0e00000000000000000000000000000000000000000000000000000000000080 true
|
|
||||||
check_key 0f00000000000000000000000000000000000000000000000000000000000000 true
|
|
||||||
check_key 0f00000000000000000000000000000000000000000000000000000000000080 true
|
|
||||||
check_key 1000000000000000000000000000000000000000000000000000000000000000 true
|
|
||||||
check_key 1000000000000000000000000000000000000000000000000000000000000080 true
|
|
||||||
check_key 1100000000000000000000000000000000000000000000000000000000000000 false
|
|
||||||
check_key 1100000000000000000000000000000000000000000000000000000000000080 false
|
|
||||||
check_key 1200000000000000000000000000000000000000000000000000000000000000 true
|
|
||||||
check_key 1200000000000000000000000000000000000000000000000000000000000080 true
|
|
||||||
check_key 1300000000000000000000000000000000000000000000000000000000000000 true
|
|
||||||
check_key 1300000000000000000000000000000000000000000000000000000000000080 true
|
|
||||||
check_key daffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
|
|
||||||
check_key daffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
|
|
||||||
check_key dbffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
|
|
||||||
check_key dbffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
|
|
||||||
check_key dcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key dcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key ddffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
|
|
||||||
check_key ddffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
|
|
||||||
check_key deffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
|
|
||||||
check_key deffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
|
|
||||||
check_key dfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
|
|
||||||
check_key dfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
|
|
||||||
check_key e0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key e0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key e1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key e1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key e2ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key e2ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key e3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
|
|
||||||
check_key e3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
|
|
||||||
check_key e4ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
|
|
||||||
check_key e4ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
|
|
||||||
check_key e5ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key e5ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key e6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key e6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key e7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
|
|
||||||
check_key e7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
|
|
||||||
check_key e8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
|
|
||||||
check_key e8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
|
|
||||||
check_key e9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
|
|
||||||
check_key e9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
|
|
||||||
check_key eaffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
|
|
||||||
check_key eaffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
|
|
||||||
check_key ebffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key ebffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key ecffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
|
|
||||||
check_key ecffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key eeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key eeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key f0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key f0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key f1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key f1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key f2ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key f2ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key f3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key f3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key f4ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key f4ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key f5ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key f5ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key f6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key f6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key f7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key f7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key f8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key f8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key f9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key f9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key faffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key faffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key fbffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key fbffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key fcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key fcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key fdffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key fdffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key feffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key feffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
check_key ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
|
|
||||||
check_key ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
|
|
||||||
hash_to_ec da66e9ba613919dec28ef367a125bb310d6d83fb9052e71034164b6dc4f392d0 52b3f38753b4e13b74624862e253072cf12f745d43fcfafbe8c217701a6e5875
|
|
||||||
hash_to_ec a7fbdeeccb597c2d5fdaf2ea2e10cbfcd26b5740903e7f6d46bcbf9a90384fc6 f055ba2d0d9828ce2e203d9896bfda494d7830e7e3a27fa27d5eaa825a79a19c
|
|
||||||
hash_to_ec ed6e6579368caba2cc4851672972e949c0ee586fee4d6d6a9476d4a908f64070 da3ceda9a2ef6316bf9272566e6dffd785ac71f57855c0202f422bbb86af4ec0
|
|
||||||
hash_to_ec 9ae78e5620f1c4e6b29d03da006869465b3b16dae87ab0a51f4e1b74bc8aa48b 72d8720da66f797f55fbb7fa538af0b4a4f5930c8289c991472c37dc5ec16853
|
|
||||||
hash_to_ec ab49eb4834d24db7f479753217b763f70604ecb79ed37e6c788528720f424e5b 45914ba926a1a22c8146459c7f050a51ef5f560f5b74bae436b93a379866e6b8
|
|
||||||
hash_to_ec 5b79158ef2341180b8327b976efddbf364620b7e88d2e0707fa56f3b902c34b3 eac991dcbba39cb3bd166906ab48e2c3c3f4cd289a05e1c188486d348ede7c2e
|
|
||||||
hash_to_ec f21daa7896c81d3a7a2e9df721035d3c3902fe546c9d739d0c334ed894fb1d21 a6bedc5ffcc867d0c13a88a03360c8c83a9e4ddf339851bd3768c53a124378ec
|
|
||||||
hash_to_ec 3dae79aaca1abe6aecea7b0d38646c6b013d40053c7cdde2bed094497d925d2b 1a442546a35860a4ab697a36b158ded8e001bbfe20aef1c63e2840e87485c613
|
|
||||||
hash_to_ec 3d219463a55c24ac6f55706a6e46ade3fcd1edc87bade7b967129372036aca63 b252922ab64e32968735b8ade861445aa8dc02b763bd249bff121d10829f7c52
|
|
||||||
hash_to_ec bc5db69aced2b3197398eaf7cf60fd782379874b5ca27cb21bd23692c3c885cc ae072a43f78a0f29dc9822ae5e70865bbd151236a6d7fe4ae3e8f8961e19b0e5
|
|
||||||
hash_to_ec 98a6ed760b225976f8ada0579540e35da643089656695b5d0b8c7265a37e2342 6a99dbfa8ead6228910498cc3ff3fb18cb8627c5735e4b8657da846c16d2dcad
|
|
||||||
hash_to_ec e9cdc9fd9425a4a2389a5d60f76a2d839f0afbf66330f079a88fe23d73eae930 8aa518d091928668f3ca40e71e14b2698f6cae097b8120d7f6ae9afba8fd3d60
|
|
||||||
hash_to_ec a50c026c0af2f9f9884c2e9b8464724ac83bef546fec2c86b7de0880980d24fb b07433f8df39da2453a1e13fd413123a158feae602d822b724d42ef6c8e443bf
|
|
||||||
hash_to_ec bf180e20d160fa23ccfa6993febe22b920160efc5a9614245f1a3a360076e87a 9d6454ff69779ce978ea5fb3be88576dc8feaedf151e93b70065f92505f2e800
|
|
||||||
hash_to_ec b2b64dfeb1d58c6afbf5a56d8c0c42012175ebb4b7df30f26a67b66be8c34614 0523b22e7f220c939b604a15780abc5816709b91b81d9ee1541d44bd2586bbd8
|
|
||||||
hash_to_ec 463fc877f4279740020d10652c950f088ebdebeae34aa7a366c92c9c8773f63a daa5fa72e70c4d3af407b8f2f3364708029b2d4863bbdde54bd67bd08db0fcad
|
|
||||||
hash_to_ec 721842f3809982e7b96a806ae1f162d98ae6911d476307ad1e4f24522fd26f55 4397c300a8cfcb42e7cc310bc975dc975ec2d191eaa7e0462998eb2830c34126
|
|
||||||
hash_to_ec 384da8d9b83972af8cbefc2da5efc744037c8ef40efa4b3bacc3238a6232963d 3c80f107e6868f73ef600ab9229a3f4bbe24f4adce52e6ab3a66d5d510e0670d
|
|
||||||
hash_to_ec e26f8adef5b6fe5bb01466bff0455ca23fda07e200133697b3b6430ca3332bde e262a58bcc1f8baf1980e00d5d40ba00803690174d14fb4c0f608429ce3df773
|
|
||||||
hash_to_ec 6e275b4ea4f085a5d3151aa08cf16a8c60b078e70be7ce5dac75b5d7b0eebe7c cb21b5a7744b4fcdc92ead4be0b04bcb9145e7bb4b06eff3bb2f0fe429b85108
|
|
||||||
hash_to_ec a0dde4561ad9daa796d9cd8a3c34fd41687cee76d128bf2e2252466e3ef3b068 79a2eb06bb7647f5d0aae5da7cf2e2b2d2ce890f25f2b1f81bfc5fef8c87a7d3
|
|
||||||
hash_to_ec dbaf63830e037b4c329969d1d85e58cb6c4f56014fd08eb38219bd20031ae27c 079c93ae27cd98075a487fd3f7457ad2fb57cdf12ec8651fedd944d765d07549
|
|
||||||
hash_to_ec 1e87ba8a9acf96948bc199ae55c83ab3277be152c6d0b1d68a07955768d81171 5c6339f834116791f9ea22fcc3970346aaeddacf13fbd0a7d4005fbd469492ca
|
|
||||||
hash_to_ec 5a544088e63ddf5b9f444ed75a75bc9315c4c50439522f06b4823ecaf5e8a08d e95ca0730d57c6469be3a0f3c94382f8490257e2e546de86c650bdbc6482eaee
|
|
||||||
hash_to_ec e4e06d92ebb036a5e4bb547dbaa43fd70db3929eef2702649455c86d7e59aa46 e26210ff8ee28e24ef2613df40aa8a874b5e3c1d07ae14acc59220615aa334dc
|
|
||||||
hash_to_ec 5793b8b32dcc0f204501647f2976493c4f8f1fa5132315226f99f29a5a6fdfce 656e390086906d99852c9696e831f62cb56fc8f85f9a5c936c327f23c7faf4fe
|
|
||||||
hash_to_ec 84f56fa4d7f12e0efd48b1f7c81c15d6e3843ebb419f4a27ec97028d4f9da19e 0cbd4f0cd288e1e071cce800877de6aef97b63fff867424a4f2b2bab25602608
|
|
||||||
hash_to_ec 242683ddf0a9fc55f6585de3aa64ea17c9c544896ff7677cd82c98f833bdf2ca 38c36d52314549213df7c7201ab7749a4724cbea92812f583bb48cabc20816ad
|
|
||||||
hash_to_ec a93ee320dc030aa382168c2eb6d75fce6e5a63a81f15632d514c6de8a7cfa5ee bd0a2facaa95bc95215a94be21996e46f789ee8beb38e75a1173b75fc686c505
|
|
||||||
hash_to_ec e36136601d84475d25c3f14efe030363d646658937a8a8a19a812d5e6deb5944 2fb93d78fae299c9f6b22346acfb829796ee7a47ec71db5456d8201bec6c35a3
|
|
||||||
hash_to_ec ba4b67d3d387c66baa4a32ec8b1db7681087e85076e71bab10036388c3aeb011 cc01329ce56f963bf444a124751c45b2c779ccb6dea16ca05251baca246b5401
|
|
||||||
hash_to_ec 3fbc91896a2585154d6f7094c5ab9c487e29a27951c226eec1235f618e44946b 7d983acbb901bf5497d0708392e5e742ec8c8036cbb0d03403e9929da8cc85a7
|
|
||||||
hash_to_ec a2da289fed650e9901f69a5f33535eb47c6bd07798633cbf6c00ce3172df76ac dca8a4d30ec2d657fefd0dba9c1c5fd45a79f665048b3cf72ac2c3b7363da1ac
|
|
||||||
hash_to_ec 99025d2d493f768e273ed66cacd3a5b392761e6bd158ca09c8fba84631ea1534 7ef5af79ab155ab7e1770a47fcd7f194aca43d79ec6e303c7ce18c6a20279b04
|
|
||||||
hash_to_ec 3cf1d01d0b70fb31f2a2f979c1bae812381430f474247d0b018167f2a2cd9a9f 7c53d799ec938a21bb305a6b5ca0a7a355fa9a68b01d289c4f22b36ce3738f95
|
|
||||||
hash_to_ec 639c421b49636b2a1f8416c5d6e64425fe51e3b52584c265502379189895668e 0b47216ae5e6e03667143a6cf8894d9d73e3152c64fb455631d81a424410e871
|
|
||||||
hash_to_ec 4ccf2c973348b7cc4b14f846f9bfcdcb959b7429accf6dede96248946841d990 7fd41f5b97ba42ed03947dd953f8e69770c92cc34b16236edad7ab3c78cbbb2e
|
|
||||||
hash_to_ec f76ae09fff537f8919fd1a43ff9b8922b6a77e9e30791c82cf2c4b8acb51363e 8e2c6bf86461ad2c230c496ee3896da33c11cc020fd4c70faa3645b329049234
|
|
||||||
hash_to_ec 98932da7450f15db6c1eef78359904915c31c2aa7572366ec8855180edb81e3a 86180adddfac0b4d1fb41d58e98445dde1da605b380d392e9386bd445f1d821c
|
|
||||||
hash_to_ec ab26a1660988ec7aba91fc01f7aa9a157bbc12927f5b197062b922a5c0c7f8dd 2c44a43eda0d0aad055f18333e761f2f2ec11c585ec7339081c19266af918e4f
|
|
||||||
hash_to_ec 4465d0c1b4930cc718252efd87d11d04162d2a321b9b850c4a19a6acdfca24f4 b03806287d804188a4d679a0ecee66f399d7bdc3bd1494f9b2b0772bbb5a034f
|
|
||||||
hash_to_ec 0f2a7867864ed00e5c40082df0a0b031c89fa5f978d9beb2fde75153f51cfb75 5c471e1b118ef9d76c93aec70e0578f46e8db1d55affd447c1f64c0ad9a5caa5
|
|
||||||
hash_to_ec 5c2808c07d8175f332cae050ce13bec4254870d76abff68faf34b0b8d3ad5000 eeff1d9a5aa428b7aecc575e63dde17294072eb246568493e1ed88ce5c95b779
|
|
||||||
hash_to_ec 36300a21601fad00d00da45e27b36c11923b857f97e50303bd01f21998eaef95 b33b077871e6f5dad8ff6bc621c1b6dedcf700777d996c8c02d73f7297108b7e
|
|
||||||
hash_to_ec 9e1afb76d6c480816d2cedd7f2ab08a36c309efaa3764dcdb51bad6049683805 4cd96ba7b543b1a224b8670bf20b3733e3910711d32456d3e58e920215788adf
|
|
||||||
hash_to_ec 685f152704664495459b76c81567a4b571e8b307dd0e3c9b08ee95651a006047 80dd6b637580cb3be76025867f1525852b65a7a66066993fda3af7eb187dc1a5
|
|
||||||
hash_to_ec 0b216444391a1163c14f7b27f9135e9747978c0e426dce1fa65c657f3e9146be 021259695a6854a4a03e8c74d09ab9630a401bfca06172a733fe122f01af90b4
|
|
||||||
hash_to_ec cfcb35e98f71226c3558eaa9cf620db5ae207ece081ab13ddea4b1f122850a5a 46763d2742e2cdffe80bb3d056f4d3a1565aa83f19aab0a1f89e54ad81ae0814
|
|
||||||
hash_to_ec 07e7292da8cdcdb58ee30c3fa16f1d609e9b3b1110dd6fa9b2cc18f4103a1c12 fe949ca251ac66f13a8925ae624a09cdbf6696d3c110442338d37700536e8ec7
|
|
||||||
hash_to_ec 813bc7e3749e658190cf2a4e358bc07a6671f262e2c4eef9f44c66066a72e6a7 6b92fbda984bd0e6f4af7a5e04c2b66b6f0f9d197a9694362a8556e5b7439f8a
|
|
||||||
hash_to_ec 89c50a1e5497156e0fae20d99f5e33e330362b962c9ca00eaf084fe91aaec71d ef36cb75eb95fb761a8fa8c376e9c4447bcd61421250f7a711bd289e6ed78a9b
|
|
||||||
hash_to_ec d9bd9ff2dd807eb25de7c5de865dbc43cce2466389cedbc92b90aab0eb014f81 30104771ff961cd1861cd053689feab888c57b8a4a2e3989646ea7dea40f3c04
|
|
||||||
hash_to_ec b8c837501b6ca3e118db9848717c847c062bf0ebeca5a7c211726c1426878af5 19a1e204b4a32ce9cccf5d96a541eb76a78789dceaf4fe69964e58ff96c29b63
|
|
||||||
hash_to_ec 84376c5350a42c07ac9f96e8d5c35a8c7f62c639a1834b09e4331b5962ecace8 ba1e4437d5048bd1294eadc502092eafc470b99fde82649e84a52225e68e88f2
|
|
||||||
hash_to_ec a3345e4a4cfc369bf0e7d11f49aed0d2a6ded00e3ff8c7605db9a919cf730640 0d318705c16e943c0fdcde134aaf6e4ccce9f3d9161d001861656fc7ea77a0b1
|
|
||||||
hash_to_ec 3c994dfb9c71e4f401e65fd552dc9f49885f88b8b3588e24e1d2e9b8870ffab1 984157de5d7c2c4b43b2bffea171809165d7bb442baea88e83b27f839ebdb939
|
|
||||||
hash_to_ec 153674c1c1b18a646f564af77c5bd7de452dc3f3e1e2326bfe9c57745b69ec5c e9a4a1e225ae472d1b3168c99f8ba1943ad2ed84ef29598f3f96314f22db9ef2
|
|
||||||
hash_to_ec 2d46a705d4fe5d8b5a1f4e9ef46d9e06467450eb357b6d39faa000995314e871 b9d1aec540bf6a9c0e1b325ab87d4fbe66b1df48986dde3cb62e66e136eba107
|
|
||||||
hash_to_ec 6764c3767f16ec8faecc62f9f76735f76b11d7556aeb61066aeaeaad4fc9042f 3a5c68fb94b023488fb5940e07d1005e7c18328e7a84f673ccd536c07560a57b
|
|
||||||
hash_to_ec c99c6ee5804d4b13a445bc03eaa07a6ef5bcb2fff0f71678dd3bd66b822f8be8 a9e1ce91deed4136e6e53e143d1c0af106abde9d77c066c78ebbf5d227f9dde0
|
|
||||||
hash_to_ec 3009182e1efac085c7eba24a7d9ef28ace98ebafa72211e73a41c935c37e6768 e55431a4c89d38bd95f8092cdf6e44d164ad5855677aba17ec262abc8c217c86
|
|
||||||
hash_to_ec e7153acd114a7636a207be0b67fa86fee56dd318f2808a81e35dd13d4251b2d0 ff2b98d257e4d4ff7379e8871441ca7d26e73f78f3f5afcf421d78c9799ba677
|
|
||||||
hash_to_ec 6378586744b721c5003976e3e18351c49cd28154c821bc45338892e5efedd197 3d765fb7bb4e165a3fa6ea00b5b5e22250f3861f0db0099626d9a9020443dda2
|
|
||||||
hash_to_ec 5be49aba389b7e3ad6def3ba3c7dbec0a11a3c36fc9d441130ef370b8a8d29c2 2d61faf38062dc98ae1aaafec05e90a925c9769df5b8b8f7090d9e91b2a11151
|
|
||||||
hash_to_ec f7bc382178d38e1b9a1a995bd8347c1283d8a2e8d150379faa53fd125e903d2b 544c815da65c3c5994b0ac7d6455578d03a2bc7cf558b788bcdb3430e231635a
|
|
||||||
hash_to_ec c28b5c4b6662eebb3ec358600644849ebeb59d827ed589c161d900ca18715fa8 a2d64db3c0e0353c257aadf9abc12ac779654d364f348b9f8e429aa7571203db
|
|
||||||
hash_to_ec 3a4792e5df9b2416a785739b9cf4e0d68aef600fa756a399cc949dd1fff5033a 4b54591bd79c30640b700dfb7f20158f692f467b6af70bd8a4e739c14a66c86a
|
|
||||||
hash_to_ec 002e70f25e1ceaf35cc14b2c6975a4c777b284a695550541e6f5424b962c19f5 73987e9342e338eb57a7a9e03bd33144db37c1091e952a10bd243c5bb295c18a
|
|
||||||
hash_to_ec 7eb671319f212c9cae0975571b6af109124724ba182937a9066546c92bdeff0c 49b46da3be0df1d141d2a323d5af82202afa2947a95b9f3df47722337f0d5798
|
|
||||||
hash_to_ec ca093712559c8edd5c51689e2ddcb8641c2960e5d9c8b03a44926bb798a0c8dc b9ef9cf0f8e4a3d123db565afafb1102338bfb75498444ac0a25c5ed70d615da
|
|
||||||
hash_to_ec cfea0a08a72777ff3aa7be0d8934587fa4127cd49a1a938232815dc3fd8b23ac b4de604b3d712f1ef578195fb0e53c865d41e2dfe425202c6cfe6f10e4404eb5
|
|
||||||
hash_to_ec aa0122ae258d6db21a26a31c0c92d8a0e3fdb46594aed41d561e069687dedcd6 5247eaec346de1c6cddf0ab04c12cd1d85cdb6d3a2fba2a5f9a5fe461abef5eb
|
|
||||||
hash_to_ec b3941734f4d3ba34ccaf03c4c737ac5a1e036eb74309300ce44d73aca24fef08 535938985c936e3780c61fe29a4121d6cb89a05080b6c2147031ea0c2b5b9829
|
|
||||||
hash_to_ec 8c2ee1041a2743b30dcbf413cc9232099b9268f82a5a21a09b63e7aff750882f 6ad0d4b3a65b522dfad0e9ac814b1fb939bc4910bd780943c72f57f362754cca
|
|
||||||
hash_to_ec 4b6829a2a2d46c8f0d0c23db0f735fcf976524bf39ccb623b919dd3b28ad5193 2e0097d7f92993bc45ba06baf4ca63d64899d86760adc4eb5eeefb4a78561050
|
|
||||||
hash_to_ec 9c1407cb6bba11e7b4c1d274d772f074f410d6fe9a1ee7a22cddf379257877d9 692261c7d6a9a7031c67d033f6d82a68ef3c27bd51a5666e55972238769821cd
|
|
||||||
hash_to_ec 638c42e4997abf8a4a9bffd040e31bd695d590cde8afbd7efd16ffdbae63bf66 793024c8ce196a2419f761dde8734734af6bd9eb772b30cc78f2cb89598dce97
|
|
||||||
hash_to_ec 1fb60d79600de151a1cf8a2334deb5828632cbd91cb5b3d45ae06e08187ae23d ff2542cde5bc2562e69471a31cfc3d0c26e2f6ccc1891a633b07a3968e42521c
|
|
||||||
hash_to_ec d2fdbbae4e38a1b734151c3df52540feb2d3ff74edfef2f740e49a5c363406ee 344c83ba6ff4e38b257077623d298d2f2b52002645021241bc9389f81b29ad12
|
|
||||||
hash_to_ec 836c27a6ddfe1a24aba3d6022dff6dfe970f142d8b4ac6afb8efcba5a051942f b8af481d33726b3f875268282d621e4c63f891a09f920b8f2f49080f3a507387
|
|
||||||
hash_to_ec 46281153ddcdf2e79d459693b6fe318c1969538dd59a750b790bfff6e9481abf 8eaf534919ab6573ba4e0fbde0e370ae01eae0763335177aa429f61c4295e9d4
|
|
||||||
hash_to_ec d57b789e050bf3db462b79a997dac76aa048d4be05f133c66edee56afd3dbe66 0c5a294cb2cbb6d9d1c0a1d57d938278f674867f612ed89dcbe4533449f1a131
|
|
||||||
hash_to_ec 548d524d03ac22da18ff4201ce8dbee83ad9af54ee4e26791d26ed2ab8f9bfc7 c6609d9e7d9fd982dec8a166ff4fb6f7d195b413aad2df85f73d555349134f3b
|
|
||||||
hash_to_ec cc920690422e307357f573b87a6e0e65f432c6ec12a604eb718b66ba18897a56 6f11c466d1c72fccd81e51d9bda03b6e8d6a395e1d931b2a84e392dc9a3efa18
|
|
||||||
hash_to_ec c7fb8a51f5fcd8824fc0875d4eb57ab4917cb97090a6e2288f852f2bb449edd9 45543fea6eed461016e48598b521f18ff70178afea18032b188deea3e56052fc
|
|
||||||
hash_to_ec c681bb1b829e24b1c52cb890036b89f0029d261c6a15e5b2c684ee7dfe91e746 263006fe2c6b08f1ab29cdf442472c298e2faf225bbf5c32399d3745cd3904bd
|
|
||||||
hash_to_ec e06411c542312fdd305e17e46be14c63bab5836dc8751da06164b1ae22d4e20f 901871be7a7ff5aecade2acff869846f3c50de69307ac155f2aa3a74d5472ef2
|
|
||||||
hash_to_ec 9c725a2acb80fa712f9781da510e5163b1b30f4e1c064c26b5185e537f0614ea 02420d49257846eb39fddd196d3171679f6be21d9adac667786b65a6e90f57b1
|
|
||||||
hash_to_ec 22792772820feafa85c5cb3fa8f876105251bef08617d389619697f47dff54f2 a3ad444e7811693687f3925e7c315ae55d08d9f4b0a29876bc2a891ab941c1c3
|
|
||||||
hash_to_ec 0587b790121395d0f4f39093d10b4817f58a1e80621a24eea22b3c127d6ac5a2 86c417c695c64c7becaad0d59ddbb2bca4cb2b409a21253d680aac1a08617095
|
|
||||||
hash_to_ec fa0b5f28399bef0cd87bfe6b8a2b69e9c5506fb4bacd22deba8049615a5db526 ede0ea240036ff75d075258a053f3ce5d6f77925d358dbe33c06509fc9b12111
|
|
||||||
hash_to_ec 62a3274fc0bed109d5057b865c2ba6b6a5a417cb90a3425674102fcd457ede2d ff7e46751bb4dcd1e800a8feab7cf6771f42dc0cfed7084c23b8a5d255a6f34e
|
|
||||||
hash_to_ec a6fcd4aecaaaf281563b9b7cd6fbc7b1829654f644f4165942669a2ef632b2bf 28f136be0eb957a5b36f8ec294399c9f73ad3a3c9bb953ad191758ced554a233
|
|
||||||
hash_to_ec 01baa4c06d6676c9b286cda76ed949fd80a408b3309500ba84a5bb7e3dce58e2 a943d1afa2efce284740e7db21ea02db70b124808be2ff80cbf9b9cb96c7b73e
|
|
||||||
hash_to_ec dd9aff9c006ba514cef8fae665657bc9813fe2715467cf479643ea4c4e365d6d 68de2f7d49de4004286ce0989a06a686b15d0f463a02ffd448a18914e1ddf713
|
|
||||||
hash_to_ec 3df3513d5e539161761ce7992ab9935f649bc934bed0da3c5e1095344b733bb9 e9c2dd747d7b2482474325943cd850102b8093164678362c7621993a790e2a8a
|
|
||||||
hash_to_ec 7680cfb244dc8ef37c671fff176be1a3dad00e5d283f93145d0cbee74cca2df4 a0fd8c3cca16a130eaa5864cbe8152b7adfbf09e8cf72244b2fc8364c3b20bf4
|
|
||||||
hash_to_ec 8a547c38bd6b219ea0d612d4a155eba9c56034a1405dcf4b608de787f37e0fd8 76bf0dc40fd0a5508c5e091d8bb7eccfa28b331e72c6a0d4ac0e05a3d651850b
|
|
||||||
hash_to_ec dd93901621f58465e9791012afa76908f1e80ad80e52b809dc7fc32bb004f0a8 09a0b7ecfe8058b1e9ee01c9b523826867ca97a32efad29ac8ceebca67a4ea00
|
|
||||||
hash_to_ec b643010220f1f4ee6c7565f6e1b3dc84c18274ede363ac36b6af3707e69a1542 233c9ff8de59e5f96c2f91892a71d9d93fa7316319f30d1615f10ac1e01f9285
|
|
||||||
hash_to_ec c2637b2299dfc1fd7e953e39a582bafd19e6e7fff3642978eb092b900dbfea80 339587ba1c05e2cba44196a4be1fd218b772199e2c61c3c0ff21dcd54b570c43
|
|
||||||
hash_to_ec 1f36d3a7e7c468eb000937de138809e381ad2e23414cbbaac49b7f33533ed486 7e5b0a96051c77237a027a79764c2763487af88121c7774645e97827fb744888
|
|
||||||
hash_to_ec 8c142a55f60b2edbe03335b7f90aa2bd63e567048a65d61c70cb28779c5200af d3d6d5563b3d81c8c91cf9806bb13b2850fb7c162c610fd2f5b83c464add8182
|
|
||||||
hash_to_ec 99e7b98293c9de1f81aff1376485a990014b8b176521b2a68cdbde6300190398 119cbc01a1d9b9fb4759031d3a70685aebea0f01bc5ee082ce824265fd21b3b4
|
|
||||||
hash_to_ec 9753bd38be072b51490290be6207ca4545e3541bdf194e0850ae0a9f9e64b8ba 1ad3aa759863153606fa6570f0e1290baded4c8c1f2ba0f67c1911bfc8ccd7a0
|
|
||||||
hash_to_ec 322703864ceee19b7f17cec2a822f310f0c4da3ff98b0be61a6fd30ac4db649c 89d9e7a5947e1cde874e4030de278070aae363063cd3592ce5411821474f0816
|
|
||||||
hash_to_ec c1acd01e1e535fad273a8b757d981470f43dd7d95af732901fbba16b6e245761 57e80445248111150da5e63c706b4abbf3eef2cc508bd0347ff6b81e8c59f5bc
|
|
||||||
hash_to_ec 492473559f181bbe78f60215bc6d3a5168435ea2fc0a508372d6f5ca126e9767 df3965f137cf6f60c56ebd7c8f246281fd6dc92ce23a37e9f846f8452c884e01
|
|
||||||
hash_to_ec afa9d6e0e2fb972ee806beb450c2c0165e58234b0676a4ec0ca19b6e710d7c35 669a57e69dd2845a5e50ed8e5d8423ac9ae792a43c7738554d6c5e765a7b088a
|
|
||||||
hash_to_ec 094de050bdadef3b7dbaeeca29381c667e63e71220970149d97b95db8f4db61b 0cf5d03530c5e97850d0964c6a394de9cde1e8e498f8c0e173c518242c07f99a
|
|
||||||
hash_to_ec 2ce583724bc699ad800b33176a1d983512fe3cb3afa65d99224b23dae223efb7 e1548fd563c75ae5b5366dbab4cb73c54e7d5e087c9e5453125ff8fbe6c83a5c
|
|
||||||
hash_to_ec 8064974b976ff5ef6adaade6196ab69cda6970cd74f7f5899181805f691ad970 98ae63c47331a4ac433cb2f17230c525982d89d21e2838515a36ec5744ec2d15
|
|
||||||
hash_to_ec 384911047de609c6ae8438c745897357989363885cef2381a8a00a090cf04a58 4692ec3a0a03263620841c108538d584322fdd24d221a74bf1e1f407f83828af
|
|
||||||
hash_to_ec 0e1b1ced5ae997ef9c10b72cfc6d8c36d7433c01fc04f4083447f87243282528 6ee443ab0637702b7340bd4a908b9e2e63df0cc423c409fb320eb3f383118b80
|
|
||||||
hash_to_ec 5a7aea70c85c040af6ff3384bcaa63ec45c015b55b44fffa37ab982a00dc57c5 2df2e20137cefd166c767646ecd2e386d28f405aebe43d739aa55beba04ed407
|
|
||||||
hash_to_ec 3e878a3567487f20f7c98ea0488a40b87f1ba99e50bbfe9f00a423f927cbd898 697c7e60e4bf8c429ba7ac22b11a4b248d7465fc6abe597ec6d1e1c973330688
|
|
||||||
hash_to_ec c0bb08350d8a4bb6bf8745f6440e9bd254653102a81c79d6528da2810da758e4 396a872ac9147a69b27223bf4ec4198345b26576b3690f233b832395f2598235
|
|
||||||
hash_to_ec 6c3026a9284053a4ddb754818f9ae306ffa96eb7003bd03826eeccc9a0cf656e bef73da51d3ba9972a33d1afb7d263094b66ab6dbe3988161b08c17f8c69c2d5
|
|
||||||
hash_to_ec f80b7d8f5a80d321af3a42130db199d9edcb8f5a82507d8bfca6d002d65458b6 aa59c167ea60ee024421bfbd00adbb3cbfc20e16bd3c9b172a6bef4d47ca7f57
|
|
||||||
hash_to_ec bc0ffc24615aa02fafef447f17e7b776489cd2cc909f71e8344e01cad9f1610d 5c4195cc8dc3518143f06a9c228ae59ec9a6425a8fab89bfc638ad997cf35220
|
|
||||||
hash_to_ec b15fad558737229f8816fcba8fbef805bd420c03e392d118c69bdf01890c4924 f5810477e37554728837f097e1b170d1d8c95351c7fff8abbbfc624e1a50c1b9
|
|
||||||
hash_to_ec ec8c1f10d8e9da9cf0d57c4a1f2c402771bed7970109f3cf21ad32111f1f198f a697e0a3f09827b0cf3a4ffb6386388feda80d30ffffcbd54443dafcba162b28
|
|
||||||
hash_to_ec a989647bf0d70fdb7533b8c303a2a07f5e42e26a45ffc4e48cff5ba88643a201 450fd73e636f94d0d232600dd39031386b0e2ecde4105124fc451341da9803db
|
|
||||||
hash_to_ec 7159971b03c365480d91d625a0fadc8e3a632c518acf0dbec87dd659da70e168 377bc43c038ac46cf6565aa0a6d6bf39968c0c1142755dba3141eeebf0acdf5d
|
|
||||||
hash_to_ec e39089a64fedac4b2c25e36312b33f79d02bf75a883f450f910915b8560a3b06 77efa7db1be020e77596f550de45626824a8268095d56a0991696b211cb329cc
|
|
||||||
hash_to_ec 2056b3c6347611bb0929dad00ec932a4d9bec0f06b2d57f17e01ffa1528a719e b6072c2be2ce928e8cbbb87e8eb7e06975c0f93b309dd3b6a29edaad2b56f99b
|
|
||||||
hash_to_ec 2c026793146e81b889fc741d62e06c341ce263560d57cd46d0376f5b29174489 8f1f64b67762aa784969e954c196a2c6610addc3604aa3291eb0b80304dfe9ef
|
|
||||||
hash_to_ec be6026d6704379c489fa7749832b58bdb1a9685a5ffb68c438537f2f76e0011f 0072569a4090a9ad383a205bb092196c9de871c22506e3bb63d6b9d1b2357c96
|
|
||||||
hash_to_ec f4db802d5c6b7d7b53663b03d988b4cd0c7cad6c26612c5307754a93ebdc9710 f21bc9be4cb28761f6fe1d0a555ad5e9748375a2e9faea25a1df75cc8d273e18
|
|
||||||
hash_to_ec c27d79a564c56b00956a55090481e85fbc837fd5fb5e8311ecb436e300c07e3a 1b1891e6abec74621501450cd68bb1eeaa5b2fffff4ec441a55d1235ff3a0842
|
|
||||||
hash_to_ec a1e2f93c717cad32af386efa624198973df5a710963dd19d4c3ac40032a3a286 69c60571e3f9f63d2bfb359386ae3b8cd9e49a2e9127753002866e85c0443573
|
|
||||||
hash_to_ec 76920d7b1763474bc94a16433c3c28241a9acdee3ff2b2cb0e6757ba415310aa c1b409169f102b696fc7fa1aa9c48631e58e08b5132b6aadf43407627bb1b499
|
|
||||||
hash_to_ec 57ac654b29fa227c181fff2121491fcb283af6cbe932c8199c946862c0e90cb2 a204e8d327ea93b0b1bd74a78ffc370b20cea6455e209f2bc258114baa16d728
|
|
||||||
hash_to_ec 88e66cfaef6432b759c50efce885097d1752252b479dac5ed822fa6c85d56427 6fb84790d3749a5c1088209ee3823848d9c19bf1524215c44031143dd8080d70
|
|
||||||
hash_to_ec c1e55da929c4f8f793696fc77ff4e1c317c34852d98403bfd15dd388ee7df0df 2f41e76f15c5b480665bd84067e3b543b85ce6de02be9da7a550b5e1ead94d34
|
|
||||||
hash_to_ec 29e9ace5aa3c5a572b13f4b62b738a764d90c8c293ccb062ad798acbab7c5ef4 bce791aba1edc2a66079628fd838799489ab16b0a475ce7fe62e24cc56fe131c
|
|
||||||
hash_to_ec f25b2340689dadacaa9a0ef08aee8447d80b982e8a1ea42cf0500a1b9d85b37d f7f53aa117e6772a9abc452b3931b0a99405ac45147e7c550ac9fcf7ffe377b5
|
|
||||||
hash_to_ec 0cb6c47fc8478063b33f5aed615a05bcc84d782c497b6cc8e76ec1fa11edbfdb 7a0b58b03147e7c9be1d98de49ead2ce738d0071b0af8ca03cc92ceb26fc2246
|
|
||||||
hash_to_ec 7bd7287d7c4b596fe46fe57a6982c959653487bea843a77dd47d40986200d576 343084618c58284c64a5ff076f891be64885dc2ac73fa1567f7b39fde6b91542
|
|
||||||
hash_to_ec e4984bf330708152254fb18ecef12d546afd24898a3cf00fba866957b6ee1b82 c70e88b061656181fbd6ff12aca578fb66de5553c756ea4698a248b177185bc6
|
|
||||||
hash_to_ec cefd6c3cb9754ea632d6aea140af017de5ea12e5184f868936b74d9aa349d603 4b476502a8a483aadd50667f262f95351901628dd3a2aac1a5a41c4ea03f1647
|
|
||||||
hash_to_ec da5d0f33344ee7f3345204badf183491b9452b84bccc907602c7bad43e5cf43e 9561b9e61241625e028361494d4fa5cd78df4c7219fa64c8fede6d8421b8904a
|
|
||||||
hash_to_ec d6f0a4f8c770a1274a76fd7ae4e5faf7779249263e1aaecc6f815cf376f5c302 cd5c55820be10f0d38feb81363ede3716a9168601a0dd1ce3109aab81367d698
|
|
||||||
hash_to_ec b6bf32491d12a41c275d8518fc534d9a0d17aade509e7e8b8409a95c86167307 4aae534abbd67a9a8f2974154606c0e9be8932e920c7a5e931b46a92859acf82
|
|
||||||
hash_to_ec 0f930beaad041f9cefd867bc194027dd651fb3c9bda5944ececdba8a7136b6d3 521708f8149891b418d0920369569a9d578029c78f8e41c68a0bb68d3ad5df60
|
|
||||||
hash_to_ec 49b1fe0f97be74b81e0b047027b3e9f726fa5e90a67dafa877309397291c06c5 0852e59dfae5ec32cce606c119376597bce5cd4d04879d329f74e3ec66414cd3
|
|
||||||
hash_to_ec 4d57647d03f2cfbd4782fcc933e0683b52d35fc8d37283e6c7de522ddfa7e698 cbeb9ebfbbc49ec81fac3b7b063fecac1bb40ea686d3ffb08f82b291715cd87f
|
|
||||||
hash_to_ec 4ea3238c06fc9346c7421ff85bc0244b893860b94bc437378472814d09b2e99f a1fbae941adc344031bbdf53385dfdc012311490a4eb5e9a2749a21b27ce917a
|
|
||||||
hash_to_ec 0cd3609f5c78b318cb853d189b73b1ee2d00edd4e5fce2812027daa3fcb1fed1 0c7a7241b16e3c47d41f5abbf205797bd4b63fc425a7120cb2a4bf324e08ae74
|
|
||||||
hash_to_ec d74ab71428e36943c9868f70d3243469babd27988a1666a06f499a5741a52e3e 65b7c259f3b4547c082b2a7669b2b363668c4d87ac14e80471317b03b34e5216
|
|
||||||
hash_to_ec f6b151998365e7d69bcbce383dd2e8b5bf93b8b72f029ff942588208c1619591 6ce840ce5dfbca238665c1e6eddb8b045aa85c69b5976fc55ab57e66d3d0a791
|
|
||||||
hash_to_ec 207751de234b2bd7ec20bdd8326210c23aa68f04875c94ad7e256a96520f25d6 fc8f79ab3af317c38bfb88f40fb84422995a0479cfa6b03fa6df7f4e5f2813fb
|
|
||||||
hash_to_ec 62291e2873f38c0a234b77d1964205f3f91905c261d3c06f81051a9b0cb787cb 076d1d767457518e6777cb3bd4df22c8a19eb617e4bbccd1b0bd37522d6597a5
|
|
||||||
hash_to_ec 4b060df2d2854036751d00190ee821cb0066d256d4172539fdfa6fbd1cdfe1f9 59866e927c69e7de5df00dc46c0d2a1ddf799d901128ff040cebb8fd61b95da4
|
|
||||||
hash_to_ec ac8daf73f9c609bb36bce4fdeec1e50be5f22de38c3904fabcf758f0fc180bc7 7d8dc4e956363b652468a5fecafd7c08d48a2297e93b8edcb38e595fdd5a1fde
|
|
||||||
hash_to_ec fef7b6563fd27f3aab1d659806b26b8f2ec38bc8feefad50288383c001d1c20f e6e42547f12df431439d45103d2c5a583248f44554a98a3a433cf8c38b11805d
|
|
||||||
hash_to_ec 40a3d6871c76ecc6bb7b28324478733e196cc11d062dd4c9265cf31be5cf5a97 8c55a3811c241a020b1be202a58d5defbc4c8945d73b132570b47dd7c019ccf0
|
|
||||||
hash_to_ec 0cd71e7e562b2b47f4bc8640caf20e69d3a62f10231b4c7a372c9691cff9ac3c fb8e4e3de479b3bf1f4f13b4ed5507df1e80bd9250567b9d021b03339d6e7197
|
|
||||||
hash_to_ec 40a4e62800a99b7a26e0b507ffb29592e5bdba25284dc473048f24b27d25b40a 90ae131d29ee4a71cd764ab26f1ca4e6d09a40db98f8692b345c3a0e130dc860
|
|
||||||
hash_to_ec 1ddf35193cf52860bfe3e41060a7f44281241c6ae49cd541d24c1aca679b7501 3b4f50013895c522776ced456329c4e727de03575f6b99ae7d238a9f70862121
|
|
||||||
hash_to_ec 014e0fa8ce9d5df262b9a1765725fde354a855de8aef3fc23684e05dd1ba8d34 3857f57776a3cb68721bcb7f1533a5f9fb416a1dc8824d719399b63a142d24de
|
|
||||||
hash_to_ec 09987979b0e98d1d5355df8a8698b8f54d3a037d12745c0a4317fe519c3df9cc 32a181e2b754aeced214c73ac459c97d99e63317be3eb923344c64a396173bca
|
|
||||||
hash_to_ec 51e9e8ec4413e92dbaaba067824c32b018487a8d16412ed310507b4741e18eed 0356b209156b4993fd5d5630308298429a1b0021c19bedecb7719ac607cfa644
|
|
||||||
hash_to_ec 14d91313dfe46e353310e6a4a23ee15d7a4e1f431700a444be8520e6043d08d9 6f345f4018b5d178d9f61894d9f46ac09ff639483727b0d113943507cee88cfd
|
|
||||||
hash_to_ec 0d5af9ace87382acfffb9ab1a34b6e921881aa015d4f6d9c73171b2b0a97600d a8dbf36c85bebe6a7b3733e70cd3cd9ed0eb282ca470f344e5fcf9fe959f2e6e
|
|
||||||
hash_to_ec 996690caac7328b19d20ed28eb0003d675b1a9ff79055ab530e3bf170eb22a94 14340d7d935cffce74b8b2f325c9d92ce0238b51807ef2c1512935bb843194ce
|
|
||||||
hash_to_ec ad839c4b4c278c8ebe16ff137a558255a1f74646aa87c6cd99e994c7bb97ce8a d4f2da327ffded913b50577be0e583db2b237b5ca74da648e9b985c247073b76
|
|
||||||
hash_to_ec 26fc2eeeee983e1300d72362fdff42edf08038e4eee277a6e2dbd1bd8c9d6560 3468b8269728c2c0bfc2e53b1575415124798bc0f59b60ea2f14967fc0ca19ce
|
|
||||||
hash_to_ec db33cecaf4ee6f0ceba338cc5fabfb7462cd952a9c9007357ff3f0ca8336f8bc 0bab38f58686d0ff770f770a297971510bc83e2ff2dfead34823d1c4d67f11af
|
|
||||||
hash_to_ec a0ee84b3c646526fb8787d26dcd9b7fe9dc713c8a6c1a4ea640465a9f36a64df 4d7a638f6759d3ec45339cd1300e1239cca5f0f658ca3cd29bc9bdb32f44faf0
|
|
||||||
hash_to_ec 6a702e7899fcf3988e2b6b55654c22e54f43d3fa29de19177bdff5b2295fe27f 145d5748d6054fb586568e276f6925aef593a5b9c8249ad3dbef510af99b4307
|
|
||||||
hash_to_ec 30ce0fd4f1fac8b62d613b8ee4a66deef6eb7094bd8466531050b837460f6971 f3aa850d593ba7cef01389f7e1916e57617f1d75cd42f64ce8f5f272384b148c
|
|
||||||
hash_to_ec 3aa31d4ad7046ad13d83eb11c9a6e90eb8483a374a77a9a7b2a7cc0978fefa76 2fe0827dc080d9c1e7ec475a78aa7ae3c86d1a35f4c3f25f4a1f7299cacf018a
|
|
||||||
hash_to_ec 8562a5a91e763b98014523ebb6e49120979098f89c31df1fde9eb3a49a15b20f ae223bf85e2009a9daf5fd8a14685e2e1e625fc88818b2fd437dd7e109a48f59
|
|
||||||
hash_to_ec ccf9c313a47b8dbf7ce42c94b785818bc24134d95b6d22acc53c1ec2be29cf27 3e79fce6fe5aa14251b6560df4b76e811d7739eec097f27052c4403a283be71d
|
|
||||||
hash_to_ec d1e33cd6f8918618d5fb6d67ad8de939db8beaec4f115551eac64479b739b773 613fffcbe1bf48bb2d7bfd64fd97790a06025f8f2429edddb9ac145707847ecf
|
|
||||||
hash_to_ec 81eaeced34dd44e448d5dafa5715225e4956c90911c964a96ff7aa5b86b969bc 8f81177495d120a1357380164d677509b167f2958eb8b962b616c3951d426d8c
|
|
||||||
hash_to_ec 2bc001a29f8eab1c7377de69957ba365fb5bdaf9c2c220889709af920dfe27d3 9bcb3010038f366fa4c280eed6e914a23bfc402594d0b83d0e66730a465a565b
|
|
||||||
hash_to_ec 6feeb703c05e86c58d9fc5623f1af8657ecd1e75a14d18c4eedb642a8a393d16 6544628ba67ed0e14854961739c4d467fcf49d6361e39d32ea73dabeae51e6c3
|
|
||||||
hash_to_ec e8ff145a7c26897f2c1639edd333a5412f87752f110079f581ccdc87fcce208c d4b5a6e06069c7e012e32119f8eda08ff04a8dfa784e1cf1bced455a4d41d905
|
|
||||||
hash_to_ec 80488131dcb2018527908dbf8cdf4b823ef0806dc1d360f4da671004ef7ff74d 9984a79d9fd4f317768b442161116eef84e2ca49e938642b268fd64312d59a27
|
|
||||||
hash_to_ec d8c4ca60446849a784d1462aa26a3b93073ff6841cb2da3ef52ab9785b00b1fd da5ec1562e7de2382d35728312f4eea3608d4dba775c1c108de510e1ce97d059
|
|
||||||
hash_to_ec 68645728dfc6b9358dfb426493238ba38f24a2f46a3e89edb47d212549939cb7 d3253aa7235113dcc1b577d3bb80be34f528398815a653dbdbacbcbdfd5887a1
|
|
||||||
hash_to_ec 4e8eb97ba2d1046e1b42e67530a61441e31c84e5e5e448d8e8dbe75d104eaccb de94f73e83222aa0e39b559d4fef70387b0815b9b2f6beff5da67262d8f0eb3e
|
|
||||||
hash_to_ec 104ff03122ffdf59b22b8c0fe3d8f2ef67d02328e4d5181916d3d2a92f9a0bb7 1517ccf69c0328327e1cf581f16944ff66bc91c37e1cd68a99525415e00b7c9f
|
|
||||||
hash_to_ec 80f23aae7356ae9a2f9f7504495a731214d26f870fb7df68fdc00b233494156f 7aef046b0a70f84e8d239aa95e192b5a3fffa0fae5090c91273e8996beca9e38
|
|
||||||
hash_to_ec 2424b33235955a737ebddbf1c6c59cd8778af74da3bd3e658447666a2ab2f557 d19e2be8d482950fbdae429618da7a9daedb8c5944dea19cd1b6b274e792231b
|
|
||||||
hash_to_ec 0adc839d2b8f099e4341a4763b074c06318d6bcbd1ec558d20a9820c4a426463 cea5da12a84e5c20011726d9224a9930bec30f9571762dd7ca857b86bd37d056
|
|
||||||
hash_to_ec 46c84d53951f1ba23c46a23d5d96bf019c559aa5d2d79e4535cfcdb36f38ce25 2a913a01a6f7dd78a43cdd5354d1160d9a5f0d824c489a892c80eba798a77567
|
|
||||||
hash_to_ec 99bdaaf68555ccdc93d97c3a0fb4c126a1aa8b1202194a1a753401a6cae21055 1f645efe173577a092f2d847cc966e28ba3b36397fe84c96dfa4724ed4fcfdf9
|
|
||||||
hash_to_ec c540ff78f1e063ad26ffa69febb8818c9f2a325072c566091ad816e40fe39af4 de7a762262c91ab4beccc0713233cb91163aec43e34de0dbcfad0c431e8a9722
|
|
||||||
hash_to_ec de8b1ff8978cd5e02681521542b7b6c3c2f8f4602065059f83594809d04e3dda 290601e75207085bff3e016746e55a80310a76dea9ef566c24181079c76da11c
|
|
||||||
hash_to_ec d555994c8a022e52602d2a8bdd01fc1bfa6b9ab6734ff72a1bd5f937de4627f8 5f6794e874f48c4b362d0a24207374c2d274e28de86351afc6ddb95d8cc2fd62
|
|
||||||
hash_to_ec 19db72f703fe6f1b73f21b6ba133ae6b111ae8cc496d3aa32e02411e34c0d8d7 42f159f43d2d62b8cf8a47d5f1340c5cf070e9860fc60de647c55d50fe9f5607
|
|
||||||
hash_to_ec 23a87a258c2a5d1353aa2d5946f9e5749b92f85e3c58e1d177c3b6c3dcac809c e5685016f79d5e87d1fecb3e2a0fe64e4875f7accd2f6649d7f6b16317549cb1
|
|
||||||
hash_to_ec 43e1738d7d1b5b565f5fc78e81480f7edf9a4dc18f104fc4be95135b98931b17 650f5b682e45f2d0c5d5e8bcfd9e0cda7d9071b55ecbfaf5e3b59941cd7479f2
|
|
||||||
hash_to_ec a9d644de0804edf62dee613efa2547e510990a9b7a987ebe55ec74c23873a878 52ad329f88499a4f110e6a6cba1f820012d8db6ccb8f6495ab1e3eb5a24786e1
|
|
||||||
hash_to_ec 11f2b5d89a0350d7c8727becf0f4dd19bd90f8c94ff207132ab13282dd9b94e6 b798a47bb98dc2a8f99deaf64d27638e33a0d504c5d2fbee477a2bc9b89e2838
|
|
||||||
hash_to_ec 5e206e3190b3b715d125f1a11fff424fb33e36e534c99ddde2a3517068b7dcc4 2738e9571c96b2ddf93cb5f4a72b1ea78d3731d9555b830494513c0683c950ca
|
|
||||||
hash_to_ec efc3d65a43d4f10795c7265a76671348f80173e0f507c812f7ae76793b99c529 cf4434d18ce8167b51f117fe930860143c46e1739a8db1fba73b6b0de830d707
|
|
||||||
hash_to_ec 81f00469788aad6631cf75b585ae06d43ec81c20479925a2009afac9687dff60 c335b5889b36ba4b4175bb0d986807e8eedb6f6b7329b70b922e2ab729c4202a
|
|
||||||
hash_to_ec 9ef5ff329b525ee8f5c3ac38e1dba7cb19985617341d356707c67ff273aed02d bef9f9e051ba0e24d1fdf72099cf43ecdd250d047fb329855b5372d5c422db9e
|
|
||||||
hash_to_ec 3fa1401bd63132cf8b385c0fa65f0715ba1fe6161e41d59f8033ae2b22f63fa1 8289a1cb3c2dae48879bb8913fafe2d196cc2fdab5f2a77607910efd33eae6df
|
|
||||||
hash_to_ec 6559836fd0081fa38a3f8d8408b564e5698b9797cf5e15f7f12a7d2c84511989 28d405a6687d2ecc90c1c66bf0454d58f3fa38835743075e1db58c658e15a104
|
|
||||||
hash_to_ec 8e0882d45f0e4c2fb2839d3be86ff699d4b2242f5b25ac5a3c2f65297c7d2032 2771fdcf9135a62007adb5f0004d8222f0e42f819c81710aa4dc3ab2042bebf3
|
|
||||||
hash_to_ec 1d91dc4dd9bd82646029d13aca1af96830c1d8a0400ddebeb14b00c93501c039 7792c62e897f32cbc9c4229f0d28f7882ceeae120329a1cd35f76a75ac704e93
|
|
||||||
hash_to_ec 09527f9052acbbdd7676cbbd9534780865f04a27aaadad2b7d4f1dac68883cf0 b934220cde1327f2dc6af67bcb4124bf424d5084ef4da945e4daad1717cd0bb8
|
|
||||||
hash_to_ec 2362e1abe73e64cdd2ca7f6c5ea9f467213747dd3f2b7c6e5df9cb21e03307d7 676b7122b96564358bbaaf77e3a5a4db1767e4f9a50f6ddd1c69df4566755af9
|
|
||||||
hash_to_ec 26c2dd2356e9b6c68a415b25f91d18614dc8500c66f346d28489da543ee75a94 0f4fd7086acd68eb7c9fa2410e2ecf18e34654eb44e979bc03ce436e992d5feb
|
|
||||||
hash_to_ec 422dc0a09d6a45a8e0b563eeb6a5ee84b08abd3a8cb34ff93f77ba3b163f4042 631f1b412ff5a0fccbe53a02b4a3deaa93a0418ed9874df401eb698ef75d7441
|
|
||||||
hash_to_ec ceecdf46f57ef3f36ff30a1a3579b609340282d1b26ab5ddef2f53514e91bab1 9bc6f981fe98d14a2fc5b01a8134b6d35e123ec9ab8a3f303e0a5abb28150e2e
|
|
||||||
hash_to_ec 024a9e6e0d73f28aa6207fb1e02ce86d444d2d46f8211e8aaab54f459db91a5a 5fb0c1d2c3b30f399102104ea1874099fa83110b3d9c1fcfffb2981c98bf8cdf
|
|
||||||
hash_to_ec 5b8e45e269c9ccac4c68e532a72b29346d218f4606f37a14064826a62050e3a8 c7be46a871b77fc05ce891d24bd6bd54d9775b7ef573c6bc2d92b67f3604c1d1
|
|
||||||
hash_to_ec 9a6593a385c266389eef14237874b97bdcd1823c3199311667d4853c2d12aa81 9f55ee9d94102d2b9c5670f30586cf9823bf205b4d4fe088c323e87c4e10f26f
|
|
||||||
hash_to_ec 27377e2811598c3569b92990865d39b72c7a5533e1be30f77330863187c11875 abd82bc726f2710a8b87e4c1cf5a069f0ae800de614468d3ff35639983020197
|
|
||||||
hash_to_ec 7cacfaa135fb7d568b8dce8ea9136498b1b28c6d1020af45d376288d78d411f0 229fccd49744c0692508af329224553d21561ee6062b2b8a21f080f73da5bd97
|
|
||||||
hash_to_ec 52abd90a5542d6496b8dec9567b020f30058e29458d64f2d4f3ad6f3bfc1a5a0 874e82ced7cf77577b3374087fb08a2300b7f403de628310c26bdb3be869d309
|
|
||||||
hash_to_ec 5c8eebe9d12309187afa8d0d5191de3fdb84e5a05485d7cd62e8804ce7fdc0bc 12b7537643488aa8b9dcc4bae040cd491f8b466163b7988157b0502fb6c9177f
|
|
||||||
hash_to_ec 6ca3dd5c7a21a6bf65d6eefbe20a66e9b1d6b64196344be0c075f47aea48e3aa 5e1d0705ee24675238293b73ab1d98359119d4b328275be2460cc6ee4d19cc88
|
|
||||||
hash_to_ec d7e6cd0d39b4308c2a5ee547c4569c8bb3887e49cedece62d218d7c3c5277797 793dc4397112dfd9a8f4e061f457eb6d6fbb1d7a58c40bad5f16002c64914186
|
|
||||||
hash_to_ec 9cb6de8ba967cca0f0f861c6e20546f8958446595c01c28dae7ba6cfa09d6b14 ba1a2f7502b58fee3499c20e35fa01bb932e7a7c4a925dc04fbf5d90f33cfb5e
|
|
||||||
hash_to_ec 8ef9c7366733a1edcd116238cdbd177d61222d5c3e05b30ef6b85014cbcb6b79 8fc89664722947164ac9b77086aed319897612068f56ecd57f47029f14671603
|
|
||||||
hash_to_ec 7f317a34e4fb7de9f69cb107ffc0e57fd9f5c85b85ccb5319d05cebfc169924a 4b71c42339c73db7d710cd63f374d478a6c13bdc352cff40e967282268965ba7
|
|
||||||
hash_to_ec 15beef8d9687b92918a903b01d594859db4e7128263c8db0cae9d423ff962c1e cd75e6323952f6ac88f138f391b69f38c46d70b7eda61f9e431725b6f1d514a5
|
|
||||||
hash_to_ec 7a1c04c9af8fc6649833fe81e96f0199fcfe94959256cbe1490075fc5be0904e 0368270cd979439ae0a9552a5d6c9f959e4247fcf920d9e071464582e79c04b1
|
|
||||||
hash_to_ec c854c583d338615f85f69061e0fa9c9d7c5bbbfe562e8774fef3be556fe8bb63 061620171d7320f64bee98414ff7200a1f481521d202fb281cab06be73b80402
|
|
||||||
hash_to_ec 0fb8af5aba05ad2503edf1cfad5a451da088e7e974772057cd991a4e0601a3eb d3cbc20384a4420143fcce2cb763b0c15bec4f3267d1bdad3c34c1ee6b790f5e
|
|
||||||
hash_to_ec 9a251cf59e84a9da5630642f9671c732440caa8fcf4c92446a7e5f5ef99da46c 9b9679086a433f2077f40bcd4c7545fb5cc87e7dbb8bba468d53cb04a74361a0
|
|
||||||
hash_to_ec 8c632e357cef00e0911eb566f8cc809136b3f5ac1e82d183e4d645cef89fa155 5e06b0f4f278fa1ccb5431866e0b35171cdb814e2e82b9189ce01d8d8a1b2408
|
|
||||||
hash_to_ec 4aa4c31463475086a5d96b3ff550340567ab3b4a86fa3f01cfe9be18bc4dcb54 76a2916cfc093f27992e1f07b50f431d61d58e255507e208cd29ea4d3bc56623
|
|
||||||
hash_to_ec 1d33d9aadb949346e3c78d065a0f5262374524f4cb97a7390c8cdaede7ca6578 9ad2f757f499359903031adea6126c577469c4e834a2959e3ac08ee74b13783c
|
|
||||||
hash_to_ec d9217b9a070df20c4d2f0db42ff0bb36bfba9f51b0b6df8fdfe150405dce4934 65a843c522b4b8ec081a696a0d2dd8dfdfea45db201de7a5889a1446c6dff8c7
|
|
||||||
hash_to_ec b665b2ca8a285e44ba84e785533b56496a5319730dbb95bc14d3bdfece7544dc 8a804cd13457497b0a29eeca2cecfaa858766ec1d270a0e0c6785b43fd49b824
|
|
||||||
hash_to_ec 43b5cbcc21b3404bca97fa9a661940fe64d40f3ca569310e50b1bb0173c4d5ee 6c12fffb540d536060bb8b96cf635c1b2cbaa4d875a8d2fb0bf79a690363df19
|
|
||||||
hash_to_ec 11c58f20562c00dec5bb4456be07cd98186837e9af38d50d45f5e7b6f0f9000d cee76b567586f66dadd38c01213bfc1a17d38e96a495efb4c26063dc498ba209
|
|
||||||
hash_to_ec b069a980b51d8e030262db0b30069e660f4a3f6f8075d1790c153ba12b879f8b 262391b00bdee71d1d827b2cfe50b46c29e265934dc91959bd369aca0cc6444e
|
|
||||||
hash_to_ec 75274bfd79bf33eb2f9ab046d34528af9a71811e7e3d55c20eb049c81ac692d8 cb93c850e36896fe6626e97c53652af6736ec3ba0641c7765d0cca2bad2352de
|
|
||||||
hash_to_ec 5cdb6a24d9736a00f197d9707949fedc5405f367744fe8c83b7cff650302b589 8b4ac03123fab9275dcf340345a1b11fba48ef106d410ba2e0e6f6457037a419
|
|
||||||
hash_to_ec 07fdc85f809f95a07b59b084402bf91c512ebbe05c7657d6ba27a9e7e121e3e2 61182b3def063630e11de648a278032bcb75949f3a24ef5a133da87830ae5c4e
|
|
||||||
hash_to_ec a4188ca634cbb796f9927822e343d7b267e0a609c1a0ffa4dcf3726b9ffcc8a2 a911e4899fda28fd6337d708d34553ac5e810ee4938f6f7d9d6e521cab069edb
|
|
||||||
hash_to_ec 3c128ec5c955ea189a5789df2c892e94193a534a9d5801b8f75df870bc492a69 59eef5ee9df0f681df5b5c67ead1f06b059a8a843837b67f20cce15779608170
|
|
||||||
hash_to_ec 51a4cc7ec4a14a98c0731e9de7f3ce0779123222d95455e940f2014a23729ec8 105863ccda076af7290d1bf9ec828651dc5811159839044d23f1c3e31a11c5e2
|
|
||||||
hash_to_ec 1b901a31acbb7807c3309facdc7d04bc3b5a4aa714e6e346bd1c6ad4634e6534 01b3c0000b6c6b471c67c6ab3f9c7a500beaea5edb5c8f2b34df91b69ff67f21
|
|
||||||
hash_to_ec d2f2c8d79cfa2e7cb2db80568ba62ca0576741acfbe5e2baa0d9b3c424a7c84d 7df9d9088022bd1ce6814d6f8051eef27a650ee38e789b184da2691efd27139d
|
|
||||||
hash_to_ec 04dcb7644fdfc12d8e34d6e57d7769db939b4a149ed2b81aa51a74ee90babe19 6cff0ab2dd3b32ba1bd1a78e3661722f3f10003a01ce83e430970557decedb2c
|
|
||||||
hash_to_ec 222798c6841eeaa07e7b7e29686942d7c7f9afc38d09360c8e1f52f2b7debd12 133e3a04ec82aa9b8dbbec18cadbafff446d1270bf7c6f3f97ddd3906dae2468
|
|
||||||
hash_to_ec 4f7277c3ef247a0689b486ad965f969c433fc63e95d7310e789c4708418ccabc 7e0f2c984dd3cffb35458938c95fe92acf2e697aed060b0e3377c7a07e53c494
|
|
||||||
hash_to_ec 359b4d6709413243ae2c5409ea02714a9f8961bbbb64a91e81daf01e18c981bf eab69af2cb7f113ad6a27035c0399853d10bd0b99291fad37794d100f7530431
|
|
||||||
hash_to_ec 6cea3c6a9eb38f60329537170aa4db8dbb869af2040061e53b10c267daf6568c da9a97f4fa96bd05dade5e2704a6a633ba4dbe5080a1e831cda888e9d4f86615
|
|
||||||
hash_to_ec 3dddecb954ef0209bcf61fd5b46b6c94f2384ef281c48a20ffee74f90788172d af9899c31f944617af54712f93d1a2b4944e48867f480d0d1aec61f3b713e32d
|
|
||||||
hash_to_ec 9605247462f50bdf7ff57fe966abbefe8b6efa0b65b5116252f0ec723717013f fc8f10904d42a74e09310ccf63db31a90f1dab88b278f15e3364a2356810f7e9
|
|
||||||
hash_to_ec a005143c4d299933f866db41d0a0b8c67264f5d4ea840dd243cb10c3526bc077 928df1fe9404ffa9c1f4a1c8b2d43ab9b81c5615c8330d2dc2074ac66d4d5200
|
|
||||||
hash_to_ec f45ce88065c34a163f8e77b6fb583502ed0eb1f490f63f76065a9d97e214e3a9 41bd6784270af4154f2f24f118617e2d7f5b7771a409f08b0f2b7bbcb5e3d666
|
|
||||||
hash_to_ec 7b40ac30ed02b12ff592a5479c80cf5a7673abfdd4dd38810e40e63275bc2eed 6c6bf5961d83851c9728801093d9af04e5a693bc6cbad237b9ac4b0ed580a771
|
|
||||||
hash_to_ec 9f985005794d3052a63361413a9820d2ce903198d6d5195b3f20a68f146c6d5c 88bcac53ba5b1c5b44730a24b4cc2cd782298fc70dc9d777b577a2b33b256449
|
|
||||||
hash_to_ec 31b8e37d01fd5669de4ebf78889d749bc44ffe997186ace56f1fb3e60b8742d2 776366b44170efb130a5045597db5675c6c0b56f3def84863c6b6358aa8dcf40
|
|
||||||
@@ -1,6 +1,8 @@
|
|||||||
use std_shims::io::{self, Write};
|
use std_shims::io::{self, Write};
|
||||||
|
|
||||||
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
||||||
|
|
||||||
|
#[allow(clippy::trivially_copy_pass_by_ref)] // &u64 is needed for API consistency
|
||||||
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
||||||
let mut varint = *varint;
|
let mut varint = *varint;
|
||||||
while {
|
while {
|
||||||
|
|||||||
@@ -1,57 +1,31 @@
|
|||||||
#[cfg(feature = "binaries")]
|
use std::sync::Arc;
|
||||||
mod binaries {
|
|
||||||
pub(crate) use std::sync::Arc;
|
|
||||||
|
|
||||||
pub(crate) use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
use serde::Deserialize;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
pub(crate) use multiexp::BatchVerifier;
|
use monero_serai::{
|
||||||
|
transaction::Transaction,
|
||||||
pub(crate) use serde::Deserialize;
|
|
||||||
pub(crate) use serde_json::json;
|
|
||||||
|
|
||||||
pub(crate) use monero_serai::{
|
|
||||||
Commitment,
|
|
||||||
ringct::RctPrunable,
|
|
||||||
transaction::{Input, Transaction},
|
|
||||||
block::Block,
|
block::Block,
|
||||||
rpc::{RpcError, Rpc, HttpRpc},
|
rpc::{Rpc, HttpRpc},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) use monero_generators::decompress_point;
|
use tokio::task::JoinHandle;
|
||||||
|
|
||||||
pub(crate) use tokio::task::JoinHandle;
|
async fn check_block(rpc: Arc<Rpc<HttpRpc>>, block_i: usize) {
|
||||||
|
let hash = rpc.get_block_hash(block_i).await.expect("couldn't get block {block_i}'s hash");
|
||||||
pub(crate) async fn check_block(rpc: Arc<Rpc<HttpRpc>>, block_i: usize) {
|
|
||||||
let hash = loop {
|
|
||||||
match rpc.get_block_hash(block_i).await {
|
|
||||||
Ok(hash) => break hash,
|
|
||||||
Err(RpcError::ConnectionError(e)) => {
|
|
||||||
println!("get_block_hash ConnectionError: {e}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
Err(e) => panic!("couldn't get block {block_i}'s hash: {e:?}"),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: Grab the JSON to also check it was deserialized correctly
|
// TODO: Grab the JSON to also check it was deserialized correctly
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
struct BlockResponse {
|
struct BlockResponse {
|
||||||
blob: String,
|
blob: String,
|
||||||
}
|
}
|
||||||
let res: BlockResponse = loop {
|
let res: BlockResponse = rpc
|
||||||
match rpc.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await {
|
.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) })))
|
||||||
Ok(res) => break res,
|
.await
|
||||||
Err(RpcError::ConnectionError(e)) => {
|
.expect("couldn't get block {block} via block.hash()");
|
||||||
println!("get_block ConnectionError: {e}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
Err(e) => panic!("couldn't get block {block_i} via block.hash(): {e:?}"),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let blob = hex::decode(res.blob).expect("node returned non-hex block");
|
let blob = hex::decode(res.blob).expect("node returned non-hex block");
|
||||||
let block = Block::read(&mut blob.as_slice())
|
let block = Block::read(&mut blob.as_slice()).expect("couldn't deserialize block {block_i}");
|
||||||
.unwrap_or_else(|e| panic!("couldn't deserialize block {block_i}: {e}"));
|
|
||||||
assert_eq!(block.hash(), hash, "hash differs");
|
assert_eq!(block.hash(), hash, "hash differs");
|
||||||
assert_eq!(block.serialize(), blob, "serialization differs");
|
assert_eq!(block.serialize(), blob, "serialization differs");
|
||||||
|
|
||||||
@@ -73,8 +47,7 @@ mod binaries {
|
|||||||
let mut hashes_hex = block.txs.iter().map(hex::encode).collect::<Vec<_>>();
|
let mut hashes_hex = block.txs.iter().map(hex::encode).collect::<Vec<_>>();
|
||||||
let mut all_txs = vec![];
|
let mut all_txs = vec![];
|
||||||
while !hashes_hex.is_empty() {
|
while !hashes_hex.is_empty() {
|
||||||
let txs: TransactionsResponse = loop {
|
let txs: TransactionsResponse = rpc
|
||||||
match rpc
|
|
||||||
.rpc_call(
|
.rpc_call(
|
||||||
"get_transactions",
|
"get_transactions",
|
||||||
Some(json!({
|
Some(json!({
|
||||||
@@ -82,21 +55,12 @@ mod binaries {
|
|||||||
})),
|
})),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
.expect("couldn't call get_transactions");
|
||||||
Ok(txs) => break txs,
|
|
||||||
Err(RpcError::ConnectionError(e)) => {
|
|
||||||
println!("get_transactions ConnectionError: {e}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
Err(e) => panic!("couldn't call get_transactions: {e:?}"),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
assert!(txs.missed_tx.is_empty());
|
assert!(txs.missed_tx.is_empty());
|
||||||
all_txs.extend(txs.txs);
|
all_txs.extend(txs.txs);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut batch = BatchVerifier::new(block.txs.len());
|
for (tx_hash, tx_res) in block.txs.into_iter().zip(all_txs.into_iter()) {
|
||||||
for (tx_hash, tx_res) in block.txs.into_iter().zip(all_txs) {
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tx_res.tx_hash,
|
tx_res.tx_hash,
|
||||||
hex::encode(tx_hash),
|
hex::encode(tx_hash),
|
||||||
@@ -114,138 +78,14 @@ mod binaries {
|
|||||||
"Transaction serialization was different"
|
"Transaction serialization was different"
|
||||||
);
|
);
|
||||||
assert_eq!(tx.hash(), tx_hash, "Transaction hash was different");
|
assert_eq!(tx.hash(), tx_hash, "Transaction hash was different");
|
||||||
|
}
|
||||||
if matches!(tx.rct_signatures.prunable, RctPrunable::Null) {
|
|
||||||
assert_eq!(tx.prefix.version, 1);
|
|
||||||
assert!(!tx.signatures.is_empty());
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let sig_hash = tx.signature_hash();
|
println!("Deserialized, hashed, and reserialized {block_i} with {} TXs", txs_len);
|
||||||
// Verify all proofs we support proving for
|
|
||||||
// This is due to having debug_asserts calling verify within their proving, and CLSAG
|
|
||||||
// multisig explicitly calling verify as part of its signing process
|
|
||||||
// Accordingly, making sure our signature_hash algorithm is correct is great, and further
|
|
||||||
// making sure the verification functions are valid is appreciated
|
|
||||||
match tx.rct_signatures.prunable {
|
|
||||||
RctPrunable::Null |
|
|
||||||
RctPrunable::AggregateMlsagBorromean { .. } |
|
|
||||||
RctPrunable::MlsagBorromean { .. } => {}
|
|
||||||
RctPrunable::MlsagBulletproofs { bulletproofs, .. } => {
|
|
||||||
assert!(bulletproofs.batch_verify(
|
|
||||||
&mut rand_core::OsRng,
|
|
||||||
&mut batch,
|
|
||||||
(),
|
|
||||||
&tx.rct_signatures.base.commitments
|
|
||||||
));
|
|
||||||
}
|
|
||||||
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs } => {
|
|
||||||
assert!(bulletproofs.batch_verify(
|
|
||||||
&mut rand_core::OsRng,
|
|
||||||
&mut batch,
|
|
||||||
(),
|
|
||||||
&tx.rct_signatures.base.commitments
|
|
||||||
));
|
|
||||||
|
|
||||||
for (i, clsag) in clsags.into_iter().enumerate() {
|
|
||||||
let (amount, key_offsets, image) = match &tx.prefix.inputs[i] {
|
|
||||||
Input::Gen(_) => panic!("Input::Gen"),
|
|
||||||
Input::ToKey { amount, key_offsets, key_image } => (amount, key_offsets, key_image),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut running_sum = 0;
|
|
||||||
let mut actual_indexes = vec![];
|
|
||||||
for offset in key_offsets {
|
|
||||||
running_sum += offset;
|
|
||||||
actual_indexes.push(running_sum);
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_outs(
|
|
||||||
rpc: &Rpc<HttpRpc>,
|
|
||||||
amount: u64,
|
|
||||||
indexes: &[u64],
|
|
||||||
) -> Vec<[EdwardsPoint; 2]> {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct Out {
|
|
||||||
key: String,
|
|
||||||
mask: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct Outs {
|
|
||||||
outs: Vec<Out>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let outs: Outs = loop {
|
|
||||||
match rpc
|
|
||||||
.rpc_call(
|
|
||||||
"get_outs",
|
|
||||||
Some(json!({
|
|
||||||
"get_txid": true,
|
|
||||||
"outputs": indexes.iter().map(|o| json!({
|
|
||||||
"amount": amount,
|
|
||||||
"index": o
|
|
||||||
})).collect::<Vec<_>>()
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
Ok(outs) => break outs,
|
|
||||||
Err(RpcError::ConnectionError(e)) => {
|
|
||||||
println!("get_outs ConnectionError: {e}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
Err(e) => panic!("couldn't connect to RPC to get outs: {e:?}"),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let rpc_point = |point: &str| {
|
|
||||||
decompress_point(
|
|
||||||
hex::decode(point)
|
|
||||||
.expect("invalid hex for ring member")
|
|
||||||
.try_into()
|
|
||||||
.expect("invalid point len for ring member"),
|
|
||||||
)
|
|
||||||
.expect("invalid point for ring member")
|
|
||||||
};
|
|
||||||
|
|
||||||
outs
|
|
||||||
.outs
|
|
||||||
.iter()
|
|
||||||
.map(|out| {
|
|
||||||
let mask = rpc_point(&out.mask);
|
|
||||||
if amount != 0 {
|
|
||||||
assert_eq!(mask, Commitment::new(Scalar::from(1u8), amount).calculate());
|
|
||||||
}
|
|
||||||
[rpc_point(&out.key), mask]
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
clsag
|
|
||||||
.verify(
|
|
||||||
&get_outs(&rpc, amount.unwrap_or(0), &actual_indexes).await,
|
|
||||||
image,
|
|
||||||
&pseudo_outs[i],
|
|
||||||
&sig_hash,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assert!(batch.verify_vartime());
|
|
||||||
}
|
|
||||||
|
|
||||||
println!("Deserialized, hashed, and reserialized {block_i} with {txs_len} TXs");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "binaries")]
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() {
|
||||||
use binaries::*;
|
|
||||||
|
|
||||||
let args = std::env::args().collect::<Vec<String>>();
|
let args = std::env::args().collect::<Vec<String>>();
|
||||||
|
|
||||||
// Read start block as the first arg
|
// Read start block as the first arg
|
||||||
@@ -271,15 +111,14 @@ async fn main() {
|
|||||||
}
|
}
|
||||||
let nodes = if specified_nodes.is_empty() { default_nodes } else { specified_nodes };
|
let nodes = if specified_nodes.is_empty() { default_nodes } else { specified_nodes };
|
||||||
|
|
||||||
let rpc = |url: String| async move {
|
let rpc = |url: String| {
|
||||||
HttpRpc::new(url.clone())
|
HttpRpc::new(url.clone())
|
||||||
.await
|
|
||||||
.unwrap_or_else(|_| panic!("couldn't create HttpRpc connected to {url}"))
|
.unwrap_or_else(|_| panic!("couldn't create HttpRpc connected to {url}"))
|
||||||
};
|
};
|
||||||
let main_rpc = rpc(nodes[0].clone()).await;
|
let main_rpc = rpc(nodes[0].clone());
|
||||||
let mut rpcs = vec![];
|
let mut rpcs = vec![];
|
||||||
for i in 0 .. async_parallelism {
|
for i in 0 .. async_parallelism {
|
||||||
rpcs.push(Arc::new(rpc(nodes[i % nodes.len()].clone()).await));
|
rpcs.push(Arc::new(rpc(nodes[i % nodes.len()].clone())));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut rpc_i = 0;
|
let mut rpc_i = 0;
|
||||||
@@ -314,8 +153,3 @@ async fn main() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = "binaries"))]
|
|
||||||
fn main() {
|
|
||||||
panic!("To run binaries, please build with `--feature binaries`.");
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -17,8 +17,8 @@ const EXISTING_BLOCK_HASH_202612: [u8; 32] =
|
|||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
pub struct BlockHeader {
|
pub struct BlockHeader {
|
||||||
pub major_version: u8,
|
pub major_version: u64,
|
||||||
pub minor_version: u8,
|
pub minor_version: u64,
|
||||||
pub timestamp: u64,
|
pub timestamp: u64,
|
||||||
pub previous: [u8; 32],
|
pub previous: [u8; 32],
|
||||||
pub nonce: u32,
|
pub nonce: u32,
|
||||||
@@ -39,8 +39,8 @@ impl BlockHeader {
|
|||||||
serialized
|
serialized
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<BlockHeader> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
||||||
Ok(BlockHeader {
|
Ok(Self {
|
||||||
major_version: read_varint(r)?,
|
major_version: read_varint(r)?,
|
||||||
minor_version: read_varint(r)?,
|
minor_version: read_varint(r)?,
|
||||||
timestamp: read_varint(r)?,
|
timestamp: read_varint(r)?,
|
||||||
@@ -58,17 +58,17 @@ pub struct Block {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Block {
|
impl Block {
|
||||||
pub fn number(&self) -> Option<u64> {
|
pub fn number(&self) -> usize {
|
||||||
match self.miner_tx.prefix.inputs.first() {
|
match self.miner_tx.prefix.inputs.get(0) {
|
||||||
Some(Input::Gen(number)) => Some(*number),
|
Some(Input::Gen(number)) => (*number).try_into().unwrap(),
|
||||||
_ => None,
|
_ => panic!("invalid block, miner TX didn't have a Input::Gen"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
self.header.write(w)?;
|
self.header.write(w)?;
|
||||||
self.miner_tx.write(w)?;
|
self.miner_tx.write(w)?;
|
||||||
write_varint(&self.txs.len(), w)?;
|
write_varint(&self.txs.len().try_into().unwrap(), w)?;
|
||||||
for tx in &self.txs {
|
for tx in &self.txs {
|
||||||
w.write_all(tx)?;
|
w.write_all(tx)?;
|
||||||
}
|
}
|
||||||
@@ -79,27 +79,20 @@ impl Block {
|
|||||||
merkle_root(self.miner_tx.hash(), &self.txs)
|
merkle_root(self.miner_tx.hash(), &self.txs)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Serialize the block as required for the proof of work hash.
|
fn serialize_hashable(&self) -> Vec<u8> {
|
||||||
///
|
|
||||||
/// This is distinct from the serialization required for the block hash. To get the block hash,
|
|
||||||
/// use the [`Block::hash`] function.
|
|
||||||
pub fn serialize_hashable(&self) -> Vec<u8> {
|
|
||||||
let mut blob = self.header.serialize();
|
let mut blob = self.header.serialize();
|
||||||
blob.extend_from_slice(&self.tx_merkle_root());
|
blob.extend_from_slice(&self.tx_merkle_root());
|
||||||
write_varint(&(1 + u64::try_from(self.txs.len()).unwrap()), &mut blob).unwrap();
|
write_varint(&(1 + u64::try_from(self.txs.len()).unwrap()), &mut blob).unwrap();
|
||||||
|
|
||||||
blob
|
let mut out = Vec::with_capacity(8 + blob.len());
|
||||||
|
write_varint(&u64::try_from(blob.len()).unwrap(), &mut out).unwrap();
|
||||||
|
out.append(&mut blob);
|
||||||
|
|
||||||
|
out
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hash(&self) -> [u8; 32] {
|
pub fn hash(&self) -> [u8; 32] {
|
||||||
let mut hashable = self.serialize_hashable();
|
let hash = hash(&self.serialize_hashable());
|
||||||
// Monero pre-appends a VarInt of the block hashing blobs length before getting the block hash
|
|
||||||
// but doesn't do this when getting the proof of work hash :)
|
|
||||||
let mut hashing_blob = Vec::with_capacity(8 + hashable.len());
|
|
||||||
write_varint(&u64::try_from(hashable.len()).unwrap(), &mut hashing_blob).unwrap();
|
|
||||||
hashing_blob.append(&mut hashable);
|
|
||||||
|
|
||||||
let hash = hash(&hashing_blob);
|
|
||||||
if hash == CORRECT_BLOCK_HASH_202612 {
|
if hash == CORRECT_BLOCK_HASH_202612 {
|
||||||
return EXISTING_BLOCK_HASH_202612;
|
return EXISTING_BLOCK_HASH_202612;
|
||||||
};
|
};
|
||||||
@@ -113,18 +106,11 @@ impl Block {
|
|||||||
serialized
|
serialized
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Block> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
||||||
let header = BlockHeader::read(r)?;
|
Ok(Self {
|
||||||
|
header: BlockHeader::read(r)?,
|
||||||
let miner_tx = Transaction::read(r)?;
|
miner_tx: Transaction::read(r)?,
|
||||||
if !matches!(miner_tx.prefix.inputs.as_slice(), &[Input::Gen(_)]) {
|
txs: (0 .. read_varint(r)?).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
|
||||||
Err(io::Error::other("Miner transaction has incorrect input type."))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Block {
|
|
||||||
header,
|
|
||||||
miner_tx,
|
|
||||||
txs: (0_usize .. read_varint(r)?).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,19 +16,13 @@ use sha3::{Digest, Keccak256};
|
|||||||
|
|
||||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
||||||
|
|
||||||
pub use monero_generators::{H, decompress_point};
|
pub use monero_generators::H;
|
||||||
|
|
||||||
mod merkle;
|
mod merkle;
|
||||||
|
|
||||||
mod serialize;
|
mod serialize;
|
||||||
use serialize::{read_byte, read_u16};
|
use serialize::{read_byte, read_u16};
|
||||||
|
|
||||||
/// UnreducedScalar struct with functionality for recovering incorrectly reduced scalars.
|
|
||||||
mod unreduced_scalar;
|
|
||||||
|
|
||||||
/// Ring Signature structs and functionality.
|
|
||||||
pub mod ring_signatures;
|
|
||||||
|
|
||||||
/// RingCT structs and functionality.
|
/// RingCT structs and functionality.
|
||||||
pub mod ringct;
|
pub mod ringct;
|
||||||
use ringct::RctType;
|
use ringct::RctType;
|
||||||
@@ -46,10 +40,6 @@ pub mod wallet;
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
pub const DEFAULT_LOCK_WINDOW: usize = 10;
|
|
||||||
pub const COINBASE_LOCK_WINDOW: usize = 60;
|
|
||||||
pub const BLOCK_TIME: usize = 120;
|
|
||||||
|
|
||||||
static INV_EIGHT_CELL: OnceLock<Scalar> = OnceLock::new();
|
static INV_EIGHT_CELL: OnceLock<Scalar> = OnceLock::new();
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
pub(crate) fn INV_EIGHT() -> Scalar {
|
pub(crate) fn INV_EIGHT() -> Scalar {
|
||||||
@@ -65,144 +55,103 @@ pub(crate) fn INV_EIGHT() -> Scalar {
|
|||||||
pub enum Protocol {
|
pub enum Protocol {
|
||||||
v14,
|
v14,
|
||||||
v16,
|
v16,
|
||||||
Custom {
|
Custom { ring_len: usize, bp_plus: bool, optimal_rct_type: RctType },
|
||||||
ring_len: usize,
|
|
||||||
bp_plus: bool,
|
|
||||||
optimal_rct_type: RctType,
|
|
||||||
view_tags: bool,
|
|
||||||
v16_fee: bool,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Protocol {
|
impl Protocol {
|
||||||
/// Amount of ring members under this protocol version.
|
/// Amount of ring members under this protocol version.
|
||||||
pub fn ring_len(&self) -> usize {
|
pub const fn ring_len(&self) -> usize {
|
||||||
match self {
|
match self {
|
||||||
Protocol::v14 => 11,
|
Self::v14 => 11,
|
||||||
Protocol::v16 => 16,
|
Self::v16 => 16,
|
||||||
Protocol::Custom { ring_len, .. } => *ring_len,
|
Self::Custom { ring_len, .. } => *ring_len,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether or not the specified version uses Bulletproofs or Bulletproofs+.
|
/// Whether or not the specified version uses Bulletproofs or Bulletproofs+.
|
||||||
///
|
///
|
||||||
/// This method will likely be reworked when versions not using Bulletproofs at all are added.
|
/// This method will likely be reworked when versions not using Bulletproofs at all are added.
|
||||||
pub fn bp_plus(&self) -> bool {
|
pub const fn bp_plus(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
Protocol::v14 => false,
|
Self::v14 => false,
|
||||||
Protocol::v16 => true,
|
Self::v16 => true,
|
||||||
Protocol::Custom { bp_plus, .. } => *bp_plus,
|
Self::Custom { bp_plus, .. } => *bp_plus,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Make this an Option when we support pre-RCT protocols
|
// TODO: Make this an Option when we support pre-RCT protocols
|
||||||
pub fn optimal_rct_type(&self) -> RctType {
|
pub const fn optimal_rct_type(&self) -> RctType {
|
||||||
match self {
|
match self {
|
||||||
Protocol::v14 => RctType::Clsag,
|
Self::v14 => RctType::Clsag,
|
||||||
Protocol::v16 => RctType::BulletproofsPlus,
|
Self::v16 => RctType::BulletproofsPlus,
|
||||||
Protocol::Custom { optimal_rct_type, .. } => *optimal_rct_type,
|
Self::Custom { optimal_rct_type, .. } => *optimal_rct_type,
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Whether or not the specified version uses view tags.
|
|
||||||
pub fn view_tags(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Protocol::v14 => false,
|
|
||||||
Protocol::v16 => true,
|
|
||||||
Protocol::Custom { view_tags, .. } => *view_tags,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Whether or not the specified version uses the fee algorithm from Monero
|
|
||||||
/// hard fork version 16 (released in v18 binaries).
|
|
||||||
pub fn v16_fee(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Protocol::v14 => false,
|
|
||||||
Protocol::v16 => true,
|
|
||||||
Protocol::Custom { v16_fee, .. } => *v16_fee,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
|
pub(crate) fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
Protocol::v14 => w.write_all(&[0, 14]),
|
Self::v14 => w.write_all(&[0, 14]),
|
||||||
Protocol::v16 => w.write_all(&[0, 16]),
|
Self::v16 => w.write_all(&[0, 16]),
|
||||||
Protocol::Custom { ring_len, bp_plus, optimal_rct_type, view_tags, v16_fee } => {
|
Self::Custom { ring_len, bp_plus, optimal_rct_type } => {
|
||||||
// Custom, version 0
|
// Custom, version 0
|
||||||
w.write_all(&[1, 0])?;
|
w.write_all(&[1, 0])?;
|
||||||
w.write_all(&u16::try_from(*ring_len).unwrap().to_le_bytes())?;
|
w.write_all(&u16::try_from(*ring_len).unwrap().to_le_bytes())?;
|
||||||
w.write_all(&[u8::from(*bp_plus)])?;
|
w.write_all(&[u8::from(*bp_plus)])?;
|
||||||
w.write_all(&[optimal_rct_type.to_byte()])?;
|
w.write_all(&[optimal_rct_type.to_byte()])
|
||||||
w.write_all(&[u8::from(*view_tags)])?;
|
|
||||||
w.write_all(&[u8::from(*v16_fee)])
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read<R: io::Read>(r: &mut R) -> io::Result<Protocol> {
|
pub(crate) fn read<R: io::Read>(r: &mut R) -> io::Result<Self> {
|
||||||
Ok(match read_byte(r)? {
|
Ok(match read_byte(r)? {
|
||||||
// Monero protocol
|
// Monero protocol
|
||||||
0 => match read_byte(r)? {
|
0 => match read_byte(r)? {
|
||||||
14 => Protocol::v14,
|
14 => Self::v14,
|
||||||
16 => Protocol::v16,
|
16 => Self::v16,
|
||||||
_ => Err(io::Error::other("unrecognized monero protocol"))?,
|
_ => Err(io::Error::new(io::ErrorKind::Other, "unrecognized monero protocol"))?,
|
||||||
},
|
},
|
||||||
// Custom
|
// Custom
|
||||||
1 => match read_byte(r)? {
|
1 => match read_byte(r)? {
|
||||||
0 => Protocol::Custom {
|
0 => Self::Custom {
|
||||||
ring_len: read_u16(r)?.into(),
|
ring_len: read_u16(r)?.into(),
|
||||||
bp_plus: match read_byte(r)? {
|
bp_plus: match read_byte(r)? {
|
||||||
0 => false,
|
0 => false,
|
||||||
1 => true,
|
1 => true,
|
||||||
_ => Err(io::Error::other("invalid bool serialization"))?,
|
_ => Err(io::Error::new(io::ErrorKind::Other, "invalid bool serialization"))?,
|
||||||
},
|
},
|
||||||
optimal_rct_type: RctType::from_byte(read_byte(r)?)
|
optimal_rct_type: RctType::from_byte(read_byte(r)?)
|
||||||
.ok_or_else(|| io::Error::other("invalid RctType serialization"))?,
|
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid RctType serialization"))?,
|
||||||
view_tags: match read_byte(r)? {
|
|
||||||
0 => false,
|
|
||||||
1 => true,
|
|
||||||
_ => Err(io::Error::other("invalid bool serialization"))?,
|
|
||||||
},
|
},
|
||||||
v16_fee: match read_byte(r)? {
|
_ => {
|
||||||
0 => false,
|
Err(io::Error::new(io::ErrorKind::Other, "unrecognized custom protocol serialization"))?
|
||||||
1 => true,
|
}
|
||||||
_ => Err(io::Error::other("invalid bool serialization"))?,
|
|
||||||
},
|
},
|
||||||
},
|
_ => Err(io::Error::new(io::ErrorKind::Other, "unrecognized protocol serialization"))?,
|
||||||
_ => Err(io::Error::other("unrecognized custom protocol serialization"))?,
|
|
||||||
},
|
|
||||||
_ => Err(io::Error::other("unrecognized protocol serialization"))?,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Transparent structure representing a Pedersen commitment's contents.
|
/// Transparent structure representing a Pedersen commitment's contents.
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||||
pub struct Commitment {
|
pub struct Commitment {
|
||||||
pub mask: Scalar,
|
pub mask: Scalar,
|
||||||
pub amount: u64,
|
pub amount: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl core::fmt::Debug for Commitment {
|
|
||||||
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
|
||||||
fmt.debug_struct("Commitment").field("amount", &self.amount).finish_non_exhaustive()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Commitment {
|
impl Commitment {
|
||||||
/// A commitment to zero, defined with a mask of 1 (as to not be the identity).
|
/// A commitment to zero, defined with a mask of 1 (as to not be the identity).
|
||||||
pub fn zero() -> Commitment {
|
pub fn zero() -> Self {
|
||||||
Commitment { mask: Scalar::ONE, amount: 0 }
|
Self { mask: Scalar::one(), amount: 0 }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(mask: Scalar, amount: u64) -> Commitment {
|
pub fn new(mask: Scalar, amount: u64) -> Self {
|
||||||
Commitment { mask, amount }
|
Self { mask, amount }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Calculate a Pedersen commitment, as a point, from the transparent structure.
|
/// Calculate a Pedersen commitment, as a point, from the transparent structure.
|
||||||
pub fn calculate(&self) -> EdwardsPoint {
|
pub fn calculate(&self) -> EdwardsPoint {
|
||||||
(&self.mask * ED25519_BASEPOINT_TABLE) + (Scalar::from(self.amount) * H())
|
(&self.mask * &ED25519_BASEPOINT_TABLE) + (Scalar::from(self.amount) * H())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -224,6 +173,6 @@ pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
|||||||
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
|
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
|
||||||
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
|
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
|
||||||
// not generate/verify a proof we believe to be valid when it isn't
|
// not generate/verify a proof we believe to be valid when it isn't
|
||||||
assert!(scalar != Scalar::ZERO, "ZERO HASH: {data:?}");
|
assert!(scalar != Scalar::zero(), "ZERO HASH: {data:?}");
|
||||||
scalar
|
scalar
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,72 +0,0 @@
|
|||||||
use std_shims::{
|
|
||||||
io::{self, *},
|
|
||||||
vec::Vec,
|
|
||||||
};
|
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
|
||||||
|
|
||||||
use curve25519_dalek::{EdwardsPoint, Scalar};
|
|
||||||
|
|
||||||
use monero_generators::hash_to_point;
|
|
||||||
|
|
||||||
use crate::{serialize::*, hash_to_scalar};
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub struct Signature {
|
|
||||||
c: Scalar,
|
|
||||||
r: Scalar,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Signature {
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
write_scalar(&self.c, w)?;
|
|
||||||
write_scalar(&self.r, w)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Signature> {
|
|
||||||
Ok(Signature { c: read_scalar(r)?, r: read_scalar(r)? })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub struct RingSignature {
|
|
||||||
sigs: Vec<Signature>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RingSignature {
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
for sig in &self.sigs {
|
|
||||||
sig.write(w)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(members: usize, r: &mut R) -> io::Result<RingSignature> {
|
|
||||||
Ok(RingSignature { sigs: read_raw_vec(Signature::read, members, r)? })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn verify(&self, msg: &[u8; 32], ring: &[EdwardsPoint], key_image: &EdwardsPoint) -> bool {
|
|
||||||
if ring.len() != self.sigs.len() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut buf = Vec::with_capacity(32 + (32 * 2 * ring.len()));
|
|
||||||
buf.extend_from_slice(msg);
|
|
||||||
|
|
||||||
let mut sum = Scalar::ZERO;
|
|
||||||
|
|
||||||
for (ring_member, sig) in ring.iter().zip(&self.sigs) {
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let Li = EdwardsPoint::vartime_double_scalar_mul_basepoint(&sig.c, ring_member, &sig.r);
|
|
||||||
buf.extend_from_slice(Li.compress().as_bytes());
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let Ri = (sig.r * hash_to_point(ring_member.compress().to_bytes())) + (sig.c * key_image);
|
|
||||||
buf.extend_from_slice(Ri.compress().as_bytes());
|
|
||||||
|
|
||||||
sum += sig.c;
|
|
||||||
}
|
|
||||||
|
|
||||||
sum == hash_to_scalar(&buf)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,63 +1,69 @@
|
|||||||
use core::fmt::Debug;
|
use core::fmt::Debug;
|
||||||
use std_shims::io::{self, Read, Write};
|
use std_shims::io::{self, Read, Write};
|
||||||
|
|
||||||
use curve25519_dalek::{traits::Identity, Scalar, EdwardsPoint};
|
use curve25519_dalek::edwards::EdwardsPoint;
|
||||||
|
#[cfg(feature = "experimental")]
|
||||||
|
use curve25519_dalek::{traits::Identity, scalar::Scalar};
|
||||||
|
|
||||||
|
#[cfg(feature = "experimental")]
|
||||||
use monero_generators::H_pow_2;
|
use monero_generators::H_pow_2;
|
||||||
|
#[cfg(feature = "experimental")]
|
||||||
use crate::{hash_to_scalar, unreduced_scalar::UnreducedScalar, serialize::*};
|
use crate::hash_to_scalar;
|
||||||
|
use crate::serialize::*;
|
||||||
|
|
||||||
/// 64 Borromean ring signatures.
|
/// 64 Borromean ring signatures.
|
||||||
///
|
///
|
||||||
/// s0 and s1 are stored as `UnreducedScalar`s due to Monero not requiring they were reduced.
|
/// This type keeps the data as raw bytes as Monero has some transactions with unreduced scalars in
|
||||||
/// `UnreducedScalar` preserves their original byte encoding and implements a custom reduction
|
/// this field. While we could use `from_bytes_mod_order`, we'd then not be able to encode this
|
||||||
/// algorithm which was in use.
|
/// back into it's original form.
|
||||||
|
///
|
||||||
|
/// Those scalars also have a custom reduction algorithm...
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
pub struct BorromeanSignatures {
|
pub struct BorromeanSignatures {
|
||||||
pub s0: [UnreducedScalar; 64],
|
pub s0: [[u8; 32]; 64],
|
||||||
pub s1: [UnreducedScalar; 64],
|
pub s1: [[u8; 32]; 64],
|
||||||
pub ee: Scalar,
|
pub ee: [u8; 32],
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BorromeanSignatures {
|
impl BorromeanSignatures {
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<BorromeanSignatures> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
||||||
Ok(BorromeanSignatures {
|
Ok(Self { s0: read_array(read_bytes, r)?, s1: read_array(read_bytes, r)?, ee: read_bytes(r)? })
|
||||||
s0: read_array(UnreducedScalar::read, r)?,
|
|
||||||
s1: read_array(UnreducedScalar::read, r)?,
|
|
||||||
ee: read_scalar(r)?,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
for s0 in &self.s0 {
|
for s0 in &self.s0 {
|
||||||
s0.write(w)?;
|
w.write_all(s0)?;
|
||||||
}
|
}
|
||||||
for s1 in &self.s1 {
|
for s1 in &self.s1 {
|
||||||
s1.write(w)?;
|
w.write_all(s1)?;
|
||||||
}
|
}
|
||||||
write_scalar(&self.ee, w)
|
w.write_all(&self.ee)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "experimental")]
|
||||||
fn verify(&self, keys_a: &[EdwardsPoint], keys_b: &[EdwardsPoint]) -> bool {
|
fn verify(&self, keys_a: &[EdwardsPoint], keys_b: &[EdwardsPoint]) -> bool {
|
||||||
let mut transcript = [0; 2048];
|
let mut transcript = [0; 2048];
|
||||||
|
|
||||||
for i in 0 .. 64 {
|
for i in 0 .. 64 {
|
||||||
|
// TODO: These aren't the correct reduction
|
||||||
|
// TODO: Can either of these be tightened?
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
let LL = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
let LL = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
||||||
&self.ee,
|
&Scalar::from_bytes_mod_order(self.ee),
|
||||||
&keys_a[i],
|
&keys_a[i],
|
||||||
&self.s0[i].recover_monero_slide_scalar(),
|
&Scalar::from_bytes_mod_order(self.s0[i]),
|
||||||
);
|
);
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
||||||
&hash_to_scalar(LL.compress().as_bytes()),
|
&hash_to_scalar(LL.compress().as_bytes()),
|
||||||
&keys_b[i],
|
&keys_b[i],
|
||||||
&self.s1[i].recover_monero_slide_scalar(),
|
&Scalar::from_bytes_mod_order(self.s1[i]),
|
||||||
);
|
);
|
||||||
transcript[(i * 32) .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes());
|
transcript[i .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes());
|
||||||
}
|
}
|
||||||
|
|
||||||
hash_to_scalar(&transcript) == self.ee
|
// TODO: This isn't the correct reduction
|
||||||
|
// TODO: Can this be tightened to from_canonical_bytes?
|
||||||
|
hash_to_scalar(&transcript) == Scalar::from_bytes_mod_order(self.ee)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -69,17 +75,16 @@ pub struct BorromeanRange {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl BorromeanRange {
|
impl BorromeanRange {
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<BorromeanRange> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
||||||
Ok(BorromeanRange {
|
Ok(Self { sigs: BorromeanSignatures::read(r)?, bit_commitments: read_array(read_point, r)? })
|
||||||
sigs: BorromeanSignatures::read(r)?,
|
|
||||||
bit_commitments: read_array(read_point, r)?,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
self.sigs.write(w)?;
|
self.sigs.write(w)?;
|
||||||
write_raw_vec(write_point, &self.bit_commitments, w)
|
write_raw_vec(write_point, &self.bit_commitments, w)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "experimental")]
|
||||||
|
#[must_use]
|
||||||
pub fn verify(&self, commitment: &EdwardsPoint) -> bool {
|
pub fn verify(&self, commitment: &EdwardsPoint) -> bool {
|
||||||
if &self.bit_commitments.iter().sum::<EdwardsPoint>() != commitment {
|
if &self.bit_commitments.iter().sum::<EdwardsPoint>() != commitment {
|
||||||
return false;
|
return false;
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ use std_shims::{
|
|||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
use zeroize::{Zeroize, Zeroizing};
|
use zeroize::Zeroize;
|
||||||
|
|
||||||
use curve25519_dalek::edwards::EdwardsPoint;
|
use curve25519_dalek::edwards::EdwardsPoint;
|
||||||
use multiexp::BatchVerifier;
|
use multiexp::BatchVerifier;
|
||||||
@@ -19,10 +19,12 @@ pub(crate) mod core;
|
|||||||
use self::core::LOG_N;
|
use self::core::LOG_N;
|
||||||
|
|
||||||
pub(crate) mod original;
|
pub(crate) mod original;
|
||||||
use self::original::OriginalStruct;
|
pub use original::GENERATORS as BULLETPROOFS_GENERATORS;
|
||||||
|
|
||||||
pub(crate) mod plus;
|
pub(crate) mod plus;
|
||||||
use self::plus::*;
|
pub use plus::GENERATORS as BULLETPROOFS_PLUS_GENERATORS;
|
||||||
|
|
||||||
|
pub(crate) use self::original::OriginalStruct;
|
||||||
|
pub(crate) use self::plus::PlusStruct;
|
||||||
|
|
||||||
pub(crate) const MAX_OUTPUTS: usize = self::core::MAX_M;
|
pub(crate) const MAX_OUTPUTS: usize = self::core::MAX_M;
|
||||||
|
|
||||||
@@ -31,45 +33,28 @@ pub(crate) const MAX_OUTPUTS: usize = self::core::MAX_M;
|
|||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
pub enum Bulletproofs {
|
pub enum Bulletproofs {
|
||||||
Original(OriginalStruct),
|
Original(OriginalStruct),
|
||||||
Plus(AggregateRangeProof),
|
Plus(PlusStruct),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Bulletproofs {
|
impl Bulletproofs {
|
||||||
fn bp_fields(plus: bool) -> usize {
|
pub(crate) fn fee_weight(plus: bool, outputs: usize) -> usize {
|
||||||
if plus {
|
let fields = if plus { 6 } else { 9 };
|
||||||
6
|
|
||||||
} else {
|
|
||||||
9
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
// TODO: Shouldn't this use u32/u64?
|
||||||
// src/cryptonote_basic/cryptonote_format_utils.cpp#L106-L124
|
|
||||||
pub(crate) fn calculate_bp_clawback(plus: bool, n_outputs: usize) -> (usize, usize) {
|
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
let mut LR_len = 0;
|
let mut LR_len = usize::try_from(usize::BITS - (outputs - 1).leading_zeros()).unwrap();
|
||||||
let mut n_padded_outputs = 1;
|
let padded_outputs = 1 << LR_len;
|
||||||
while n_padded_outputs < n_outputs {
|
|
||||||
LR_len += 1;
|
|
||||||
n_padded_outputs = 1 << LR_len;
|
|
||||||
}
|
|
||||||
LR_len += LOG_N;
|
LR_len += LOG_N;
|
||||||
|
|
||||||
let mut bp_clawback = 0;
|
let len = (fields + (2 * LR_len)) * 32;
|
||||||
if n_padded_outputs > 2 {
|
len +
|
||||||
let fields = Bulletproofs::bp_fields(plus);
|
if padded_outputs <= 2 {
|
||||||
|
0
|
||||||
|
} else {
|
||||||
let base = ((fields + (2 * (LOG_N + 1))) * 32) / 2;
|
let base = ((fields + (2 * (LOG_N + 1))) * 32) / 2;
|
||||||
let size = (fields + (2 * LR_len)) * 32;
|
let size = (fields + (2 * LR_len)) * 32;
|
||||||
bp_clawback = ((base * n_padded_outputs) - size) * 4 / 5;
|
((base * padded_outputs) - size) * 4 / 5
|
||||||
}
|
}
|
||||||
|
|
||||||
(bp_clawback, LR_len)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn fee_weight(plus: bool, outputs: usize) -> usize {
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let (bp_clawback, LR_len) = Bulletproofs::calculate_bp_clawback(plus, outputs);
|
|
||||||
32 * (Bulletproofs::bp_fields(plus) + (2 * LR_len)) + 2 + bp_clawback
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Prove the list of commitments are within [0 .. 2^64).
|
/// Prove the list of commitments are within [0 .. 2^64).
|
||||||
@@ -77,23 +62,14 @@ impl Bulletproofs {
|
|||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
outputs: &[Commitment],
|
outputs: &[Commitment],
|
||||||
plus: bool,
|
plus: bool,
|
||||||
) -> Result<Bulletproofs, TransactionError> {
|
) -> Result<Self, TransactionError> {
|
||||||
if outputs.is_empty() {
|
|
||||||
Err(TransactionError::NoOutputs)?;
|
|
||||||
}
|
|
||||||
if outputs.len() > MAX_OUTPUTS {
|
if outputs.len() > MAX_OUTPUTS {
|
||||||
Err(TransactionError::TooManyOutputs)?;
|
return Err(TransactionError::TooManyOutputs)?;
|
||||||
}
|
}
|
||||||
Ok(if !plus {
|
Ok(if !plus {
|
||||||
Bulletproofs::Original(OriginalStruct::prove(rng, outputs))
|
Self::Plus(PlusStruct::prove(rng, outputs))
|
||||||
} else {
|
} else {
|
||||||
use dalek_ff_group::EdwardsPoint as DfgPoint;
|
Self::Original(OriginalStruct::prove(rng, outputs))
|
||||||
Bulletproofs::Plus(
|
|
||||||
AggregateRangeStatement::new(outputs.iter().map(|com| DfgPoint(com.calculate())).collect())
|
|
||||||
.unwrap()
|
|
||||||
.prove(rng, &Zeroizing::new(AggregateRangeWitness::new(outputs).unwrap()))
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -101,23 +77,8 @@ impl Bulletproofs {
|
|||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
|
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
|
||||||
match self {
|
match self {
|
||||||
Bulletproofs::Original(bp) => bp.verify(rng, commitments),
|
Self::Original(bp) => bp.verify(rng, commitments),
|
||||||
Bulletproofs::Plus(bp) => {
|
Self::Plus(bp) => bp.verify(rng, commitments),
|
||||||
let mut verifier = BatchVerifier::new(1);
|
|
||||||
// If this commitment is torsioned (which is allowed), this won't be a well-formed
|
|
||||||
// dfg::EdwardsPoint (expected to be of prime-order)
|
|
||||||
// The actual BP+ impl will perform a torsion clear though, making this safe
|
|
||||||
// TODO: Have AggregateRangeStatement take in dalek EdwardsPoint for clarity on this
|
|
||||||
let Some(statement) = AggregateRangeStatement::new(
|
|
||||||
commitments.iter().map(|c| dalek_ff_group::EdwardsPoint(*c)).collect(),
|
|
||||||
) else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
if !statement.verify(rng, &mut verifier, (), bp.clone()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
verifier.verify_vartime()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -133,15 +94,8 @@ impl Bulletproofs {
|
|||||||
commitments: &[EdwardsPoint],
|
commitments: &[EdwardsPoint],
|
||||||
) -> bool {
|
) -> bool {
|
||||||
match self {
|
match self {
|
||||||
Bulletproofs::Original(bp) => bp.batch_verify(rng, verifier, id, commitments),
|
Self::Original(bp) => bp.batch_verify(rng, verifier, id, commitments),
|
||||||
Bulletproofs::Plus(bp) => {
|
Self::Plus(bp) => bp.batch_verify(rng, verifier, id, commitments),
|
||||||
let Some(statement) = AggregateRangeStatement::new(
|
|
||||||
commitments.iter().map(|c| dalek_ff_group::EdwardsPoint(*c)).collect(),
|
|
||||||
) else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
statement.verify(rng, verifier, id, bp.clone())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -151,7 +105,7 @@ impl Bulletproofs {
|
|||||||
specific_write_vec: F,
|
specific_write_vec: F,
|
||||||
) -> io::Result<()> {
|
) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
Bulletproofs::Original(bp) => {
|
Self::Original(bp) => {
|
||||||
write_point(&bp.A, w)?;
|
write_point(&bp.A, w)?;
|
||||||
write_point(&bp.S, w)?;
|
write_point(&bp.S, w)?;
|
||||||
write_point(&bp.T1, w)?;
|
write_point(&bp.T1, w)?;
|
||||||
@@ -165,15 +119,15 @@ impl Bulletproofs {
|
|||||||
write_scalar(&bp.t, w)
|
write_scalar(&bp.t, w)
|
||||||
}
|
}
|
||||||
|
|
||||||
Bulletproofs::Plus(bp) => {
|
Self::Plus(bp) => {
|
||||||
write_point(&bp.A.0, w)?;
|
write_point(&bp.A, w)?;
|
||||||
write_point(&bp.wip.A.0, w)?;
|
write_point(&bp.A1, w)?;
|
||||||
write_point(&bp.wip.B.0, w)?;
|
write_point(&bp.B, w)?;
|
||||||
write_scalar(&bp.wip.r_answer.0, w)?;
|
write_scalar(&bp.r1, w)?;
|
||||||
write_scalar(&bp.wip.s_answer.0, w)?;
|
write_scalar(&bp.s1, w)?;
|
||||||
write_scalar(&bp.wip.delta_answer.0, w)?;
|
write_scalar(&bp.d1, w)?;
|
||||||
specific_write_vec(&bp.wip.L.iter().copied().map(|L| L.0).collect::<Vec<_>>(), w)?;
|
specific_write_vec(&bp.L, w)?;
|
||||||
specific_write_vec(&bp.wip.R.iter().copied().map(|R| R.0).collect::<Vec<_>>(), w)
|
specific_write_vec(&bp.R, w)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -193,8 +147,8 @@ impl Bulletproofs {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Read Bulletproofs.
|
/// Read Bulletproofs.
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Bulletproofs> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
||||||
Ok(Bulletproofs::Original(OriginalStruct {
|
Ok(Self::Original(OriginalStruct {
|
||||||
A: read_point(r)?,
|
A: read_point(r)?,
|
||||||
S: read_point(r)?,
|
S: read_point(r)?,
|
||||||
T1: read_point(r)?,
|
T1: read_point(r)?,
|
||||||
@@ -210,20 +164,16 @@ impl Bulletproofs {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Read Bulletproofs+.
|
/// Read Bulletproofs+.
|
||||||
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Bulletproofs> {
|
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Self> {
|
||||||
use dalek_ff_group::{Scalar as DfgScalar, EdwardsPoint as DfgPoint};
|
Ok(Self::Plus(PlusStruct {
|
||||||
|
A: read_point(r)?,
|
||||||
Ok(Bulletproofs::Plus(AggregateRangeProof {
|
A1: read_point(r)?,
|
||||||
A: DfgPoint(read_point(r)?),
|
B: read_point(r)?,
|
||||||
wip: WipProof {
|
r1: read_scalar(r)?,
|
||||||
A: DfgPoint(read_point(r)?),
|
s1: read_scalar(r)?,
|
||||||
B: DfgPoint(read_point(r)?),
|
d1: read_scalar(r)?,
|
||||||
r_answer: DfgScalar(read_scalar(r)?),
|
L: read_vec(read_point, r)?,
|
||||||
s_answer: DfgScalar(read_scalar(r)?),
|
R: read_vec(read_point, r)?,
|
||||||
delta_answer: DfgScalar(read_scalar(r)?),
|
|
||||||
L: read_vec(read_point, r)?.into_iter().map(DfgPoint).collect(),
|
|
||||||
R: read_vec(read_point, r)?.into_iter().map(DfgPoint).collect(),
|
|
||||||
},
|
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as D
|
|||||||
use group::{ff::Field, Group};
|
use group::{ff::Field, Group};
|
||||||
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
|
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
|
||||||
|
|
||||||
use multiexp::{BatchVerifier, multiexp};
|
use multiexp::BatchVerifier;
|
||||||
|
|
||||||
use crate::{Commitment, ringct::bulletproofs::core::*};
|
use crate::{Commitment, ringct::bulletproofs::core::*};
|
||||||
|
|
||||||
@@ -17,20 +17,7 @@ include!(concat!(env!("OUT_DIR"), "/generators.rs"));
|
|||||||
|
|
||||||
static IP12_CELL: OnceLock<Scalar> = OnceLock::new();
|
static IP12_CELL: OnceLock<Scalar> = OnceLock::new();
|
||||||
pub(crate) fn IP12() -> Scalar {
|
pub(crate) fn IP12() -> Scalar {
|
||||||
*IP12_CELL.get_or_init(|| ScalarVector(vec![Scalar::ONE; N]).inner_product(TWO_N()))
|
*IP12_CELL.get_or_init(|| inner_product(&ScalarVector(vec![Scalar::ONE; N]), TWO_N()))
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn hadamard_fold(
|
|
||||||
l: &[EdwardsPoint],
|
|
||||||
r: &[EdwardsPoint],
|
|
||||||
a: Scalar,
|
|
||||||
b: Scalar,
|
|
||||||
) -> Vec<EdwardsPoint> {
|
|
||||||
let mut res = Vec::with_capacity(l.len() / 2);
|
|
||||||
for i in 0 .. l.len() {
|
|
||||||
res.push(multiexp(&[(a, l[i]), (b, r[i])]));
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
@@ -49,10 +36,8 @@ pub struct OriginalStruct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl OriginalStruct {
|
impl OriginalStruct {
|
||||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
#[allow(clippy::many_single_char_names)]
|
||||||
rng: &mut R,
|
pub(crate) fn prove<R: RngCore + CryptoRng>(rng: &mut R, commitments: &[Commitment]) -> Self {
|
||||||
commitments: &[Commitment],
|
|
||||||
) -> OriginalStruct {
|
|
||||||
let (logMN, M, MN) = MN(commitments.len());
|
let (logMN, M, MN) = MN(commitments.len());
|
||||||
|
|
||||||
let (aL, aR) = bit_decompose(commitments);
|
let (aL, aR) = bit_decompose(commitments);
|
||||||
@@ -70,7 +55,7 @@ impl OriginalStruct {
|
|||||||
let mut cache = hash_to_scalar(&y.to_bytes());
|
let mut cache = hash_to_scalar(&y.to_bytes());
|
||||||
let z = cache;
|
let z = cache;
|
||||||
|
|
||||||
let l0 = aL - z;
|
let l0 = &aL - z;
|
||||||
let l1 = sL;
|
let l1 = sL;
|
||||||
|
|
||||||
let mut zero_twos = Vec::with_capacity(MN);
|
let mut zero_twos = Vec::with_capacity(MN);
|
||||||
@@ -82,12 +67,12 @@ impl OriginalStruct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let yMN = ScalarVector::powers(y, MN);
|
let yMN = ScalarVector::powers(y, MN);
|
||||||
let r0 = ((aR + z) * &yMN) + &ScalarVector(zero_twos);
|
let r0 = (&(aR + z) * &yMN) + ScalarVector(zero_twos);
|
||||||
let r1 = yMN * &sR;
|
let r1 = yMN * sR;
|
||||||
|
|
||||||
let (T1, T2, x, mut taux) = {
|
let (T1, T2, x, mut taux) = {
|
||||||
let t1 = l0.clone().inner_product(&r1) + r0.clone().inner_product(&l1);
|
let t1 = inner_product(&l0, &r1) + inner_product(&l1, &r0);
|
||||||
let t2 = l1.clone().inner_product(&r1);
|
let t2 = inner_product(&l1, &r1);
|
||||||
|
|
||||||
let mut tau1 = Scalar::random(&mut *rng);
|
let mut tau1 = Scalar::random(&mut *rng);
|
||||||
let mut tau2 = Scalar::random(&mut *rng);
|
let mut tau2 = Scalar::random(&mut *rng);
|
||||||
@@ -113,10 +98,10 @@ impl OriginalStruct {
|
|||||||
taux += zpow[i + 2] * gamma;
|
taux += zpow[i + 2] * gamma;
|
||||||
}
|
}
|
||||||
|
|
||||||
let l = l0 + &(l1 * x);
|
let l = &l0 + &(l1 * x);
|
||||||
let r = r0 + &(r1 * x);
|
let r = &r0 + &(r1 * x);
|
||||||
|
|
||||||
let t = l.clone().inner_product(&r);
|
let t = inner_product(&l, &r);
|
||||||
|
|
||||||
let x_ip =
|
let x_ip =
|
||||||
hash_cache(&mut cache, &[x.to_bytes(), taux.to_bytes(), mu.to_bytes(), t.to_bytes()]);
|
hash_cache(&mut cache, &[x.to_bytes(), taux.to_bytes(), mu.to_bytes(), t.to_bytes()]);
|
||||||
@@ -139,22 +124,22 @@ impl OriginalStruct {
|
|||||||
let (aL, aR) = a.split();
|
let (aL, aR) = a.split();
|
||||||
let (bL, bR) = b.split();
|
let (bL, bR) = b.split();
|
||||||
|
|
||||||
let cL = aL.clone().inner_product(&bR);
|
let cL = inner_product(&aL, &bR);
|
||||||
let cR = aR.clone().inner_product(&bL);
|
let cR = inner_product(&aR, &bL);
|
||||||
|
|
||||||
let (G_L, G_R) = G_proof.split_at(aL.len());
|
let (G_L, G_R) = G_proof.split_at(aL.len());
|
||||||
let (H_L, H_R) = H_proof.split_at(aL.len());
|
let (H_L, H_R) = H_proof.split_at(aL.len());
|
||||||
|
|
||||||
let L_i = prove_multiexp(&LR_statements(&aL, G_R, &bR, H_L, cL, U));
|
let L_i = prove_multiexp(&LR_statements(&aL, G_R, &bR, H_L, cL, U));
|
||||||
let R_i = prove_multiexp(&LR_statements(&aR, G_L, &bL, H_R, cR, U));
|
let R_i = prove_multiexp(&LR_statements(&aR, G_L, &bL, H_R, cR, U));
|
||||||
L.push(L_i);
|
L.push(*L_i);
|
||||||
R.push(R_i);
|
R.push(*R_i);
|
||||||
|
|
||||||
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
|
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
|
||||||
let winv = w.invert().unwrap();
|
let winv = w.invert().unwrap();
|
||||||
|
|
||||||
a = (aL * w) + &(aR * winv);
|
a = (aL * w) + (aR * winv);
|
||||||
b = (bL * winv) + &(bR * w);
|
b = (bL * winv) + (bR * w);
|
||||||
|
|
||||||
if a.len() != 1 {
|
if a.len() != 1 {
|
||||||
G_proof = hadamard_fold(G_L, G_R, winv, w);
|
G_proof = hadamard_fold(G_L, G_R, winv, w);
|
||||||
@@ -162,15 +147,15 @@ impl OriginalStruct {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let res = OriginalStruct {
|
let res = Self {
|
||||||
A: *A,
|
A: *A,
|
||||||
S: *S,
|
S: *S,
|
||||||
T1: *T1,
|
T1: *T1,
|
||||||
T2: *T2,
|
T2: *T2,
|
||||||
taux: *taux,
|
taux: *taux,
|
||||||
mu: *mu,
|
mu: *mu,
|
||||||
L: L.drain(..).map(|L| *L).collect(),
|
L,
|
||||||
R: R.drain(..).map(|R| *R).collect(),
|
R,
|
||||||
a: *a[0],
|
a: *a[0],
|
||||||
b: *b[0],
|
b: *b[0],
|
||||||
t: *t,
|
t: *t,
|
||||||
@@ -179,6 +164,7 @@ impl OriginalStruct {
|
|||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::many_single_char_names)]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||||
&self,
|
&self,
|
||||||
|
|||||||
300
coins/monero/src/ringct/bulletproofs/plus.rs
Normal file
300
coins/monero/src/ringct/bulletproofs/plus.rs
Normal file
@@ -0,0 +1,300 @@
|
|||||||
|
use std_shims::{vec::Vec, sync::OnceLock};
|
||||||
|
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
|
use zeroize::Zeroize;
|
||||||
|
|
||||||
|
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
|
||||||
|
|
||||||
|
use group::ff::Field;
|
||||||
|
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
|
||||||
|
|
||||||
|
use multiexp::BatchVerifier;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
Commitment, hash,
|
||||||
|
ringct::{hash_to_point::raw_hash_to_point, bulletproofs::core::*},
|
||||||
|
};
|
||||||
|
|
||||||
|
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
|
||||||
|
|
||||||
|
static TRANSCRIPT_CELL: OnceLock<[u8; 32]> = OnceLock::new();
|
||||||
|
pub(crate) fn TRANSCRIPT() -> [u8; 32] {
|
||||||
|
*TRANSCRIPT_CELL.get_or_init(|| {
|
||||||
|
EdwardsPoint(raw_hash_to_point(hash(b"bulletproof_plus_transcript"))).compress().to_bytes()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// TRANSCRIPT isn't a Scalar, so we need this alternative for the first hash
|
||||||
|
fn hash_plus<C: IntoIterator<Item = DalekPoint>>(commitments: C) -> (Scalar, Vec<EdwardsPoint>) {
|
||||||
|
let (cache, commitments) = hash_commitments(commitments);
|
||||||
|
(hash_to_scalar(&[TRANSCRIPT().as_ref(), &cache.to_bytes()].concat()), commitments)
|
||||||
|
}
|
||||||
|
|
||||||
|
// d[j*N+i] = z**(2*(j+1)) * 2**i
|
||||||
|
fn d(z: Scalar, M: usize, MN: usize) -> (ScalarVector, ScalarVector) {
|
||||||
|
let zpow = ScalarVector::even_powers(z, 2 * M);
|
||||||
|
let mut d = vec![Scalar::ZERO; MN];
|
||||||
|
for j in 0 .. M {
|
||||||
|
for i in 0 .. N {
|
||||||
|
d[(j * N) + i] = zpow[j] * TWO_N()[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(zpow, ScalarVector(d))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub struct PlusStruct {
|
||||||
|
pub(crate) A: DalekPoint,
|
||||||
|
pub(crate) A1: DalekPoint,
|
||||||
|
pub(crate) B: DalekPoint,
|
||||||
|
pub(crate) r1: DalekScalar,
|
||||||
|
pub(crate) s1: DalekScalar,
|
||||||
|
pub(crate) d1: DalekScalar,
|
||||||
|
pub(crate) L: Vec<DalekPoint>,
|
||||||
|
pub(crate) R: Vec<DalekPoint>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PlusStruct {
|
||||||
|
#[allow(clippy::many_single_char_names)]
|
||||||
|
pub(crate) fn prove<R: RngCore + CryptoRng>(rng: &mut R, commitments: &[Commitment]) -> Self {
|
||||||
|
let generators = GENERATORS();
|
||||||
|
|
||||||
|
let (logMN, M, MN) = MN(commitments.len());
|
||||||
|
|
||||||
|
let (aL, aR) = bit_decompose(commitments);
|
||||||
|
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||||
|
let (mut cache, _) = hash_plus(commitments_points.clone());
|
||||||
|
let (mut alpha1, A) = alpha_rho(&mut *rng, generators, &aL, &aR);
|
||||||
|
|
||||||
|
let y = hash_cache(&mut cache, &[A.compress().to_bytes()]);
|
||||||
|
let mut cache = hash_to_scalar(&y.to_bytes());
|
||||||
|
let z = cache;
|
||||||
|
|
||||||
|
let (zpow, d) = d(z, M, MN);
|
||||||
|
|
||||||
|
let aL1 = aL - z;
|
||||||
|
|
||||||
|
let ypow = ScalarVector::powers(y, MN + 2);
|
||||||
|
let mut y_for_d = ScalarVector(ypow.0[1 ..= MN].to_vec());
|
||||||
|
y_for_d.0.reverse();
|
||||||
|
let aR1 = (aR + z) + (y_for_d * d);
|
||||||
|
|
||||||
|
for (j, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
|
||||||
|
alpha1 += zpow[j] * ypow[MN + 1] * gamma;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut a = aL1;
|
||||||
|
let mut b = aR1;
|
||||||
|
|
||||||
|
let yinv = y.invert().unwrap();
|
||||||
|
let yinvpow = ScalarVector::powers(yinv, MN);
|
||||||
|
|
||||||
|
let mut G_proof = generators.G[.. a.len()].to_vec();
|
||||||
|
let mut H_proof = generators.H[.. a.len()].to_vec();
|
||||||
|
|
||||||
|
let mut L = Vec::with_capacity(logMN);
|
||||||
|
let mut R = Vec::with_capacity(logMN);
|
||||||
|
|
||||||
|
while a.len() != 1 {
|
||||||
|
let (aL, aR) = a.split();
|
||||||
|
let (bL, bR) = b.split();
|
||||||
|
|
||||||
|
let cL = weighted_inner_product(&aL, &bR, y);
|
||||||
|
let cR = weighted_inner_product(&(&aR * ypow[aR.len()]), &bL, y);
|
||||||
|
|
||||||
|
let (mut dL, mut dR) = (Scalar::random(&mut *rng), Scalar::random(&mut *rng));
|
||||||
|
|
||||||
|
let (G_L, G_R) = G_proof.split_at(aL.len());
|
||||||
|
let (H_L, H_R) = H_proof.split_at(aL.len());
|
||||||
|
|
||||||
|
let mut L_i = LR_statements(&(&aL * yinvpow[aL.len()]), G_R, &bR, H_L, cL, H());
|
||||||
|
L_i.push((dL, G));
|
||||||
|
let L_i = prove_multiexp(&L_i);
|
||||||
|
L.push(*L_i);
|
||||||
|
|
||||||
|
let mut R_i = LR_statements(&(&aR * ypow[aR.len()]), G_L, &bL, H_R, cR, H());
|
||||||
|
R_i.push((dR, G));
|
||||||
|
let R_i = prove_multiexp(&R_i);
|
||||||
|
R.push(*R_i);
|
||||||
|
|
||||||
|
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
|
||||||
|
let winv = w.invert().unwrap();
|
||||||
|
|
||||||
|
G_proof = hadamard_fold(G_L, G_R, winv, w * yinvpow[aL.len()]);
|
||||||
|
H_proof = hadamard_fold(H_L, H_R, w, winv);
|
||||||
|
|
||||||
|
a = (&aL * w) + (aR * (winv * ypow[aL.len()]));
|
||||||
|
b = (bL * winv) + (bR * w);
|
||||||
|
|
||||||
|
alpha1 += (dL * (w * w)) + (dR * (winv * winv));
|
||||||
|
|
||||||
|
dL.zeroize();
|
||||||
|
dR.zeroize();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut r = Scalar::random(&mut *rng);
|
||||||
|
let mut s = Scalar::random(&mut *rng);
|
||||||
|
let mut d = Scalar::random(&mut *rng);
|
||||||
|
let mut eta = Scalar::random(&mut *rng);
|
||||||
|
|
||||||
|
let A1 = prove_multiexp(&[
|
||||||
|
(r, G_proof[0]),
|
||||||
|
(s, H_proof[0]),
|
||||||
|
(d, G),
|
||||||
|
((r * y * b[0]) + (s * y * a[0]), H()),
|
||||||
|
]);
|
||||||
|
let B = prove_multiexp(&[(r * y * s, H()), (eta, G)]);
|
||||||
|
let e = hash_cache(&mut cache, &[A1.compress().to_bytes(), B.compress().to_bytes()]);
|
||||||
|
|
||||||
|
let r1 = (a[0] * e) + r;
|
||||||
|
r.zeroize();
|
||||||
|
let s1 = (b[0] * e) + s;
|
||||||
|
s.zeroize();
|
||||||
|
let d1 = ((d * e) + eta) + (alpha1 * (e * e));
|
||||||
|
d.zeroize();
|
||||||
|
eta.zeroize();
|
||||||
|
alpha1.zeroize();
|
||||||
|
|
||||||
|
let res = Self { A: *A, A1: *A1, B: *B, r1: *r1, s1: *s1, d1: *d1, L, R };
|
||||||
|
debug_assert!(res.verify(rng, &commitments_points));
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::many_single_char_names)]
|
||||||
|
#[must_use]
|
||||||
|
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||||
|
&self,
|
||||||
|
rng: &mut R,
|
||||||
|
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
||||||
|
id: ID,
|
||||||
|
commitments: &[DalekPoint],
|
||||||
|
) -> bool {
|
||||||
|
// Verify commitments are valid
|
||||||
|
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify L and R are properly sized
|
||||||
|
if self.L.len() != self.R.len() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let (logMN, M, MN) = MN(commitments.len());
|
||||||
|
if self.L.len() != logMN {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rebuild all challenges
|
||||||
|
let (mut cache, commitments) = hash_plus(commitments.iter().copied());
|
||||||
|
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes()]);
|
||||||
|
let yinv = y.invert().unwrap();
|
||||||
|
let z = hash_to_scalar(&y.to_bytes());
|
||||||
|
cache = z;
|
||||||
|
|
||||||
|
let mut w = Vec::with_capacity(logMN);
|
||||||
|
let mut winv = Vec::with_capacity(logMN);
|
||||||
|
for (L, R) in self.L.iter().zip(&self.R) {
|
||||||
|
w.push(hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]));
|
||||||
|
winv.push(cache.invert().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
let e = hash_cache(&mut cache, &[self.A1.compress().to_bytes(), self.B.compress().to_bytes()]);
|
||||||
|
|
||||||
|
// Convert the proof from * INV_EIGHT to its actual form
|
||||||
|
let normalize = |point: &DalekPoint| EdwardsPoint(point.mul_by_cofactor());
|
||||||
|
|
||||||
|
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
|
||||||
|
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
|
||||||
|
let A = normalize(&self.A);
|
||||||
|
let A1 = normalize(&self.A1);
|
||||||
|
let B = normalize(&self.B);
|
||||||
|
|
||||||
|
// Verify it
|
||||||
|
let mut proof = Vec::with_capacity(logMN + 5 + (2 * (MN + logMN)));
|
||||||
|
|
||||||
|
let mut yMN = y;
|
||||||
|
for _ in 0 .. logMN {
|
||||||
|
yMN *= yMN;
|
||||||
|
}
|
||||||
|
let yMNy = yMN * y;
|
||||||
|
|
||||||
|
let (zpow, d) = d(z, M, MN);
|
||||||
|
let zsq = zpow[0];
|
||||||
|
|
||||||
|
let esq = e * e;
|
||||||
|
let minus_esq = -esq;
|
||||||
|
let commitment_weight = minus_esq * yMNy;
|
||||||
|
for (i, commitment) in commitments.iter().map(EdwardsPoint::mul_by_cofactor).enumerate() {
|
||||||
|
proof.push((commitment_weight * zpow[i], commitment));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Invert B, instead of the Scalar, as the latter is only 2x as expensive yet enables reduction
|
||||||
|
// to a single addition under vartime for the first BP verified in the batch, which is expected
|
||||||
|
// to be much more significant
|
||||||
|
proof.push((Scalar::ONE, -B));
|
||||||
|
proof.push((-e, A1));
|
||||||
|
proof.push((minus_esq, A));
|
||||||
|
proof.push((Scalar(self.d1), G));
|
||||||
|
|
||||||
|
let d_sum = zpow.sum() * Scalar::from(u64::MAX);
|
||||||
|
let y_sum = weighted_powers(y, MN).sum();
|
||||||
|
proof.push((
|
||||||
|
Scalar(self.r1 * y.0 * self.s1) + (esq * ((yMNy * z * d_sum) + ((zsq - z) * y_sum))),
|
||||||
|
H(),
|
||||||
|
));
|
||||||
|
|
||||||
|
let w_cache = challenge_products(&w, &winv);
|
||||||
|
|
||||||
|
let mut e_r1_y = e * Scalar(self.r1);
|
||||||
|
let e_s1 = e * Scalar(self.s1);
|
||||||
|
let esq_z = esq * z;
|
||||||
|
let minus_esq_z = -esq_z;
|
||||||
|
let mut minus_esq_y = minus_esq * yMN;
|
||||||
|
|
||||||
|
let generators = GENERATORS();
|
||||||
|
for i in 0 .. MN {
|
||||||
|
proof.push((e_r1_y * w_cache[i] + esq_z, generators.G[i]));
|
||||||
|
proof.push((
|
||||||
|
(e_s1 * w_cache[(!i) & (MN - 1)]) + minus_esq_z + (minus_esq_y * d[i]),
|
||||||
|
generators.H[i],
|
||||||
|
));
|
||||||
|
|
||||||
|
e_r1_y *= yinv;
|
||||||
|
minus_esq_y *= yinv;
|
||||||
|
}
|
||||||
|
|
||||||
|
for i in 0 .. logMN {
|
||||||
|
proof.push((minus_esq * w[i] * w[i], L[i]));
|
||||||
|
proof.push((minus_esq * winv[i] * winv[i], R[i]));
|
||||||
|
}
|
||||||
|
|
||||||
|
verifier.queue(rng, id, proof);
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub(crate) fn verify<R: RngCore + CryptoRng>(
|
||||||
|
&self,
|
||||||
|
rng: &mut R,
|
||||||
|
commitments: &[DalekPoint],
|
||||||
|
) -> bool {
|
||||||
|
let mut verifier = BatchVerifier::new(1);
|
||||||
|
if self.verify_core(rng, &mut verifier, (), commitments) {
|
||||||
|
verifier.verify_vartime()
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||||
|
&self,
|
||||||
|
rng: &mut R,
|
||||||
|
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
||||||
|
id: ID,
|
||||||
|
commitments: &[DalekPoint],
|
||||||
|
) -> bool {
|
||||||
|
self.verify_core(rng, verifier, id, commitments)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,257 +0,0 @@
|
|||||||
use std_shims::vec::Vec;
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
|
||||||
|
|
||||||
use multiexp::{multiexp, multiexp_vartime, BatchVerifier};
|
|
||||||
use group::{
|
|
||||||
ff::{Field, PrimeField},
|
|
||||||
Group, GroupEncoding,
|
|
||||||
};
|
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Commitment,
|
|
||||||
ringct::{
|
|
||||||
bulletproofs::core::{MAX_M, N},
|
|
||||||
bulletproofs::plus::{
|
|
||||||
ScalarVector, PointVector, GeneratorsList, Generators,
|
|
||||||
transcript::*,
|
|
||||||
weighted_inner_product::{WipStatement, WipWitness, WipProof},
|
|
||||||
padded_pow_of_2, u64_decompose,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Figure 3
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub(crate) struct AggregateRangeStatement {
|
|
||||||
generators: Generators,
|
|
||||||
V: Vec<EdwardsPoint>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Zeroize for AggregateRangeStatement {
|
|
||||||
fn zeroize(&mut self) {
|
|
||||||
self.V.zeroize();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub(crate) struct AggregateRangeWitness {
|
|
||||||
values: Vec<u64>,
|
|
||||||
gammas: Vec<Scalar>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AggregateRangeWitness {
|
|
||||||
pub(crate) fn new(commitments: &[Commitment]) -> Option<Self> {
|
|
||||||
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut values = Vec::with_capacity(commitments.len());
|
|
||||||
let mut gammas = Vec::with_capacity(commitments.len());
|
|
||||||
for commitment in commitments {
|
|
||||||
values.push(commitment.amount);
|
|
||||||
gammas.push(Scalar(commitment.mask));
|
|
||||||
}
|
|
||||||
Some(AggregateRangeWitness { values, gammas })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub struct AggregateRangeProof {
|
|
||||||
pub(crate) A: EdwardsPoint,
|
|
||||||
pub(crate) wip: WipProof,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AggregateRangeStatement {
|
|
||||||
pub(crate) fn new(V: Vec<EdwardsPoint>) -> Option<Self> {
|
|
||||||
if V.is_empty() || (V.len() > MAX_M) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(Self { generators: Generators::new(), V })
|
|
||||||
}
|
|
||||||
|
|
||||||
fn transcript_A(transcript: &mut Scalar, A: EdwardsPoint) -> (Scalar, Scalar) {
|
|
||||||
let y = hash_to_scalar(&[transcript.to_repr().as_ref(), A.to_bytes().as_ref()].concat());
|
|
||||||
let z = hash_to_scalar(y.to_bytes().as_ref());
|
|
||||||
*transcript = z;
|
|
||||||
(y, z)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn d_j(j: usize, m: usize) -> ScalarVector {
|
|
||||||
let mut d_j = Vec::with_capacity(m * N);
|
|
||||||
for _ in 0 .. (j - 1) * N {
|
|
||||||
d_j.push(Scalar::ZERO);
|
|
||||||
}
|
|
||||||
d_j.append(&mut ScalarVector::powers(Scalar::from(2u8), N).0);
|
|
||||||
for _ in 0 .. (m - j) * N {
|
|
||||||
d_j.push(Scalar::ZERO);
|
|
||||||
}
|
|
||||||
ScalarVector(d_j)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn compute_A_hat(
|
|
||||||
mut V: PointVector,
|
|
||||||
generators: &Generators,
|
|
||||||
transcript: &mut Scalar,
|
|
||||||
mut A: EdwardsPoint,
|
|
||||||
) -> (Scalar, ScalarVector, Scalar, Scalar, ScalarVector, EdwardsPoint) {
|
|
||||||
let (y, z) = Self::transcript_A(transcript, A);
|
|
||||||
A = A.mul_by_cofactor();
|
|
||||||
|
|
||||||
while V.len() < padded_pow_of_2(V.len()) {
|
|
||||||
V.0.push(EdwardsPoint::identity());
|
|
||||||
}
|
|
||||||
let mn = V.len() * N;
|
|
||||||
|
|
||||||
let mut z_pow = Vec::with_capacity(V.len());
|
|
||||||
|
|
||||||
let mut d = ScalarVector::new(mn);
|
|
||||||
for j in 1 ..= V.len() {
|
|
||||||
z_pow.push(z.pow(Scalar::from(2 * u64::try_from(j).unwrap()))); // TODO: Optimize this
|
|
||||||
d = d + &(Self::d_j(j, V.len()) * (z_pow[j - 1]));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut ascending_y = ScalarVector(vec![y]);
|
|
||||||
for i in 1 .. d.len() {
|
|
||||||
ascending_y.0.push(ascending_y[i - 1] * y);
|
|
||||||
}
|
|
||||||
let y_pows = ascending_y.clone().sum();
|
|
||||||
|
|
||||||
let mut descending_y = ascending_y.clone();
|
|
||||||
descending_y.0.reverse();
|
|
||||||
|
|
||||||
let d_descending_y = d.clone() * &descending_y;
|
|
||||||
let d_descending_y_plus_z = d_descending_y + z;
|
|
||||||
|
|
||||||
let y_mn_plus_one = descending_y[0] * y;
|
|
||||||
|
|
||||||
let mut commitment_accum = EdwardsPoint::identity();
|
|
||||||
for (j, commitment) in V.0.iter().enumerate() {
|
|
||||||
commitment_accum += *commitment * z_pow[j];
|
|
||||||
}
|
|
||||||
|
|
||||||
let neg_z = -z;
|
|
||||||
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 2);
|
|
||||||
for (i, d_y_z) in d_descending_y_plus_z.0.iter().enumerate() {
|
|
||||||
A_terms.push((neg_z, generators.generator(GeneratorsList::GBold1, i)));
|
|
||||||
A_terms.push((*d_y_z, generators.generator(GeneratorsList::HBold1, i)));
|
|
||||||
}
|
|
||||||
A_terms.push((y_mn_plus_one, commitment_accum));
|
|
||||||
A_terms.push((
|
|
||||||
((y_pows * z) - (d.sum() * y_mn_plus_one * z) - (y_pows * z.square())),
|
|
||||||
Generators::g(),
|
|
||||||
));
|
|
||||||
|
|
||||||
(
|
|
||||||
y,
|
|
||||||
d_descending_y_plus_z,
|
|
||||||
y_mn_plus_one,
|
|
||||||
z,
|
|
||||||
ScalarVector(z_pow),
|
|
||||||
A + multiexp_vartime(&A_terms),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
|
||||||
self,
|
|
||||||
rng: &mut R,
|
|
||||||
witness: &AggregateRangeWitness,
|
|
||||||
) -> Option<AggregateRangeProof> {
|
|
||||||
// Check for consistency with the witness
|
|
||||||
if self.V.len() != witness.values.len() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
for (commitment, (value, gamma)) in
|
|
||||||
self.V.iter().zip(witness.values.iter().zip(witness.gammas.iter()))
|
|
||||||
{
|
|
||||||
if Commitment::new(**gamma, *value).calculate() != **commitment {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let Self { generators, V } = self;
|
|
||||||
// Monero expects all of these points to be torsion-free
|
|
||||||
// Generally, for Bulletproofs, it sends points * INV_EIGHT and then performs a torsion clear
|
|
||||||
// by multiplying by 8
|
|
||||||
// This also restores the original value due to the preprocessing
|
|
||||||
// Commitments aren't transmitted INV_EIGHT though, so this multiplies by INV_EIGHT to enable
|
|
||||||
// clearing its cofactor without mutating the value
|
|
||||||
// For some reason, these values are transcripted * INV_EIGHT, not as transmitted
|
|
||||||
let mut V = V.into_iter().map(|V| EdwardsPoint(V.0 * crate::INV_EIGHT())).collect::<Vec<_>>();
|
|
||||||
let mut transcript = initial_transcript(V.iter());
|
|
||||||
V.iter_mut().for_each(|V| *V = V.mul_by_cofactor());
|
|
||||||
|
|
||||||
// Pad V
|
|
||||||
while V.len() < padded_pow_of_2(V.len()) {
|
|
||||||
V.push(EdwardsPoint::identity());
|
|
||||||
}
|
|
||||||
|
|
||||||
let generators = generators.reduce(V.len() * N);
|
|
||||||
|
|
||||||
let mut d_js = Vec::with_capacity(V.len());
|
|
||||||
let mut a_l = ScalarVector(Vec::with_capacity(V.len() * N));
|
|
||||||
for j in 1 ..= V.len() {
|
|
||||||
d_js.push(Self::d_j(j, V.len()));
|
|
||||||
a_l.0.append(&mut u64_decompose(*witness.values.get(j - 1).unwrap_or(&0)).0);
|
|
||||||
}
|
|
||||||
|
|
||||||
let a_r = a_l.clone() - Scalar::ONE;
|
|
||||||
|
|
||||||
let alpha = Scalar::random(&mut *rng);
|
|
||||||
|
|
||||||
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 1);
|
|
||||||
for (i, a_l) in a_l.0.iter().enumerate() {
|
|
||||||
A_terms.push((*a_l, generators.generator(GeneratorsList::GBold1, i)));
|
|
||||||
}
|
|
||||||
for (i, a_r) in a_r.0.iter().enumerate() {
|
|
||||||
A_terms.push((*a_r, generators.generator(GeneratorsList::HBold1, i)));
|
|
||||||
}
|
|
||||||
A_terms.push((alpha, Generators::h()));
|
|
||||||
let mut A = multiexp(&A_terms);
|
|
||||||
A_terms.zeroize();
|
|
||||||
|
|
||||||
// Multiply by INV_EIGHT per earlier commentary
|
|
||||||
A.0 *= crate::INV_EIGHT();
|
|
||||||
|
|
||||||
let (y, d_descending_y_plus_z, y_mn_plus_one, z, z_pow, A_hat) =
|
|
||||||
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, A);
|
|
||||||
|
|
||||||
let a_l = a_l - z;
|
|
||||||
let a_r = a_r + &d_descending_y_plus_z;
|
|
||||||
let mut alpha = alpha;
|
|
||||||
for j in 1 ..= witness.gammas.len() {
|
|
||||||
alpha += z_pow[j - 1] * witness.gammas[j - 1] * y_mn_plus_one;
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(AggregateRangeProof {
|
|
||||||
A,
|
|
||||||
wip: WipStatement::new(generators, A_hat, y)
|
|
||||||
.prove(rng, transcript, &Zeroizing::new(WipWitness::new(a_l, a_r, alpha).unwrap()))
|
|
||||||
.unwrap(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn verify<Id: Copy + Zeroize, R: RngCore + CryptoRng>(
|
|
||||||
self,
|
|
||||||
rng: &mut R,
|
|
||||||
verifier: &mut BatchVerifier<Id, EdwardsPoint>,
|
|
||||||
id: Id,
|
|
||||||
proof: AggregateRangeProof,
|
|
||||||
) -> bool {
|
|
||||||
let Self { generators, V } = self;
|
|
||||||
|
|
||||||
let mut V = V.into_iter().map(|V| EdwardsPoint(V.0 * crate::INV_EIGHT())).collect::<Vec<_>>();
|
|
||||||
let mut transcript = initial_transcript(V.iter());
|
|
||||||
V.iter_mut().for_each(|V| *V = V.mul_by_cofactor());
|
|
||||||
|
|
||||||
let generators = generators.reduce(V.len() * N);
|
|
||||||
|
|
||||||
let (y, _, _, _, _, A_hat) =
|
|
||||||
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, proof.A);
|
|
||||||
WipStatement::new(generators, A_hat, y).verify(rng, verifier, id, transcript, proof.wip)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,85 +0,0 @@
|
|||||||
#![allow(non_snake_case)]
|
|
||||||
|
|
||||||
use group::Group;
|
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
pub(crate) use crate::ringct::bulletproofs::scalar_vector::ScalarVector;
|
|
||||||
mod point_vector;
|
|
||||||
pub(crate) use point_vector::PointVector;
|
|
||||||
|
|
||||||
pub(crate) mod transcript;
|
|
||||||
pub(crate) mod weighted_inner_product;
|
|
||||||
pub(crate) use weighted_inner_product::*;
|
|
||||||
pub(crate) mod aggregate_range_proof;
|
|
||||||
pub(crate) use aggregate_range_proof::*;
|
|
||||||
|
|
||||||
pub(crate) fn padded_pow_of_2(i: usize) -> usize {
|
|
||||||
let mut next_pow_of_2 = 1;
|
|
||||||
while next_pow_of_2 < i {
|
|
||||||
next_pow_of_2 <<= 1;
|
|
||||||
}
|
|
||||||
next_pow_of_2
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
|
||||||
pub(crate) enum GeneratorsList {
|
|
||||||
GBold1,
|
|
||||||
HBold1,
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Table these
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub(crate) struct Generators {
|
|
||||||
g_bold1: &'static [EdwardsPoint],
|
|
||||||
h_bold1: &'static [EdwardsPoint],
|
|
||||||
}
|
|
||||||
|
|
||||||
mod generators {
|
|
||||||
use std_shims::sync::OnceLock;
|
|
||||||
use monero_generators::Generators;
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Generators {
|
|
||||||
#[allow(clippy::new_without_default)]
|
|
||||||
pub(crate) fn new() -> Self {
|
|
||||||
let gens = generators::GENERATORS();
|
|
||||||
Generators { g_bold1: &gens.G, h_bold1: &gens.H }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn len(&self) -> usize {
|
|
||||||
self.g_bold1.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn g() -> EdwardsPoint {
|
|
||||||
dalek_ff_group::EdwardsPoint(crate::H())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn h() -> EdwardsPoint {
|
|
||||||
EdwardsPoint::generator()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn generator(&self, list: GeneratorsList, i: usize) -> EdwardsPoint {
|
|
||||||
match list {
|
|
||||||
GeneratorsList::GBold1 => self.g_bold1[i],
|
|
||||||
GeneratorsList::HBold1 => self.h_bold1[i],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn reduce(&self, generators: usize) -> Self {
|
|
||||||
// Round to the nearest power of 2
|
|
||||||
let generators = padded_pow_of_2(generators);
|
|
||||||
assert!(generators <= self.g_bold1.len());
|
|
||||||
|
|
||||||
Generators { g_bold1: &self.g_bold1[.. generators], h_bold1: &self.h_bold1[.. generators] }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns the little-endian decomposition.
|
|
||||||
fn u64_decompose(value: u64) -> ScalarVector {
|
|
||||||
let mut bits = ScalarVector::new(64);
|
|
||||||
for bit in 0 .. 64 {
|
|
||||||
bits[bit] = Scalar::from((value >> bit) & 1);
|
|
||||||
}
|
|
||||||
bits
|
|
||||||
}
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
use core::ops::{Index, IndexMut};
|
|
||||||
use std_shims::vec::Vec;
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
|
||||||
|
|
||||||
use dalek_ff_group::EdwardsPoint;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
use multiexp::multiexp;
|
|
||||||
#[cfg(test)]
|
|
||||||
use crate::ringct::bulletproofs::plus::ScalarVector;
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub(crate) struct PointVector(pub(crate) Vec<EdwardsPoint>);
|
|
||||||
|
|
||||||
impl Index<usize> for PointVector {
|
|
||||||
type Output = EdwardsPoint;
|
|
||||||
fn index(&self, index: usize) -> &EdwardsPoint {
|
|
||||||
&self.0[index]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IndexMut<usize> for PointVector {
|
|
||||||
fn index_mut(&mut self, index: usize) -> &mut EdwardsPoint {
|
|
||||||
&mut self.0[index]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PointVector {
|
|
||||||
#[cfg(test)]
|
|
||||||
pub(crate) fn multiexp(&self, vector: &ScalarVector) -> EdwardsPoint {
|
|
||||||
debug_assert_eq!(self.len(), vector.len());
|
|
||||||
let mut res = Vec::with_capacity(self.len());
|
|
||||||
for (point, scalar) in self.0.iter().copied().zip(vector.0.iter().copied()) {
|
|
||||||
res.push((scalar, point));
|
|
||||||
}
|
|
||||||
multiexp(&res)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn len(&self) -> usize {
|
|
||||||
self.0.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn split(mut self) -> (Self, Self) {
|
|
||||||
debug_assert!(self.len() > 1);
|
|
||||||
let r = self.0.split_off(self.0.len() / 2);
|
|
||||||
debug_assert_eq!(self.len(), r.len());
|
|
||||||
(self, PointVector(r))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
use std_shims::{sync::OnceLock, vec::Vec};
|
|
||||||
|
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use monero_generators::{hash_to_point as raw_hash_to_point};
|
|
||||||
use crate::{hash, hash_to_scalar as dalek_hash};
|
|
||||||
|
|
||||||
// Monero starts BP+ transcripts with the following constant.
|
|
||||||
static TRANSCRIPT_CELL: OnceLock<[u8; 32]> = OnceLock::new();
|
|
||||||
pub(crate) fn TRANSCRIPT() -> [u8; 32] {
|
|
||||||
// Why this uses a hash_to_point is completely unknown.
|
|
||||||
*TRANSCRIPT_CELL
|
|
||||||
.get_or_init(|| raw_hash_to_point(hash(b"bulletproof_plus_transcript")).compress().to_bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
|
|
||||||
Scalar(dalek_hash(data))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn initial_transcript(commitments: core::slice::Iter<'_, EdwardsPoint>) -> Scalar {
|
|
||||||
let commitments_hash =
|
|
||||||
hash_to_scalar(&commitments.flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>());
|
|
||||||
hash_to_scalar(&[TRANSCRIPT().as_ref(), &commitments_hash.to_bytes()].concat())
|
|
||||||
}
|
|
||||||
@@ -1,444 +0,0 @@
|
|||||||
use std_shims::vec::Vec;
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
|
||||||
|
|
||||||
use multiexp::{BatchVerifier, multiexp, multiexp_vartime};
|
|
||||||
use group::{
|
|
||||||
ff::{Field, PrimeField},
|
|
||||||
GroupEncoding,
|
|
||||||
};
|
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use crate::ringct::bulletproofs::plus::{
|
|
||||||
ScalarVector, PointVector, GeneratorsList, Generators, padded_pow_of_2, transcript::*,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Figure 1
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub(crate) struct WipStatement {
|
|
||||||
generators: Generators,
|
|
||||||
P: EdwardsPoint,
|
|
||||||
y: ScalarVector,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Zeroize for WipStatement {
|
|
||||||
fn zeroize(&mut self) {
|
|
||||||
self.P.zeroize();
|
|
||||||
self.y.zeroize();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub(crate) struct WipWitness {
|
|
||||||
a: ScalarVector,
|
|
||||||
b: ScalarVector,
|
|
||||||
alpha: Scalar,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WipWitness {
|
|
||||||
pub(crate) fn new(mut a: ScalarVector, mut b: ScalarVector, alpha: Scalar) -> Option<Self> {
|
|
||||||
if a.0.is_empty() || (a.len() != b.len()) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Pad to the nearest power of 2
|
|
||||||
let missing = padded_pow_of_2(a.len()) - a.len();
|
|
||||||
a.0.reserve(missing);
|
|
||||||
b.0.reserve(missing);
|
|
||||||
for _ in 0 .. missing {
|
|
||||||
a.0.push(Scalar::ZERO);
|
|
||||||
b.0.push(Scalar::ZERO);
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(Self { a, b, alpha })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub(crate) struct WipProof {
|
|
||||||
pub(crate) L: Vec<EdwardsPoint>,
|
|
||||||
pub(crate) R: Vec<EdwardsPoint>,
|
|
||||||
pub(crate) A: EdwardsPoint,
|
|
||||||
pub(crate) B: EdwardsPoint,
|
|
||||||
pub(crate) r_answer: Scalar,
|
|
||||||
pub(crate) s_answer: Scalar,
|
|
||||||
pub(crate) delta_answer: Scalar,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WipStatement {
|
|
||||||
pub(crate) fn new(generators: Generators, P: EdwardsPoint, y: Scalar) -> Self {
|
|
||||||
debug_assert_eq!(generators.len(), padded_pow_of_2(generators.len()));
|
|
||||||
|
|
||||||
// y ** n
|
|
||||||
let mut y_vec = ScalarVector::new(generators.len());
|
|
||||||
y_vec[0] = y;
|
|
||||||
for i in 1 .. y_vec.len() {
|
|
||||||
y_vec[i] = y_vec[i - 1] * y;
|
|
||||||
}
|
|
||||||
|
|
||||||
Self { generators, P, y: y_vec }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn transcript_L_R(transcript: &mut Scalar, L: EdwardsPoint, R: EdwardsPoint) -> Scalar {
|
|
||||||
let e = hash_to_scalar(
|
|
||||||
&[transcript.to_repr().as_ref(), L.to_bytes().as_ref(), R.to_bytes().as_ref()].concat(),
|
|
||||||
);
|
|
||||||
*transcript = e;
|
|
||||||
e
|
|
||||||
}
|
|
||||||
|
|
||||||
fn transcript_A_B(transcript: &mut Scalar, A: EdwardsPoint, B: EdwardsPoint) -> Scalar {
|
|
||||||
let e = hash_to_scalar(
|
|
||||||
&[transcript.to_repr().as_ref(), A.to_bytes().as_ref(), B.to_bytes().as_ref()].concat(),
|
|
||||||
);
|
|
||||||
*transcript = e;
|
|
||||||
e
|
|
||||||
}
|
|
||||||
|
|
||||||
// Prover's variant of the shared code block to calculate G/H/P when n > 1
|
|
||||||
// Returns each permutation of G/H since the prover needs to do operation on each permutation
|
|
||||||
// P is dropped as it's unused in the prover's path
|
|
||||||
// TODO: It'd still probably be faster to keep in terms of the original generators, both between
|
|
||||||
// the reduced amount of group operations and the potential tabling of the generators under
|
|
||||||
// multiexp
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
fn next_G_H(
|
|
||||||
transcript: &mut Scalar,
|
|
||||||
mut g_bold1: PointVector,
|
|
||||||
mut g_bold2: PointVector,
|
|
||||||
mut h_bold1: PointVector,
|
|
||||||
mut h_bold2: PointVector,
|
|
||||||
L: EdwardsPoint,
|
|
||||||
R: EdwardsPoint,
|
|
||||||
y_inv_n_hat: Scalar,
|
|
||||||
) -> (Scalar, Scalar, Scalar, Scalar, PointVector, PointVector) {
|
|
||||||
debug_assert_eq!(g_bold1.len(), g_bold2.len());
|
|
||||||
debug_assert_eq!(h_bold1.len(), h_bold2.len());
|
|
||||||
debug_assert_eq!(g_bold1.len(), h_bold1.len());
|
|
||||||
|
|
||||||
let e = Self::transcript_L_R(transcript, L, R);
|
|
||||||
let inv_e = e.invert().unwrap();
|
|
||||||
|
|
||||||
// This vartime is safe as all of these arguments are public
|
|
||||||
let mut new_g_bold = Vec::with_capacity(g_bold1.len());
|
|
||||||
let e_y_inv = e * y_inv_n_hat;
|
|
||||||
for g_bold in g_bold1.0.drain(..).zip(g_bold2.0.drain(..)) {
|
|
||||||
new_g_bold.push(multiexp_vartime(&[(inv_e, g_bold.0), (e_y_inv, g_bold.1)]));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut new_h_bold = Vec::with_capacity(h_bold1.len());
|
|
||||||
for h_bold in h_bold1.0.drain(..).zip(h_bold2.0.drain(..)) {
|
|
||||||
new_h_bold.push(multiexp_vartime(&[(e, h_bold.0), (inv_e, h_bold.1)]));
|
|
||||||
}
|
|
||||||
|
|
||||||
let e_square = e.square();
|
|
||||||
let inv_e_square = inv_e.square();
|
|
||||||
|
|
||||||
(e, inv_e, e_square, inv_e_square, PointVector(new_g_bold), PointVector(new_h_bold))
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
This has room for optimization worth investigating further. It currently takes
|
|
||||||
an iterative approach. It can be optimized further via divide and conquer.
|
|
||||||
|
|
||||||
Assume there are 4 challenges.
|
|
||||||
|
|
||||||
Iterative approach (current):
|
|
||||||
1. Do the optimal multiplications across challenge column 0 and 1.
|
|
||||||
2. Do the optimal multiplications across that result and column 2.
|
|
||||||
3. Do the optimal multiplications across that result and column 3.
|
|
||||||
|
|
||||||
Divide and conquer (worth investigating further):
|
|
||||||
1. Do the optimal multiplications across challenge column 0 and 1.
|
|
||||||
2. Do the optimal multiplications across challenge column 2 and 3.
|
|
||||||
3. Multiply both results together.
|
|
||||||
|
|
||||||
When there are 4 challenges (n=16), the iterative approach does 28 multiplications
|
|
||||||
versus divide and conquer's 24.
|
|
||||||
*/
|
|
||||||
fn challenge_products(challenges: &[(Scalar, Scalar)]) -> Vec<Scalar> {
|
|
||||||
let mut products = vec![Scalar::ONE; 1 << challenges.len()];
|
|
||||||
|
|
||||||
if !challenges.is_empty() {
|
|
||||||
products[0] = challenges[0].1;
|
|
||||||
products[1] = challenges[0].0;
|
|
||||||
|
|
||||||
for (j, challenge) in challenges.iter().enumerate().skip(1) {
|
|
||||||
let mut slots = (1 << (j + 1)) - 1;
|
|
||||||
while slots > 0 {
|
|
||||||
products[slots] = products[slots / 2] * challenge.0;
|
|
||||||
products[slots - 1] = products[slots / 2] * challenge.1;
|
|
||||||
|
|
||||||
slots = slots.saturating_sub(2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sanity check since if the above failed to populate, it'd be critical
|
|
||||||
for product in &products {
|
|
||||||
debug_assert!(!bool::from(product.is_zero()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
products
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
|
||||||
self,
|
|
||||||
rng: &mut R,
|
|
||||||
mut transcript: Scalar,
|
|
||||||
witness: &WipWitness,
|
|
||||||
) -> Option<WipProof> {
|
|
||||||
let WipStatement { generators, P, mut y } = self;
|
|
||||||
#[cfg(not(debug_assertions))]
|
|
||||||
let _ = P;
|
|
||||||
|
|
||||||
if generators.len() != witness.a.len() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
let (g, h) = (Generators::g(), Generators::h());
|
|
||||||
let mut g_bold = vec![];
|
|
||||||
let mut h_bold = vec![];
|
|
||||||
for i in 0 .. generators.len() {
|
|
||||||
g_bold.push(generators.generator(GeneratorsList::GBold1, i));
|
|
||||||
h_bold.push(generators.generator(GeneratorsList::HBold1, i));
|
|
||||||
}
|
|
||||||
let mut g_bold = PointVector(g_bold);
|
|
||||||
let mut h_bold = PointVector(h_bold);
|
|
||||||
|
|
||||||
// Check P has the expected relationship
|
|
||||||
#[cfg(debug_assertions)]
|
|
||||||
{
|
|
||||||
let mut P_terms = witness
|
|
||||||
.a
|
|
||||||
.0
|
|
||||||
.iter()
|
|
||||||
.copied()
|
|
||||||
.zip(g_bold.0.iter().copied())
|
|
||||||
.chain(witness.b.0.iter().copied().zip(h_bold.0.iter().copied()))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
P_terms.push((witness.a.clone().weighted_inner_product(&witness.b, &y), g));
|
|
||||||
P_terms.push((witness.alpha, h));
|
|
||||||
debug_assert_eq!(multiexp(&P_terms), P);
|
|
||||||
P_terms.zeroize();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut a = witness.a.clone();
|
|
||||||
let mut b = witness.b.clone();
|
|
||||||
let mut alpha = witness.alpha;
|
|
||||||
|
|
||||||
// From here on, g_bold.len() is used as n
|
|
||||||
debug_assert_eq!(g_bold.len(), a.len());
|
|
||||||
|
|
||||||
let mut L_vec = vec![];
|
|
||||||
let mut R_vec = vec![];
|
|
||||||
|
|
||||||
// else n > 1 case from figure 1
|
|
||||||
while g_bold.len() > 1 {
|
|
||||||
let (a1, a2) = a.clone().split();
|
|
||||||
let (b1, b2) = b.clone().split();
|
|
||||||
let (g_bold1, g_bold2) = g_bold.split();
|
|
||||||
let (h_bold1, h_bold2) = h_bold.split();
|
|
||||||
|
|
||||||
let n_hat = g_bold1.len();
|
|
||||||
debug_assert_eq!(a1.len(), n_hat);
|
|
||||||
debug_assert_eq!(a2.len(), n_hat);
|
|
||||||
debug_assert_eq!(b1.len(), n_hat);
|
|
||||||
debug_assert_eq!(b2.len(), n_hat);
|
|
||||||
debug_assert_eq!(g_bold1.len(), n_hat);
|
|
||||||
debug_assert_eq!(g_bold2.len(), n_hat);
|
|
||||||
debug_assert_eq!(h_bold1.len(), n_hat);
|
|
||||||
debug_assert_eq!(h_bold2.len(), n_hat);
|
|
||||||
|
|
||||||
let y_n_hat = y[n_hat - 1];
|
|
||||||
y.0.truncate(n_hat);
|
|
||||||
|
|
||||||
let d_l = Scalar::random(&mut *rng);
|
|
||||||
let d_r = Scalar::random(&mut *rng);
|
|
||||||
|
|
||||||
let c_l = a1.clone().weighted_inner_product(&b2, &y);
|
|
||||||
let c_r = (a2.clone() * y_n_hat).weighted_inner_product(&b1, &y);
|
|
||||||
|
|
||||||
// TODO: Calculate these with a batch inversion
|
|
||||||
let y_inv_n_hat = y_n_hat.invert().unwrap();
|
|
||||||
|
|
||||||
let mut L_terms = (a1.clone() * y_inv_n_hat)
|
|
||||||
.0
|
|
||||||
.drain(..)
|
|
||||||
.zip(g_bold2.0.iter().copied())
|
|
||||||
.chain(b2.0.iter().copied().zip(h_bold1.0.iter().copied()))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
L_terms.push((c_l, g));
|
|
||||||
L_terms.push((d_l, h));
|
|
||||||
let L = multiexp(&L_terms) * Scalar(crate::INV_EIGHT());
|
|
||||||
L_vec.push(L);
|
|
||||||
L_terms.zeroize();
|
|
||||||
|
|
||||||
let mut R_terms = (a2.clone() * y_n_hat)
|
|
||||||
.0
|
|
||||||
.drain(..)
|
|
||||||
.zip(g_bold1.0.iter().copied())
|
|
||||||
.chain(b1.0.iter().copied().zip(h_bold2.0.iter().copied()))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
R_terms.push((c_r, g));
|
|
||||||
R_terms.push((d_r, h));
|
|
||||||
let R = multiexp(&R_terms) * Scalar(crate::INV_EIGHT());
|
|
||||||
R_vec.push(R);
|
|
||||||
R_terms.zeroize();
|
|
||||||
|
|
||||||
let (e, inv_e, e_square, inv_e_square);
|
|
||||||
(e, inv_e, e_square, inv_e_square, g_bold, h_bold) =
|
|
||||||
Self::next_G_H(&mut transcript, g_bold1, g_bold2, h_bold1, h_bold2, L, R, y_inv_n_hat);
|
|
||||||
|
|
||||||
a = (a1 * e) + &(a2 * (y_n_hat * inv_e));
|
|
||||||
b = (b1 * inv_e) + &(b2 * e);
|
|
||||||
alpha += (d_l * e_square) + (d_r * inv_e_square);
|
|
||||||
|
|
||||||
debug_assert_eq!(g_bold.len(), a.len());
|
|
||||||
debug_assert_eq!(g_bold.len(), h_bold.len());
|
|
||||||
debug_assert_eq!(g_bold.len(), b.len());
|
|
||||||
}
|
|
||||||
|
|
||||||
// n == 1 case from figure 1
|
|
||||||
debug_assert_eq!(g_bold.len(), 1);
|
|
||||||
debug_assert_eq!(h_bold.len(), 1);
|
|
||||||
|
|
||||||
debug_assert_eq!(a.len(), 1);
|
|
||||||
debug_assert_eq!(b.len(), 1);
|
|
||||||
|
|
||||||
let r = Scalar::random(&mut *rng);
|
|
||||||
let s = Scalar::random(&mut *rng);
|
|
||||||
let delta = Scalar::random(&mut *rng);
|
|
||||||
let eta = Scalar::random(&mut *rng);
|
|
||||||
|
|
||||||
let ry = r * y[0];
|
|
||||||
|
|
||||||
let mut A_terms =
|
|
||||||
vec![(r, g_bold[0]), (s, h_bold[0]), ((ry * b[0]) + (s * y[0] * a[0]), g), (delta, h)];
|
|
||||||
let A = multiexp(&A_terms) * Scalar(crate::INV_EIGHT());
|
|
||||||
A_terms.zeroize();
|
|
||||||
|
|
||||||
let mut B_terms = vec![(ry * s, g), (eta, h)];
|
|
||||||
let B = multiexp(&B_terms) * Scalar(crate::INV_EIGHT());
|
|
||||||
B_terms.zeroize();
|
|
||||||
|
|
||||||
let e = Self::transcript_A_B(&mut transcript, A, B);
|
|
||||||
|
|
||||||
let r_answer = r + (a[0] * e);
|
|
||||||
let s_answer = s + (b[0] * e);
|
|
||||||
let delta_answer = eta + (delta * e) + (alpha * e.square());
|
|
||||||
|
|
||||||
Some(WipProof { L: L_vec, R: R_vec, A, B, r_answer, s_answer, delta_answer })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn verify<Id: Copy + Zeroize, R: RngCore + CryptoRng>(
|
|
||||||
self,
|
|
||||||
rng: &mut R,
|
|
||||||
verifier: &mut BatchVerifier<Id, EdwardsPoint>,
|
|
||||||
id: Id,
|
|
||||||
mut transcript: Scalar,
|
|
||||||
mut proof: WipProof,
|
|
||||||
) -> bool {
|
|
||||||
let WipStatement { generators, P, y } = self;
|
|
||||||
|
|
||||||
let (g, h) = (Generators::g(), Generators::h());
|
|
||||||
|
|
||||||
// Verify the L/R lengths
|
|
||||||
{
|
|
||||||
let mut lr_len = 0;
|
|
||||||
while (1 << lr_len) < generators.len() {
|
|
||||||
lr_len += 1;
|
|
||||||
}
|
|
||||||
if (proof.L.len() != lr_len) ||
|
|
||||||
(proof.R.len() != lr_len) ||
|
|
||||||
(generators.len() != (1 << lr_len))
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let inv_y = {
|
|
||||||
let inv_y = y[0].invert().unwrap();
|
|
||||||
let mut res = Vec::with_capacity(y.len());
|
|
||||||
res.push(inv_y);
|
|
||||||
while res.len() < y.len() {
|
|
||||||
res.push(inv_y * res.last().unwrap());
|
|
||||||
}
|
|
||||||
res
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut P_terms = vec![(Scalar::ONE, P)];
|
|
||||||
P_terms.reserve(6 + (2 * generators.len()) + proof.L.len());
|
|
||||||
|
|
||||||
let mut challenges = Vec::with_capacity(proof.L.len());
|
|
||||||
let product_cache = {
|
|
||||||
let mut es = Vec::with_capacity(proof.L.len());
|
|
||||||
for (L, R) in proof.L.iter_mut().zip(proof.R.iter_mut()) {
|
|
||||||
es.push(Self::transcript_L_R(&mut transcript, *L, *R));
|
|
||||||
*L = L.mul_by_cofactor();
|
|
||||||
*R = R.mul_by_cofactor();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut inv_es = es.clone();
|
|
||||||
let mut scratch = vec![Scalar::ZERO; es.len()];
|
|
||||||
group::ff::BatchInverter::invert_with_external_scratch(&mut inv_es, &mut scratch);
|
|
||||||
drop(scratch);
|
|
||||||
|
|
||||||
debug_assert_eq!(es.len(), inv_es.len());
|
|
||||||
debug_assert_eq!(es.len(), proof.L.len());
|
|
||||||
debug_assert_eq!(es.len(), proof.R.len());
|
|
||||||
for ((e, inv_e), (L, R)) in
|
|
||||||
es.drain(..).zip(inv_es.drain(..)).zip(proof.L.iter().zip(proof.R.iter()))
|
|
||||||
{
|
|
||||||
debug_assert_eq!(e.invert().unwrap(), inv_e);
|
|
||||||
|
|
||||||
challenges.push((e, inv_e));
|
|
||||||
|
|
||||||
let e_square = e.square();
|
|
||||||
let inv_e_square = inv_e.square();
|
|
||||||
P_terms.push((e_square, *L));
|
|
||||||
P_terms.push((inv_e_square, *R));
|
|
||||||
}
|
|
||||||
|
|
||||||
Self::challenge_products(&challenges)
|
|
||||||
};
|
|
||||||
|
|
||||||
let e = Self::transcript_A_B(&mut transcript, proof.A, proof.B);
|
|
||||||
proof.A = proof.A.mul_by_cofactor();
|
|
||||||
proof.B = proof.B.mul_by_cofactor();
|
|
||||||
let neg_e_square = -e.square();
|
|
||||||
|
|
||||||
let mut multiexp = P_terms;
|
|
||||||
multiexp.reserve(4 + (2 * generators.len()));
|
|
||||||
for (scalar, _) in &mut multiexp {
|
|
||||||
*scalar *= neg_e_square;
|
|
||||||
}
|
|
||||||
|
|
||||||
let re = proof.r_answer * e;
|
|
||||||
for i in 0 .. generators.len() {
|
|
||||||
let mut scalar = product_cache[i] * re;
|
|
||||||
if i > 0 {
|
|
||||||
scalar *= inv_y[i - 1];
|
|
||||||
}
|
|
||||||
multiexp.push((scalar, generators.generator(GeneratorsList::GBold1, i)));
|
|
||||||
}
|
|
||||||
|
|
||||||
let se = proof.s_answer * e;
|
|
||||||
for i in 0 .. generators.len() {
|
|
||||||
multiexp.push((
|
|
||||||
se * product_cache[product_cache.len() - 1 - i],
|
|
||||||
generators.generator(GeneratorsList::HBold1, i),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
multiexp.push((-e, proof.A));
|
|
||||||
multiexp.push((proof.r_answer * y[0] * proof.s_answer, g));
|
|
||||||
multiexp.push((proof.delta_answer, h));
|
|
||||||
multiexp.push((-Scalar::ONE, proof.B));
|
|
||||||
|
|
||||||
verifier.queue(rng, id, multiexp);
|
|
||||||
|
|
||||||
true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,103 +1,55 @@
|
|||||||
use core::{
|
use core::ops::{Add, Sub, Mul, Index};
|
||||||
borrow::Borrow,
|
|
||||||
ops::{Index, IndexMut, Add, Sub, Mul},
|
|
||||||
};
|
|
||||||
use std_shims::vec::Vec;
|
use std_shims::vec::Vec;
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||||
|
|
||||||
use group::ff::Field;
|
use group::ff::Field;
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||||
|
|
||||||
use multiexp::multiexp;
|
use multiexp::multiexp;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||||
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
||||||
|
macro_rules! math_op {
|
||||||
|
($Op: ident, $op: ident, $f: expr) => {
|
||||||
|
impl $Op<Scalar> for ScalarVector {
|
||||||
|
type Output = Self;
|
||||||
|
fn $op(self, b: Scalar) -> Self {
|
||||||
|
Self(self.0.iter().map(|a| $f((a, &b))).collect())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Index<usize> for ScalarVector {
|
impl $Op<Scalar> for &ScalarVector {
|
||||||
type Output = Scalar;
|
type Output = ScalarVector;
|
||||||
fn index(&self, index: usize) -> &Scalar {
|
fn $op(self, b: Scalar) -> ScalarVector {
|
||||||
&self.0[index]
|
ScalarVector(self.0.iter().map(|a| $f((a, &b))).collect())
|
||||||
}
|
}
|
||||||
}
|
|
||||||
impl IndexMut<usize> for ScalarVector {
|
|
||||||
fn index_mut(&mut self, index: usize) -> &mut Scalar {
|
|
||||||
&mut self.0[index]
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl<S: Borrow<Scalar>> Add<S> for ScalarVector {
|
impl $Op<ScalarVector> for ScalarVector {
|
||||||
type Output = ScalarVector;
|
type Output = Self;
|
||||||
fn add(mut self, scalar: S) -> ScalarVector {
|
fn $op(self, b: Self) -> Self {
|
||||||
for s in &mut self.0 {
|
|
||||||
*s += scalar.borrow();
|
|
||||||
}
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<S: Borrow<Scalar>> Sub<S> for ScalarVector {
|
|
||||||
type Output = ScalarVector;
|
|
||||||
fn sub(mut self, scalar: S) -> ScalarVector {
|
|
||||||
for s in &mut self.0 {
|
|
||||||
*s -= scalar.borrow();
|
|
||||||
}
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<S: Borrow<Scalar>> Mul<S> for ScalarVector {
|
|
||||||
type Output = ScalarVector;
|
|
||||||
fn mul(mut self, scalar: S) -> ScalarVector {
|
|
||||||
for s in &mut self.0 {
|
|
||||||
*s *= scalar.borrow();
|
|
||||||
}
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Add<&ScalarVector> for ScalarVector {
|
|
||||||
type Output = ScalarVector;
|
|
||||||
fn add(mut self, other: &ScalarVector) -> ScalarVector {
|
|
||||||
debug_assert_eq!(self.len(), other.len());
|
|
||||||
for (s, o) in self.0.iter_mut().zip(other.0.iter()) {
|
|
||||||
*s += o;
|
|
||||||
}
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl Sub<&ScalarVector> for ScalarVector {
|
|
||||||
type Output = ScalarVector;
|
|
||||||
fn sub(mut self, other: &ScalarVector) -> ScalarVector {
|
|
||||||
debug_assert_eq!(self.len(), other.len());
|
|
||||||
for (s, o) in self.0.iter_mut().zip(other.0.iter()) {
|
|
||||||
*s -= o;
|
|
||||||
}
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl Mul<&ScalarVector> for ScalarVector {
|
|
||||||
type Output = ScalarVector;
|
|
||||||
fn mul(mut self, other: &ScalarVector) -> ScalarVector {
|
|
||||||
debug_assert_eq!(self.len(), other.len());
|
|
||||||
for (s, o) in self.0.iter_mut().zip(other.0.iter()) {
|
|
||||||
*s *= o;
|
|
||||||
}
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Mul<&[EdwardsPoint]> for &ScalarVector {
|
|
||||||
type Output = EdwardsPoint;
|
|
||||||
fn mul(self, b: &[EdwardsPoint]) -> EdwardsPoint {
|
|
||||||
debug_assert_eq!(self.len(), b.len());
|
debug_assert_eq!(self.len(), b.len());
|
||||||
let mut multiexp_args = self.0.iter().copied().zip(b.iter().copied()).collect::<Vec<_>>();
|
Self(self.0.iter().zip(b.0.iter()).map($f).collect())
|
||||||
let res = multiexp(&multiexp_args);
|
|
||||||
multiexp_args.zeroize();
|
|
||||||
res
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl $Op<Self> for &ScalarVector {
|
||||||
|
type Output = ScalarVector;
|
||||||
|
fn $op(self, b: Self) -> ScalarVector {
|
||||||
|
debug_assert_eq!(self.len(), b.len());
|
||||||
|
ScalarVector(self.0.iter().zip(b.0.iter()).map($f).collect())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
math_op!(Add, add, |(a, b): (&Scalar, &Scalar)| *a + *b);
|
||||||
|
math_op!(Sub, sub, |(a, b): (&Scalar, &Scalar)| *a - *b);
|
||||||
|
math_op!(Mul, mul, |(a, b): (&Scalar, &Scalar)| *a * *b);
|
||||||
|
|
||||||
impl ScalarVector {
|
impl ScalarVector {
|
||||||
pub(crate) fn new(len: usize) -> Self {
|
pub(crate) fn new(len: usize) -> Self {
|
||||||
ScalarVector(vec![Scalar::ZERO; len])
|
Self(vec![Scalar::ZERO; len])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn powers(x: Scalar, len: usize) -> Self {
|
pub(crate) fn powers(x: Scalar, len: usize) -> Self {
|
||||||
@@ -105,34 +57,81 @@ impl ScalarVector {
|
|||||||
|
|
||||||
let mut res = Vec::with_capacity(len);
|
let mut res = Vec::with_capacity(len);
|
||||||
res.push(Scalar::ONE);
|
res.push(Scalar::ONE);
|
||||||
res.push(x);
|
for i in 1 .. len {
|
||||||
for i in 2 .. len {
|
|
||||||
res.push(res[i - 1] * x);
|
res.push(res[i - 1] * x);
|
||||||
}
|
}
|
||||||
res.truncate(len);
|
Self(res)
|
||||||
ScalarVector(res)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn len(&self) -> usize {
|
pub(crate) fn even_powers(x: Scalar, pow: usize) -> Self {
|
||||||
self.0.len()
|
debug_assert!(pow != 0);
|
||||||
|
// Verify pow is a power of two
|
||||||
|
debug_assert_eq!(((pow - 1) & pow), 0);
|
||||||
|
|
||||||
|
let xsq = x * x;
|
||||||
|
let mut res = Self(Vec::with_capacity(pow / 2));
|
||||||
|
res.0.push(xsq);
|
||||||
|
|
||||||
|
let mut prev = 2;
|
||||||
|
while prev < pow {
|
||||||
|
res.0.push(res[res.len() - 1] * xsq);
|
||||||
|
prev += 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn sum(mut self) -> Scalar {
|
pub(crate) fn sum(mut self) -> Scalar {
|
||||||
self.0.drain(..).sum()
|
self.0.drain(..).sum()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn inner_product(self, vector: &Self) -> Scalar {
|
pub(crate) fn len(&self) -> usize {
|
||||||
(self * vector).sum()
|
self.0.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn weighted_inner_product(self, vector: &Self, y: &Self) -> Scalar {
|
pub(crate) fn split(self) -> (Self, Self) {
|
||||||
(self * vector * y).sum()
|
let (l, r) = self.0.split_at(self.0.len() / 2);
|
||||||
}
|
(Self(l.to_vec()), Self(r.to_vec()))
|
||||||
|
|
||||||
pub(crate) fn split(mut self) -> (Self, Self) {
|
|
||||||
debug_assert!(self.len() > 1);
|
|
||||||
let r = self.0.split_off(self.0.len() / 2);
|
|
||||||
debug_assert_eq!(self.len(), r.len());
|
|
||||||
(self, ScalarVector(r))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Index<usize> for ScalarVector {
|
||||||
|
type Output = Scalar;
|
||||||
|
fn index(&self, index: usize) -> &Scalar {
|
||||||
|
&self.0[index]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn inner_product(a: &ScalarVector, b: &ScalarVector) -> Scalar {
|
||||||
|
(a * b).sum()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn weighted_powers(x: Scalar, len: usize) -> ScalarVector {
|
||||||
|
ScalarVector(ScalarVector::powers(x, len + 1).0[1 ..].to_vec())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn weighted_inner_product(a: &ScalarVector, b: &ScalarVector, y: Scalar) -> Scalar {
|
||||||
|
// y ** 0 is not used as a power
|
||||||
|
(a * b * weighted_powers(y, a.len())).sum()
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Mul<&[EdwardsPoint]> for &ScalarVector {
|
||||||
|
type Output = EdwardsPoint;
|
||||||
|
fn mul(self, b: &[EdwardsPoint]) -> EdwardsPoint {
|
||||||
|
debug_assert_eq!(self.len(), b.len());
|
||||||
|
multiexp(&self.0.iter().copied().zip(b.iter().copied()).collect::<Vec<_>>())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn hadamard_fold(
|
||||||
|
l: &[EdwardsPoint],
|
||||||
|
r: &[EdwardsPoint],
|
||||||
|
a: Scalar,
|
||||||
|
b: Scalar,
|
||||||
|
) -> Vec<EdwardsPoint> {
|
||||||
|
let mut res = Vec::with_capacity(l.len() / 2);
|
||||||
|
for i in 0 .. l.len() {
|
||||||
|
res.push(multiexp(&[(a, l[i]), (b, r[i])]));
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|||||||
@@ -6,10 +6,9 @@ use std_shims::{
|
|||||||
io::{self, Read, Write},
|
io::{self, Read, Write},
|
||||||
};
|
};
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||||
use subtle::{ConstantTimeEq, ConditionallySelectable};
|
use subtle::{ConstantTimeEq, Choice, CtOption};
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
use curve25519_dalek::{
|
use curve25519_dalek::{
|
||||||
constants::ED25519_BASEPOINT_TABLE,
|
constants::ED25519_BASEPOINT_TABLE,
|
||||||
@@ -62,7 +61,7 @@ pub struct ClsagInput {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ClsagInput {
|
impl ClsagInput {
|
||||||
pub fn new(commitment: Commitment, decoys: Decoys) -> Result<ClsagInput, ClsagError> {
|
pub fn new(commitment: Commitment, decoys: Decoys) -> Result<Self, ClsagError> {
|
||||||
let n = decoys.len();
|
let n = decoys.len();
|
||||||
if n > u8::MAX.into() {
|
if n > u8::MAX.into() {
|
||||||
Err(ClsagError::InternalError("max ring size in this library is u8 max"))?;
|
Err(ClsagError::InternalError("max ring size in this library is u8 max"))?;
|
||||||
@@ -77,7 +76,7 @@ impl ClsagInput {
|
|||||||
Err(ClsagError::InvalidCommitment)?;
|
Err(ClsagError::InvalidCommitment)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(ClsagInput { commitment, decoys })
|
Ok(Self { commitment, decoys })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -96,7 +95,7 @@ fn core(
|
|||||||
msg: &[u8; 32],
|
msg: &[u8; 32],
|
||||||
D: &EdwardsPoint,
|
D: &EdwardsPoint,
|
||||||
s: &[Scalar],
|
s: &[Scalar],
|
||||||
A_c1: &Mode,
|
A_c1: Mode,
|
||||||
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
|
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
|
||||||
let n = ring.len();
|
let n = ring.len();
|
||||||
|
|
||||||
@@ -164,18 +163,23 @@ fn core(
|
|||||||
Mode::Verify(c1) => {
|
Mode::Verify(c1) => {
|
||||||
start = 0;
|
start = 0;
|
||||||
end = n;
|
end = n;
|
||||||
c = *c1;
|
c = c1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Perform the core loop
|
// Perform the core loop
|
||||||
let mut c1 = c;
|
let mut c1 = CtOption::new(Scalar::zero(), Choice::from(0));
|
||||||
for i in (start .. end).map(|i| i % n) {
|
for i in (start .. end).map(|i| i % n) {
|
||||||
|
// This will only execute once and shouldn't need to be constant time. Making it constant time
|
||||||
|
// removes the risk of branch prediction creating timing differences depending on ring index
|
||||||
|
// however
|
||||||
|
c1 = c1.or_else(|| CtOption::new(c, i.ct_eq(&0)));
|
||||||
|
|
||||||
let c_p = mu_P * c;
|
let c_p = mu_P * c;
|
||||||
let c_c = mu_C * c;
|
let c_c = mu_C * c;
|
||||||
|
|
||||||
let L = (&s[i] * ED25519_BASEPOINT_TABLE) + (c_p * P[i]) + (c_c * C[i]);
|
let L = (&s[i] * &ED25519_BASEPOINT_TABLE) + (c_p * P[i]) + (c_c * C[i]);
|
||||||
let PH = hash_to_point(&P[i]);
|
let PH = hash_to_point(P[i]);
|
||||||
// Shouldn't be an issue as all of the variables in this vartime statement are public
|
// Shouldn't be an issue as all of the variables in this vartime statement are public
|
||||||
let R = (s[i] * PH) + images_precomp.vartime_multiscalar_mul([c_p, c_c]);
|
let R = (s[i] * PH) + images_precomp.vartime_multiscalar_mul([c_p, c_c]);
|
||||||
|
|
||||||
@@ -183,15 +187,10 @@ fn core(
|
|||||||
to_hash.extend(L.compress().to_bytes());
|
to_hash.extend(L.compress().to_bytes());
|
||||||
to_hash.extend(R.compress().to_bytes());
|
to_hash.extend(R.compress().to_bytes());
|
||||||
c = hash_to_scalar(&to_hash);
|
c = hash_to_scalar(&to_hash);
|
||||||
|
|
||||||
// This will only execute once and shouldn't need to be constant time. Making it constant time
|
|
||||||
// removes the risk of branch prediction creating timing differences depending on ring index
|
|
||||||
// however
|
|
||||||
c1.conditional_assign(&c, i.ct_eq(&(n - 1)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// This first tuple is needed to continue signing, the latter is the c to be tested/worked with
|
// This first tuple is needed to continue signing, the latter is the c to be tested/worked with
|
||||||
((D, c * mu_P, c * mu_C), c1)
|
((D, c * mu_P, c * mu_C), c1.unwrap_or(c))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// CLSAG signature, as used in Monero.
|
/// CLSAG signature, as used in Monero.
|
||||||
@@ -205,6 +204,7 @@ pub struct Clsag {
|
|||||||
impl Clsag {
|
impl Clsag {
|
||||||
// Sign core is the extension of core as needed for signing, yet is shared between single signer
|
// Sign core is the extension of core as needed for signing, yet is shared between single signer
|
||||||
// and multisig, hence why it's still core
|
// and multisig, hence why it's still core
|
||||||
|
#[allow(clippy::many_single_char_names)]
|
||||||
pub(crate) fn sign_core<R: RngCore + CryptoRng>(
|
pub(crate) fn sign_core<R: RngCore + CryptoRng>(
|
||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
I: &EdwardsPoint,
|
I: &EdwardsPoint,
|
||||||
@@ -213,22 +213,22 @@ impl Clsag {
|
|||||||
msg: &[u8; 32],
|
msg: &[u8; 32],
|
||||||
A: EdwardsPoint,
|
A: EdwardsPoint,
|
||||||
AH: EdwardsPoint,
|
AH: EdwardsPoint,
|
||||||
) -> (Clsag, EdwardsPoint, Scalar, Scalar) {
|
) -> (Self, EdwardsPoint, Scalar, Scalar) {
|
||||||
let r: usize = input.decoys.i.into();
|
let r: usize = input.decoys.i.into();
|
||||||
|
|
||||||
let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate();
|
let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate();
|
||||||
let z = input.commitment.mask - mask;
|
let z = input.commitment.mask - mask;
|
||||||
|
|
||||||
let H = hash_to_point(&input.decoys.ring[r][0]);
|
let H = hash_to_point(input.decoys.ring[r][0]);
|
||||||
let D = H * z;
|
let D = H * z;
|
||||||
let mut s = Vec::with_capacity(input.decoys.ring.len());
|
let mut s = Vec::with_capacity(input.decoys.ring.len());
|
||||||
for _ in 0 .. input.decoys.ring.len() {
|
for _ in 0 .. input.decoys.ring.len() {
|
||||||
s.push(random_scalar(rng));
|
s.push(random_scalar(rng));
|
||||||
}
|
}
|
||||||
let ((D, p, c), c1) =
|
let ((D, p, c), c1) =
|
||||||
core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, &Mode::Sign(r, A, AH));
|
core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
|
||||||
|
|
||||||
(Clsag { D, s, c1 }, pseudo_out, p, c * z)
|
(Self { D, s, c1 }, pseudo_out, p, c * z)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate CLSAG signatures for the given inputs.
|
/// Generate CLSAG signatures for the given inputs.
|
||||||
@@ -239,27 +239,28 @@ impl Clsag {
|
|||||||
mut inputs: Vec<(Zeroizing<Scalar>, EdwardsPoint, ClsagInput)>,
|
mut inputs: Vec<(Zeroizing<Scalar>, EdwardsPoint, ClsagInput)>,
|
||||||
sum_outputs: Scalar,
|
sum_outputs: Scalar,
|
||||||
msg: [u8; 32],
|
msg: [u8; 32],
|
||||||
) -> Vec<(Clsag, EdwardsPoint)> {
|
) -> Vec<(Self, EdwardsPoint)> {
|
||||||
let mut res = Vec::with_capacity(inputs.len());
|
let mut res = Vec::with_capacity(inputs.len());
|
||||||
let mut sum_pseudo_outs = Scalar::ZERO;
|
let mut sum_pseudo_outs = Scalar::zero();
|
||||||
for i in 0 .. inputs.len() {
|
for i in 0 .. inputs.len() {
|
||||||
let mut mask = random_scalar(rng);
|
let mask = if i == (inputs.len() - 1) {
|
||||||
if i == (inputs.len() - 1) {
|
sum_outputs - sum_pseudo_outs
|
||||||
mask = sum_outputs - sum_pseudo_outs;
|
|
||||||
} else {
|
} else {
|
||||||
|
let mask = random_scalar(rng);
|
||||||
sum_pseudo_outs += mask;
|
sum_pseudo_outs += mask;
|
||||||
}
|
mask
|
||||||
|
};
|
||||||
|
|
||||||
let mut nonce = Zeroizing::new(random_scalar(rng));
|
let mut nonce = Zeroizing::new(random_scalar(rng));
|
||||||
let (mut clsag, pseudo_out, p, c) = Clsag::sign_core(
|
let (mut clsag, pseudo_out, p, c) = Self::sign_core(
|
||||||
rng,
|
rng,
|
||||||
&inputs[i].1,
|
&inputs[i].1,
|
||||||
&inputs[i].2,
|
&inputs[i].2,
|
||||||
mask,
|
mask,
|
||||||
&msg,
|
&msg,
|
||||||
nonce.deref() * ED25519_BASEPOINT_TABLE,
|
nonce.deref() * &ED25519_BASEPOINT_TABLE,
|
||||||
nonce.deref() *
|
nonce.deref() *
|
||||||
hash_to_point(&inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
|
hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
|
||||||
);
|
);
|
||||||
clsag.s[usize::from(inputs[i].2.decoys.i)] =
|
clsag.s[usize::from(inputs[i].2.decoys.i)] =
|
||||||
(-((p * inputs[i].0.deref()) + c)) + nonce.deref();
|
(-((p * inputs[i].0.deref()) + c)) + nonce.deref();
|
||||||
@@ -301,7 +302,7 @@ impl Clsag {
|
|||||||
Err(ClsagError::InvalidD)?;
|
Err(ClsagError::InvalidD)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let (_, c1) = core(ring, I, pseudo_out, msg, &D, &self.s, &Mode::Verify(self.c1));
|
let (_, c1) = core(ring, I, pseudo_out, msg, &D, &self.s, Mode::Verify(self.c1));
|
||||||
if c1 != self.c1 {
|
if c1 != self.c1 {
|
||||||
Err(ClsagError::InvalidC1)?;
|
Err(ClsagError::InvalidC1)?;
|
||||||
}
|
}
|
||||||
@@ -318,7 +319,7 @@ impl Clsag {
|
|||||||
write_point(&self.D, w)
|
write_point(&self.D, w)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read<R: Read>(decoys: usize, r: &mut R) -> io::Result<Clsag> {
|
pub fn read<R: Read>(decoys: usize, r: &mut R) -> io::Result<Self> {
|
||||||
Ok(Clsag { s: read_raw_vec(read_scalar, decoys, r)?, c1: read_scalar(r)?, D: read_point(r)? })
|
Ok(Self { s: read_raw_vec(read_scalar, decoys, r)?, c1: read_scalar(r)?, D: read_point(r)? })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,13 +1,20 @@
|
|||||||
use core::{ops::Deref, fmt::Debug};
|
use core::{ops::Deref, fmt::Debug};
|
||||||
use std_shims::io::{self, Read, Write};
|
use std_shims::{
|
||||||
use std::sync::{Arc, RwLock};
|
sync::Arc,
|
||||||
|
io::{self, Read, Write},
|
||||||
|
};
|
||||||
|
use std::sync::RwLock;
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
||||||
use rand_chacha::ChaCha20Rng;
|
use rand_chacha::ChaCha20Rng;
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||||
|
|
||||||
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
use curve25519_dalek::{
|
||||||
|
traits::{Identity, IsIdentity},
|
||||||
|
scalar::Scalar,
|
||||||
|
edwards::EdwardsPoint,
|
||||||
|
};
|
||||||
|
|
||||||
use group::{ff::Field, Group, GroupEncoding};
|
use group::{ff::Field, Group, GroupEncoding};
|
||||||
|
|
||||||
@@ -44,7 +51,7 @@ impl ClsagInput {
|
|||||||
// if in use
|
// if in use
|
||||||
transcript.append_message(b"member", [u8::try_from(i).expect("ring size exceeded 255")]);
|
transcript.append_message(b"member", [u8::try_from(i).expect("ring size exceeded 255")]);
|
||||||
transcript.append_message(b"key", pair[0].compress().to_bytes());
|
transcript.append_message(b"key", pair[0].compress().to_bytes());
|
||||||
transcript.append_message(b"commitment", pair[1].compress().to_bytes())
|
transcript.append_message(b"commitment", pair[1].compress().to_bytes());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Doesn't include the commitment's parts as the above ring + index includes the commitment
|
// Doesn't include the commitment's parts as the above ring + index includes the commitment
|
||||||
@@ -61,8 +68,8 @@ pub struct ClsagDetails {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ClsagDetails {
|
impl ClsagDetails {
|
||||||
pub fn new(input: ClsagInput, mask: Scalar) -> ClsagDetails {
|
pub fn new(input: ClsagInput, mask: Scalar) -> Self {
|
||||||
ClsagDetails { input, mask }
|
Self { input, mask }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -112,11 +119,11 @@ impl ClsagMultisig {
|
|||||||
transcript: RecommendedTranscript,
|
transcript: RecommendedTranscript,
|
||||||
output_key: EdwardsPoint,
|
output_key: EdwardsPoint,
|
||||||
details: Arc<RwLock<Option<ClsagDetails>>>,
|
details: Arc<RwLock<Option<ClsagDetails>>>,
|
||||||
) -> ClsagMultisig {
|
) -> Self {
|
||||||
ClsagMultisig {
|
Self {
|
||||||
transcript,
|
transcript,
|
||||||
|
|
||||||
H: hash_to_point(&output_key),
|
H: hash_to_point(output_key),
|
||||||
image: EdwardsPoint::identity(),
|
image: EdwardsPoint::identity(),
|
||||||
|
|
||||||
details,
|
details,
|
||||||
@@ -143,7 +150,7 @@ pub(crate) fn add_key_image_share(
|
|||||||
participant: Participant,
|
participant: Participant,
|
||||||
share: EdwardsPoint,
|
share: EdwardsPoint,
|
||||||
) {
|
) {
|
||||||
if image.is_identity().into() {
|
if image.is_identity() {
|
||||||
*image = generator * offset;
|
*image = generator * offset;
|
||||||
}
|
}
|
||||||
*image += share * lagrange::<dfg::Scalar>(participant, included).0;
|
*image += share * lagrange::<dfg::Scalar>(participant, included).0;
|
||||||
@@ -184,10 +191,10 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||||||
reader.read_exact(&mut bytes)?;
|
reader.read_exact(&mut bytes)?;
|
||||||
// dfg ensures the point is torsion free
|
// dfg ensures the point is torsion free
|
||||||
let xH = Option::<dfg::EdwardsPoint>::from(dfg::EdwardsPoint::from_bytes(&bytes))
|
let xH = Option::<dfg::EdwardsPoint>::from(dfg::EdwardsPoint::from_bytes(&bytes))
|
||||||
.ok_or_else(|| io::Error::other("invalid key image"))?;
|
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid key image"))?;
|
||||||
// Ensure this is a canonical point
|
// Ensure this is a canonical point
|
||||||
if xH.to_bytes() != bytes {
|
if xH.to_bytes() != bytes {
|
||||||
Err(io::Error::other("non-canonical key image"))?;
|
Err(io::Error::new(io::ErrorKind::Other, "non-canonical key image"))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(ClsagAddendum { key_image: xH, dleq: DLEqProof::<dfg::EdwardsPoint>::read(reader)? })
|
Ok(ClsagAddendum { key_image: xH, dleq: DLEqProof::<dfg::EdwardsPoint>::read(reader)? })
|
||||||
@@ -199,8 +206,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||||||
l: Participant,
|
l: Participant,
|
||||||
addendum: ClsagAddendum,
|
addendum: ClsagAddendum,
|
||||||
) -> Result<(), FrostError> {
|
) -> Result<(), FrostError> {
|
||||||
// TODO: This check is faulty if two shares are additive inverses of each other
|
if self.image.is_identity() {
|
||||||
if self.image.is_identity().into() {
|
|
||||||
self.transcript.domain_separate(b"CLSAG");
|
self.transcript.domain_separate(b"CLSAG");
|
||||||
self.input().transcript(&mut self.transcript);
|
self.input().transcript(&mut self.transcript);
|
||||||
self.transcript.append_message(b"mask", self.mask().to_bytes());
|
self.transcript.append_message(b"mask", self.mask().to_bytes());
|
||||||
|
|||||||
@@ -3,6 +3,6 @@ use curve25519_dalek::edwards::EdwardsPoint;
|
|||||||
pub use monero_generators::{hash_to_point as raw_hash_to_point};
|
pub use monero_generators::{hash_to_point as raw_hash_to_point};
|
||||||
|
|
||||||
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
|
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
|
||||||
pub fn hash_to_point(key: &EdwardsPoint) -> EdwardsPoint {
|
pub fn hash_to_point(key: EdwardsPoint) -> EdwardsPoint {
|
||||||
raw_hash_to_point(key.compress().to_bytes())
|
raw_hash_to_point(key.compress().to_bytes())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,85 +3,17 @@ use std_shims::{
|
|||||||
io::{self, Read, Write},
|
io::{self, Read, Write},
|
||||||
};
|
};
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
use curve25519_dalek::scalar::Scalar;
|
||||||
|
#[cfg(feature = "experimental")]
|
||||||
|
use curve25519_dalek::edwards::EdwardsPoint;
|
||||||
|
|
||||||
use curve25519_dalek::{traits::IsIdentity, Scalar, EdwardsPoint};
|
use crate::serialize::*;
|
||||||
|
#[cfg(feature = "experimental")]
|
||||||
|
use crate::{hash_to_scalar, ringct::hash_to_point};
|
||||||
|
|
||||||
use monero_generators::H;
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
|
||||||
use crate::{hash_to_scalar, ringct::hash_to_point, serialize::*};
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
|
||||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
|
||||||
pub enum MlsagError {
|
|
||||||
#[cfg_attr(feature = "std", error("invalid ring"))]
|
|
||||||
InvalidRing,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid amount of key images"))]
|
|
||||||
InvalidAmountOfKeyImages,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid ss"))]
|
|
||||||
InvalidSs,
|
|
||||||
#[cfg_attr(feature = "std", error("key image was identity"))]
|
|
||||||
IdentityKeyImage,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid ci"))]
|
|
||||||
InvalidCi,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub struct RingMatrix {
|
|
||||||
matrix: Vec<Vec<EdwardsPoint>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RingMatrix {
|
|
||||||
pub fn new(matrix: Vec<Vec<EdwardsPoint>>) -> Result<Self, MlsagError> {
|
|
||||||
// Monero requires that there is more than one ring member for MLSAG signatures:
|
|
||||||
// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/
|
|
||||||
// src/ringct/rctSigs.cpp#L462
|
|
||||||
if matrix.len() < 2 {
|
|
||||||
Err(MlsagError::InvalidRing)?;
|
|
||||||
}
|
|
||||||
for member in &matrix {
|
|
||||||
if member.is_empty() || (member.len() != matrix[0].len()) {
|
|
||||||
Err(MlsagError::InvalidRing)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(RingMatrix { matrix })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Construct a ring matrix for an individual output.
|
|
||||||
pub fn individual(
|
|
||||||
ring: &[[EdwardsPoint; 2]],
|
|
||||||
pseudo_out: EdwardsPoint,
|
|
||||||
) -> Result<Self, MlsagError> {
|
|
||||||
let mut matrix = Vec::with_capacity(ring.len());
|
|
||||||
for ring_member in ring {
|
|
||||||
matrix.push(vec![ring_member[0], ring_member[1] - pseudo_out]);
|
|
||||||
}
|
|
||||||
RingMatrix::new(matrix)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn iter(&self) -> impl Iterator<Item = &[EdwardsPoint]> {
|
|
||||||
self.matrix.iter().map(AsRef::as_ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the amount of members in the ring.
|
|
||||||
pub fn members(&self) -> usize {
|
|
||||||
self.matrix.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the length of a ring member.
|
|
||||||
///
|
|
||||||
/// A ring member is a vector of points for which the signer knows all of the discrete logarithms
|
|
||||||
/// of.
|
|
||||||
pub fn member_len(&self) -> usize {
|
|
||||||
// this is safe to do as the constructors don't allow empty rings
|
|
||||||
self.matrix[0].len()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub struct Mlsag {
|
pub struct Mlsag {
|
||||||
pub ss: Vec<Vec<Scalar>>,
|
pub ss: Vec<[Scalar; 2]>,
|
||||||
pub cc: Scalar,
|
pub cc: Scalar,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -93,124 +25,48 @@ impl Mlsag {
|
|||||||
write_scalar(&self.cc, w)
|
write_scalar(&self.cc, w)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read<R: Read>(mixins: usize, ss_2_elements: usize, r: &mut R) -> io::Result<Mlsag> {
|
pub fn read<R: Read>(mixins: usize, r: &mut R) -> io::Result<Self> {
|
||||||
Ok(Mlsag {
|
Ok(Self {
|
||||||
ss: (0 .. mixins)
|
ss: (0 .. mixins).map(|_| read_array(read_scalar, r)).collect::<Result<_, _>>()?,
|
||||||
.map(|_| read_raw_vec(read_scalar, ss_2_elements, r))
|
|
||||||
.collect::<Result<_, _>>()?,
|
|
||||||
cc: read_scalar(r)?,
|
cc: read_scalar(r)?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "experimental")]
|
||||||
|
#[must_use]
|
||||||
pub fn verify(
|
pub fn verify(
|
||||||
&self,
|
&self,
|
||||||
msg: &[u8; 32],
|
msg: &[u8; 32],
|
||||||
ring: &RingMatrix,
|
ring: &[[EdwardsPoint; 2]],
|
||||||
key_images: &[EdwardsPoint],
|
key_image: &EdwardsPoint,
|
||||||
) -> Result<(), MlsagError> {
|
) -> bool {
|
||||||
// Mlsag allows for layers to not need linkability, hence they don't need key images
|
if ring.is_empty() {
|
||||||
// Monero requires that there is always only 1 non-linkable layer - the amount commitments.
|
return false;
|
||||||
if ring.member_len() != (key_images.len() + 1) {
|
|
||||||
Err(MlsagError::InvalidAmountOfKeyImages)?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut buf = Vec::with_capacity(6 * 32);
|
let mut buf = Vec::with_capacity(6 * 32);
|
||||||
|
let mut ci = self.cc;
|
||||||
|
for (i, ring_member) in ring.iter().enumerate() {
|
||||||
buf.extend_from_slice(msg);
|
buf.extend_from_slice(msg);
|
||||||
|
|
||||||
let mut ci = self.cc;
|
|
||||||
|
|
||||||
// This is an iterator over the key images as options with an added entry of `None` at the
|
|
||||||
// end for the non-linkable layer
|
|
||||||
let key_images_iter = key_images.iter().map(|ki| Some(*ki)).chain(core::iter::once(None));
|
|
||||||
|
|
||||||
if ring.matrix.len() != self.ss.len() {
|
|
||||||
Err(MlsagError::InvalidSs)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (ring_member, ss) in ring.iter().zip(&self.ss) {
|
|
||||||
if ring_member.len() != ss.len() {
|
|
||||||
Err(MlsagError::InvalidSs)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
for ((ring_member_entry, s), ki) in ring_member.iter().zip(ss).zip(key_images_iter.clone()) {
|
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
let L = EdwardsPoint::vartime_double_scalar_mul_basepoint(&ci, ring_member_entry, s);
|
let L =
|
||||||
|
|r| EdwardsPoint::vartime_double_scalar_mul_basepoint(&ci, &ring_member[r], &self.ss[i][r]);
|
||||||
|
|
||||||
buf.extend_from_slice(ring_member_entry.compress().as_bytes());
|
buf.extend_from_slice(ring_member[0].compress().as_bytes());
|
||||||
buf.extend_from_slice(L.compress().as_bytes());
|
buf.extend_from_slice(L(0).compress().as_bytes());
|
||||||
|
|
||||||
// Not all dimensions need to be linkable, e.g. commitments, and only linkable layers need
|
|
||||||
// to have key images.
|
|
||||||
if let Some(ki) = ki {
|
|
||||||
if ki.is_identity() {
|
|
||||||
Err(MlsagError::IdentityKeyImage)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
let R = (s * hash_to_point(ring_member_entry)) + (ci * ki);
|
let R = (self.ss[i][0] * hash_to_point(ring_member[0])) + (ci * key_image);
|
||||||
buf.extend_from_slice(R.compress().as_bytes());
|
buf.extend_from_slice(R.compress().as_bytes());
|
||||||
}
|
|
||||||
}
|
buf.extend_from_slice(ring_member[1].compress().as_bytes());
|
||||||
|
buf.extend_from_slice(L(1).compress().as_bytes());
|
||||||
|
|
||||||
ci = hash_to_scalar(&buf);
|
ci = hash_to_scalar(&buf);
|
||||||
// keep the msg in the buffer.
|
buf.clear();
|
||||||
buf.drain(msg.len() ..);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if ci != self.cc {
|
ci == self.cc
|
||||||
Err(MlsagError::InvalidCi)?
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An aggregate ring matrix builder, usable to set up the ring matrix to prove/verify an aggregate
|
|
||||||
/// MLSAG signature.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub struct AggregateRingMatrixBuilder {
|
|
||||||
key_ring: Vec<Vec<EdwardsPoint>>,
|
|
||||||
amounts_ring: Vec<EdwardsPoint>,
|
|
||||||
sum_out: EdwardsPoint,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AggregateRingMatrixBuilder {
|
|
||||||
/// Create a new AggregateRingMatrixBuilder.
|
|
||||||
///
|
|
||||||
/// Takes in the transaction's outputs; commitments and fee.
|
|
||||||
pub fn new(commitments: &[EdwardsPoint], fee: u64) -> Self {
|
|
||||||
AggregateRingMatrixBuilder {
|
|
||||||
key_ring: vec![],
|
|
||||||
amounts_ring: vec![],
|
|
||||||
sum_out: commitments.iter().sum::<EdwardsPoint>() + (H() * Scalar::from(fee)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Push a ring of [output key, commitment] to the matrix.
|
|
||||||
pub fn push_ring(&mut self, ring: &[[EdwardsPoint; 2]]) -> Result<(), MlsagError> {
|
|
||||||
if self.key_ring.is_empty() {
|
|
||||||
self.key_ring = vec![vec![]; ring.len()];
|
|
||||||
// Now that we know the length of the ring, fill the `amounts_ring`.
|
|
||||||
self.amounts_ring = vec![-self.sum_out; ring.len()];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (self.amounts_ring.len() != ring.len()) || ring.is_empty() {
|
|
||||||
// All the rings in an aggregate matrix must be the same length.
|
|
||||||
return Err(MlsagError::InvalidRing);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (i, ring_member) in ring.iter().enumerate() {
|
|
||||||
self.key_ring[i].push(ring_member[0]);
|
|
||||||
self.amounts_ring[i] += ring_member[1]
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Build and return the [`RingMatrix`]
|
|
||||||
pub fn build(mut self) -> Result<RingMatrix, MlsagError> {
|
|
||||||
for (i, amount_commitment) in self.amounts_ring.drain(..).enumerate() {
|
|
||||||
self.key_ring[i].push(amount_commitment);
|
|
||||||
}
|
|
||||||
RingMatrix::new(self.key_ring)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ use crate::{
|
|||||||
|
|
||||||
/// Generate a key image for a given key. Defined as `x * hash_to_point(xG)`.
|
/// Generate a key image for a given key. Defined as `x * hash_to_point(xG)`.
|
||||||
pub fn generate_key_image(secret: &Zeroizing<Scalar>) -> EdwardsPoint {
|
pub fn generate_key_image(secret: &Zeroizing<Scalar>) -> EdwardsPoint {
|
||||||
hash_to_point(&(ED25519_BASEPOINT_TABLE * secret.deref())) * secret.deref()
|
hash_to_point(&ED25519_BASEPOINT_TABLE * secret.deref()) * secret.deref()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
@@ -38,21 +38,21 @@ pub enum EncryptedAmount {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl EncryptedAmount {
|
impl EncryptedAmount {
|
||||||
pub fn read<R: Read>(compact: bool, r: &mut R) -> io::Result<EncryptedAmount> {
|
pub fn read<R: Read>(compact: bool, r: &mut R) -> io::Result<Self> {
|
||||||
Ok(if !compact {
|
Ok(if compact {
|
||||||
EncryptedAmount::Original { mask: read_bytes(r)?, amount: read_bytes(r)? }
|
Self::Compact { amount: read_bytes(r)? }
|
||||||
} else {
|
} else {
|
||||||
EncryptedAmount::Compact { amount: read_bytes(r)? }
|
Self::Original { mask: read_bytes(r)?, amount: read_bytes(r)? }
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
EncryptedAmount::Original { mask, amount } => {
|
Self::Original { mask, amount } => {
|
||||||
w.write_all(mask)?;
|
w.write_all(mask)?;
|
||||||
w.write_all(amount)
|
w.write_all(amount)
|
||||||
}
|
}
|
||||||
EncryptedAmount::Compact { amount } => w.write_all(amount),
|
Self::Compact { amount } => w.write_all(amount),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -61,7 +61,7 @@ impl EncryptedAmount {
|
|||||||
pub enum RctType {
|
pub enum RctType {
|
||||||
/// No RCT proofs.
|
/// No RCT proofs.
|
||||||
Null,
|
Null,
|
||||||
/// One MLSAG for multiple inputs and Borromean range proofs (RCTTypeFull).
|
/// One MLSAG for a single input and a Borromean range proof (RCTTypeFull).
|
||||||
MlsagAggregate,
|
MlsagAggregate,
|
||||||
// One MLSAG for each input and a Borromean range proof (RCTTypeSimple).
|
// One MLSAG for each input and a Borromean range proof (RCTTypeSimple).
|
||||||
MlsagIndividual,
|
MlsagIndividual,
|
||||||
@@ -79,36 +79,33 @@ pub enum RctType {
|
|||||||
impl RctType {
|
impl RctType {
|
||||||
pub fn to_byte(self) -> u8 {
|
pub fn to_byte(self) -> u8 {
|
||||||
match self {
|
match self {
|
||||||
RctType::Null => 0,
|
Self::Null => 0,
|
||||||
RctType::MlsagAggregate => 1,
|
Self::MlsagAggregate => 1,
|
||||||
RctType::MlsagIndividual => 2,
|
Self::MlsagIndividual => 2,
|
||||||
RctType::Bulletproofs => 3,
|
Self::Bulletproofs => 3,
|
||||||
RctType::BulletproofsCompactAmount => 4,
|
Self::BulletproofsCompactAmount => 4,
|
||||||
RctType::Clsag => 5,
|
Self::Clsag => 5,
|
||||||
RctType::BulletproofsPlus => 6,
|
Self::BulletproofsPlus => 6,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_byte(byte: u8) -> Option<Self> {
|
pub fn from_byte(byte: u8) -> Option<Self> {
|
||||||
Some(match byte {
|
Some(match byte {
|
||||||
0 => RctType::Null,
|
0 => Self::Null,
|
||||||
1 => RctType::MlsagAggregate,
|
1 => Self::MlsagAggregate,
|
||||||
2 => RctType::MlsagIndividual,
|
2 => Self::MlsagIndividual,
|
||||||
3 => RctType::Bulletproofs,
|
3 => Self::Bulletproofs,
|
||||||
4 => RctType::BulletproofsCompactAmount,
|
4 => Self::BulletproofsCompactAmount,
|
||||||
5 => RctType::Clsag,
|
5 => Self::Clsag,
|
||||||
6 => RctType::BulletproofsPlus,
|
6 => Self::BulletproofsPlus,
|
||||||
_ => None?,
|
_ => None?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compact_encrypted_amounts(&self) -> bool {
|
pub fn compact_encrypted_amounts(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
RctType::Null |
|
Self::Null | Self::MlsagAggregate | Self::MlsagIndividual | Self::Bulletproofs => false,
|
||||||
RctType::MlsagAggregate |
|
Self::BulletproofsCompactAmount | Self::Clsag | Self::BulletproofsPlus => true,
|
||||||
RctType::MlsagIndividual |
|
|
||||||
RctType::Bulletproofs => false,
|
|
||||||
RctType::BulletproofsCompactAmount | RctType::Clsag | RctType::BulletproofsPlus => true,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -122,16 +119,20 @@ pub struct RctBase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl RctBase {
|
impl RctBase {
|
||||||
pub(crate) fn fee_weight(outputs: usize, fee: u64) -> usize {
|
pub(crate) fn fee_weight(outputs: usize) -> usize {
|
||||||
// 1 byte for the RCT signature type
|
1 + 8 + (outputs * (8 + 32))
|
||||||
1 + (outputs * (8 + 32)) + varint_len(fee)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W, rct_type: RctType) -> io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W, rct_type: RctType) -> io::Result<()> {
|
||||||
w.write_all(&[rct_type.to_byte()])?;
|
w.write_all(&[rct_type.to_byte()])?;
|
||||||
match rct_type {
|
match rct_type {
|
||||||
RctType::Null => Ok(()),
|
RctType::Null => Ok(()),
|
||||||
_ => {
|
RctType::MlsagAggregate |
|
||||||
|
RctType::MlsagIndividual |
|
||||||
|
RctType::Bulletproofs |
|
||||||
|
RctType::BulletproofsCompactAmount |
|
||||||
|
RctType::Clsag |
|
||||||
|
RctType::BulletproofsPlus => {
|
||||||
write_varint(&self.fee, w)?;
|
write_varint(&self.fee, w)?;
|
||||||
if rct_type == RctType::MlsagIndividual {
|
if rct_type == RctType::MlsagIndividual {
|
||||||
write_raw_vec(write_point, &self.pseudo_outs, w)?;
|
write_raw_vec(write_point, &self.pseudo_outs, w)?;
|
||||||
@@ -144,9 +145,9 @@ impl RctBase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read<R: Read>(inputs: usize, outputs: usize, r: &mut R) -> io::Result<(RctBase, RctType)> {
|
pub fn read<R: Read>(inputs: usize, outputs: usize, r: &mut R) -> io::Result<(Self, RctType)> {
|
||||||
let rct_type =
|
let rct_type = RctType::from_byte(read_byte(r)?)
|
||||||
RctType::from_byte(read_byte(r)?).ok_or_else(|| io::Error::other("invalid RCT type"))?;
|
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid RCT type"))?;
|
||||||
|
|
||||||
match rct_type {
|
match rct_type {
|
||||||
RctType::Null | RctType::MlsagAggregate | RctType::MlsagIndividual => {}
|
RctType::Null | RctType::MlsagAggregate | RctType::MlsagIndividual => {}
|
||||||
@@ -160,16 +161,16 @@ impl RctBase {
|
|||||||
// If there are Bulletproofs, there must be a matching amount of outputs, implicitly
|
// If there are Bulletproofs, there must be a matching amount of outputs, implicitly
|
||||||
// banning 0 outputs
|
// banning 0 outputs
|
||||||
// Since HF 12 (CLSAG being 13), a 2-output minimum has also been enforced
|
// Since HF 12 (CLSAG being 13), a 2-output minimum has also been enforced
|
||||||
Err(io::Error::other("RCT with Bulletproofs(+) had 0 outputs"))?;
|
Err(io::Error::new(io::ErrorKind::Other, "RCT with Bulletproofs(+) had 0 outputs"))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
if rct_type == RctType::Null {
|
if rct_type == RctType::Null {
|
||||||
RctBase { fee: 0, pseudo_outs: vec![], encrypted_amounts: vec![], commitments: vec![] }
|
Self { fee: 0, pseudo_outs: vec![], encrypted_amounts: vec![], commitments: vec![] }
|
||||||
} else {
|
} else {
|
||||||
RctBase {
|
Self {
|
||||||
fee: read_varint(r)?,
|
fee: read_varint(r)?,
|
||||||
pseudo_outs: if rct_type == RctType::MlsagIndividual {
|
pseudo_outs: if rct_type == RctType::MlsagIndividual {
|
||||||
read_raw_vec(read_point, inputs, r)?
|
read_raw_vec(read_point, inputs, r)?
|
||||||
@@ -190,10 +191,6 @@ impl RctBase {
|
|||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
pub enum RctPrunable {
|
pub enum RctPrunable {
|
||||||
Null,
|
Null,
|
||||||
AggregateMlsagBorromean {
|
|
||||||
borromean: Vec<BorromeanRange>,
|
|
||||||
mlsag: Mlsag,
|
|
||||||
},
|
|
||||||
MlsagBorromean {
|
MlsagBorromean {
|
||||||
borromean: Vec<BorromeanRange>,
|
borromean: Vec<BorromeanRange>,
|
||||||
mlsags: Vec<Mlsag>,
|
mlsags: Vec<Mlsag>,
|
||||||
@@ -212,23 +209,18 @@ pub enum RctPrunable {
|
|||||||
|
|
||||||
impl RctPrunable {
|
impl RctPrunable {
|
||||||
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
|
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
|
||||||
// 1 byte for number of BPs (technically a VarInt, yet there's always just zero or one)
|
|
||||||
1 + Bulletproofs::fee_weight(protocol.bp_plus(), outputs) +
|
1 + Bulletproofs::fee_weight(protocol.bp_plus(), outputs) +
|
||||||
(inputs * (Clsag::fee_weight(protocol.ring_len()) + 32))
|
(inputs * (Clsag::fee_weight(protocol.ring_len()) + 32))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W, rct_type: RctType) -> io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W, rct_type: RctType) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
RctPrunable::Null => Ok(()),
|
Self::Null => Ok(()),
|
||||||
RctPrunable::AggregateMlsagBorromean { borromean, mlsag } => {
|
Self::MlsagBorromean { borromean, mlsags } => {
|
||||||
write_raw_vec(BorromeanRange::write, borromean, w)?;
|
|
||||||
mlsag.write(w)
|
|
||||||
}
|
|
||||||
RctPrunable::MlsagBorromean { borromean, mlsags } => {
|
|
||||||
write_raw_vec(BorromeanRange::write, borromean, w)?;
|
write_raw_vec(BorromeanRange::write, borromean, w)?;
|
||||||
write_raw_vec(Mlsag::write, mlsags, w)
|
write_raw_vec(Mlsag::write, mlsags, w)
|
||||||
}
|
}
|
||||||
RctPrunable::MlsagBulletproofs { bulletproofs, mlsags, pseudo_outs } => {
|
Self::MlsagBulletproofs { bulletproofs, mlsags, pseudo_outs } => {
|
||||||
if rct_type == RctType::Bulletproofs {
|
if rct_type == RctType::Bulletproofs {
|
||||||
w.write_all(&1u32.to_le_bytes())?;
|
w.write_all(&1u32.to_le_bytes())?;
|
||||||
} else {
|
} else {
|
||||||
@@ -239,7 +231,7 @@ impl RctPrunable {
|
|||||||
write_raw_vec(Mlsag::write, mlsags, w)?;
|
write_raw_vec(Mlsag::write, mlsags, w)?;
|
||||||
write_raw_vec(write_point, pseudo_outs, w)
|
write_raw_vec(write_point, pseudo_outs, w)
|
||||||
}
|
}
|
||||||
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs } => {
|
Self::Clsag { bulletproofs, clsags, pseudo_outs } => {
|
||||||
w.write_all(&[1])?;
|
w.write_all(&[1])?;
|
||||||
bulletproofs.write(w)?;
|
bulletproofs.write(w)?;
|
||||||
|
|
||||||
@@ -257,34 +249,17 @@ impl RctPrunable {
|
|||||||
|
|
||||||
pub fn read<R: Read>(
|
pub fn read<R: Read>(
|
||||||
rct_type: RctType,
|
rct_type: RctType,
|
||||||
ring_length: usize,
|
decoys: &[usize],
|
||||||
inputs: usize,
|
|
||||||
outputs: usize,
|
outputs: usize,
|
||||||
r: &mut R,
|
r: &mut R,
|
||||||
) -> io::Result<RctPrunable> {
|
) -> io::Result<Self> {
|
||||||
// While we generally don't bother with misc consensus checks, this affects the safety of
|
|
||||||
// the below defined rct_type function
|
|
||||||
// The exact line preventing zero-input transactions is:
|
|
||||||
// https://github.com/monero-project/monero/blob/00fd416a99686f0956361d1cd0337fe56e58d4a7/
|
|
||||||
// src/ringct/rctSigs.cpp#L609
|
|
||||||
// And then for RctNull, that's only allowed for miner TXs which require one input of
|
|
||||||
// Input::Gen
|
|
||||||
if inputs == 0 {
|
|
||||||
Err(io::Error::other("transaction had no inputs"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(match rct_type {
|
Ok(match rct_type {
|
||||||
RctType::Null => RctPrunable::Null,
|
RctType::Null => Self::Null,
|
||||||
RctType::MlsagAggregate => RctPrunable::AggregateMlsagBorromean {
|
RctType::MlsagAggregate | RctType::MlsagIndividual => Self::MlsagBorromean {
|
||||||
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
|
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
|
||||||
mlsag: Mlsag::read(ring_length, inputs + 1, r)?,
|
mlsags: decoys.iter().map(|d| Mlsag::read(*d, r)).collect::<Result<_, _>>()?,
|
||||||
},
|
},
|
||||||
RctType::MlsagIndividual => RctPrunable::MlsagBorromean {
|
RctType::Bulletproofs | RctType::BulletproofsCompactAmount => Self::MlsagBulletproofs {
|
||||||
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
|
|
||||||
mlsags: (0 .. inputs).map(|_| Mlsag::read(ring_length, 2, r)).collect::<Result<_, _>>()?,
|
|
||||||
},
|
|
||||||
RctType::Bulletproofs | RctType::BulletproofsCompactAmount => {
|
|
||||||
RctPrunable::MlsagBulletproofs {
|
|
||||||
bulletproofs: {
|
bulletproofs: {
|
||||||
if (if rct_type == RctType::Bulletproofs {
|
if (if rct_type == RctType::Bulletproofs {
|
||||||
u64::from(read_u32(r)?)
|
u64::from(read_u32(r)?)
|
||||||
@@ -292,40 +267,34 @@ impl RctPrunable {
|
|||||||
read_varint(r)?
|
read_varint(r)?
|
||||||
}) != 1
|
}) != 1
|
||||||
{
|
{
|
||||||
Err(io::Error::other("n bulletproofs instead of one"))?;
|
Err(io::Error::new(io::ErrorKind::Other, "n bulletproofs instead of one"))?;
|
||||||
}
|
}
|
||||||
Bulletproofs::read(r)?
|
Bulletproofs::read(r)?
|
||||||
},
|
},
|
||||||
mlsags: (0 .. inputs)
|
mlsags: decoys.iter().map(|d| Mlsag::read(*d, r)).collect::<Result<_, _>>()?,
|
||||||
.map(|_| Mlsag::read(ring_length, 2, r))
|
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
|
||||||
.collect::<Result<_, _>>()?,
|
},
|
||||||
pseudo_outs: read_raw_vec(read_point, inputs, r)?,
|
RctType::Clsag | RctType::BulletproofsPlus => Self::Clsag {
|
||||||
}
|
|
||||||
}
|
|
||||||
RctType::Clsag | RctType::BulletproofsPlus => RctPrunable::Clsag {
|
|
||||||
bulletproofs: {
|
bulletproofs: {
|
||||||
if read_varint::<_, u64>(r)? != 1 {
|
if read_varint(r)? != 1 {
|
||||||
Err(io::Error::other("n bulletproofs instead of one"))?;
|
Err(io::Error::new(io::ErrorKind::Other, "n bulletproofs instead of one"))?;
|
||||||
}
|
}
|
||||||
(if rct_type == RctType::Clsag { Bulletproofs::read } else { Bulletproofs::read_plus })(
|
(if rct_type == RctType::Clsag { Bulletproofs::read } else { Bulletproofs::read_plus })(
|
||||||
r,
|
r,
|
||||||
)?
|
)?
|
||||||
},
|
},
|
||||||
clsags: (0 .. inputs).map(|_| Clsag::read(ring_length, r)).collect::<Result<_, _>>()?,
|
clsags: (0 .. decoys.len()).map(|o| Clsag::read(decoys[o], r)).collect::<Result<_, _>>()?,
|
||||||
pseudo_outs: read_raw_vec(read_point, inputs, r)?,
|
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
RctPrunable::Null => panic!("Serializing RctPrunable::Null for a signature"),
|
Self::Null => panic!("Serializing RctPrunable::Null for a signature"),
|
||||||
RctPrunable::AggregateMlsagBorromean { borromean, .. } |
|
Self::MlsagBorromean { borromean, .. } => borromean.iter().try_for_each(|rs| rs.write(w)),
|
||||||
RctPrunable::MlsagBorromean { borromean, .. } => {
|
Self::MlsagBulletproofs { bulletproofs, .. } => bulletproofs.signature_write(w),
|
||||||
borromean.iter().try_for_each(|rs| rs.write(w))
|
Self::Clsag { bulletproofs, .. } => bulletproofs.signature_write(w),
|
||||||
}
|
|
||||||
RctPrunable::MlsagBulletproofs { bulletproofs, .. } |
|
|
||||||
RctPrunable::Clsag { bulletproofs, .. } => bulletproofs.signature_write(w),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -341,8 +310,30 @@ impl RctSignatures {
|
|||||||
pub fn rct_type(&self) -> RctType {
|
pub fn rct_type(&self) -> RctType {
|
||||||
match &self.prunable {
|
match &self.prunable {
|
||||||
RctPrunable::Null => RctType::Null,
|
RctPrunable::Null => RctType::Null,
|
||||||
RctPrunable::AggregateMlsagBorromean { .. } => RctType::MlsagAggregate,
|
RctPrunable::MlsagBorromean { .. } => {
|
||||||
RctPrunable::MlsagBorromean { .. } => RctType::MlsagIndividual,
|
/*
|
||||||
|
This type of RctPrunable may have no outputs, yet pseudo_outs are per input
|
||||||
|
This will only be a valid RctSignatures if it's for a TX with inputs
|
||||||
|
That makes this valid for any valid RctSignatures
|
||||||
|
|
||||||
|
While it will be invalid for any invalid RctSignatures, potentially letting an invalid
|
||||||
|
MlsagAggregate be interpreted as a valid MlsagIndividual (or vice versa), they have
|
||||||
|
incompatible deserializations
|
||||||
|
|
||||||
|
This means it's impossible to receive a MlsagAggregate over the wire and interpret it
|
||||||
|
as a MlsagIndividual (or vice versa)
|
||||||
|
|
||||||
|
That only makes manual manipulation unsafe, which will always be true since these fields
|
||||||
|
are all pub
|
||||||
|
|
||||||
|
TODO: Consider making them private with read-only accessors?
|
||||||
|
*/
|
||||||
|
if self.base.pseudo_outs.is_empty() {
|
||||||
|
RctType::MlsagAggregate
|
||||||
|
} else {
|
||||||
|
RctType::MlsagIndividual
|
||||||
|
}
|
||||||
|
}
|
||||||
// RctBase ensures there's at least one output, making the following
|
// RctBase ensures there's at least one output, making the following
|
||||||
// inferences guaranteed/expects impossible on any valid RctSignatures
|
// inferences guaranteed/expects impossible on any valid RctSignatures
|
||||||
RctPrunable::MlsagBulletproofs { .. } => {
|
RctPrunable::MlsagBulletproofs { .. } => {
|
||||||
@@ -350,7 +341,7 @@ impl RctSignatures {
|
|||||||
self
|
self
|
||||||
.base
|
.base
|
||||||
.encrypted_amounts
|
.encrypted_amounts
|
||||||
.first()
|
.get(0)
|
||||||
.expect("MLSAG with Bulletproofs didn't have any outputs"),
|
.expect("MLSAG with Bulletproofs didn't have any outputs"),
|
||||||
EncryptedAmount::Original { .. }
|
EncryptedAmount::Original { .. }
|
||||||
) {
|
) {
|
||||||
@@ -369,8 +360,8 @@ impl RctSignatures {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize, fee: u64) -> usize {
|
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
|
||||||
RctBase::fee_weight(outputs, fee) + RctPrunable::fee_weight(protocol, inputs, outputs)
|
RctBase::fee_weight(outputs) + RctPrunable::fee_weight(protocol, inputs, outputs)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
@@ -385,16 +376,8 @@ impl RctSignatures {
|
|||||||
serialized
|
serialized
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read<R: Read>(
|
pub fn read<R: Read>(decoys: Vec<usize>, outputs: usize, r: &mut R) -> io::Result<Self> {
|
||||||
ring_length: usize,
|
let base = RctBase::read(decoys.len(), outputs, r)?;
|
||||||
inputs: usize,
|
Ok(Self { base: base.0, prunable: RctPrunable::read(base.1, &decoys, outputs, r)? })
|
||||||
outputs: usize,
|
|
||||||
r: &mut R,
|
|
||||||
) -> io::Result<RctSignatures> {
|
|
||||||
let base = RctBase::read(inputs, outputs, r)?;
|
|
||||||
Ok(RctSignatures {
|
|
||||||
base: base.0,
|
|
||||||
prunable: RctPrunable::read(base.1, ring_length, inputs, outputs, r)?,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,83 +1,29 @@
|
|||||||
use std::{sync::Arc, io::Read, time::Duration};
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
|
|
||||||
use tokio::sync::Mutex;
|
use digest_auth::AuthContext;
|
||||||
|
use reqwest::Client;
|
||||||
use digest_auth::{WwwAuthenticateHeader, AuthContext};
|
|
||||||
use simple_request::{
|
|
||||||
hyper::{StatusCode, header::HeaderValue, Request},
|
|
||||||
Response, Client,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::rpc::{RpcError, RpcConnection, Rpc};
|
use crate::rpc::{RpcError, RpcConnection, Rpc};
|
||||||
|
|
||||||
const DEFAULT_TIMEOUT: Duration = Duration::from_secs(30);
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
enum Authentication {
|
|
||||||
// If unauthenticated, use a single client
|
|
||||||
Unauthenticated(Client),
|
|
||||||
// If authenticated, use a single client which supports being locked and tracks its nonce
|
|
||||||
// This ensures that if a nonce is requested, another caller doesn't make a request invalidating
|
|
||||||
// it
|
|
||||||
Authenticated {
|
|
||||||
username: String,
|
|
||||||
password: String,
|
|
||||||
#[allow(clippy::type_complexity)]
|
|
||||||
connection: Arc<Mutex<(Option<(WwwAuthenticateHeader, u64)>, Client)>>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An HTTP(S) transport for the RPC.
|
|
||||||
///
|
|
||||||
/// Requires tokio.
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct HttpRpc {
|
pub struct HttpRpc {
|
||||||
authentication: Authentication,
|
client: Client,
|
||||||
|
userpass: Option<(String, String)>,
|
||||||
url: String,
|
url: String,
|
||||||
request_timeout: Duration,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HttpRpc {
|
impl HttpRpc {
|
||||||
fn digest_auth_challenge(
|
|
||||||
response: &Response,
|
|
||||||
) -> Result<Option<(WwwAuthenticateHeader, u64)>, RpcError> {
|
|
||||||
Ok(if let Some(header) = response.headers().get("www-authenticate") {
|
|
||||||
Some((
|
|
||||||
digest_auth::parse(header.to_str().map_err(|_| {
|
|
||||||
RpcError::InvalidNode("www-authenticate header wasn't a string".to_string())
|
|
||||||
})?)
|
|
||||||
.map_err(|_| RpcError::InvalidNode("invalid digest-auth response".to_string()))?,
|
|
||||||
0,
|
|
||||||
))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new HTTP(S) RPC connection.
|
/// Create a new HTTP(S) RPC connection.
|
||||||
///
|
///
|
||||||
/// A daemon requiring authentication can be used via including the username and password in the
|
/// A daemon requiring authentication can be used via including the username and password in the
|
||||||
/// URL.
|
/// URL.
|
||||||
pub async fn new(url: String) -> Result<Rpc<HttpRpc>, RpcError> {
|
pub fn new(mut url: String) -> Result<Rpc<Self>, RpcError> {
|
||||||
Self::with_custom_timeout(url, DEFAULT_TIMEOUT).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new HTTP(S) RPC connection with a custom timeout.
|
|
||||||
///
|
|
||||||
/// A daemon requiring authentication can be used via including the username and password in the
|
|
||||||
/// URL.
|
|
||||||
pub async fn with_custom_timeout(
|
|
||||||
mut url: String,
|
|
||||||
request_timeout: Duration,
|
|
||||||
) -> Result<Rpc<HttpRpc>, RpcError> {
|
|
||||||
let authentication = if url.contains('@') {
|
|
||||||
// Parse out the username and password
|
// Parse out the username and password
|
||||||
|
let userpass = if url.contains('@') {
|
||||||
let url_clone = url;
|
let url_clone = url;
|
||||||
let split_url = url_clone.split('@').collect::<Vec<_>>();
|
let split_url = url_clone.split('@').collect::<Vec<_>>();
|
||||||
if split_url.len() != 2 {
|
if split_url.len() != 2 {
|
||||||
Err(RpcError::ConnectionError("invalid amount of login specifications".to_string()))?;
|
Err(RpcError::InvalidNode)?;
|
||||||
}
|
}
|
||||||
let mut userpass = split_url[0];
|
let mut userpass = split_url[0];
|
||||||
url = split_url[1].to_string();
|
url = split_url[1].to_string();
|
||||||
@@ -86,201 +32,60 @@ impl HttpRpc {
|
|||||||
if userpass.contains("://") {
|
if userpass.contains("://") {
|
||||||
let split_userpass = userpass.split("://").collect::<Vec<_>>();
|
let split_userpass = userpass.split("://").collect::<Vec<_>>();
|
||||||
if split_userpass.len() != 2 {
|
if split_userpass.len() != 2 {
|
||||||
Err(RpcError::ConnectionError("invalid amount of protocol specifications".to_string()))?;
|
Err(RpcError::InvalidNode)?;
|
||||||
}
|
}
|
||||||
url = split_userpass[0].to_string() + "://" + &url;
|
url = split_userpass[0].to_string() + "://" + &url;
|
||||||
userpass = split_userpass[1];
|
userpass = split_userpass[1];
|
||||||
}
|
}
|
||||||
|
|
||||||
let split_userpass = userpass.split(':').collect::<Vec<_>>();
|
let split_userpass = userpass.split(':').collect::<Vec<_>>();
|
||||||
if split_userpass.len() > 2 {
|
if split_userpass.len() != 2 {
|
||||||
Err(RpcError::ConnectionError("invalid amount of passwords".to_string()))?;
|
Err(RpcError::InvalidNode)?;
|
||||||
}
|
|
||||||
|
|
||||||
let client = Client::without_connection_pool(&url)
|
|
||||||
.map_err(|_| RpcError::ConnectionError("invalid URL".to_string()))?;
|
|
||||||
// Obtain the initial challenge, which also somewhat validates this connection
|
|
||||||
let challenge = Self::digest_auth_challenge(
|
|
||||||
&client
|
|
||||||
.request(
|
|
||||||
Request::post(url.clone())
|
|
||||||
.body(vec![].into())
|
|
||||||
.map_err(|e| RpcError::ConnectionError(format!("couldn't make request: {e:?}")))?,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?,
|
|
||||||
)?;
|
|
||||||
Authentication::Authenticated {
|
|
||||||
username: split_userpass[0].to_string(),
|
|
||||||
password: (*split_userpass.get(1).unwrap_or(&"")).to_string(),
|
|
||||||
connection: Arc::new(Mutex::new((challenge, client))),
|
|
||||||
}
|
}
|
||||||
|
Some((split_userpass[0].to_string(), split_userpass[1].to_string()))
|
||||||
} else {
|
} else {
|
||||||
Authentication::Unauthenticated(Client::with_connection_pool())
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(Rpc(HttpRpc { authentication, url, request_timeout }))
|
Ok(Rpc(Self { client: Client::new(), userpass, url }))
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HttpRpc {
|
|
||||||
async fn inner_post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError> {
|
|
||||||
let request_fn = |uri| {
|
|
||||||
Request::post(uri)
|
|
||||||
.body(body.clone().into())
|
|
||||||
.map_err(|e| RpcError::ConnectionError(format!("couldn't make request: {e:?}")))
|
|
||||||
};
|
|
||||||
|
|
||||||
async fn body_from_response(response: Response<'_>) -> Result<Vec<u8>, RpcError> {
|
|
||||||
/*
|
|
||||||
let length = usize::try_from(
|
|
||||||
response
|
|
||||||
.headers()
|
|
||||||
.get("content-length")
|
|
||||||
.ok_or(RpcError::InvalidNode("no content-length header"))?
|
|
||||||
.to_str()
|
|
||||||
.map_err(|_| RpcError::InvalidNode("non-ascii content-length value"))?
|
|
||||||
.parse::<u32>()
|
|
||||||
.map_err(|_| RpcError::InvalidNode("non-u32 content-length value"))?,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
// Only pre-allocate 1 MB so a malicious node which claims a content-length of 1 GB actually
|
|
||||||
// has to send 1 GB of data to cause a 1 GB allocation
|
|
||||||
let mut res = Vec::with_capacity(length.max(1024 * 1024));
|
|
||||||
let mut body = response.into_body();
|
|
||||||
while res.len() < length {
|
|
||||||
let Some(data) = body.data().await else { break };
|
|
||||||
res.extend(data.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?.as_ref());
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
let mut res = Vec::with_capacity(128);
|
|
||||||
response
|
|
||||||
.body()
|
|
||||||
.await
|
|
||||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?
|
|
||||||
.read_to_end(&mut res)
|
|
||||||
.unwrap();
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
for attempt in 0 .. 2 {
|
|
||||||
return Ok(match &self.authentication {
|
|
||||||
Authentication::Unauthenticated(client) => {
|
|
||||||
body_from_response(
|
|
||||||
client
|
|
||||||
.request(request_fn(self.url.clone() + "/" + route)?)
|
|
||||||
.await
|
|
||||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?,
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
}
|
|
||||||
Authentication::Authenticated { username, password, connection } => {
|
|
||||||
let mut connection_lock = connection.lock().await;
|
|
||||||
|
|
||||||
let mut request = request_fn("/".to_string() + route)?;
|
|
||||||
|
|
||||||
// If we don't have an auth challenge, obtain one
|
|
||||||
if connection_lock.0.is_none() {
|
|
||||||
connection_lock.0 = Self::digest_auth_challenge(
|
|
||||||
&connection_lock
|
|
||||||
.1
|
|
||||||
.request(request)
|
|
||||||
.await
|
|
||||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?,
|
|
||||||
)?;
|
|
||||||
request = request_fn("/".to_string() + route)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert the challenge response, if we have a challenge
|
|
||||||
if let Some((challenge, cnonce)) = connection_lock.0.as_mut() {
|
|
||||||
// Update the cnonce
|
|
||||||
// Overflow isn't a concern as this is a u64
|
|
||||||
*cnonce += 1;
|
|
||||||
|
|
||||||
let mut context = AuthContext::new_post::<_, _, _, &[u8]>(
|
|
||||||
username,
|
|
||||||
password,
|
|
||||||
"/".to_string() + route,
|
|
||||||
None,
|
|
||||||
);
|
|
||||||
context.set_custom_cnonce(hex::encode(cnonce.to_le_bytes()));
|
|
||||||
|
|
||||||
request.headers_mut().insert(
|
|
||||||
"Authorization",
|
|
||||||
HeaderValue::from_str(
|
|
||||||
&challenge
|
|
||||||
.respond(&context)
|
|
||||||
.map_err(|_| {
|
|
||||||
RpcError::InvalidNode("couldn't respond to digest-auth challenge".to_string())
|
|
||||||
})?
|
|
||||||
.to_header_string(),
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let response = connection_lock
|
|
||||||
.1
|
|
||||||
.request(request)
|
|
||||||
.await
|
|
||||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")));
|
|
||||||
|
|
||||||
let (error, is_stale) = match &response {
|
|
||||||
Err(e) => (Some(e.clone()), false),
|
|
||||||
Ok(response) => (
|
|
||||||
None,
|
|
||||||
if response.status() == StatusCode::UNAUTHORIZED {
|
|
||||||
if let Some(header) = response.headers().get("www-authenticate") {
|
|
||||||
header
|
|
||||||
.to_str()
|
|
||||||
.map_err(|_| {
|
|
||||||
RpcError::InvalidNode("www-authenticate header wasn't a string".to_string())
|
|
||||||
})?
|
|
||||||
.contains("stale")
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
},
|
|
||||||
),
|
|
||||||
};
|
|
||||||
|
|
||||||
// If the connection entered an error state, drop the cached challenge as challenges are
|
|
||||||
// per-connection
|
|
||||||
// We don't need to create a new connection as simple-request will for us
|
|
||||||
if error.is_some() || is_stale {
|
|
||||||
connection_lock.0 = None;
|
|
||||||
// If we're not already on our second attempt, move to the next loop iteration
|
|
||||||
// (retrying all of this once)
|
|
||||||
if attempt == 0 {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if let Some(e) = error {
|
|
||||||
Err(e)?
|
|
||||||
} else {
|
|
||||||
debug_assert!(is_stale);
|
|
||||||
Err(RpcError::InvalidNode(
|
|
||||||
"node claimed fresh connection had stale authentication".to_string(),
|
|
||||||
))?
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
body_from_response(response.unwrap()).await?
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
unreachable!()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
impl RpcConnection for HttpRpc {
|
impl RpcConnection for HttpRpc {
|
||||||
async fn post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError> {
|
async fn post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError> {
|
||||||
tokio::time::timeout(self.request_timeout, self.inner_post(route, body))
|
let mut builder = self.client.post(self.url.clone() + "/" + route).body(body);
|
||||||
|
|
||||||
|
if let Some((user, pass)) = &self.userpass {
|
||||||
|
let req = self.client.post(&self.url).send().await.map_err(|_| RpcError::InvalidNode)?;
|
||||||
|
// Only provide authentication if this daemon actually expects it
|
||||||
|
if let Some(header) = req.headers().get("www-authenticate") {
|
||||||
|
builder = builder.header(
|
||||||
|
"Authorization",
|
||||||
|
digest_auth::parse(header.to_str().map_err(|_| RpcError::InvalidNode)?)
|
||||||
|
.map_err(|_| RpcError::InvalidNode)?
|
||||||
|
.respond(&AuthContext::new_post::<_, _, _, &[u8]>(
|
||||||
|
user,
|
||||||
|
pass,
|
||||||
|
"/".to_string() + route,
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
.map_err(|_| RpcError::InvalidNode)?
|
||||||
|
.to_header_string(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(
|
||||||
|
builder
|
||||||
|
.send()
|
||||||
.await
|
.await
|
||||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?
|
.map_err(|_| RpcError::ConnectionError)?
|
||||||
|
.bytes()
|
||||||
|
.await
|
||||||
|
.map_err(|_| RpcError::ConnectionError)?
|
||||||
|
.slice(..)
|
||||||
|
.to_vec(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,9 +9,7 @@ use std_shims::{
|
|||||||
|
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
|
|
||||||
use curve25519_dalek::edwards::EdwardsPoint;
|
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
||||||
|
|
||||||
use monero_generators::decompress_point;
|
|
||||||
|
|
||||||
use serde::{Serialize, Deserialize, de::DeserializeOwned};
|
use serde::{Serialize, Deserialize, de::DeserializeOwned};
|
||||||
use serde_json::{Value, json};
|
use serde_json::{Value, json};
|
||||||
@@ -21,21 +19,16 @@ use crate::{
|
|||||||
serialize::*,
|
serialize::*,
|
||||||
transaction::{Input, Timelock, Transaction},
|
transaction::{Input, Timelock, Transaction},
|
||||||
block::Block,
|
block::Block,
|
||||||
wallet::{FeePriority, Fee},
|
wallet::Fee,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(feature = "http-rpc")]
|
#[cfg(feature = "http_rpc")]
|
||||||
mod http;
|
mod http;
|
||||||
#[cfg(feature = "http-rpc")]
|
#[cfg(feature = "http_rpc")]
|
||||||
pub use http::*;
|
pub use http::*;
|
||||||
|
|
||||||
// Number of blocks the fee estimate will be valid for
|
|
||||||
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
|
||||||
// src/wallet/wallet2.cpp#L121
|
|
||||||
const GRACE_BLOCKS_FOR_FEE_ESTIMATE: u64 = 10;
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
pub struct EmptyResponse {}
|
pub struct EmptyResponse;
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
pub struct JsonRpcResponse<T> {
|
pub struct JsonRpcResponse<T> {
|
||||||
result: T,
|
result: T,
|
||||||
@@ -54,24 +47,15 @@ struct TransactionsResponse {
|
|||||||
txs: Vec<TransactionResponse>,
|
txs: Vec<TransactionResponse>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
pub struct OutputResponse {
|
|
||||||
pub height: usize,
|
|
||||||
pub unlocked: bool,
|
|
||||||
key: String,
|
|
||||||
mask: String,
|
|
||||||
txid: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
||||||
pub enum RpcError {
|
pub enum RpcError {
|
||||||
#[cfg_attr(feature = "std", error("internal error ({0})"))]
|
#[cfg_attr(feature = "std", error("internal error ({0})"))]
|
||||||
InternalError(&'static str),
|
InternalError(&'static str),
|
||||||
#[cfg_attr(feature = "std", error("connection error ({0})"))]
|
#[cfg_attr(feature = "std", error("connection error"))]
|
||||||
ConnectionError(String),
|
ConnectionError,
|
||||||
#[cfg_attr(feature = "std", error("invalid node ({0})"))]
|
#[cfg_attr(feature = "std", error("invalid node"))]
|
||||||
InvalidNode(String),
|
InvalidNode,
|
||||||
#[cfg_attr(feature = "std", error("unsupported protocol version ({0})"))]
|
#[cfg_attr(feature = "std", error("unsupported protocol version ({0})"))]
|
||||||
UnsupportedProtocol(usize),
|
UnsupportedProtocol(usize),
|
||||||
#[cfg_attr(feature = "std", error("transactions not found"))]
|
#[cfg_attr(feature = "std", error("transactions not found"))]
|
||||||
@@ -82,24 +66,21 @@ pub enum RpcError {
|
|||||||
PrunedTransaction,
|
PrunedTransaction,
|
||||||
#[cfg_attr(feature = "std", error("invalid transaction ({0:?})"))]
|
#[cfg_attr(feature = "std", error("invalid transaction ({0:?})"))]
|
||||||
InvalidTransaction([u8; 32]),
|
InvalidTransaction([u8; 32]),
|
||||||
#[cfg_attr(feature = "std", error("unexpected fee response"))]
|
|
||||||
InvalidFee,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid priority"))]
|
|
||||||
InvalidPriority,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
|
fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
|
||||||
hex::decode(value).map_err(|_| RpcError::InvalidNode("expected hex wasn't hex".to_string()))
|
hex::decode(value).map_err(|_| RpcError::InvalidNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hash_hex(hash: &str) -> Result<[u8; 32], RpcError> {
|
fn hash_hex(hash: &str) -> Result<[u8; 32], RpcError> {
|
||||||
rpc_hex(hash)?.try_into().map_err(|_| RpcError::InvalidNode("hash wasn't 32-bytes".to_string()))
|
rpc_hex(hash)?.try_into().map_err(|_| RpcError::InvalidNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
|
fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
|
||||||
decompress_point(
|
CompressedEdwardsY(
|
||||||
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?,
|
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?,
|
||||||
)
|
)
|
||||||
|
.decompress()
|
||||||
.ok_or_else(|| RpcError::InvalidPoint(point.to_string()))
|
.ok_or_else(|| RpcError::InvalidPoint(point.to_string()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -121,7 +102,7 @@ fn read_epee_vi<R: io::Read>(reader: &mut R) -> io::Result<u64> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
#[async_trait]
|
||||||
pub trait RpcConnection: Clone + Debug {
|
pub trait RpcConnection: Send + Sync + Clone + Debug {
|
||||||
/// Perform a POST request to the specified route with the specified body.
|
/// Perform a POST request to the specified route with the specified body.
|
||||||
///
|
///
|
||||||
/// The implementor is left to handle anything such as authentication.
|
/// The implementor is left to handle anything such as authentication.
|
||||||
@@ -136,12 +117,14 @@ impl<R: RpcConnection> Rpc<R> {
|
|||||||
///
|
///
|
||||||
/// This is NOT a JSON-RPC call. They use a route of "json_rpc" and are available via
|
/// This is NOT a JSON-RPC call. They use a route of "json_rpc" and are available via
|
||||||
/// `json_rpc_call`.
|
/// `json_rpc_call`.
|
||||||
pub async fn rpc_call<Params: Serialize + Debug, Response: DeserializeOwned + Debug>(
|
pub async fn rpc_call<Params: Send + Serialize + Debug, Response: DeserializeOwned + Debug>(
|
||||||
&self,
|
&self,
|
||||||
route: &str,
|
route: &str,
|
||||||
params: Option<Params>,
|
params: Option<Params>,
|
||||||
) -> Result<Response, RpcError> {
|
) -> Result<Response, RpcError> {
|
||||||
let res = self
|
serde_json::from_str(
|
||||||
|
std_shims::str::from_utf8(
|
||||||
|
&self
|
||||||
.0
|
.0
|
||||||
.post(
|
.post(
|
||||||
route,
|
route,
|
||||||
@@ -151,11 +134,11 @@ impl<R: RpcConnection> Rpc<R> {
|
|||||||
vec![]
|
vec![]
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await?;
|
.await?,
|
||||||
let res_str = std_shims::str::from_utf8(&res)
|
)
|
||||||
.map_err(|_| RpcError::InvalidNode("response wasn't utf-8".to_string()))?;
|
.map_err(|_| RpcError::InvalidNode)?,
|
||||||
serde_json::from_str(res_str)
|
)
|
||||||
.map_err(|_| RpcError::InvalidNode(format!("response wasn't json: {res_str}")))
|
.map_err(|_| RpcError::InvalidNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Perform a JSON-RPC call with the specified method with the provided parameters
|
/// Perform a JSON-RPC call with the specified method with the provided parameters
|
||||||
@@ -255,7 +238,7 @@ impl<R: RpcConnection> Rpc<R> {
|
|||||||
|
|
||||||
// https://github.com/monero-project/monero/issues/8311
|
// https://github.com/monero-project/monero/issues/8311
|
||||||
if res.as_hex.is_empty() {
|
if res.as_hex.is_empty() {
|
||||||
match tx.prefix.inputs.first() {
|
match tx.prefix.inputs.get(0) {
|
||||||
Some(Input::Gen { .. }) => (),
|
Some(Input::Gen { .. }) => (),
|
||||||
_ => Err(RpcError::PrunedTransaction)?,
|
_ => Err(RpcError::PrunedTransaction)?,
|
||||||
}
|
}
|
||||||
@@ -264,9 +247,7 @@ impl<R: RpcConnection> Rpc<R> {
|
|||||||
// This does run a few keccak256 hashes, which is pointless if the node is trusted
|
// This does run a few keccak256 hashes, which is pointless if the node is trusted
|
||||||
// In exchange, this provides resilience against invalid/malicious nodes
|
// In exchange, this provides resilience against invalid/malicious nodes
|
||||||
if tx.hash() != hashes[i] {
|
if tx.hash() != hashes[i] {
|
||||||
Err(RpcError::InvalidNode(
|
Err(RpcError::InvalidNode)?;
|
||||||
"replied with transaction wasn't the requested transaction".to_string(),
|
|
||||||
))?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(tx)
|
Ok(tx)
|
||||||
@@ -292,7 +273,7 @@ impl<R: RpcConnection> Rpc<R> {
|
|||||||
|
|
||||||
let header: BlockHeaderByHeightResponse =
|
let header: BlockHeaderByHeightResponse =
|
||||||
self.json_rpc_call("get_block_header_by_height", Some(json!({ "height": number }))).await?;
|
self.json_rpc_call("get_block_header_by_height", Some(json!({ "height": number }))).await?;
|
||||||
hash_hex(&header.block_header.hash)
|
rpc_hex(&header.block_header.hash)?.try_into().map_err(|_| RpcError::InvalidNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a block from the node by its hash.
|
/// Get a block from the node by its hash.
|
||||||
@@ -306,38 +287,30 @@ impl<R: RpcConnection> Rpc<R> {
|
|||||||
let res: BlockResponse =
|
let res: BlockResponse =
|
||||||
self.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await?;
|
self.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await?;
|
||||||
|
|
||||||
let block = Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref())
|
let block =
|
||||||
.map_err(|_| RpcError::InvalidNode("invalid block".to_string()))?;
|
Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref()).map_err(|_| RpcError::InvalidNode)?;
|
||||||
if block.hash() != hash {
|
if block.hash() != hash {
|
||||||
Err(RpcError::InvalidNode("different block than requested (hash)".to_string()))?;
|
Err(RpcError::InvalidNode)?;
|
||||||
}
|
}
|
||||||
Ok(block)
|
Ok(block)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_block_by_number(&self, number: usize) -> Result<Block, RpcError> {
|
pub async fn get_block_by_number(&self, number: usize) -> Result<Block, RpcError> {
|
||||||
#[derive(Deserialize, Debug)]
|
match self.get_block(self.get_block_hash(number).await?).await {
|
||||||
struct BlockResponse {
|
Ok(block) => {
|
||||||
blob: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
let res: BlockResponse =
|
|
||||||
self.json_rpc_call("get_block", Some(json!({ "height": number }))).await?;
|
|
||||||
|
|
||||||
let block = Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref())
|
|
||||||
.map_err(|_| RpcError::InvalidNode("invalid block".to_string()))?;
|
|
||||||
|
|
||||||
// Make sure this is actually the block for this number
|
// Make sure this is actually the block for this number
|
||||||
match block.miner_tx.prefix.inputs.first() {
|
match block.miner_tx.prefix.inputs.get(0) {
|
||||||
Some(Input::Gen(actual)) => {
|
Some(Input::Gen(actual)) => {
|
||||||
if usize::try_from(*actual).unwrap() == number {
|
if usize::try_from(*actual).unwrap() == number {
|
||||||
Ok(block)
|
Ok(block)
|
||||||
} else {
|
} else {
|
||||||
Err(RpcError::InvalidNode("different block than requested (number)".to_string()))
|
Err(RpcError::InvalidNode)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => Err(RpcError::InvalidNode(
|
Some(Input::ToKey { .. }) | None => Err(RpcError::InvalidNode),
|
||||||
"block's miner_tx didn't have an input of kind Input::Gen".to_string(),
|
}
|
||||||
)),
|
}
|
||||||
|
e => e,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -365,6 +338,7 @@ impl<R: RpcConnection> Rpc<R> {
|
|||||||
txid: [u8; 32],
|
txid: [u8; 32],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
struct OIndexes {
|
struct OIndexes {
|
||||||
o_indexes: Vec<u64>,
|
o_indexes: Vec<u64>,
|
||||||
@@ -395,14 +369,11 @@ impl<R: RpcConnection> Rpc<R> {
|
|||||||
let mut indexes: &[u8] = indexes_buf.as_ref();
|
let mut indexes: &[u8] = indexes_buf.as_ref();
|
||||||
|
|
||||||
(|| {
|
(|| {
|
||||||
let mut res = None;
|
|
||||||
let mut is_okay = false;
|
|
||||||
|
|
||||||
if read_bytes::<_, { EPEE_HEADER.len() }>(&mut indexes)? != EPEE_HEADER {
|
if read_bytes::<_, { EPEE_HEADER.len() }>(&mut indexes)? != EPEE_HEADER {
|
||||||
Err(io::Error::other("invalid header"))?;
|
Err(io::Error::new(io::ErrorKind::Other, "invalid header"))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let read_object = |reader: &mut &[u8]| -> io::Result<Vec<u64>> {
|
let read_object = |reader: &mut &[u8]| {
|
||||||
let fields = read_byte(reader)? >> 2;
|
let fields = read_byte(reader)? >> 2;
|
||||||
|
|
||||||
for _ in 0 .. fields {
|
for _ in 0 .. fields {
|
||||||
@@ -415,7 +386,7 @@ impl<R: RpcConnection> Rpc<R> {
|
|||||||
let iters = if type_with_array_flag != kind { read_epee_vi(reader)? } else { 1 };
|
let iters = if type_with_array_flag != kind { read_epee_vi(reader)? } else { 1 };
|
||||||
|
|
||||||
if (&name == b"o_indexes") && (kind != 5) {
|
if (&name == b"o_indexes") && (kind != 5) {
|
||||||
Err(io::Error::other("o_indexes weren't u64s"))?;
|
Err(io::Error::new(io::ErrorKind::Other, "o_indexes weren't u64s"))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let f = match kind {
|
let f = match kind {
|
||||||
@@ -442,72 +413,54 @@ impl<R: RpcConnection> Rpc<R> {
|
|||||||
let len = read_epee_vi(reader)?;
|
let len = read_epee_vi(reader)?;
|
||||||
read_raw_vec(
|
read_raw_vec(
|
||||||
read_byte,
|
read_byte,
|
||||||
len.try_into().map_err(|_| io::Error::other("u64 length exceeded usize"))?,
|
len
|
||||||
|
.try_into()
|
||||||
|
.map_err(|_| io::Error::new(io::ErrorKind::Other, "u64 length exceeded usize"))?,
|
||||||
reader,
|
reader,
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
// bool
|
// bool
|
||||||
11 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
|
11 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
|
||||||
// object, errors here as it shouldn't be used on this call
|
// object, errors here as it shouldn't be used on this call
|
||||||
12 => {
|
12 => |_: &mut &[u8]| {
|
||||||
|_: &mut &[u8]| Err(io::Error::other("node used object in reply to get_o_indexes"))
|
Err(io::Error::new(
|
||||||
}
|
io::ErrorKind::Other,
|
||||||
|
"node used object in reply to get_o_indexes",
|
||||||
|
))
|
||||||
|
},
|
||||||
// array, so far unused
|
// array, so far unused
|
||||||
13 => |_: &mut &[u8]| Err(io::Error::other("node used the unused array type")),
|
13 => |_: &mut &[u8]| {
|
||||||
_ => |_: &mut &[u8]| Err(io::Error::other("node used an invalid type")),
|
Err(io::Error::new(io::ErrorKind::Other, "node used the unused array type"))
|
||||||
|
},
|
||||||
|
_ => {
|
||||||
|
|_: &mut &[u8]| Err(io::Error::new(io::ErrorKind::Other, "node used an invalid type"))
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut bytes_res = vec![];
|
let mut res = vec![];
|
||||||
for _ in 0 .. iters {
|
for _ in 0 .. iters {
|
||||||
bytes_res.push(f(reader)?);
|
res.push(f(reader)?);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut actual_res = Vec::with_capacity(bytes_res.len());
|
let mut actual_res = Vec::with_capacity(res.len());
|
||||||
match name.as_slice() {
|
if &name == b"o_indexes" {
|
||||||
b"o_indexes" => {
|
for o_index in res {
|
||||||
for o_index in bytes_res {
|
actual_res.push(u64::from_le_bytes(o_index.try_into().map_err(|_| {
|
||||||
actual_res.push(u64::from_le_bytes(
|
io::Error::new(io::ErrorKind::Other, "node didn't provide 8 bytes for a u64")
|
||||||
o_index
|
})?));
|
||||||
.try_into()
|
|
||||||
.map_err(|_| io::Error::other("node didn't provide 8 bytes for a u64"))?,
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
res = Some(actual_res);
|
return Ok(actual_res);
|
||||||
}
|
|
||||||
b"status" => {
|
|
||||||
if bytes_res
|
|
||||||
.first()
|
|
||||||
.ok_or_else(|| io::Error::other("status wasn't a string"))?
|
|
||||||
.as_slice() !=
|
|
||||||
b"OK"
|
|
||||||
{
|
|
||||||
// TODO: Better handle non-OK responses
|
|
||||||
Err(io::Error::other("response wasn't OK"))?;
|
|
||||||
}
|
|
||||||
is_okay = true;
|
|
||||||
}
|
|
||||||
_ => continue,
|
|
||||||
}
|
|
||||||
|
|
||||||
if is_okay && res.is_some() {
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Didn't return a response with a status
|
// Didn't return a response with o_indexes
|
||||||
// (if the status wasn't okay, we would've already errored)
|
// TODO: Check if this didn't have o_indexes because it's an error response
|
||||||
if !is_okay {
|
Err(io::Error::new(io::ErrorKind::Other, "response didn't contain o_indexes"))
|
||||||
Err(io::Error::other("response didn't contain a status"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the Vec was empty, it would've been omitted, hence the unwrap_or
|
|
||||||
// TODO: Test against a 0-output TX, such as the ones found in block 202612
|
|
||||||
Ok(res.unwrap_or(vec![]))
|
|
||||||
};
|
};
|
||||||
|
|
||||||
read_object(&mut indexes)
|
read_object(&mut indexes)
|
||||||
})()
|
})()
|
||||||
.map_err(|_| RpcError::InvalidNode("invalid binary response".to_string()))
|
.map_err(|_| RpcError::InvalidNode)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the output distribution, from the specified height to the specified height (both
|
/// Get the output distribution, from the specified height to the specified height (both
|
||||||
@@ -517,11 +470,13 @@ impl<R: RpcConnection> Rpc<R> {
|
|||||||
from: usize,
|
from: usize,
|
||||||
to: usize,
|
to: usize,
|
||||||
) -> Result<Vec<u64>, RpcError> {
|
) -> Result<Vec<u64>, RpcError> {
|
||||||
|
#[allow(dead_code)]
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
struct Distribution {
|
struct Distribution {
|
||||||
distribution: Vec<u64>,
|
distribution: Vec<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
struct Distributions {
|
struct Distributions {
|
||||||
distributions: Vec<Distribution>,
|
distributions: Vec<Distribution>,
|
||||||
@@ -543,15 +498,27 @@ impl<R: RpcConnection> Rpc<R> {
|
|||||||
Ok(distributions.distributions.swap_remove(0).distribution)
|
Ok(distributions.distributions.swap_remove(0).distribution)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the specified outputs from the RingCT (zero-amount) pool
|
/// Get the specified outputs from the RingCT (zero-amount) pool, but only return them if their
|
||||||
pub async fn get_outs(&self, indexes: &[u64]) -> Result<Vec<OutputResponse>, RpcError> {
|
/// timelock has been satisfied. This is distinct from being free of the 10-block lock applied to
|
||||||
|
/// all Monero transactions.
|
||||||
|
pub async fn get_unlocked_outputs(
|
||||||
|
&self,
|
||||||
|
indexes: &[u64],
|
||||||
|
height: usize,
|
||||||
|
) -> Result<Vec<Option<[EdwardsPoint; 2]>>, RpcError> {
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
struct OutsResponse {
|
struct Out {
|
||||||
status: String,
|
key: String,
|
||||||
outs: Vec<OutputResponse>,
|
mask: String,
|
||||||
|
txid: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
let res: OutsResponse = self
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct Outs {
|
||||||
|
outs: Vec<Out>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let outs: Outs = self
|
||||||
.rpc_call(
|
.rpc_call(
|
||||||
"get_outs",
|
"get_outs",
|
||||||
Some(json!({
|
Some(json!({
|
||||||
@@ -564,142 +531,43 @@ impl<R: RpcConnection> Rpc<R> {
|
|||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
if res.status != "OK" {
|
let txs = self
|
||||||
Err(RpcError::InvalidNode("bad response to get_outs".to_string()))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(res.outs)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the specified outputs from the RingCT (zero-amount) pool, but only return them if their
|
|
||||||
/// timelock has been satisfied.
|
|
||||||
///
|
|
||||||
/// The timelock being satisfied is distinct from being free of the 10-block lock applied to all
|
|
||||||
/// Monero transactions.
|
|
||||||
pub async fn get_unlocked_outputs(
|
|
||||||
&self,
|
|
||||||
indexes: &[u64],
|
|
||||||
height: usize,
|
|
||||||
fingerprintable_canonical: bool,
|
|
||||||
) -> Result<Vec<Option<[EdwardsPoint; 2]>>, RpcError> {
|
|
||||||
let outs: Vec<OutputResponse> = self.get_outs(indexes).await?;
|
|
||||||
|
|
||||||
// Only need to fetch txs to do canonical check on timelock
|
|
||||||
let txs = if fingerprintable_canonical {
|
|
||||||
self
|
|
||||||
.get_transactions(
|
.get_transactions(
|
||||||
&outs.iter().map(|out| hash_hex(&out.txid)).collect::<Result<Vec<_>, _>>()?,
|
&outs
|
||||||
|
.outs
|
||||||
|
.iter()
|
||||||
|
.map(|out| rpc_hex(&out.txid)?.try_into().map_err(|_| RpcError::InvalidNode))
|
||||||
|
.collect::<Result<Vec<_>, _>>()?,
|
||||||
)
|
)
|
||||||
.await?
|
.await?;
|
||||||
} else {
|
|
||||||
Vec::new()
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: https://github.com/serai-dex/serai/issues/104
|
// TODO: https://github.com/serai-dex/serai/issues/104
|
||||||
outs
|
outs
|
||||||
|
.outs
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(i, out)| {
|
.map(|(i, out)| {
|
||||||
// Allow keys to be invalid, though if they are, return None to trigger selection of a new
|
Ok(
|
||||||
// decoy
|
Some([rpc_point(&out.key)?, rpc_point(&out.mask)?])
|
||||||
// Only valid keys can be used in CLSAG proofs, hence the need for re-selection, yet
|
.filter(|_| Timelock::Block(height) >= txs[i].prefix.timelock),
|
||||||
// invalid keys may honestly exist on the blockchain
|
)
|
||||||
// Only a recent hard fork checked output keys were valid points
|
|
||||||
let Some(key) = decompress_point(
|
|
||||||
rpc_hex(&out.key)?
|
|
||||||
.try_into()
|
|
||||||
.map_err(|_| RpcError::InvalidNode("non-32-byte point".to_string()))?,
|
|
||||||
) else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
Ok(Some([key, rpc_point(&out.mask)?]).filter(|_| {
|
|
||||||
if fingerprintable_canonical {
|
|
||||||
Timelock::Block(height) >= txs[i].prefix.timelock
|
|
||||||
} else {
|
|
||||||
out.unlocked
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_fee_v14(&self, priority: FeePriority) -> Result<Fee, RpcError> {
|
/// Get the currently estimated fee from the node. This may be manipulated to unsafe levels and
|
||||||
|
/// MUST be sanity checked.
|
||||||
|
// TODO: Take a sanity check argument
|
||||||
|
pub async fn get_fee(&self) -> Result<Fee, RpcError> {
|
||||||
|
#[allow(dead_code)]
|
||||||
#[derive(Deserialize, Debug)]
|
#[derive(Deserialize, Debug)]
|
||||||
struct FeeResponseV14 {
|
struct FeeResponse {
|
||||||
status: String,
|
|
||||||
fee: u64,
|
fee: u64,
|
||||||
quantization_mask: u64,
|
quantization_mask: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
let res: FeeResponse = self.json_rpc_call("get_fee_estimate", None).await?;
|
||||||
// src/wallet/wallet2.cpp#L7569-L7584
|
Ok(Fee { per_weight: res.fee, mask: res.quantization_mask })
|
||||||
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
|
||||||
// src/wallet/wallet2.cpp#L7660-L7661
|
|
||||||
let priority_idx =
|
|
||||||
usize::try_from(if priority.fee_priority() == 0 { 1 } else { priority.fee_priority() - 1 })
|
|
||||||
.map_err(|_| RpcError::InvalidPriority)?;
|
|
||||||
let multipliers = [1, 5, 25, 1000];
|
|
||||||
if priority_idx >= multipliers.len() {
|
|
||||||
// though not an RPC error, it seems sensible to treat as such
|
|
||||||
Err(RpcError::InvalidPriority)?;
|
|
||||||
}
|
|
||||||
let fee_multiplier = multipliers[priority_idx];
|
|
||||||
|
|
||||||
let res: FeeResponseV14 = self
|
|
||||||
.json_rpc_call(
|
|
||||||
"get_fee_estimate",
|
|
||||||
Some(json!({ "grace_blocks": GRACE_BLOCKS_FOR_FEE_ESTIMATE })),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if res.status != "OK" {
|
|
||||||
Err(RpcError::InvalidFee)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Fee { per_weight: res.fee * fee_multiplier, mask: res.quantization_mask })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the currently estimated fee from the node.
|
|
||||||
///
|
|
||||||
/// This may be manipulated to unsafe levels and MUST be sanity checked.
|
|
||||||
// TODO: Take a sanity check argument
|
|
||||||
pub async fn get_fee(&self, protocol: Protocol, priority: FeePriority) -> Result<Fee, RpcError> {
|
|
||||||
// TODO: Implement wallet2's adjust_priority which by default automatically uses a lower
|
|
||||||
// priority than provided depending on the backlog in the pool
|
|
||||||
if protocol.v16_fee() {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct FeeResponse {
|
|
||||||
status: String,
|
|
||||||
fees: Vec<u64>,
|
|
||||||
quantization_mask: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
let res: FeeResponse = self
|
|
||||||
.json_rpc_call(
|
|
||||||
"get_fee_estimate",
|
|
||||||
Some(json!({ "grace_blocks": GRACE_BLOCKS_FOR_FEE_ESTIMATE })),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
|
||||||
// src/wallet/wallet2.cpp#L7615-L7620
|
|
||||||
let priority_idx = usize::try_from(if priority.fee_priority() >= 4 {
|
|
||||||
3
|
|
||||||
} else {
|
|
||||||
priority.fee_priority().saturating_sub(1)
|
|
||||||
})
|
|
||||||
.map_err(|_| RpcError::InvalidPriority)?;
|
|
||||||
|
|
||||||
if res.status != "OK" {
|
|
||||||
Err(RpcError::InvalidFee)
|
|
||||||
} else if priority_idx >= res.fees.len() {
|
|
||||||
Err(RpcError::InvalidPriority)
|
|
||||||
} else {
|
|
||||||
Ok(Fee { per_weight: res.fees[priority_idx], mask: res.quantization_mask })
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.get_fee_v14(priority).await
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn publish_transaction(&self, tx: &Transaction) -> Result<(), RpcError> {
|
pub async fn publish_transaction(&self, tx: &Transaction) -> Result<(), RpcError> {
|
||||||
@@ -730,32 +598,20 @@ impl<R: RpcConnection> Rpc<R> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Take &Address, not &str?
|
pub async fn generate_blocks(&self, address: &str, block_count: usize) -> Result<(), RpcError> {
|
||||||
pub async fn generate_blocks(
|
self
|
||||||
&self,
|
.rpc_call::<_, EmptyResponse>(
|
||||||
address: &str,
|
"json_rpc",
|
||||||
block_count: usize,
|
|
||||||
) -> Result<(Vec<[u8; 32]>, usize), RpcError> {
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
struct BlocksResponse {
|
|
||||||
blocks: Vec<String>,
|
|
||||||
height: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
let res = self
|
|
||||||
.json_rpc_call::<BlocksResponse>(
|
|
||||||
"generateblocks",
|
|
||||||
Some(json!({
|
Some(json!({
|
||||||
|
"method": "generateblocks",
|
||||||
|
"params": {
|
||||||
"wallet_address": address,
|
"wallet_address": address,
|
||||||
"amount_of_blocks": block_count
|
"amount_of_blocks": block_count
|
||||||
|
},
|
||||||
})),
|
})),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let mut blocks = Vec::with_capacity(res.blocks.len());
|
Ok(())
|
||||||
for block in res.blocks {
|
|
||||||
blocks.push(hash_hex(&block)?);
|
|
||||||
}
|
|
||||||
Ok((blocks, res.height))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,43 +4,23 @@ use std_shims::{
|
|||||||
io::{self, Read, Write},
|
io::{self, Read, Write},
|
||||||
};
|
};
|
||||||
|
|
||||||
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
use curve25519_dalek::{
|
||||||
|
scalar::Scalar,
|
||||||
use monero_generators::decompress_point;
|
edwards::{EdwardsPoint, CompressedEdwardsY},
|
||||||
|
};
|
||||||
|
|
||||||
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
||||||
|
|
||||||
mod sealed {
|
pub(crate) fn varint_len(varint: usize) -> usize {
|
||||||
pub trait VarInt: TryInto<u64> + TryFrom<u64> + Copy {
|
((usize::try_from(usize::BITS - varint.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
|
||||||
const BITS: usize;
|
|
||||||
}
|
|
||||||
impl VarInt for u8 {
|
|
||||||
const BITS: usize = 8;
|
|
||||||
}
|
|
||||||
impl VarInt for u32 {
|
|
||||||
const BITS: usize = 32;
|
|
||||||
}
|
|
||||||
impl VarInt for u64 {
|
|
||||||
const BITS: usize = 64;
|
|
||||||
}
|
|
||||||
impl VarInt for usize {
|
|
||||||
const BITS: usize = core::mem::size_of::<usize>() * 8;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This will panic if the VarInt exceeds u64::MAX
|
|
||||||
pub(crate) fn varint_len<U: sealed::VarInt>(varint: U) -> usize {
|
|
||||||
let varint_u64: u64 = varint.try_into().map_err(|_| "varint exceeded u64").unwrap();
|
|
||||||
((usize::try_from(u64::BITS - varint_u64.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
|
pub(crate) fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
|
||||||
w.write_all(&[*byte])
|
w.write_all(&[*byte])
|
||||||
}
|
}
|
||||||
|
|
||||||
// This will panic if the VarInt exceeds u64::MAX
|
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
||||||
pub(crate) fn write_varint<W: Write, U: sealed::VarInt>(varint: &U, w: &mut W) -> io::Result<()> {
|
let mut varint = *varint;
|
||||||
let mut varint: u64 = (*varint).try_into().map_err(|_| "varint exceeded u64").unwrap();
|
|
||||||
while {
|
while {
|
||||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
||||||
varint >>= 7;
|
varint >>= 7;
|
||||||
@@ -77,7 +57,7 @@ pub(crate) fn write_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
|||||||
values: &[T],
|
values: &[T],
|
||||||
w: &mut W,
|
w: &mut W,
|
||||||
) -> io::Result<()> {
|
) -> io::Result<()> {
|
||||||
write_varint(&values.len(), w)?;
|
write_varint(&values.len().try_into().unwrap(), w)?;
|
||||||
write_raw_vec(f, values, w)
|
write_raw_vec(f, values, w)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -103,23 +83,23 @@ pub(crate) fn read_u64<R: Read>(r: &mut R) -> io::Result<u64> {
|
|||||||
read_bytes(r).map(u64::from_le_bytes)
|
read_bytes(r).map(u64::from_le_bytes)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_varint<R: Read, U: sealed::VarInt>(r: &mut R) -> io::Result<U> {
|
pub(crate) fn read_varint<R: Read>(r: &mut R) -> io::Result<u64> {
|
||||||
let mut bits = 0;
|
let mut bits = 0;
|
||||||
let mut res = 0;
|
let mut res = 0;
|
||||||
while {
|
while {
|
||||||
let b = read_byte(r)?;
|
let b = read_byte(r)?;
|
||||||
if (bits != 0) && (b == 0) {
|
if (bits != 0) && (b == 0) {
|
||||||
Err(io::Error::other("non-canonical varint"))?;
|
Err(io::Error::new(io::ErrorKind::Other, "non-canonical varint"))?;
|
||||||
}
|
}
|
||||||
if ((bits + 7) >= U::BITS) && (b >= (1 << (U::BITS - bits))) {
|
if ((bits + 7) > 64) && (b >= (1 << (64 - bits))) {
|
||||||
Err(io::Error::other("varint overflow"))?;
|
Err(io::Error::new(io::ErrorKind::Other, "varint overflow"))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits;
|
res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits;
|
||||||
bits += 7;
|
bits += 7;
|
||||||
b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK
|
b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK
|
||||||
} {}
|
} {}
|
||||||
res.try_into().map_err(|_| io::Error::other("VarInt does not fit into integer type"))
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
// All scalar fields supported by monero-serai are checked to be canonical for valid transactions
|
// All scalar fields supported by monero-serai are checked to be canonical for valid transactions
|
||||||
@@ -129,20 +109,24 @@ pub(crate) fn read_varint<R: Read, U: sealed::VarInt>(r: &mut R) -> io::Result<U
|
|||||||
// https://github.com/monero-project/monero/issues/8438, where some scalars had an archaic
|
// https://github.com/monero-project/monero/issues/8438, where some scalars had an archaic
|
||||||
// reduction applied
|
// reduction applied
|
||||||
pub(crate) fn read_scalar<R: Read>(r: &mut R) -> io::Result<Scalar> {
|
pub(crate) fn read_scalar<R: Read>(r: &mut R) -> io::Result<Scalar> {
|
||||||
Option::from(Scalar::from_canonical_bytes(read_bytes(r)?))
|
Scalar::from_canonical_bytes(read_bytes(r)?)
|
||||||
.ok_or_else(|| io::Error::other("unreduced scalar"))
|
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
pub(crate) fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||||
let bytes = read_bytes(r)?;
|
let bytes = read_bytes(r)?;
|
||||||
decompress_point(bytes).ok_or_else(|| io::Error::other("invalid point"))
|
CompressedEdwardsY(bytes)
|
||||||
|
.decompress()
|
||||||
|
// Ban points which are either unreduced or -0
|
||||||
|
.filter(|point| point.compress().to_bytes() == bytes)
|
||||||
|
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
pub(crate) fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||||
read_point(r)
|
read_point(r)
|
||||||
.ok()
|
.ok()
|
||||||
.filter(EdwardsPoint::is_torsion_free)
|
.filter(EdwardsPoint::is_torsion_free)
|
||||||
.ok_or_else(|| io::Error::other("invalid point"))
|
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_raw_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
pub(crate) fn read_raw_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
||||||
@@ -168,5 +152,5 @@ pub(crate) fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
|||||||
f: F,
|
f: F,
|
||||||
r: &mut R,
|
r: &mut R,
|
||||||
) -> io::Result<Vec<T>> {
|
) -> io::Result<Vec<T>> {
|
||||||
read_raw_vec(f, read_varint(r)?, r)
|
read_raw_vec(f, read_varint(r)?.try_into().unwrap(), r)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,9 +2,7 @@ use hex_literal::hex;
|
|||||||
|
|
||||||
use rand_core::{RngCore, OsRng};
|
use rand_core::{RngCore, OsRng};
|
||||||
|
|
||||||
use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
|
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, edwards::CompressedEdwardsY};
|
||||||
|
|
||||||
use monero_generators::decompress_point;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
random_scalar,
|
random_scalar,
|
||||||
@@ -75,8 +73,8 @@ fn featured() {
|
|||||||
[(Network::Mainnet, 'C'), (Network::Testnet, 'K'), (Network::Stagenet, 'F')]
|
[(Network::Mainnet, 'C'), (Network::Testnet, 'K'), (Network::Stagenet, 'F')]
|
||||||
{
|
{
|
||||||
for _ in 0 .. 100 {
|
for _ in 0 .. 100 {
|
||||||
let spend = &random_scalar(&mut OsRng) * ED25519_BASEPOINT_TABLE;
|
let spend = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
||||||
let view = &random_scalar(&mut OsRng) * ED25519_BASEPOINT_TABLE;
|
let view = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
||||||
|
|
||||||
for features in 0 .. (1 << 3) {
|
for features in 0 .. (1 << 3) {
|
||||||
const SUBADDRESS_FEATURE_BIT: u8 = 1;
|
const SUBADDRESS_FEATURE_BIT: u8 = 1;
|
||||||
@@ -144,8 +142,10 @@ fn featured_vectors() {
|
|||||||
}
|
}
|
||||||
_ => panic!("Unknown network"),
|
_ => panic!("Unknown network"),
|
||||||
};
|
};
|
||||||
let spend = decompress_point(hex::decode(vector.spend).unwrap().try_into().unwrap()).unwrap();
|
let spend =
|
||||||
let view = decompress_point(hex::decode(vector.view).unwrap().try_into().unwrap()).unwrap();
|
CompressedEdwardsY::from_slice(&hex::decode(vector.spend).unwrap()).decompress().unwrap();
|
||||||
|
let view =
|
||||||
|
CompressedEdwardsY::from_slice(&hex::decode(vector.view).unwrap()).decompress().unwrap();
|
||||||
|
|
||||||
let addr = MoneroAddress::from_str(network, &vector.address).unwrap();
|
let addr = MoneroAddress::from_str(network, &vector.address).unwrap();
|
||||||
assert_eq!(addr.spend, spend);
|
assert_eq!(addr.spend, spend);
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
use hex_literal::hex;
|
use hex_literal::hex;
|
||||||
use rand_core::OsRng;
|
use rand_core::OsRng;
|
||||||
|
|
||||||
use curve25519_dalek::scalar::Scalar;
|
use curve25519_dalek::{scalar::Scalar, edwards::CompressedEdwardsY};
|
||||||
use monero_generators::decompress_point;
|
|
||||||
use multiexp::BatchVerifier;
|
use multiexp::BatchVerifier;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@@ -10,12 +9,10 @@ use crate::{
|
|||||||
ringct::bulletproofs::{Bulletproofs, original::OriginalStruct},
|
ringct::bulletproofs::{Bulletproofs, original::OriginalStruct},
|
||||||
};
|
};
|
||||||
|
|
||||||
mod plus;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn bulletproofs_vector() {
|
fn bulletproofs_vector() {
|
||||||
let scalar = |scalar| Scalar::from_canonical_bytes(scalar).unwrap();
|
let scalar = |scalar| Scalar::from_canonical_bytes(scalar).unwrap();
|
||||||
let point = |point| decompress_point(point).unwrap();
|
let point = |point| CompressedEdwardsY(point).decompress().unwrap();
|
||||||
|
|
||||||
// Generated from Monero
|
// Generated from Monero
|
||||||
assert!(Bulletproofs::Original(OriginalStruct {
|
assert!(Bulletproofs::Original(OriginalStruct {
|
||||||
@@ -65,7 +62,7 @@ macro_rules! bulletproofs_tests {
|
|||||||
fn $name() {
|
fn $name() {
|
||||||
// Create Bulletproofs for all possible output quantities
|
// Create Bulletproofs for all possible output quantities
|
||||||
let mut verifier = BatchVerifier::new(16);
|
let mut verifier = BatchVerifier::new(16);
|
||||||
for i in 1 ..= 16 {
|
for i in 1 .. 17 {
|
||||||
let commitments = (1 ..= i)
|
let commitments = (1 ..= i)
|
||||||
.map(|i| Commitment::new(random_scalar(&mut OsRng), u64::try_from(i).unwrap()))
|
.map(|i| Commitment::new(random_scalar(&mut OsRng), u64::try_from(i).unwrap()))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
@@ -84,7 +81,7 @@ macro_rules! bulletproofs_tests {
|
|||||||
// Check Bulletproofs errors if we try to prove for too many outputs
|
// Check Bulletproofs errors if we try to prove for too many outputs
|
||||||
let mut commitments = vec![];
|
let mut commitments = vec![];
|
||||||
for _ in 0 .. 17 {
|
for _ in 0 .. 17 {
|
||||||
commitments.push(Commitment::new(Scalar::ZERO, 0));
|
commitments.push(Commitment::new(Scalar::zero(), 0));
|
||||||
}
|
}
|
||||||
assert!(Bulletproofs::prove(&mut OsRng, &commitments, $plus).is_err());
|
assert!(Bulletproofs::prove(&mut OsRng, &commitments, $plus).is_err());
|
||||||
}
|
}
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
use rand_core::{RngCore, OsRng};
|
|
||||||
|
|
||||||
use multiexp::BatchVerifier;
|
|
||||||
use group::ff::Field;
|
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Commitment,
|
|
||||||
ringct::bulletproofs::plus::aggregate_range_proof::{
|
|
||||||
AggregateRangeStatement, AggregateRangeWitness,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_aggregate_range_proof() {
|
|
||||||
let mut verifier = BatchVerifier::new(16);
|
|
||||||
for m in 1 ..= 16 {
|
|
||||||
let mut commitments = vec![];
|
|
||||||
for _ in 0 .. m {
|
|
||||||
commitments.push(Commitment::new(*Scalar::random(&mut OsRng), OsRng.next_u64()));
|
|
||||||
}
|
|
||||||
let commitment_points = commitments.iter().map(|com| EdwardsPoint(com.calculate())).collect();
|
|
||||||
let statement = AggregateRangeStatement::new(commitment_points).unwrap();
|
|
||||||
let witness = AggregateRangeWitness::new(&commitments).unwrap();
|
|
||||||
|
|
||||||
let proof = statement.clone().prove(&mut OsRng, &witness).unwrap();
|
|
||||||
statement.verify(&mut OsRng, &mut verifier, (), proof);
|
|
||||||
}
|
|
||||||
assert!(verifier.verify_vartime());
|
|
||||||
}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
#[cfg(test)]
|
|
||||||
mod weighted_inner_product;
|
|
||||||
#[cfg(test)]
|
|
||||||
mod aggregate_range_proof;
|
|
||||||
@@ -1,81 +0,0 @@
|
|||||||
// The inner product relation is P = sum(g_bold * a, h_bold * b, g * (a * y * b), h * alpha)
|
|
||||||
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
use multiexp::BatchVerifier;
|
|
||||||
use group::{ff::Field, Group};
|
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use crate::ringct::bulletproofs::plus::{
|
|
||||||
ScalarVector, PointVector, GeneratorsList, Generators,
|
|
||||||
weighted_inner_product::{WipStatement, WipWitness},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_zero_weighted_inner_product() {
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let P = EdwardsPoint::identity();
|
|
||||||
let y = Scalar::random(&mut OsRng);
|
|
||||||
|
|
||||||
let generators = Generators::new().reduce(1);
|
|
||||||
let statement = WipStatement::new(generators, P, y);
|
|
||||||
let witness = WipWitness::new(ScalarVector::new(1), ScalarVector::new(1), Scalar::ZERO).unwrap();
|
|
||||||
|
|
||||||
let transcript = Scalar::random(&mut OsRng);
|
|
||||||
let proof = statement.clone().prove(&mut OsRng, transcript, &witness).unwrap();
|
|
||||||
|
|
||||||
let mut verifier = BatchVerifier::new(1);
|
|
||||||
statement.verify(&mut OsRng, &mut verifier, (), transcript, proof);
|
|
||||||
assert!(verifier.verify_vartime());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_weighted_inner_product() {
|
|
||||||
// P = sum(g_bold * a, h_bold * b, g * (a * y * b), h * alpha)
|
|
||||||
let mut verifier = BatchVerifier::new(6);
|
|
||||||
let generators = Generators::new();
|
|
||||||
for i in [1, 2, 4, 8, 16, 32] {
|
|
||||||
let generators = generators.reduce(i);
|
|
||||||
let g = Generators::g();
|
|
||||||
let h = Generators::h();
|
|
||||||
assert_eq!(generators.len(), i);
|
|
||||||
let mut g_bold = vec![];
|
|
||||||
let mut h_bold = vec![];
|
|
||||||
for i in 0 .. i {
|
|
||||||
g_bold.push(generators.generator(GeneratorsList::GBold1, i));
|
|
||||||
h_bold.push(generators.generator(GeneratorsList::HBold1, i));
|
|
||||||
}
|
|
||||||
let g_bold = PointVector(g_bold);
|
|
||||||
let h_bold = PointVector(h_bold);
|
|
||||||
|
|
||||||
let mut a = ScalarVector::new(i);
|
|
||||||
let mut b = ScalarVector::new(i);
|
|
||||||
let alpha = Scalar::random(&mut OsRng);
|
|
||||||
|
|
||||||
let y = Scalar::random(&mut OsRng);
|
|
||||||
let mut y_vec = ScalarVector::new(g_bold.len());
|
|
||||||
y_vec[0] = y;
|
|
||||||
for i in 1 .. y_vec.len() {
|
|
||||||
y_vec[i] = y_vec[i - 1] * y;
|
|
||||||
}
|
|
||||||
|
|
||||||
for i in 0 .. i {
|
|
||||||
a[i] = Scalar::random(&mut OsRng);
|
|
||||||
b[i] = Scalar::random(&mut OsRng);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let P = g_bold.multiexp(&a) +
|
|
||||||
h_bold.multiexp(&b) +
|
|
||||||
(g * a.clone().weighted_inner_product(&b, &y_vec)) +
|
|
||||||
(h * alpha);
|
|
||||||
|
|
||||||
let statement = WipStatement::new(generators, P, y);
|
|
||||||
let witness = WipWitness::new(a, b, alpha).unwrap();
|
|
||||||
|
|
||||||
let transcript = Scalar::random(&mut OsRng);
|
|
||||||
let proof = statement.clone().prove(&mut OsRng, transcript, &witness).unwrap();
|
|
||||||
statement.verify(&mut OsRng, &mut verifier, (), transcript, proof);
|
|
||||||
}
|
|
||||||
assert!(verifier.verify_vartime());
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,8 @@
|
|||||||
use core::ops::Deref;
|
use core::ops::Deref;
|
||||||
#[cfg(feature = "multisig")]
|
#[cfg(feature = "multisig")]
|
||||||
use std::sync::{Arc, RwLock};
|
use std_shims::sync::Arc;
|
||||||
|
#[cfg(feature = "multisig")]
|
||||||
|
use std::sync::RwLock;
|
||||||
|
|
||||||
use zeroize::Zeroizing;
|
use zeroize::Zeroizing;
|
||||||
use rand_core::{RngCore, OsRng};
|
use rand_core::{RngCore, OsRng};
|
||||||
@@ -40,24 +42,23 @@ fn clsag() {
|
|||||||
for real in 0 .. RING_LEN {
|
for real in 0 .. RING_LEN {
|
||||||
let msg = [1; 32];
|
let msg = [1; 32];
|
||||||
|
|
||||||
let mut secrets = (Zeroizing::new(Scalar::ZERO), Scalar::ZERO);
|
let mut secrets = (Zeroizing::new(Scalar::zero()), Scalar::zero());
|
||||||
let mut ring = vec![];
|
let mut ring = vec![];
|
||||||
for i in 0 .. RING_LEN {
|
for i in 0 .. RING_LEN {
|
||||||
let dest = Zeroizing::new(random_scalar(&mut OsRng));
|
let dest = Zeroizing::new(random_scalar(&mut OsRng));
|
||||||
let mask = random_scalar(&mut OsRng);
|
let mask = random_scalar(&mut OsRng);
|
||||||
let amount;
|
let amount = if i == real {
|
||||||
if i == real {
|
|
||||||
secrets = (dest.clone(), mask);
|
secrets = (dest.clone(), mask);
|
||||||
amount = AMOUNT;
|
AMOUNT
|
||||||
} else {
|
} else {
|
||||||
amount = OsRng.next_u64();
|
OsRng.next_u64()
|
||||||
}
|
};
|
||||||
ring
|
ring
|
||||||
.push([dest.deref() * ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]);
|
.push([dest.deref() * &ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]);
|
||||||
}
|
}
|
||||||
|
|
||||||
let image = generate_key_image(&secrets.0);
|
let image = generate_key_image(&secrets.0);
|
||||||
let (mut clsag, pseudo_out) = Clsag::sign(
|
let (clsag, pseudo_out) = Clsag::sign(
|
||||||
&mut OsRng,
|
&mut OsRng,
|
||||||
vec![(
|
vec![(
|
||||||
secrets.0,
|
secrets.0,
|
||||||
@@ -76,12 +77,7 @@ fn clsag() {
|
|||||||
msg,
|
msg,
|
||||||
)
|
)
|
||||||
.swap_remove(0);
|
.swap_remove(0);
|
||||||
|
|
||||||
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
|
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
|
||||||
|
|
||||||
// make sure verification fails if we throw a random `c1` at it.
|
|
||||||
clsag.c1 = random_scalar(&mut OsRng);
|
|
||||||
assert!(clsag.verify(&ring, &image, &pseudo_out, &msg).is_err());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -95,16 +91,15 @@ fn clsag_multisig() {
|
|||||||
for i in 0 .. RING_LEN {
|
for i in 0 .. RING_LEN {
|
||||||
let dest;
|
let dest;
|
||||||
let mask;
|
let mask;
|
||||||
let amount;
|
let amount = if i == u64::from(RING_INDEX) {
|
||||||
if i != u64::from(RING_INDEX) {
|
|
||||||
dest = &random_scalar(&mut OsRng) * ED25519_BASEPOINT_TABLE;
|
|
||||||
mask = random_scalar(&mut OsRng);
|
|
||||||
amount = OsRng.next_u64();
|
|
||||||
} else {
|
|
||||||
dest = keys[&Participant::new(1).unwrap()].group_key().0;
|
dest = keys[&Participant::new(1).unwrap()].group_key().0;
|
||||||
mask = randomness;
|
mask = randomness;
|
||||||
amount = AMOUNT;
|
AMOUNT
|
||||||
}
|
} else {
|
||||||
|
dest = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
||||||
|
mask = random_scalar(&mut OsRng);
|
||||||
|
OsRng.next_u64()
|
||||||
|
};
|
||||||
ring.push([dest, Commitment::new(mask, amount).calculate()]);
|
ring.push([dest, Commitment::new(mask, amount).calculate()]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -124,9 +119,9 @@ fn clsag_multisig() {
|
|||||||
|
|
||||||
sign(
|
sign(
|
||||||
&mut OsRng,
|
&mut OsRng,
|
||||||
&algorithm,
|
algorithm.clone(),
|
||||||
keys.clone(),
|
keys.clone(),
|
||||||
algorithm_machines(&mut OsRng, &algorithm, &keys),
|
algorithm_machines(&mut OsRng, algorithm, &keys),
|
||||||
&[1; 32],
|
&[1; 32],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,158 +0,0 @@
|
|||||||
use crate::{
|
|
||||||
wallet::{ExtraField, Extra, extra::MAX_TX_EXTRA_PADDING_COUNT},
|
|
||||||
serialize::write_varint,
|
|
||||||
};
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
|
||||||
|
|
||||||
// Borrowed tests from
|
|
||||||
// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/
|
|
||||||
// tests/unit_tests/test_tx_utils.cpp
|
|
||||||
|
|
||||||
const PUB_KEY_BYTES: [u8; 33] = [
|
|
||||||
1, 30, 208, 98, 162, 133, 64, 85, 83, 112, 91, 188, 89, 211, 24, 131, 39, 154, 22, 228, 80, 63,
|
|
||||||
198, 141, 173, 111, 244, 183, 4, 149, 186, 140, 230,
|
|
||||||
];
|
|
||||||
|
|
||||||
fn pub_key() -> EdwardsPoint {
|
|
||||||
CompressedEdwardsY(PUB_KEY_BYTES[1 .. PUB_KEY_BYTES.len()].try_into().expect("invalid pub key"))
|
|
||||||
.decompress()
|
|
||||||
.unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn test_write_buf(extra: &Extra, buf: &[u8]) {
|
|
||||||
let mut w: Vec<u8> = vec![];
|
|
||||||
Extra::write(extra, &mut w).unwrap();
|
|
||||||
assert_eq!(buf, w);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn empty_extra() {
|
|
||||||
let buf: Vec<u8> = vec![];
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert!(extra.0.is_empty());
|
|
||||||
test_write_buf(&extra, &buf);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn padding_only_size_1() {
|
|
||||||
let buf: Vec<u8> = vec![0];
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert_eq!(extra.0, vec![ExtraField::Padding(1)]);
|
|
||||||
test_write_buf(&extra, &buf);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn padding_only_size_2() {
|
|
||||||
let buf: Vec<u8> = vec![0, 0];
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert_eq!(extra.0, vec![ExtraField::Padding(2)]);
|
|
||||||
test_write_buf(&extra, &buf);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn padding_only_max_size() {
|
|
||||||
let buf: Vec<u8> = vec![0; MAX_TX_EXTRA_PADDING_COUNT];
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert_eq!(extra.0, vec![ExtraField::Padding(MAX_TX_EXTRA_PADDING_COUNT)]);
|
|
||||||
test_write_buf(&extra, &buf);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn padding_only_exceed_max_size() {
|
|
||||||
let buf: Vec<u8> = vec![0; MAX_TX_EXTRA_PADDING_COUNT + 1];
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert!(extra.0.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn invalid_padding_only() {
|
|
||||||
let buf: Vec<u8> = vec![0, 42];
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert!(extra.0.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn pub_key_only() {
|
|
||||||
let buf: Vec<u8> = PUB_KEY_BYTES.to_vec();
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert_eq!(extra.0, vec![ExtraField::PublicKey(pub_key())]);
|
|
||||||
test_write_buf(&extra, &buf);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn extra_nonce_only() {
|
|
||||||
let buf: Vec<u8> = vec![2, 1, 42];
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert_eq!(extra.0, vec![ExtraField::Nonce(vec![42])]);
|
|
||||||
test_write_buf(&extra, &buf);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn extra_nonce_only_wrong_size() {
|
|
||||||
let mut buf: Vec<u8> = vec![0; 20];
|
|
||||||
buf[0] = 2;
|
|
||||||
buf[1] = 255;
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert!(extra.0.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn pub_key_and_padding() {
|
|
||||||
let mut buf: Vec<u8> = PUB_KEY_BYTES.to_vec();
|
|
||||||
buf.extend([
|
|
||||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
|
||||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
|
||||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
|
||||||
]);
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert_eq!(extra.0, vec![ExtraField::PublicKey(pub_key()), ExtraField::Padding(76)]);
|
|
||||||
test_write_buf(&extra, &buf);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn pub_key_and_invalid_padding() {
|
|
||||||
let mut buf: Vec<u8> = PUB_KEY_BYTES.to_vec();
|
|
||||||
buf.extend([0, 1]);
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert_eq!(extra.0, vec![ExtraField::PublicKey(pub_key())]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn extra_mysterious_minergate_only() {
|
|
||||||
let buf: Vec<u8> = vec![222, 1, 42];
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert_eq!(extra.0, vec![ExtraField::MysteriousMinergate(vec![42])]);
|
|
||||||
test_write_buf(&extra, &buf);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn extra_mysterious_minergate_only_large() {
|
|
||||||
let mut buf: Vec<u8> = vec![222];
|
|
||||||
write_varint(&512u64, &mut buf).unwrap();
|
|
||||||
buf.extend_from_slice(&vec![0; 512]);
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert_eq!(extra.0, vec![ExtraField::MysteriousMinergate(vec![0; 512])]);
|
|
||||||
test_write_buf(&extra, &buf);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn extra_mysterious_minergate_only_wrong_size() {
|
|
||||||
let mut buf: Vec<u8> = vec![0; 20];
|
|
||||||
buf[0] = 222;
|
|
||||||
buf[1] = 255;
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert!(extra.0.is_empty());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn extra_mysterious_minergate_and_pub_key() {
|
|
||||||
let mut buf: Vec<u8> = vec![222, 1, 42];
|
|
||||||
buf.extend(PUB_KEY_BYTES.to_vec());
|
|
||||||
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
extra.0,
|
|
||||||
vec![ExtraField::MysteriousMinergate(vec![42]), ExtraField::PublicKey(pub_key())]
|
|
||||||
);
|
|
||||||
test_write_buf(&extra, &buf);
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,4 @@
|
|||||||
mod unreduced_scalar;
|
|
||||||
mod clsag;
|
mod clsag;
|
||||||
mod bulletproofs;
|
mod bulletproofs;
|
||||||
mod address;
|
mod address;
|
||||||
mod seed;
|
mod seed;
|
||||||
mod extra;
|
|
||||||
|
|||||||
@@ -6,17 +6,13 @@ use curve25519_dalek::scalar::Scalar;
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
hash,
|
hash,
|
||||||
wallet::seed::{
|
wallet::seed::{Seed, Language, classic::trim_by_lang},
|
||||||
Seed, SeedType, SeedError,
|
|
||||||
classic::{self, trim_by_lang},
|
|
||||||
polyseed,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_classic_seed() {
|
fn test_classic_seed() {
|
||||||
struct Vector {
|
struct Vector {
|
||||||
language: classic::Language,
|
language: Language,
|
||||||
seed: String,
|
seed: String,
|
||||||
spend: String,
|
spend: String,
|
||||||
view: String,
|
view: String,
|
||||||
@@ -24,13 +20,13 @@ fn test_classic_seed() {
|
|||||||
|
|
||||||
let vectors = [
|
let vectors = [
|
||||||
Vector {
|
Vector {
|
||||||
language: classic::Language::Chinese,
|
language: Language::Chinese,
|
||||||
seed: "摇 曲 艺 武 滴 然 效 似 赏 式 祥 歌 买 疑 小 碧 堆 博 键 房 鲜 悲 付 喷 武".into(),
|
seed: "摇 曲 艺 武 滴 然 效 似 赏 式 祥 歌 买 疑 小 碧 堆 博 键 房 鲜 悲 付 喷 武".into(),
|
||||||
spend: "a5e4fff1706ef9212993a69f246f5c95ad6d84371692d63e9bb0ea112a58340d".into(),
|
spend: "a5e4fff1706ef9212993a69f246f5c95ad6d84371692d63e9bb0ea112a58340d".into(),
|
||||||
view: "1176c43ce541477ea2f3ef0b49b25112b084e26b8a843e1304ac4677b74cdf02".into(),
|
view: "1176c43ce541477ea2f3ef0b49b25112b084e26b8a843e1304ac4677b74cdf02".into(),
|
||||||
},
|
},
|
||||||
Vector {
|
Vector {
|
||||||
language: classic::Language::English,
|
language: Language::English,
|
||||||
seed: "washing thirsty occur lectures tuesday fainted toxic adapt \
|
seed: "washing thirsty occur lectures tuesday fainted toxic adapt \
|
||||||
abnormal memoir nylon mostly building shrugged online ember northern \
|
abnormal memoir nylon mostly building shrugged online ember northern \
|
||||||
ruby woes dauntless boil family illness inroads northern"
|
ruby woes dauntless boil family illness inroads northern"
|
||||||
@@ -39,7 +35,7 @@ fn test_classic_seed() {
|
|||||||
view: "513ba91c538a5a9069e0094de90e927c0cd147fa10428ce3ac1afd49f63e3b01".into(),
|
view: "513ba91c538a5a9069e0094de90e927c0cd147fa10428ce3ac1afd49f63e3b01".into(),
|
||||||
},
|
},
|
||||||
Vector {
|
Vector {
|
||||||
language: classic::Language::Dutch,
|
language: Language::Dutch,
|
||||||
seed: "setwinst riphagen vimmetje extase blief tuitelig fuiven meifeest \
|
seed: "setwinst riphagen vimmetje extase blief tuitelig fuiven meifeest \
|
||||||
ponywagen zesmaal ripdeal matverf codetaal leut ivoor rotten \
|
ponywagen zesmaal ripdeal matverf codetaal leut ivoor rotten \
|
||||||
wisgerhof winzucht typograaf atrium rein zilt traktaat verzaagd setwinst"
|
wisgerhof winzucht typograaf atrium rein zilt traktaat verzaagd setwinst"
|
||||||
@@ -48,7 +44,7 @@ fn test_classic_seed() {
|
|||||||
view: "eac30b69477e3f68093d131c7fd961564458401b07f8c87ff8f6030c1a0c7301".into(),
|
view: "eac30b69477e3f68093d131c7fd961564458401b07f8c87ff8f6030c1a0c7301".into(),
|
||||||
},
|
},
|
||||||
Vector {
|
Vector {
|
||||||
language: classic::Language::French,
|
language: Language::French,
|
||||||
seed: "poids vaseux tarte bazar poivre effet entier nuance \
|
seed: "poids vaseux tarte bazar poivre effet entier nuance \
|
||||||
sensuel ennui pacte osselet poudre battre alibi mouton \
|
sensuel ennui pacte osselet poudre battre alibi mouton \
|
||||||
stade paquet pliage gibier type question position projet pliage"
|
stade paquet pliage gibier type question position projet pliage"
|
||||||
@@ -57,7 +53,7 @@ fn test_classic_seed() {
|
|||||||
view: "6725b32230400a1032f31d622b44c3a227f88258939b14a7c72e00939e7bdf0e".into(),
|
view: "6725b32230400a1032f31d622b44c3a227f88258939b14a7c72e00939e7bdf0e".into(),
|
||||||
},
|
},
|
||||||
Vector {
|
Vector {
|
||||||
language: classic::Language::Spanish,
|
language: Language::Spanish,
|
||||||
seed: "minero ocupar mirar evadir octubre cal logro miope \
|
seed: "minero ocupar mirar evadir octubre cal logro miope \
|
||||||
opaco disco ancla litio clase cuello nasal clase \
|
opaco disco ancla litio clase cuello nasal clase \
|
||||||
fiar avance deseo mente grumo negro cordón croqueta clase"
|
fiar avance deseo mente grumo negro cordón croqueta clase"
|
||||||
@@ -66,7 +62,7 @@ fn test_classic_seed() {
|
|||||||
view: "18deafb34d55b7a43cae2c1c1c206a3c80c12cc9d1f84640b484b95b7fec3e05".into(),
|
view: "18deafb34d55b7a43cae2c1c1c206a3c80c12cc9d1f84640b484b95b7fec3e05".into(),
|
||||||
},
|
},
|
||||||
Vector {
|
Vector {
|
||||||
language: classic::Language::German,
|
language: Language::German,
|
||||||
seed: "Kaliber Gabelung Tapir Liveband Favorit Specht Enklave Nabel \
|
seed: "Kaliber Gabelung Tapir Liveband Favorit Specht Enklave Nabel \
|
||||||
Jupiter Foliant Chronik nisten löten Vase Aussage Rekord \
|
Jupiter Foliant Chronik nisten löten Vase Aussage Rekord \
|
||||||
Yeti Gesetz Eleganz Alraune Künstler Almweide Jahr Kastanie Almweide"
|
Yeti Gesetz Eleganz Alraune Künstler Almweide Jahr Kastanie Almweide"
|
||||||
@@ -75,7 +71,7 @@ fn test_classic_seed() {
|
|||||||
view: "99f0ec556643bd9c038a4ed86edcb9c6c16032c4622ed2e000299d527a792701".into(),
|
view: "99f0ec556643bd9c038a4ed86edcb9c6c16032c4622ed2e000299d527a792701".into(),
|
||||||
},
|
},
|
||||||
Vector {
|
Vector {
|
||||||
language: classic::Language::Italian,
|
language: Language::Italian,
|
||||||
seed: "cavo pancetta auto fulmine alleanza filmato diavolo prato \
|
seed: "cavo pancetta auto fulmine alleanza filmato diavolo prato \
|
||||||
forzare meritare litigare lezione segreto evasione votare buio \
|
forzare meritare litigare lezione segreto evasione votare buio \
|
||||||
licenza cliente dorso natale crescere vento tutelare vetta evasione"
|
licenza cliente dorso natale crescere vento tutelare vetta evasione"
|
||||||
@@ -84,7 +80,7 @@ fn test_classic_seed() {
|
|||||||
view: "698a1dce6018aef5516e82ca0cb3e3ec7778d17dfb41a137567bfa2e55e63a03".into(),
|
view: "698a1dce6018aef5516e82ca0cb3e3ec7778d17dfb41a137567bfa2e55e63a03".into(),
|
||||||
},
|
},
|
||||||
Vector {
|
Vector {
|
||||||
language: classic::Language::Portuguese,
|
language: Language::Portuguese,
|
||||||
seed: "agito eventualidade onus itrio holograma sodomizar objetos dobro \
|
seed: "agito eventualidade onus itrio holograma sodomizar objetos dobro \
|
||||||
iugoslavo bcrepuscular odalisca abjeto iuane darwinista eczema acetona \
|
iugoslavo bcrepuscular odalisca abjeto iuane darwinista eczema acetona \
|
||||||
cibernetico hoquei gleba driver buffer azoto megera nogueira agito"
|
cibernetico hoquei gleba driver buffer azoto megera nogueira agito"
|
||||||
@@ -93,7 +89,7 @@ fn test_classic_seed() {
|
|||||||
view: "ad1b4fd35270f5f36c4da7166672b347e75c3f4d41346ec2a06d1d0193632801".into(),
|
view: "ad1b4fd35270f5f36c4da7166672b347e75c3f4d41346ec2a06d1d0193632801".into(),
|
||||||
},
|
},
|
||||||
Vector {
|
Vector {
|
||||||
language: classic::Language::Japanese,
|
language: Language::Japanese,
|
||||||
seed: "ぜんぶ どうぐ おたがい せんきょ おうじ そんちょう じゅしん いろえんぴつ \
|
seed: "ぜんぶ どうぐ おたがい せんきょ おうじ そんちょう じゅしん いろえんぴつ \
|
||||||
かほう つかれる えらぶ にちじょう くのう にちようび ぬまえび さんきゃく \
|
かほう つかれる えらぶ にちじょう くのう にちようび ぬまえび さんきゃく \
|
||||||
おおや ちぬき うすめる いがく せつでん さうな すいえい せつだん おおや"
|
おおや ちぬき うすめる いがく せつでん さうな すいえい せつだん おおや"
|
||||||
@@ -102,7 +98,7 @@ fn test_classic_seed() {
|
|||||||
view: "6c3634a313ec2ee979d565c33888fd7c3502d696ce0134a8bc1a2698c7f2c508".into(),
|
view: "6c3634a313ec2ee979d565c33888fd7c3502d696ce0134a8bc1a2698c7f2c508".into(),
|
||||||
},
|
},
|
||||||
Vector {
|
Vector {
|
||||||
language: classic::Language::Russian,
|
language: Language::Russian,
|
||||||
seed: "шатер икра нация ехать получать инерция доза реальный \
|
seed: "шатер икра нация ехать получать инерция доза реальный \
|
||||||
рыжий таможня лопата душа веселый клетка атлас лекция \
|
рыжий таможня лопата душа веселый клетка атлас лекция \
|
||||||
обгонять паек наивный лыжный дурак стать ежик задача паек"
|
обгонять паек наивный лыжный дурак стать ежик задача паек"
|
||||||
@@ -111,7 +107,7 @@ fn test_classic_seed() {
|
|||||||
view: "fcd53e41ec0df995ab43927f7c44bc3359c93523d5009fb3f5ba87431d545a03".into(),
|
view: "fcd53e41ec0df995ab43927f7c44bc3359c93523d5009fb3f5ba87431d545a03".into(),
|
||||||
},
|
},
|
||||||
Vector {
|
Vector {
|
||||||
language: classic::Language::Esperanto,
|
language: Language::Esperanto,
|
||||||
seed: "ukazo klini peco etikedo fabriko imitado onklino urino \
|
seed: "ukazo klini peco etikedo fabriko imitado onklino urino \
|
||||||
pudro incidento kumuluso ikono smirgi hirundo uretro krii \
|
pudro incidento kumuluso ikono smirgi hirundo uretro krii \
|
||||||
sparkado super speciala pupo alpinisto cvana vokegi zombio fabriko"
|
sparkado super speciala pupo alpinisto cvana vokegi zombio fabriko"
|
||||||
@@ -120,7 +116,7 @@ fn test_classic_seed() {
|
|||||||
view: "cd4d120e1ea34360af528f6a3e6156063312d9cefc9aa6b5218d366c0ed6a201".into(),
|
view: "cd4d120e1ea34360af528f6a3e6156063312d9cefc9aa6b5218d366c0ed6a201".into(),
|
||||||
},
|
},
|
||||||
Vector {
|
Vector {
|
||||||
language: classic::Language::Lojban,
|
language: Language::Lojban,
|
||||||
seed: "jetnu vensa julne xrotu xamsi julne cutci dakli \
|
seed: "jetnu vensa julne xrotu xamsi julne cutci dakli \
|
||||||
mlatu xedja muvgau palpi xindo sfubu ciste cinri \
|
mlatu xedja muvgau palpi xindo sfubu ciste cinri \
|
||||||
blabi darno dembi janli blabi fenki bukpu burcu blabi"
|
blabi darno dembi janli blabi fenki bukpu burcu blabi"
|
||||||
@@ -129,7 +125,7 @@ fn test_classic_seed() {
|
|||||||
view: "c806ce62bafaa7b2d597f1a1e2dbe4a2f96bfd804bf6f8420fc7f4a6bd700c00".into(),
|
view: "c806ce62bafaa7b2d597f1a1e2dbe4a2f96bfd804bf6f8420fc7f4a6bd700c00".into(),
|
||||||
},
|
},
|
||||||
Vector {
|
Vector {
|
||||||
language: classic::Language::EnglishOld,
|
language: Language::EnglishOld,
|
||||||
seed: "glorious especially puff son moment add youth nowhere \
|
seed: "glorious especially puff son moment add youth nowhere \
|
||||||
throw glide grip wrong rhythm consume very swear \
|
throw glide grip wrong rhythm consume very swear \
|
||||||
bitter heavy eventually begin reason flirt type unable"
|
bitter heavy eventually begin reason flirt type unable"
|
||||||
@@ -137,53 +133,6 @@ fn test_classic_seed() {
|
|||||||
spend: "647f4765b66b636ff07170ab6280a9a6804dfbaf19db2ad37d23be024a18730b".into(),
|
spend: "647f4765b66b636ff07170ab6280a9a6804dfbaf19db2ad37d23be024a18730b".into(),
|
||||||
view: "045da65316a906a8c30046053119c18020b07a7a3a6ef5c01ab2a8755416bd02".into(),
|
view: "045da65316a906a8c30046053119c18020b07a7a3a6ef5c01ab2a8755416bd02".into(),
|
||||||
},
|
},
|
||||||
// The following seeds require the language specification in order to calculate
|
|
||||||
// a single valid checksum
|
|
||||||
Vector {
|
|
||||||
language: classic::Language::Spanish,
|
|
||||||
seed: "pluma laico atraer pintor peor cerca balde buscar \
|
|
||||||
lancha batir nulo reloj resto gemelo nevera poder columna gol \
|
|
||||||
oveja latir amplio bolero feliz fuerza nevera"
|
|
||||||
.into(),
|
|
||||||
spend: "30303983fc8d215dd020cc6b8223793318d55c466a86e4390954f373fdc7200a".into(),
|
|
||||||
view: "97c649143f3c147ba59aa5506cc09c7992c5c219bb26964442142bf97980800e".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: classic::Language::Spanish,
|
|
||||||
seed: "pluma pluma pluma pluma pluma pluma pluma pluma \
|
|
||||||
pluma pluma pluma pluma pluma pluma pluma pluma \
|
|
||||||
pluma pluma pluma pluma pluma pluma pluma pluma pluma"
|
|
||||||
.into(),
|
|
||||||
spend: "b4050000b4050000b4050000b4050000b4050000b4050000b4050000b4050000".into(),
|
|
||||||
view: "d73534f7912b395eb70ef911791a2814eb6df7ce56528eaaa83ff2b72d9f5e0f".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: classic::Language::English,
|
|
||||||
seed: "plus plus plus plus plus plus plus plus \
|
|
||||||
plus plus plus plus plus plus plus plus \
|
|
||||||
plus plus plus plus plus plus plus plus plus"
|
|
||||||
.into(),
|
|
||||||
spend: "3b0400003b0400003b0400003b0400003b0400003b0400003b0400003b040000".into(),
|
|
||||||
view: "43a8a7715eed11eff145a2024ddcc39740255156da7bbd736ee66a0838053a02".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: classic::Language::Spanish,
|
|
||||||
seed: "audio audio audio audio audio audio audio audio \
|
|
||||||
audio audio audio audio audio audio audio audio \
|
|
||||||
audio audio audio audio audio audio audio audio audio"
|
|
||||||
.into(),
|
|
||||||
spend: "ba000000ba000000ba000000ba000000ba000000ba000000ba000000ba000000".into(),
|
|
||||||
view: "1437256da2c85d029b293d8c6b1d625d9374969301869b12f37186e3f906c708".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: classic::Language::English,
|
|
||||||
seed: "audio audio audio audio audio audio audio audio \
|
|
||||||
audio audio audio audio audio audio audio audio \
|
|
||||||
audio audio audio audio audio audio audio audio audio"
|
|
||||||
.into(),
|
|
||||||
spend: "7900000079000000790000007900000079000000790000007900000079000000".into(),
|
|
||||||
view: "20bec797ab96780ae6a045dd816676ca7ed1d7c6773f7022d03ad234b581d600".into(),
|
|
||||||
},
|
|
||||||
];
|
];
|
||||||
|
|
||||||
for vector in vectors {
|
for vector in vectors {
|
||||||
@@ -197,21 +146,14 @@ fn test_classic_seed() {
|
|||||||
|
|
||||||
// Test against Monero
|
// Test against Monero
|
||||||
{
|
{
|
||||||
println!("{}. language: {:?}, seed: {}", line!(), vector.language, vector.seed.clone());
|
let seed = Seed::from_string(Zeroizing::new(vector.seed.clone())).unwrap();
|
||||||
let seed =
|
assert_eq!(seed, Seed::from_string(Zeroizing::new(trim_seed(&vector.seed))).unwrap());
|
||||||
Seed::from_string(SeedType::Classic(vector.language), Zeroizing::new(vector.seed.clone()))
|
|
||||||
.unwrap();
|
|
||||||
let trim = trim_seed(&vector.seed);
|
|
||||||
assert_eq!(
|
|
||||||
seed,
|
|
||||||
Seed::from_string(SeedType::Classic(vector.language), Zeroizing::new(trim)).unwrap()
|
|
||||||
);
|
|
||||||
|
|
||||||
let spend: [u8; 32] = hex::decode(vector.spend).unwrap().try_into().unwrap();
|
let spend: [u8; 32] = hex::decode(vector.spend).unwrap().try_into().unwrap();
|
||||||
// For classical seeds, Monero directly uses the entropy as a spend key
|
// For classical seeds, Monero directly uses the entropy as a spend key
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Option::<Scalar>::from(Scalar::from_canonical_bytes(*seed.entropy())),
|
Scalar::from_canonical_bytes(*seed.entropy()),
|
||||||
Option::<Scalar>::from(Scalar::from_canonical_bytes(spend)),
|
Scalar::from_canonical_bytes(spend)
|
||||||
);
|
);
|
||||||
|
|
||||||
let view: [u8; 32] = hex::decode(vector.view).unwrap().try_into().unwrap();
|
let view: [u8; 32] = hex::decode(vector.view).unwrap().try_into().unwrap();
|
||||||
@@ -221,262 +163,15 @@ fn test_classic_seed() {
|
|||||||
Scalar::from_canonical_bytes(view).unwrap()
|
Scalar::from_canonical_bytes(view).unwrap()
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(Seed::from_entropy(vector.language, Zeroizing::new(spend)).unwrap(), seed);
|
||||||
Seed::from_entropy(SeedType::Classic(vector.language), Zeroizing::new(spend), None)
|
|
||||||
.unwrap(),
|
|
||||||
seed
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test against ourselves
|
// Test against ourself
|
||||||
{
|
{
|
||||||
let seed = Seed::new(&mut OsRng, SeedType::Classic(vector.language));
|
let seed = Seed::new(&mut OsRng, vector.language);
|
||||||
println!("{}. seed: {}", line!(), *seed.to_string());
|
assert_eq!(seed, Seed::from_string(Zeroizing::new(trim_seed(&seed.to_string()))).unwrap());
|
||||||
let trim = trim_seed(&seed.to_string());
|
assert_eq!(seed, Seed::from_entropy(vector.language, seed.entropy()).unwrap());
|
||||||
assert_eq!(
|
assert_eq!(seed, Seed::from_string(seed.to_string()).unwrap());
|
||||||
seed,
|
|
||||||
Seed::from_string(SeedType::Classic(vector.language), Zeroizing::new(trim)).unwrap()
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
seed,
|
|
||||||
Seed::from_entropy(SeedType::Classic(vector.language), seed.entropy(), None).unwrap()
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
seed,
|
|
||||||
Seed::from_string(SeedType::Classic(vector.language), seed.to_string()).unwrap()
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_polyseed() {
|
|
||||||
struct Vector {
|
|
||||||
language: polyseed::Language,
|
|
||||||
seed: String,
|
|
||||||
entropy: String,
|
|
||||||
birthday: u64,
|
|
||||||
has_prefix: bool,
|
|
||||||
has_accent: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
let vectors = [
|
|
||||||
Vector {
|
|
||||||
language: polyseed::Language::English,
|
|
||||||
seed: "raven tail swear infant grief assist regular lamp \
|
|
||||||
duck valid someone little harsh puppy airport language"
|
|
||||||
.into(),
|
|
||||||
entropy: "dd76e7359a0ded37cd0ff0f3c829a5ae01673300000000000000000000000000".into(),
|
|
||||||
birthday: 1638446400,
|
|
||||||
has_prefix: true,
|
|
||||||
has_accent: false,
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: polyseed::Language::Spanish,
|
|
||||||
seed: "eje fin parte célebre tabú pestaña lienzo puma \
|
|
||||||
prisión hora regalo lengua existir lápiz lote sonoro"
|
|
||||||
.into(),
|
|
||||||
entropy: "5a2b02df7db21fcbe6ec6df137d54c7b20fd2b00000000000000000000000000".into(),
|
|
||||||
birthday: 3118651200,
|
|
||||||
has_prefix: true,
|
|
||||||
has_accent: true,
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: polyseed::Language::French,
|
|
||||||
seed: "valable arracher décaler jeudi amusant dresser mener épaissir risible \
|
|
||||||
prouesse réserve ampleur ajuster muter caméra enchère"
|
|
||||||
.into(),
|
|
||||||
entropy: "11cfd870324b26657342c37360c424a14a050b00000000000000000000000000".into(),
|
|
||||||
birthday: 1679314966,
|
|
||||||
has_prefix: true,
|
|
||||||
has_accent: true,
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: polyseed::Language::Italian,
|
|
||||||
seed: "caduco midollo copione meninge isotopo illogico riflesso tartaruga fermento \
|
|
||||||
olandese normale tristezza episodio voragine forbito achille"
|
|
||||||
.into(),
|
|
||||||
entropy: "7ecc57c9b4652d4e31428f62bec91cfd55500600000000000000000000000000".into(),
|
|
||||||
birthday: 1679316358,
|
|
||||||
has_prefix: true,
|
|
||||||
has_accent: false,
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: polyseed::Language::Portuguese,
|
|
||||||
seed: "caverna custear azedo adeus senador apertada sedoso omitir \
|
|
||||||
sujeito aurora videira molho cartaz gesso dentista tapar"
|
|
||||||
.into(),
|
|
||||||
entropy: "45473063711376cae38f1b3eba18c874124e1d00000000000000000000000000".into(),
|
|
||||||
birthday: 1679316657,
|
|
||||||
has_prefix: true,
|
|
||||||
has_accent: false,
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: polyseed::Language::Czech,
|
|
||||||
seed: "usmrtit nora dotaz komunita zavalit funkce mzda sotva akce \
|
|
||||||
vesta kabel herna stodola uvolnit ustrnout email"
|
|
||||||
.into(),
|
|
||||||
entropy: "7ac8a4efd62d9c3c4c02e350d32326df37821c00000000000000000000000000".into(),
|
|
||||||
birthday: 1679316898,
|
|
||||||
has_prefix: true,
|
|
||||||
has_accent: false,
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: polyseed::Language::Korean,
|
|
||||||
seed: "전망 선풍기 국제 무궁화 설사 기름 이론적 해안 절망 예선 \
|
|
||||||
지우개 보관 절망 말기 시각 귀신"
|
|
||||||
.into(),
|
|
||||||
entropy: "684663fda420298f42ed94b2c512ed38ddf12b00000000000000000000000000".into(),
|
|
||||||
birthday: 1679317073,
|
|
||||||
has_prefix: false,
|
|
||||||
has_accent: false,
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: polyseed::Language::Japanese,
|
|
||||||
seed: "うちあわせ ちつじょ つごう しはい けんこう とおる てみやげ はんとし たんとう \
|
|
||||||
といれ おさない おさえる むかう ぬぐう なふだ せまる"
|
|
||||||
.into(),
|
|
||||||
entropy: "94e6665518a6286c6e3ba508a2279eb62b771f00000000000000000000000000".into(),
|
|
||||||
birthday: 1679318722,
|
|
||||||
has_prefix: false,
|
|
||||||
has_accent: false,
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: polyseed::Language::ChineseTraditional,
|
|
||||||
seed: "亂 挖 斤 柄 代 圈 枝 轄 魯 論 函 開 勘 番 榮 壁".into(),
|
|
||||||
entropy: "b1594f585987ab0fd5a31da1f0d377dae5283f00000000000000000000000000".into(),
|
|
||||||
birthday: 1679426433,
|
|
||||||
has_prefix: false,
|
|
||||||
has_accent: false,
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: polyseed::Language::ChineseSimplified,
|
|
||||||
seed: "啊 百 族 府 票 划 伪 仓 叶 虾 借 溜 晨 左 等 鬼".into(),
|
|
||||||
entropy: "21cdd366f337b89b8d1bc1df9fe73047c22b0300000000000000000000000000".into(),
|
|
||||||
birthday: 1679426817,
|
|
||||||
has_prefix: false,
|
|
||||||
has_accent: false,
|
|
||||||
},
|
|
||||||
// The following seed requires the language specification in order to calculate
|
|
||||||
// a single valid checksum
|
|
||||||
Vector {
|
|
||||||
language: polyseed::Language::Spanish,
|
|
||||||
seed: "impo sort usua cabi venu nobl oliv clim \
|
|
||||||
cont barr marc auto prod vaca torn fati"
|
|
||||||
.into(),
|
|
||||||
entropy: "dbfce25fe09b68a340e01c62417eeef43ad51800000000000000000000000000".into(),
|
|
||||||
birthday: 1701511650,
|
|
||||||
has_prefix: true,
|
|
||||||
has_accent: true,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
for vector in vectors {
|
|
||||||
let add_whitespace = |mut seed: String| {
|
|
||||||
seed.push(' ');
|
|
||||||
seed
|
|
||||||
};
|
|
||||||
|
|
||||||
let seed_without_accents = |seed: &str| {
|
|
||||||
seed
|
|
||||||
.split_whitespace()
|
|
||||||
.map(|w| w.chars().filter(char::is_ascii).collect::<String>())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(" ")
|
|
||||||
};
|
|
||||||
|
|
||||||
let trim_seed = |seed: &str| {
|
|
||||||
let seed_to_trim =
|
|
||||||
if vector.has_accent { seed_without_accents(seed) } else { seed.to_string() };
|
|
||||||
seed_to_trim
|
|
||||||
.split_whitespace()
|
|
||||||
.map(|w| {
|
|
||||||
let mut ascii = 0;
|
|
||||||
let mut to_take = w.len();
|
|
||||||
for (i, char) in w.chars().enumerate() {
|
|
||||||
if char.is_ascii() {
|
|
||||||
ascii += 1;
|
|
||||||
}
|
|
||||||
if ascii == polyseed::PREFIX_LEN {
|
|
||||||
// +1 to include this character, which put us at the prefix length
|
|
||||||
to_take = i + 1;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
w.chars().take(to_take).collect::<String>()
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(" ")
|
|
||||||
};
|
|
||||||
|
|
||||||
// String -> Seed
|
|
||||||
println!("{}. language: {:?}, seed: {}", line!(), vector.language, vector.seed.clone());
|
|
||||||
let seed =
|
|
||||||
Seed::from_string(SeedType::Polyseed(vector.language), Zeroizing::new(vector.seed.clone()))
|
|
||||||
.unwrap();
|
|
||||||
let trim = trim_seed(&vector.seed);
|
|
||||||
let add_whitespace = add_whitespace(vector.seed.clone());
|
|
||||||
let seed_without_accents = seed_without_accents(&vector.seed);
|
|
||||||
|
|
||||||
// Make sure a version with added whitespace still works
|
|
||||||
let whitespaced_seed =
|
|
||||||
Seed::from_string(SeedType::Polyseed(vector.language), Zeroizing::new(add_whitespace))
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(seed, whitespaced_seed);
|
|
||||||
// Check trimmed versions works
|
|
||||||
if vector.has_prefix {
|
|
||||||
let trimmed_seed =
|
|
||||||
Seed::from_string(SeedType::Polyseed(vector.language), Zeroizing::new(trim)).unwrap();
|
|
||||||
assert_eq!(seed, trimmed_seed);
|
|
||||||
}
|
|
||||||
// Check versions without accents work
|
|
||||||
if vector.has_accent {
|
|
||||||
let seed_without_accents = Seed::from_string(
|
|
||||||
SeedType::Polyseed(vector.language),
|
|
||||||
Zeroizing::new(seed_without_accents),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(seed, seed_without_accents);
|
|
||||||
}
|
|
||||||
|
|
||||||
let entropy = Zeroizing::new(hex::decode(vector.entropy).unwrap().try_into().unwrap());
|
|
||||||
assert_eq!(seed.entropy(), entropy);
|
|
||||||
assert!(seed.birthday().abs_diff(vector.birthday) < polyseed::TIME_STEP);
|
|
||||||
|
|
||||||
// Entropy -> Seed
|
|
||||||
let from_entropy =
|
|
||||||
Seed::from_entropy(SeedType::Polyseed(vector.language), entropy, Some(seed.birthday()))
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(seed.to_string(), from_entropy.to_string());
|
|
||||||
|
|
||||||
// Check against ourselves
|
|
||||||
{
|
|
||||||
let seed = Seed::new(&mut OsRng, SeedType::Polyseed(vector.language));
|
|
||||||
println!("{}. seed: {}", line!(), *seed.to_string());
|
|
||||||
assert_eq!(
|
|
||||||
seed,
|
|
||||||
Seed::from_string(SeedType::Polyseed(vector.language), seed.to_string()).unwrap()
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
seed,
|
|
||||||
Seed::from_entropy(
|
|
||||||
SeedType::Polyseed(vector.language),
|
|
||||||
seed.entropy(),
|
|
||||||
Some(seed.birthday())
|
|
||||||
)
|
|
||||||
.unwrap()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_invalid_polyseed() {
|
|
||||||
// This seed includes unsupported features bits and should error on decode
|
|
||||||
let seed = "include domain claim resemble urban hire lunch bird \
|
|
||||||
crucial fire best wife ring warm ignore model"
|
|
||||||
.into();
|
|
||||||
let res =
|
|
||||||
Seed::from_string(SeedType::Polyseed(polyseed::Language::English), Zeroizing::new(seed));
|
|
||||||
assert_eq!(res, Err(SeedError::UnsupportedFeatures));
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,32 +0,0 @@
|
|||||||
use curve25519_dalek::scalar::Scalar;
|
|
||||||
|
|
||||||
use crate::unreduced_scalar::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn recover_scalars() {
|
|
||||||
let test_recover = |stored: &str, recovered: &str| {
|
|
||||||
let stored = UnreducedScalar(hex::decode(stored).unwrap().try_into().unwrap());
|
|
||||||
let recovered =
|
|
||||||
Scalar::from_canonical_bytes(hex::decode(recovered).unwrap().try_into().unwrap()).unwrap();
|
|
||||||
assert_eq!(stored.recover_monero_slide_scalar(), recovered);
|
|
||||||
};
|
|
||||||
|
|
||||||
// https://www.moneroinflation.com/static/data_py/report_scalars_df.pdf
|
|
||||||
// Table 4.
|
|
||||||
test_recover(
|
|
||||||
"cb2be144948166d0a9edb831ea586da0c376efa217871505ad77f6ff80f203f8",
|
|
||||||
"b8ffd6a1aee47828808ab0d4c8524cb5c376efa217871505ad77f6ff80f20308",
|
|
||||||
);
|
|
||||||
test_recover(
|
|
||||||
"343d3df8a1051c15a400649c423dc4ed58bef49c50caef6ca4a618b80dee22f4",
|
|
||||||
"21113355bc682e6d7a9d5b3f2137a30259bef49c50caef6ca4a618b80dee2204",
|
|
||||||
);
|
|
||||||
test_recover(
|
|
||||||
"c14f75d612800ca2c1dcfa387a42c9cc086c005bc94b18d204dd61342418eba7",
|
|
||||||
"4f473804b1d27ab2c789c80ab21d034a096c005bc94b18d204dd61342418eb07",
|
|
||||||
);
|
|
||||||
test_recover(
|
|
||||||
"000102030405060708090a0b0c0d0e0f826c4f6e2329a31bc5bc320af0b2bcbb",
|
|
||||||
"a124cfd387f461bf3719e03965ee6877826c4f6e2329a31bc5bc320af0b2bc0b",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user