mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 04:09:23 +00:00
Compare commits
1 Commits
d219b77bd0
...
custom-gen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6ed5592d22 |
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,5 +1,3 @@
|
||||
# Auto detect text files and perform LF normalization
|
||||
* text=auto
|
||||
* text eol=lf
|
||||
|
||||
*.pdf binary
|
||||
|
||||
2
.github/LICENSE → .github/actions/LICENSE
vendored
2
.github/LICENSE → .github/actions/LICENSE
vendored
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022-2025 Luke Parker
|
||||
Copyright (c) 2022-2023 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
40
.github/actions/bitcoin/action.yml
vendored
40
.github/actions/bitcoin/action.yml
vendored
@@ -1,40 +0,0 @@
|
||||
name: bitcoin-regtest
|
||||
description: Spawns a regtest Bitcoin daemon
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: "Version to download and run"
|
||||
required: false
|
||||
default: "30.0"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Bitcoin Daemon Cache
|
||||
id: cache-bitcoind
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
|
||||
with:
|
||||
path: bitcoin.tar.gz
|
||||
key: bitcoind-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||
|
||||
- name: Download the Bitcoin Daemon
|
||||
if: steps.cache-bitcoind.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
RUNNER_OS=linux
|
||||
RUNNER_ARCH=x86_64
|
||||
FILE=bitcoin-${{ inputs.version }}-$RUNNER_ARCH-$RUNNER_OS-gnu.tar.gz
|
||||
|
||||
wget https://bitcoincore.org/bin/bitcoin-core-${{ inputs.version }}/$FILE
|
||||
mv $FILE bitcoin.tar.gz
|
||||
|
||||
- name: Extract the Bitcoin Daemon
|
||||
shell: bash
|
||||
run: |
|
||||
tar xzvf bitcoin.tar.gz
|
||||
cd bitcoin-${{ inputs.version }}
|
||||
sudo mv bin/* /bin && sudo mv lib/* /lib
|
||||
|
||||
- name: Bitcoin Regtest Daemon
|
||||
shell: bash
|
||||
run: PATH=$PATH:/usr/bin ./orchestration/dev/networks/bitcoin/run.sh -txindex -daemon
|
||||
109
.github/actions/build-dependencies/action.yml
vendored
109
.github/actions/build-dependencies/action.yml
vendored
@@ -1,85 +1,50 @@
|
||||
name: build-dependencies
|
||||
description: Installs build dependencies for Serai
|
||||
|
||||
inputs:
|
||||
github-token:
|
||||
description: "GitHub token to install Protobuf with"
|
||||
require: true
|
||||
default:
|
||||
|
||||
rust-toolchain:
|
||||
description: "Rust toolchain to install"
|
||||
required: false
|
||||
default: stable
|
||||
|
||||
rust-components:
|
||||
description: "Rust components to install"
|
||||
required: false
|
||||
default:
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Remove unused packages
|
||||
shell: bash
|
||||
run: |
|
||||
# Ensure the repositories are synced
|
||||
sudo apt update -y
|
||||
|
||||
# Actually perform the removals
|
||||
sudo apt remove -y "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli
|
||||
sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*"
|
||||
sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*"
|
||||
|
||||
sudo apt remove -y --allow-remove-essential -f shim-signed *python3*
|
||||
# This removal command requires the prior removals due to unmet dependencies otherwise
|
||||
sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*"
|
||||
|
||||
# Reinstall python3 as a general dependency of a functional operating system
|
||||
sudo apt install -y python3 --fix-missing
|
||||
if: runner.os == 'Linux'
|
||||
|
||||
- name: Remove unused packages
|
||||
shell: bash
|
||||
run: |
|
||||
(gem uninstall -aIx) || (exit 0)
|
||||
brew uninstall --force "*msbuild*" "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli
|
||||
brew uninstall --force "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*"
|
||||
brew uninstall --force "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*"
|
||||
brew uninstall --force "*qemu*" "*sql*" "*texinfo*" "*imagemagick*"
|
||||
brew cleanup
|
||||
if: runner.os == 'macOS'
|
||||
|
||||
- name: Install dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
if [ "$RUNNER_OS" == "Linux" ]; then
|
||||
sudo apt install -y ca-certificates protobuf-compiler libclang-dev
|
||||
elif [ "$RUNNER_OS" == "Windows" ]; then
|
||||
choco install protoc
|
||||
elif [ "$RUNNER_OS" == "macOS" ]; then
|
||||
brew install protobuf llvm
|
||||
HOMEBREW_ROOT_PATH=/opt/homebrew # Apple Silicon
|
||||
if [ $(uname -m) = "x86_64" ]; then HOMEBREW_ROOT_PATH=/usr/local; fi # Intel
|
||||
ls $HOMEBREW_ROOT_PATH/opt/llvm/lib | grep "libclang.dylib" # Make sure this installed `libclang`
|
||||
echo "DYLD_LIBRARY_PATH=$HOMEBREW_ROOT_PATH/opt/llvm/lib:$DYLD_LIBRARY_PATH" >> "$GITHUB_ENV"
|
||||
fi
|
||||
- name: Install Protobuf
|
||||
uses: arduino/setup-protoc@master
|
||||
with:
|
||||
repo-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Install solc
|
||||
shell: bash
|
||||
run: |
|
||||
cargo +1.91 install svm-rs --version =0.5.19
|
||||
svm install 0.8.29
|
||||
svm use 0.8.29
|
||||
pip3 install solc-select==0.2.1
|
||||
solc-select install 0.8.16
|
||||
solc-select use 0.8.16
|
||||
|
||||
- name: Remove preinstalled Docker
|
||||
shell: bash
|
||||
run: |
|
||||
docker system prune -a --volumes
|
||||
sudo apt remove -y *docker*
|
||||
# Install uidmap which will be required for the explicitly installed Docker
|
||||
sudo apt install uidmap
|
||||
if: runner.os == 'Linux'
|
||||
|
||||
- name: Update system dependencies
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt update -y
|
||||
sudo apt upgrade -y
|
||||
sudo apt autoremove -y
|
||||
sudo apt clean
|
||||
if: runner.os == 'Linux'
|
||||
|
||||
- name: Install rootless Docker
|
||||
uses: docker/setup-docker-action@b60f85385d03ac8acfca6d9996982511d8620a19
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
rootless: true
|
||||
set-host: true
|
||||
if: runner.os == 'Linux'
|
||||
toolchain: ${{ inputs.rust-toolchain }}
|
||||
components: ${{ inputs.rust-components }}
|
||||
|
||||
# - name: Cache Rust
|
||||
# uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
shell: bash
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install WASM toolchain
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ steps.nightly.outputs.version }}
|
||||
targets: wasm32-unknown-unknown
|
||||
|
||||
49
.github/actions/monero-wallet-rpc/action.yml
vendored
49
.github/actions/monero-wallet-rpc/action.yml
vendored
@@ -1,49 +0,0 @@
|
||||
name: monero-wallet-rpc
|
||||
description: Spawns a Monero Wallet-RPC.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: "Version to download and run"
|
||||
required: false
|
||||
default: v0.18.4.3
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Monero Wallet RPC Cache
|
||||
id: cache-monero-wallet-rpc
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
|
||||
with:
|
||||
path: monero-wallet-rpc
|
||||
key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||
|
||||
- name: Download the Monero Wallet RPC
|
||||
if: steps.cache-monero-wallet-rpc.outputs.cache-hit != 'true'
|
||||
# Calculates OS/ARCH to demonstrate it, yet then locks to linux-x64 due
|
||||
# to the contained folder not following the same naming scheme and
|
||||
# requiring further expansion not worth doing right now
|
||||
shell: bash
|
||||
run: |
|
||||
RUNNER_OS=${{ runner.os }}
|
||||
RUNNER_ARCH=${{ runner.arch }}
|
||||
|
||||
RUNNER_OS=${RUNNER_OS,,}
|
||||
RUNNER_ARCH=${RUNNER_ARCH,,}
|
||||
|
||||
RUNNER_OS=linux
|
||||
RUNNER_ARCH=x64
|
||||
|
||||
FILE=monero-$RUNNER_OS-$RUNNER_ARCH-${{ inputs.version }}.tar.bz2
|
||||
wget https://downloads.getmonero.org/cli/$FILE
|
||||
tar -xvf $FILE
|
||||
|
||||
mv monero-x86_64-linux-gnu-${{ inputs.version }}/monero-wallet-rpc monero-wallet-rpc
|
||||
|
||||
- name: Monero Wallet RPC
|
||||
shell: bash
|
||||
run: |
|
||||
./monero-wallet-rpc --allow-mismatched-daemon-version \
|
||||
--daemon-address 0.0.0.0:18081 --daemon-login serai:seraidex \
|
||||
--disable-rpc-login --rpc-bind-port 18082 \
|
||||
--wallet-dir ./ \
|
||||
--detach
|
||||
12
.github/actions/monero/action.yml
vendored
12
.github/actions/monero/action.yml
vendored
@@ -5,16 +5,16 @@ inputs:
|
||||
version:
|
||||
description: "Version to download and run"
|
||||
required: false
|
||||
default: v0.18.4.3
|
||||
default: v0.18.0.0
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Monero Daemon Cache
|
||||
id: cache-monerod
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /usr/bin/monerod
|
||||
path: monerod
|
||||
key: monerod-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||
|
||||
- name: Download the Monero Daemon
|
||||
@@ -37,10 +37,8 @@ runs:
|
||||
wget https://downloads.getmonero.org/cli/$FILE
|
||||
tar -xvf $FILE
|
||||
|
||||
sudo mv monero-x86_64-linux-gnu-${{ inputs.version }}/monerod /usr/bin/monerod
|
||||
sudo chmod 777 /usr/bin/monerod
|
||||
sudo chmod +x /usr/bin/monerod
|
||||
mv monero-x86_64-linux-gnu-${{ inputs.version }}/monerod monerod
|
||||
|
||||
- name: Monero Regtest Daemon
|
||||
shell: bash
|
||||
run: PATH=$PATH:/usr/bin ./orchestration/dev/networks/monero/run.sh --detach
|
||||
run: ./monerod --regtest --offline --fixed-difficulty=1 --detach
|
||||
|
||||
27
.github/actions/test-dependencies/action.yml
vendored
27
.github/actions/test-dependencies/action.yml
vendored
@@ -2,37 +2,30 @@ name: test-dependencies
|
||||
description: Installs test dependencies for Serai
|
||||
|
||||
inputs:
|
||||
github-token:
|
||||
description: "GitHub token to install Protobuf with"
|
||||
require: true
|
||||
default:
|
||||
|
||||
monero-version:
|
||||
description: "Monero version to download and run as a regtest node"
|
||||
required: false
|
||||
default: v0.18.4.3
|
||||
|
||||
bitcoin-version:
|
||||
description: "Bitcoin version to download and run as a regtest node"
|
||||
required: false
|
||||
default: "30.0"
|
||||
default: v0.18.0.0
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Install Foundry
|
||||
uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773
|
||||
uses: foundry-rs/foundry-toolchain@v1
|
||||
with:
|
||||
version: nightly-f625d0fa7c51e65b4bf1e8f7931cd1c6e2e285e9
|
||||
cache: false
|
||||
version: nightly
|
||||
|
||||
- name: Run a Monero Regtest Node
|
||||
uses: ./.github/actions/monero
|
||||
with:
|
||||
version: ${{ inputs.monero-version }}
|
||||
|
||||
- name: Run a Bitcoin Regtest Node
|
||||
uses: ./.github/actions/bitcoin
|
||||
with:
|
||||
version: ${{ inputs.bitcoin-version }}
|
||||
|
||||
- name: Run a Monero Wallet-RPC
|
||||
uses: ./.github/actions/monero-wallet-rpc
|
||||
|
||||
2
.github/nightly-version
vendored
2
.github/nightly-version
vendored
@@ -1 +1 @@
|
||||
nightly-2025-11-11
|
||||
nightly-2022-12-01
|
||||
|
||||
34
.github/workflows/common-tests.yml
vendored
34
.github/workflows/common-tests.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: common/ Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-common:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p std-shims \
|
||||
-p zalloc \
|
||||
-p patchable-async-sleep \
|
||||
-p serai-db \
|
||||
-p serai-env \
|
||||
-p serai-task \
|
||||
-p simple-request
|
||||
40
.github/workflows/coordinator-tests.yml
vendored
40
.github/workflows/coordinator-tests.yml
vendored
@@ -1,40 +0,0 @@
|
||||
name: Coordinator Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "networks/**"
|
||||
- "message-queue/**"
|
||||
- "coordinator/**"
|
||||
- "orchestration/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/coordinator/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "networks/**"
|
||||
- "message-queue/**"
|
||||
- "coordinator/**"
|
||||
- "orchestration/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/coordinator/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
|
||||
- name: Run coordinator Docker tests
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-coordinator-tests
|
||||
48
.github/workflows/crypto-tests.yml
vendored
48
.github/workflows/crypto-tests.yml
vendored
@@ -1,48 +0,0 @@
|
||||
name: crypto/ Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-crypto:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p flexible-transcript \
|
||||
-p ff-group-tests \
|
||||
-p dalek-ff-group \
|
||||
-p minimal-ed448 \
|
||||
-p ciphersuite \
|
||||
-p ciphersuite-kp256 \
|
||||
-p multiexp \
|
||||
-p schnorr-signatures \
|
||||
-p prime-field \
|
||||
-p short-weierstrass \
|
||||
-p secq256k1 \
|
||||
-p embedwards25519 \
|
||||
-p dkg \
|
||||
-p dkg-recovery \
|
||||
-p dkg-dealer \
|
||||
-p dkg-musig \
|
||||
-p dkg-evrf \
|
||||
-p modular-frost \
|
||||
-p frost-schnorrkel
|
||||
11
.github/workflows/daily-deny.yml
vendored
11
.github/workflows/daily-deny.yml
vendored
@@ -9,16 +9,19 @@ jobs:
|
||||
name: Run cargo deny
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Advisory Cache
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/advisory-db
|
||||
key: rust-advisory-db
|
||||
|
||||
- name: Install cargo
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Install cargo deny
|
||||
run: cargo +1.91 install cargo-deny --version =0.18.5
|
||||
run: cargo install --locked cargo-deny
|
||||
|
||||
- name: Run cargo deny
|
||||
run: cargo deny -L error --all-features check --hide-inclusion-graph
|
||||
run: cargo deny -L error --all-features check
|
||||
|
||||
22
.github/workflows/full-stack-tests.yml
vendored
22
.github/workflows/full-stack-tests.yml
vendored
@@ -1,22 +0,0 @@
|
||||
name: Full Stack Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
pull_request:
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
|
||||
- name: Run Full Stack Docker tests
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-full-stack-tests
|
||||
209
.github/workflows/lint.yml
vendored
209
.github/workflows/lint.yml
vendored
@@ -1,209 +0,0 @@
|
||||
name: Lint
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
clippy:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-15-intel, macos-latest, windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
shell: bash
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
|
||||
- name: Install nightly rust
|
||||
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32v1-none -c clippy
|
||||
|
||||
- name: Run Clippy
|
||||
run: cargo +${{ steps.nightly.outputs.version }} clippy --all-features --all-targets -- -D warnings -A clippy::items_after_test_module
|
||||
|
||||
# Also verify the lockfile isn't dirty
|
||||
# This happens when someone edits a Cargo.toml yet doesn't do anything
|
||||
# which causes the lockfile to be updated
|
||||
# The above clippy run will cause it to be updated, so checking there's
|
||||
# no differences present now performs the desired check
|
||||
- name: Verify lockfile
|
||||
shell: bash
|
||||
run: git diff | wc -l | LC_ALL="en_US.utf8" grep -x -e "^[ ]*0"
|
||||
|
||||
deny:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Advisory Cache
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809
|
||||
with:
|
||||
path: ~/.cargo/advisory-db
|
||||
key: rust-advisory-db
|
||||
|
||||
- name: Install cargo deny
|
||||
run: cargo +1.91 install cargo-deny --version =0.18.5
|
||||
|
||||
- name: Run cargo deny
|
||||
run: cargo deny -L error --all-features check --hide-inclusion-graph
|
||||
|
||||
fmt:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
shell: bash
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install nightly rust
|
||||
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -c rustfmt
|
||||
|
||||
- name: Run rustfmt
|
||||
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check
|
||||
|
||||
- name: Install foundry
|
||||
uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773
|
||||
with:
|
||||
version: nightly-41d4e5437107f6f42c7711123890147bc736a609
|
||||
cache: false
|
||||
|
||||
- name: Run forge fmt
|
||||
run: FOUNDRY_FMT_SORT_INPUTS=false FOUNDRY_FMT_LINE_LENGTH=100 FOUNDRY_FMT_TAB_WIDTH=2 FOUNDRY_FMT_BRACKET_SPACING=true FOUNDRY_FMT_INT_TYPES=preserve forge fmt --check $(find . -iname "*.sol")
|
||||
|
||||
machete:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Verify all dependencies are in use
|
||||
run: |
|
||||
cargo +1.91 install cargo-machete --version =0.9.1
|
||||
cargo +1.91 machete
|
||||
|
||||
msrv:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Verify claimed `rust-version`
|
||||
shell: bash
|
||||
run: |
|
||||
cargo +1.91 install cargo-msrv --version =0.18.4
|
||||
|
||||
function check_msrv {
|
||||
# We `cd` into the directory passed as the first argument, but will return to the
|
||||
# directory called from.
|
||||
return_to=$(pwd)
|
||||
echo "Checking $1"
|
||||
cd $1
|
||||
|
||||
# We then find the existing `rust-version` using `grep` (for the right line) and then a
|
||||
# regex (to strip to just the major and minor version).
|
||||
existing=$(cat ./Cargo.toml | grep "rust-version" | grep -Eo "[0-9]+\.[0-9]+")
|
||||
|
||||
# We then backup the `Cargo.toml`, allowing us to restore it after, saving time on future
|
||||
# MSRV checks (as they'll benefit from immediately exiting if the queried version is less
|
||||
# than the declared MSRV).
|
||||
mv ./Cargo.toml ./Cargo.toml.bak
|
||||
|
||||
# We then use an inverted (`-v`) grep to remove the existing `rust-version` from the
|
||||
# `Cargo.toml`, as required because else earlier versions of Rust won't even attempt to
|
||||
# compile this crate.
|
||||
cat ./Cargo.toml.bak | grep -v "rust-version" > Cargo.toml
|
||||
|
||||
# We then find the actual `rust-version` using `cargo-msrv` (again stripping to just the
|
||||
# major and minor version).
|
||||
actual=$(cargo msrv find --output-format minimal | grep -Eo "^[0-9]+\.[0-9]+")
|
||||
|
||||
# Finally, we compare the two.
|
||||
echo "Declared rust-version: $existing"
|
||||
echo "Actual rust-version: $actual"
|
||||
[ $existing == $actual ]
|
||||
result=$?
|
||||
|
||||
# Restore the original `Cargo.toml`.
|
||||
rm Cargo.toml
|
||||
mv ./Cargo.toml.bak ./Cargo.toml
|
||||
|
||||
# Return to the directory called from and return the result.
|
||||
cd $return_to
|
||||
return $result
|
||||
}
|
||||
|
||||
# Check each member of the workspace
|
||||
function check_workspace {
|
||||
# Get the members array from the workspace's `Cargo.toml`
|
||||
cargo_toml_lines=$(cat ./Cargo.toml | wc -l)
|
||||
# Keep all lines after the start of the array, then keep all lines before the next "]"
|
||||
members=$(cat Cargo.toml | grep "members\ \=\ \[" -m1 -A$cargo_toml_lines | grep "]" -m1 -B$cargo_toml_lines)
|
||||
|
||||
# Parse out any comments, whitespace, including comments post-fixed on the same line as an entry
|
||||
# We accomplish the latter by pruning all characters after the entry's ","
|
||||
members=$(echo "$members" | grep -Ev "^[[:space:]]*(#|$)" | awk -F',' '{print $1","}')
|
||||
# Replace the first line, which was "members = [" and is now "members = [,", with "["
|
||||
members=$(echo "$members" | sed "1s/.*/\[/")
|
||||
# Correct the last line, which was malleated to "],"
|
||||
members=$(echo "$members" | sed "$(echo "$members" | wc -l)s/\]\,/\]/")
|
||||
|
||||
# Don't check the following
|
||||
# Most of these are binaries, with the exception of the Substrate runtime which has a
|
||||
# bespoke build pipeline
|
||||
members=$(echo "$members" | grep -v "networks/ethereum/relayer\"")
|
||||
members=$(echo "$members" | grep -v "message-queue\"")
|
||||
members=$(echo "$members" | grep -v "processor/bin\"")
|
||||
members=$(echo "$members" | grep -v "processor/bitcoin\"")
|
||||
members=$(echo "$members" | grep -v "processor/ethereum\"")
|
||||
members=$(echo "$members" | grep -v "processor/monero\"")
|
||||
members=$(echo "$members" | grep -v "coordinator\"")
|
||||
members=$(echo "$members" | grep -v "substrate/runtime\"")
|
||||
members=$(echo "$members" | grep -v "substrate/node\"")
|
||||
members=$(echo "$members" | grep -v "orchestration\"")
|
||||
|
||||
# Don't check the tests
|
||||
members=$(echo "$members" | grep -v "mini\"")
|
||||
members=$(echo "$members" | grep -v "tests/")
|
||||
|
||||
# Remove the trailing comma by replacing the last line's "," with ""
|
||||
members=$(echo "$members" | sed "$(($(echo "$members" | wc -l) - 1))s/\,//")
|
||||
|
||||
echo $members | jq -r ".[]" | while read -r member; do
|
||||
check_msrv $member
|
||||
correct=$?
|
||||
if [ $correct -ne 0 ]; then
|
||||
return $correct
|
||||
fi
|
||||
done
|
||||
}
|
||||
check_workspace
|
||||
|
||||
slither:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
|
||||
- name: Slither
|
||||
run: |
|
||||
python3 -m pip install slither-analyzer
|
||||
|
||||
slither --include-paths ./networks/ethereum/schnorr/contracts/Schnorr.sol
|
||||
slither --include-paths ./networks/ethereum/schnorr/contracts ./networks/ethereum/schnorr/contracts/tests/Schnorr.sol
|
||||
slither processor/ethereum/deployer/contracts/Deployer.sol
|
||||
slither processor/ethereum/erc20/contracts/IERC20.sol
|
||||
|
||||
cp networks/ethereum/schnorr/contracts/Schnorr.sol processor/ethereum/router/contracts/
|
||||
cp processor/ethereum/erc20/contracts/IERC20.sol processor/ethereum/router/contracts/
|
||||
cd processor/ethereum/router/contracts
|
||||
slither Router.sol
|
||||
36
.github/workflows/message-queue-tests.yml
vendored
36
.github/workflows/message-queue-tests.yml
vendored
@@ -1,36 +0,0 @@
|
||||
name: Message Queue Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "message-queue/**"
|
||||
- "orchestration/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/message-queue/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "message-queue/**"
|
||||
- "orchestration/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/message-queue/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
|
||||
- name: Run message-queue Docker tests
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-message-queue-tests
|
||||
26
.github/workflows/mini-tests.yml
vendored
26
.github/workflows/mini-tests.yml
vendored
@@ -1,26 +0,0 @@
|
||||
name: mini/ Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "mini/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "mini/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-common:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
|
||||
- name: Run Tests
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p mini-serai
|
||||
56
.github/workflows/monero-tests.yaml
vendored
Normal file
56
.github/workflows/monero-tests.yaml
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
name: Monero Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "coins/monero/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "coins/monero/**"
|
||||
|
||||
jobs:
|
||||
# Only run these once since they will be consistent regardless of any node
|
||||
unit-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Test Dependencies
|
||||
uses: ./.github/actions/test-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Unit Tests Without Features
|
||||
run: cargo test --package monero-serai --lib
|
||||
|
||||
# Doesn't run unit tests with features as the tests workflow will
|
||||
|
||||
integration-tests:
|
||||
runs-on: ubuntu-latest
|
||||
# Test against all supported protocol versions
|
||||
strategy:
|
||||
matrix:
|
||||
version: [v0.17.3.2, v0.18.0.0]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Test Dependencies
|
||||
uses: ./.github/actions/test-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
monero-version: ${{ matrix.version }}
|
||||
|
||||
- name: Run Integration Tests Without Features
|
||||
# https://github.com/rust-lang/cargo/issues/8396
|
||||
run: cargo test --package monero-serai --test '*'
|
||||
|
||||
- name: Run Integration Tests
|
||||
# Don't run if the the tests workflow also will
|
||||
if: ${{ matrix.version != 'v0.18.0.0' }}
|
||||
run: |
|
||||
cargo test --package monero-serai --all-features --test '*'
|
||||
cargo test --package serai-processor monero
|
||||
4
.github/workflows/monthly-nightly-update.yml
vendored
4
.github/workflows/monthly-nightly-update.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
name: Update nightly
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
git push -u origin $(date +"nightly-%Y-%m")
|
||||
|
||||
- name: Pull Request
|
||||
uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const { repo, owner } = context.repo;
|
||||
|
||||
36
.github/workflows/networks-tests.yml
vendored
36
.github/workflows/networks-tests.yml
vendored
@@ -1,36 +0,0 @@
|
||||
name: networks/ Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "networks/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "networks/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-networks:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Test Dependencies
|
||||
uses: ./.github/actions/test-dependencies
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p bitcoin-serai \
|
||||
-p build-solidity-contracts \
|
||||
-p ethereum-schnorr-contract \
|
||||
-p alloy-simple-request-transport \
|
||||
-p serai-ethereum-relayer \
|
||||
45
.github/workflows/no-std.yml
vendored
45
.github/workflows/no-std.yml
vendored
@@ -1,45 +0,0 @@
|
||||
name: no-std build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "networks/**"
|
||||
- "tests/no-std/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "networks/**"
|
||||
- "tests/no-std/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
shell: bash
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install RISC-V Toolchain
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y gcc-riscv64-unknown-elf gcc-multilib
|
||||
rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal --component rust-src --target riscv32imac-unknown-none-elf
|
||||
|
||||
- name: Verify no-std builds
|
||||
run: |
|
||||
CFLAGS=-I/usr/include cargo +${{ steps.nightly.outputs.version }} build --target riscv32imac-unknown-none-elf -Z build-std=core -p serai-no-std-tests
|
||||
CFLAGS=-I/usr/include cargo +${{ steps.nightly.outputs.version }} build --target riscv32imac-unknown-none-elf -Z build-std=core,alloc -p serai-no-std-tests --features "alloc"
|
||||
91
.github/workflows/pages.yml
vendored
91
.github/workflows/pages.yml
vendored
@@ -1,91 +0,0 @@
|
||||
# MIT License
|
||||
#
|
||||
# Copyright (c) 2022 just-the-docs
|
||||
# Copyright (c) 2022-2024 Luke Parker
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
name: Deploy Rust docs and Jekyll site to Pages
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "develop"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
# Only allow one concurrent deployment
|
||||
concurrency:
|
||||
group: "pages"
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Build job
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Setup Ruby
|
||||
uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb
|
||||
with:
|
||||
bundler-cache: true
|
||||
cache-version: 0
|
||||
working-directory: "${{ github.workspace }}/docs"
|
||||
- name: Setup Pages
|
||||
id: pages
|
||||
uses: actions/configure-pages@983d7736d9b0ae728b81ab479565c72886d7745b
|
||||
- name: Build with Jekyll
|
||||
run: cd ${{ github.workspace }}/docs && bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}"
|
||||
env:
|
||||
JEKYLL_ENV: production
|
||||
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
shell: bash
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
- name: Buld Rust docs
|
||||
run: |
|
||||
rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32v1-none -c rust-docs
|
||||
RUSTDOCFLAGS="--cfg docsrs" cargo +${{ steps.nightly.outputs.version }} doc --workspace --no-deps --all-features
|
||||
mv target/doc docs/_site/rust
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b
|
||||
with:
|
||||
path: "docs/_site/"
|
||||
|
||||
# Deployment job
|
||||
deploy:
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
steps:
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e
|
||||
40
.github/workflows/processor-tests.yml
vendored
40
.github/workflows/processor-tests.yml
vendored
@@ -1,40 +0,0 @@
|
||||
name: Processor Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "networks/**"
|
||||
- "message-queue/**"
|
||||
- "processor/**"
|
||||
- "orchestration/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/processor/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "networks/**"
|
||||
- "message-queue/**"
|
||||
- "processor/**"
|
||||
- "orchestration/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/processor/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
|
||||
- name: Run processor Docker tests
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-processor-tests
|
||||
36
.github/workflows/reproducible-runtime.yml
vendored
36
.github/workflows/reproducible-runtime.yml
vendored
@@ -1,36 +0,0 @@
|
||||
name: Reproducible Runtime
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "Cargo.lock"
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "substrate/**"
|
||||
- "orchestration/runtime/**"
|
||||
- "tests/reproducible-runtime/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "Cargo.lock"
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "substrate/**"
|
||||
- "orchestration/runtime/**"
|
||||
- "tests/reproducible-runtime/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
|
||||
- name: Run Reproducible Runtime tests
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-reproducible-runtime-tests
|
||||
140
.github/workflows/tests.yml
vendored
140
.github/workflows/tests.yml
vendored
@@ -4,111 +4,77 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "networks/**"
|
||||
- "message-queue/**"
|
||||
- "processor/**"
|
||||
- "coordinator/**"
|
||||
- "substrate/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "networks/**"
|
||||
- "message-queue/**"
|
||||
- "processor/**"
|
||||
- "coordinator/**"
|
||||
- "substrate/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-infra:
|
||||
clippy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Clippy requires nightly due to serai-runtime requiring it
|
||||
rust-toolchain: ${{ steps.nightly.outputs.version }}
|
||||
rust-components: clippy
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p serai-message-queue \
|
||||
-p serai-processor-messages \
|
||||
-p serai-processor-key-gen \
|
||||
-p serai-processor-view-keys \
|
||||
-p serai-processor-frost-attempt-manager \
|
||||
-p serai-processor-primitives \
|
||||
-p serai-processor-scanner \
|
||||
-p serai-processor-scheduler-primitives \
|
||||
-p serai-processor-utxo-scheduler-primitives \
|
||||
-p serai-processor-utxo-scheduler \
|
||||
-p serai-processor-transaction-chaining-scheduler \
|
||||
-p serai-processor-smart-contract-scheduler \
|
||||
-p serai-processor-signers \
|
||||
-p serai-processor-bin \
|
||||
-p serai-bitcoin-processor \
|
||||
-p serai-processor-ethereum-primitives \
|
||||
-p serai-processor-ethereum-test-primitives \
|
||||
-p serai-processor-ethereum-deployer \
|
||||
-p serai-processor-ethereum-router \
|
||||
-p serai-processor-ethereum-erc20 \
|
||||
-p serai-ethereum-processor \
|
||||
-p serai-monero-processor \
|
||||
-p tendermint-machine \
|
||||
-p tributary-sdk \
|
||||
-p serai-cosign-types \
|
||||
-p serai-cosign \
|
||||
-p serai-coordinator-substrate \
|
||||
-p serai-coordinator-tributary \
|
||||
-p serai-coordinator-p2p \
|
||||
-p serai-coordinator-libp2p-p2p \
|
||||
-p serai-coordinator \
|
||||
-p serai-orchestrator \
|
||||
-p serai-docker-tests
|
||||
- name: Run Clippy
|
||||
run: cargo clippy --all-features --tests -- -D warnings -A dead_code
|
||||
|
||||
test-substrate:
|
||||
deny:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
- name: Advisory Cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/advisory-db
|
||||
key: rust-advisory-db
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p serai-primitives \
|
||||
-p serai-abi \
|
||||
-p serai-core-pallet \
|
||||
-p serai-coins-pallet \
|
||||
-p serai-validator-sets-pallet \
|
||||
-p serai-signals-pallet \
|
||||
-p serai-dex-pallet \
|
||||
-p serai-genesis-liquidity-pallet \
|
||||
-p serai-economic-security-pallet \
|
||||
-p serai-emissions-pallet \
|
||||
-p serai-in-instructions-pallet \
|
||||
-p serai-runtime \
|
||||
-p serai-node
|
||||
-p serai-substrate-tests
|
||||
- name: Install cargo
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
test-serai-client:
|
||||
- name: Install cargo deny
|
||||
run: cargo install --locked cargo-deny
|
||||
|
||||
- name: Run cargo deny
|
||||
run: cargo deny -L error --all-features check
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
- name: Test Dependencies
|
||||
uses: ./.github/actions/test-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client-bitcoin
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client-ethereum
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client-monero
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client-serai
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client
|
||||
run: cargo test --all-features
|
||||
|
||||
fmt:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install rustfmt
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ steps.nightly.outputs.version }}
|
||||
components: rustfmt
|
||||
|
||||
- name: Run rustfmt
|
||||
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check
|
||||
|
||||
11
.gitignore
vendored
11
.gitignore
vendored
@@ -1,13 +1,2 @@
|
||||
target
|
||||
|
||||
# Don't commit any `Cargo.lock` which aren't the workspace's
|
||||
Cargo.lock
|
||||
!/Cargo.lock
|
||||
|
||||
# Don't commit any `Dockerfile`, as they're auto-generated, except the only one which isn't
|
||||
Dockerfile
|
||||
!orchestration/runtime/Dockerfile
|
||||
|
||||
.test-logs
|
||||
|
||||
.vscode
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
edition = "2021"
|
||||
tab_spaces = 2
|
||||
|
||||
max_width = 100
|
||||
|
||||
11780
Cargo.lock
generated
11780
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
249
Cargo.toml
249
Cargo.toml
@@ -1,269 +1,58 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"common/std-shims",
|
||||
"common/zalloc",
|
||||
"common/patchable-async-sleep",
|
||||
"common/db",
|
||||
"common/env",
|
||||
"common/task",
|
||||
"common/request",
|
||||
|
||||
"crypto/transcript",
|
||||
|
||||
"crypto/ff-group-tests",
|
||||
"crypto/dalek-ff-group",
|
||||
"crypto/ed448",
|
||||
"crypto/dynamic-generator",
|
||||
"crypto/ciphersuite",
|
||||
"crypto/ciphersuite/kp256",
|
||||
|
||||
"crypto/multiexp",
|
||||
|
||||
"crypto/schnorr",
|
||||
|
||||
"crypto/prime-field",
|
||||
"crypto/short-weierstrass",
|
||||
"crypto/secq256k1",
|
||||
"crypto/embedwards25519",
|
||||
|
||||
"crypto/dleq",
|
||||
"crypto/dkg",
|
||||
"crypto/dkg/recovery",
|
||||
"crypto/dkg/dealer",
|
||||
"crypto/dkg/musig",
|
||||
"crypto/dkg/evrf",
|
||||
"crypto/frost",
|
||||
"crypto/schnorrkel",
|
||||
|
||||
"networks/bitcoin",
|
||||
"coins/ethereum",
|
||||
"coins/monero/generators",
|
||||
"coins/monero",
|
||||
|
||||
"networks/ethereum/build-contracts",
|
||||
"networks/ethereum/schnorr",
|
||||
"networks/ethereum/alloy-simple-request-transport",
|
||||
"networks/ethereum/relayer",
|
||||
"processor",
|
||||
|
||||
"message-queue",
|
||||
"substrate/serai/primitives",
|
||||
|
||||
"processor/messages",
|
||||
"substrate/validator-sets/primitives",
|
||||
"substrate/validator-sets/pallet",
|
||||
|
||||
"processor/key-gen",
|
||||
"processor/view-keys",
|
||||
"processor/frost-attempt-manager",
|
||||
|
||||
"processor/primitives",
|
||||
"processor/scanner",
|
||||
"processor/scheduler/primitives",
|
||||
"processor/scheduler/utxo/primitives",
|
||||
"processor/scheduler/utxo/standard",
|
||||
"processor/scheduler/utxo/transaction-chaining",
|
||||
"processor/scheduler/smart-contract",
|
||||
"processor/signers",
|
||||
|
||||
"processor/bin",
|
||||
"processor/bitcoin",
|
||||
"processor/ethereum/primitives",
|
||||
"processor/ethereum/test-primitives",
|
||||
"processor/ethereum/deployer",
|
||||
"processor/ethereum/erc20",
|
||||
"processor/ethereum/router",
|
||||
"processor/ethereum",
|
||||
"processor/monero",
|
||||
|
||||
"coordinator/tributary-sdk/tendermint",
|
||||
"coordinator/tributary-sdk",
|
||||
"coordinator/cosign/types",
|
||||
"coordinator/cosign",
|
||||
"coordinator/substrate",
|
||||
"coordinator/tributary",
|
||||
"coordinator/p2p",
|
||||
"coordinator/p2p/libp2p",
|
||||
"coordinator",
|
||||
|
||||
"substrate/primitives",
|
||||
"substrate/abi",
|
||||
|
||||
"substrate/core",
|
||||
"substrate/coins",
|
||||
"substrate/validator-sets",
|
||||
"substrate/signals",
|
||||
"substrate/dex",
|
||||
"substrate/genesis-liquidity",
|
||||
"substrate/economic-security",
|
||||
"substrate/emissions",
|
||||
"substrate/in-instructions",
|
||||
"substrate/tendermint/machine",
|
||||
"substrate/tendermint/primitives",
|
||||
"substrate/tendermint/client",
|
||||
"substrate/tendermint/pallet",
|
||||
|
||||
"substrate/runtime",
|
||||
"substrate/node",
|
||||
|
||||
"substrate/client/bitcoin",
|
||||
"substrate/client/ethereum",
|
||||
"substrate/client/monero",
|
||||
"substrate/client/serai",
|
||||
"substrate/client",
|
||||
|
||||
"orchestration",
|
||||
|
||||
"mini",
|
||||
|
||||
"tests/no-std",
|
||||
|
||||
"tests/docker",
|
||||
"tests/message-queue",
|
||||
# TODO "tests/processor",
|
||||
# TODO "tests/coordinator",
|
||||
"tests/substrate",
|
||||
# TODO "tests/full-stack",
|
||||
"tests/reproducible-runtime",
|
||||
]
|
||||
|
||||
[profile.dev.package]
|
||||
# Always compile Monero (and a variety of dependencies) with optimizations due
|
||||
# to the extensive operations required for Bulletproofs
|
||||
# to the unoptimized performance of Bulletproofs
|
||||
[profile.dev.package]
|
||||
subtle = { opt-level = 3 }
|
||||
|
||||
sha3 = { opt-level = 3 }
|
||||
blake2 = { opt-level = 3 }
|
||||
curve25519-dalek = { opt-level = 3 }
|
||||
|
||||
ff = { opt-level = 3 }
|
||||
group = { opt-level = 3 }
|
||||
|
||||
crypto-bigint = { opt-level = 3 }
|
||||
curve25519-dalek = { opt-level = 3 }
|
||||
dalek-ff-group = { opt-level = 3 }
|
||||
minimal-ed448 = { opt-level = 3 }
|
||||
|
||||
multiexp = { opt-level = 3 }
|
||||
|
||||
monero-generators = { opt-level = 3 }
|
||||
monero-borromean = { opt-level = 3 }
|
||||
monero-bulletproofs = { opt-level = 3 }
|
||||
monero-mlsag = { opt-level = 3 }
|
||||
monero-clsag = { opt-level = 3 }
|
||||
monero-oxide = { opt-level = 3 }
|
||||
|
||||
# Always compile the eVRF DKG tree with optimizations as well
|
||||
secp256k1 = { opt-level = 3 }
|
||||
secq256k1 = { opt-level = 3 }
|
||||
embedwards25519 = { opt-level = 3 }
|
||||
generalized-bulletproofs = { opt-level = 3 }
|
||||
generalized-bulletproofs-circuit-abstraction = { opt-level = 3 }
|
||||
generalized-bulletproofs-ec-gadgets = { opt-level = 3 }
|
||||
|
||||
# revm also effectively requires being built with optimizations
|
||||
revm = { opt-level = 3 }
|
||||
revm-bytecode = { opt-level = 3 }
|
||||
revm-context = { opt-level = 3 }
|
||||
revm-context-interface = { opt-level = 3 }
|
||||
revm-database = { opt-level = 3 }
|
||||
revm-database-interface = { opt-level = 3 }
|
||||
revm-handler = { opt-level = 3 }
|
||||
revm-inspector = { opt-level = 3 }
|
||||
revm-interpreter = { opt-level = 3 }
|
||||
revm-precompile = { opt-level = 3 }
|
||||
revm-primitives = { opt-level = 3 }
|
||||
revm-state = { opt-level = 3 }
|
||||
monero-serai = { opt-level = 3 }
|
||||
|
||||
[profile.release]
|
||||
panic = "unwind"
|
||||
overflow-checks = true
|
||||
|
||||
[patch.crates-io]
|
||||
# Point to empty crates for unused crates in our tree
|
||||
ark-ff-3 = { package = "ark-ff", path = "patches/ethereum/ark-ff-0.3" }
|
||||
ark-ff-4 = { package = "ark-ff", path = "patches/ethereum/ark-ff-0.4" }
|
||||
c-kzg = { path = "patches/ethereum/c-kzg" }
|
||||
secp256k1-30 = { package = "secp256k1", path = "patches/ethereum/secp256k1-30" }
|
||||
|
||||
# Dependencies from monero-oxide which originate from within our own tree
|
||||
std-shims = { path = "patches/std-shims" }
|
||||
simple-request = { path = "patches/simple-request" }
|
||||
multiexp = { path = "crypto/multiexp" }
|
||||
flexible-transcript = { path = "crypto/transcript" }
|
||||
ciphersuite = { path = "patches/ciphersuite" }
|
||||
dalek-ff-group = { path = "crypto/dalek-ff-group" }
|
||||
minimal-ed448 = { path = "crypto/ed448" }
|
||||
modular-frost = { path = "crypto/frost" }
|
||||
|
||||
# Patch due to `std` now including the required functionality
|
||||
is_terminal_polyfill = { path = "./patches/is_terminal_polyfill" }
|
||||
# This has a non-deprecated `std` alternative since Rust's 2024 edition
|
||||
home = { path = "patches/home" }
|
||||
|
||||
# Updates to the latest version
|
||||
darling = { path = "patches/darling" }
|
||||
thiserror = { path = "patches/thiserror" }
|
||||
|
||||
# https://github.com/rust-lang-nursery/lazy-static.rs/issues/201
|
||||
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
|
||||
|
||||
# directories-next was created because directories was unmaintained
|
||||
# directories-next is now unmaintained while directories is maintained
|
||||
# The directories author pulls in ridiculously pointless crates and prefers
|
||||
# copyleft licenses
|
||||
# The following two patches resolve everything
|
||||
option-ext = { path = "patches/option-ext" }
|
||||
directories-next = { path = "patches/directories-next" }
|
||||
|
||||
# Patch from a fork back to upstream
|
||||
parity-bip39 = { path = "patches/parity-bip39" }
|
||||
|
||||
# Patch to include `FromUniformBytes<64>` over `Scalar`
|
||||
k256 = { git = "https://github.com/kayabaNerve/elliptic-curves", rev = "4994c9ab163781a88cd4a49beae812a89a44e8c3" }
|
||||
p256 = { git = "https://github.com/kayabaNerve/elliptic-curves", rev = "4994c9ab163781a88cd4a49beae812a89a44e8c3" }
|
||||
|
||||
# `jemalloc` conflicts with `mimalloc`, so patch to a `rocksdb` which never uses `jemalloc`
|
||||
librocksdb-sys = { path = "patches/librocksdb-sys" }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
incompatible_msrv = "allow" # Manually verified with a GitHub workflow
|
||||
manual_is_multiple_of = "allow"
|
||||
unwrap_or_default = "allow"
|
||||
map_unwrap_or = "allow"
|
||||
needless_continue = "allow"
|
||||
borrow_as_ptr = "deny"
|
||||
cast_lossless = "deny"
|
||||
cast_possible_truncation = "deny"
|
||||
cast_possible_wrap = "deny"
|
||||
cast_precision_loss = "deny"
|
||||
cast_ptr_alignment = "deny"
|
||||
cast_sign_loss = "deny"
|
||||
checked_conversions = "deny"
|
||||
cloned_instead_of_copied = "deny"
|
||||
enum_glob_use = "deny"
|
||||
expl_impl_clone_on_copy = "deny"
|
||||
explicit_into_iter_loop = "deny"
|
||||
explicit_iter_loop = "deny"
|
||||
flat_map_option = "deny"
|
||||
float_cmp = "deny"
|
||||
fn_params_excessive_bools = "deny"
|
||||
ignored_unit_patterns = "deny"
|
||||
implicit_clone = "deny"
|
||||
inefficient_to_string = "deny"
|
||||
invalid_upcast_comparisons = "deny"
|
||||
large_stack_arrays = "deny"
|
||||
linkedlist = "deny"
|
||||
macro_use_imports = "deny"
|
||||
manual_instant_elapsed = "deny"
|
||||
manual_let_else = "deny"
|
||||
manual_ok_or = "deny"
|
||||
manual_string_new = "deny"
|
||||
match_bool = "deny"
|
||||
match_same_arms = "deny"
|
||||
missing_fields_in_debug = "deny"
|
||||
needless_pass_by_value = "deny"
|
||||
ptr_cast_constness = "deny"
|
||||
range_minus_one = "deny"
|
||||
range_plus_one = "deny"
|
||||
redundant_closure_for_method_calls = "deny"
|
||||
redundant_else = "deny"
|
||||
string_add_assign = "deny"
|
||||
string_slice = "deny"
|
||||
unchecked_time_subtraction = "deny"
|
||||
uninlined_format_args = "deny"
|
||||
unnecessary_box_returns = "deny"
|
||||
unnecessary_join = "deny"
|
||||
unnecessary_wraps = "deny"
|
||||
unnested_or_patterns = "deny"
|
||||
unused_async = "deny"
|
||||
unused_self = "deny"
|
||||
zero_sized_map_values = "deny"
|
||||
|
||||
[workspace.lints.rust]
|
||||
unused = "allow" # TODO: https://github.com/rust-lang/rust/issues/147648
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -5,4 +5,4 @@ a full copy of the AGPL-3.0 License is included in the root of this repository
|
||||
as a reference text. This copy should be provided with any distribution of a
|
||||
crate licensed under the AGPL-3.0, as per its terms.
|
||||
|
||||
The GitHub actions/workflows (`.github`) are licensed under the MIT license.
|
||||
The GitHub actions (`.github/actions`) are licensed under the MIT license.
|
||||
|
||||
46
README.md
46
README.md
@@ -5,61 +5,35 @@ Bitcoin, Ethereum, DAI, and Monero, offering a liquidity-pool-based trading
|
||||
experience. Funds are stored in an economically secured threshold-multisig
|
||||
wallet.
|
||||
|
||||
[Getting Started](spec/Getting%20Started.md)
|
||||
[Getting Started](docs/Getting%20Started.md)
|
||||
|
||||
### Layout
|
||||
|
||||
- `audits`: Audits for various parts of Serai.
|
||||
|
||||
- `spec`: The specification of the Serai protocol, both internally and as
|
||||
networked.
|
||||
|
||||
- `docs`: User-facing documentation on the Serai protocol.
|
||||
- `docs`: Documentation on the Serai protocol.
|
||||
|
||||
- `common`: Crates containing utilities common to a variety of areas under
|
||||
Serai, none neatly fitting under another category.
|
||||
|
||||
- `crypto`: A series of composable cryptographic libraries built around the
|
||||
`ff`/`group` APIs, achieving a variety of tasks. These range from generic
|
||||
`ff`/`group` APIs achieving a variety of tasks. These range from generic
|
||||
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
|
||||
needed for Bitcoin-Monero atomic swaps.
|
||||
|
||||
- `networks`: Various libraries intended for usage in Serai yet also by the
|
||||
- `coins`: Various coin libraries intended for usage in Serai yet also by the
|
||||
wider community. This means they will always support the functionality Serai
|
||||
needs, yet won't disadvantage other use cases when possible.
|
||||
|
||||
- `message-queue`: An ordered message server so services can talk to each other,
|
||||
even when the other is offline.
|
||||
|
||||
- `processor`: A generic chain processor to process data for Serai and process
|
||||
events from Serai, executing transactions as expected and needed.
|
||||
|
||||
- `coordinator`: A service to manage processors and communicate over a P2P
|
||||
network with other validators.
|
||||
|
||||
- `substrate`: Substrate crates used to instantiate the Serai network.
|
||||
|
||||
- `orchestration`: Dockerfiles and scripts to deploy a Serai node/test
|
||||
environment.
|
||||
|
||||
- `tests`: Tests for various crates. Generally, `crate/src/tests` is used, or
|
||||
`crate/tests`, yet any tests requiring crates' binaries are placed here.
|
||||
|
||||
### Security
|
||||
|
||||
Serai hosts a bug bounty program via
|
||||
[Immunefi](https://immunefi.com/bounty/serai/). For in-scope critical
|
||||
vulnerabilities, we will reward whitehats with up to $30,000.
|
||||
|
||||
Anything not in-scope should still be submitted through Immunefi, with rewards
|
||||
issued at the discretion of the Immunefi program managers.
|
||||
- `deploy`: Scripts to deploy a Serai node/test environment.
|
||||
|
||||
### Links
|
||||
|
||||
- [Website](https://serai.exchange/): https://serai.exchange/
|
||||
- [Immunefi](https://immunefi.com/bounty/serai/): https://immunefi.com/bounty/serai/
|
||||
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
|
||||
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
|
||||
- [Matrix](https://matrix.to/#/#serai:matrix.org): https://matrix.to/#/#serai:matrix.org
|
||||
- [Reddit](https://www.reddit.com/r/SeraiDEX/): https://www.reddit.com/r/SeraiDEX/
|
||||
- [Telegram](https://t.me/SeraiDEX): https://t.me/SeraiDEX
|
||||
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
|
||||
- [Mastodon](https://cryptodon.lol/@serai): https://cryptodon.lol/@serai
|
||||
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
|
||||
- [Matrix](https://matrix.to/#/#serai:matrix.org):
|
||||
https://matrix.to/#/#serai:matrix.org
|
||||
|
||||
Binary file not shown.
@@ -1,7 +0,0 @@
|
||||
# Cypher Stack /networks/bitcoin Audit, August 2023
|
||||
|
||||
This audit was over the `/networks/bitcoin` folder (at the time located at
|
||||
`/coins/bitcoin`). It is encompassing up to commit
|
||||
5121ca75199dff7bd34230880a1fdd793012068c.
|
||||
|
||||
Please see https://github.com/cypherstack/serai-btc-audit for provenance.
|
||||
@@ -1,14 +0,0 @@
|
||||
# Trail of Bits Ethereum Contracts Audit, June 2025
|
||||
|
||||
This audit included:
|
||||
- Our Schnorr contract and associated library (/networks/ethereum/schnorr)
|
||||
- Our Ethereum primitives library (/processor/ethereum/primitives)
|
||||
- Our Deployer contract and associated library (/processor/ethereum/deployer)
|
||||
- Our ERC20 library (/processor/ethereum/erc20)
|
||||
- Our Router contract and associated library (/processor/ethereum/router)
|
||||
|
||||
It is encompassing up to commit 4e0c58464fc4673623938335f06e2e9ea96ca8dd.
|
||||
|
||||
Please see
|
||||
https://github.com/trailofbits/publications/blob/30c4fa3ebf39ff8e4d23ba9567344ec9691697b5/reviews/2025-04-serai-dex-security-review.pdf
|
||||
for the actual report.
|
||||
Binary file not shown.
@@ -1,7 +0,0 @@
|
||||
# Cypher Stack /crypto Audit, March 2023
|
||||
|
||||
This audit was over the /crypto folder, excluding the ed448 crate, the `Ed448`
|
||||
ciphersuite in the ciphersuite crate, and the `dleq/experimental` feature. It is
|
||||
encompassing up to commit 669d2dbffc1dafb82a09d9419ea182667115df06.
|
||||
|
||||
Please see https://github.com/cypherstack/serai-audit for provenance.
|
||||
@@ -1,50 +0,0 @@
|
||||
# eVRF DKG
|
||||
|
||||
In 2024, the [eVRF paper](https://eprint.iacr.org/2024/397) was published to
|
||||
the IACR preprint server. Within it was a one-round unbiased DKG and a
|
||||
one-round unbiased threshold DKG. Unfortunately, both simply describe
|
||||
communication of the secret shares as 'Alice sends $s_b$ to Bob'. This causes,
|
||||
in practice, the need for an additional round of communication to occur where
|
||||
all participants confirm they received their secret shares.
|
||||
|
||||
Within Serai, it was posited to use the same premises as the DDH eVRF itself to
|
||||
achieve a verifiable encryption scheme. This allows the secret shares to be
|
||||
posted to any 'bulletin board' (such as a blockchain) and for all observers to
|
||||
confirm:
|
||||
|
||||
- A participant participated
|
||||
- The secret shares sent can be received by the intended recipient so long as
|
||||
they can access the bulletin board
|
||||
|
||||
Additionally, Serai desired a robust scheme (albeit with an biased key as the
|
||||
output, which is fine for our purposes). Accordingly, our implementation
|
||||
instantiates the threshold eVRF DKG from the eVRF paper, with our own proposal
|
||||
for verifiable encryption, with the caller allowed to decide the set of
|
||||
participants. They may:
|
||||
|
||||
- Select everyone, collapsing to the non-threshold unbiased DKG from the eVRF
|
||||
paper
|
||||
- Select a pre-determined set, collapsing to the threshold unbaised DKG from
|
||||
the eVRF paper
|
||||
- Select a post-determined set (with any solution for the Common Subset
|
||||
problem), allowing achieving a robust threshold biased DKG
|
||||
|
||||
Note that the eVRF paper proposes using the eVRF to sample coefficients yet
|
||||
this is unnecessary when the resulting key will be biased. Any proof of
|
||||
knowledge for the coefficients, as necessary for their extraction within the
|
||||
security proofs, would be sufficient.
|
||||
|
||||
MAGIC Grants contracted HashCloak to formalize Serai's proposal for a DKG and
|
||||
provide proofs for its security. This resulted in
|
||||
[this paper](<./Security Proofs.pdf>).
|
||||
|
||||
Our implementation itself is then built on top of the audited
|
||||
[`generalized-bulletproofs`](https://github.com/kayabaNerve/monero-oxide/tree/generalized-bulletproofs/audits/crypto/generalized-bulletproofs)
|
||||
and
|
||||
[`generalized-bulletproofs-ec-gadgets`](https://github.com/monero-oxide/monero-oxide/tree/fcmp%2B%2B/audits/fcmps).
|
||||
|
||||
Note we do not use the originally premised DDH eVRF yet the one premised on
|
||||
elliptic curve divisors, the methodology of which is commented on
|
||||
[here](https://github.com/monero-oxide/monero-oxide/tree/fcmp%2B%2B/audits/divisors).
|
||||
|
||||
Our implementation itself is unaudited at this time however.
|
||||
Binary file not shown.
3
coins/ethereum/.gitignore
vendored
Normal file
3
coins/ethereum/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# solidity build outputs
|
||||
cache
|
||||
artifacts
|
||||
37
coins/ethereum/Cargo.toml
Normal file
37
coins/ethereum/Cargo.toml
Normal file
@@ -0,0 +1,37 @@
|
||||
[package]
|
||||
name = "ethereum-serai"
|
||||
version = "0.1.0"
|
||||
description = "An Ethereum library supporting Schnorr signing and on-chain verification"
|
||||
license = "AGPL-3.0-only"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
hex-literal = "0.3"
|
||||
thiserror = "1"
|
||||
rand_core = "0.6"
|
||||
|
||||
serde_json = "1.0"
|
||||
serde = "1.0"
|
||||
|
||||
sha3 = "0.10"
|
||||
|
||||
group = "0.12"
|
||||
k256 = { version = "0.11", features = ["arithmetic", "keccak256", "ecdsa"] }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] }
|
||||
|
||||
eyre = "0.6"
|
||||
|
||||
ethers = { version = "1", features = ["abigen", "ethers-solc"] }
|
||||
|
||||
[build-dependencies]
|
||||
ethers-solc = "1"
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { version = "1", features = ["macros"] }
|
||||
@@ -1,6 +1,6 @@
|
||||
AGPL-3.0-only license
|
||||
|
||||
Copyright (c) 2022-2025 Luke Parker
|
||||
Copyright (c) 2022-2023 Luke Parker
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License Version 3 as
|
||||
9
coins/ethereum/README.md
Normal file
9
coins/ethereum/README.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Ethereum
|
||||
|
||||
This package contains Ethereum-related functionality, specifically deploying and
|
||||
interacting with Serai contracts.
|
||||
|
||||
### Dependencies
|
||||
|
||||
- solc
|
||||
- [Foundry](https://github.com/foundry-rs/foundry)
|
||||
16
coins/ethereum/build.rs
Normal file
16
coins/ethereum/build.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use ethers_solc::{Project, ProjectPathsConfig};
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=contracts");
|
||||
println!("cargo:rerun-if-changed=artifacts");
|
||||
|
||||
// configure the project with all its paths, solc, cache etc.
|
||||
let project = Project::builder()
|
||||
.paths(ProjectPathsConfig::hardhat(env!("CARGO_MANIFEST_DIR")).unwrap())
|
||||
.build()
|
||||
.unwrap();
|
||||
project.compile().unwrap();
|
||||
|
||||
// Tell Cargo that if a source file changes, to rerun this build script.
|
||||
project.rerun_if_sources_changed();
|
||||
}
|
||||
36
coins/ethereum/contracts/Schnorr.sol
Normal file
36
coins/ethereum/contracts/Schnorr.sol
Normal file
@@ -0,0 +1,36 @@
|
||||
//SPDX-License-Identifier: AGPLv3
|
||||
pragma solidity ^0.8.0;
|
||||
|
||||
// see https://github.com/noot/schnorr-verify for implementation details
|
||||
contract Schnorr {
|
||||
// secp256k1 group order
|
||||
uint256 constant public Q =
|
||||
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
|
||||
|
||||
// parity := public key y-coord parity (27 or 28)
|
||||
// px := public key x-coord
|
||||
// message := 32-byte message
|
||||
// s := schnorr signature
|
||||
// e := schnorr signature challenge
|
||||
function verify(
|
||||
uint8 parity,
|
||||
bytes32 px,
|
||||
bytes32 message,
|
||||
bytes32 s,
|
||||
bytes32 e
|
||||
) public view returns (bool) {
|
||||
// ecrecover = (m, v, r, s);
|
||||
bytes32 sp = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
|
||||
bytes32 ep = bytes32(Q - mulmod(uint256(e), uint256(px), Q));
|
||||
|
||||
require(sp != 0);
|
||||
// the ecrecover precompile implementation checks that the `r` and `s`
|
||||
// inputs are non-zero (in this case, `px` and `ep`), thus we don't need to
|
||||
// check if they're zero.will make me
|
||||
address R = ecrecover(sp, parity, px, ep);
|
||||
require(R != address(0), "ecrecover failed");
|
||||
return e == keccak256(
|
||||
abi.encodePacked(R, uint8(parity), px, block.chainid, message)
|
||||
);
|
||||
}
|
||||
}
|
||||
52
coins/ethereum/src/contract.rs
Normal file
52
coins/ethereum/src/contract.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
use crate::crypto::ProcessedSignature;
|
||||
use ethers::{contract::ContractFactory, prelude::*, solc::artifacts::contract::ContractBytecode};
|
||||
use eyre::{eyre, Result};
|
||||
use std::fs::File;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum EthereumError {
|
||||
#[error("failed to verify Schnorr signature")]
|
||||
VerificationError,
|
||||
}
|
||||
|
||||
abigen!(
|
||||
Schnorr,
|
||||
"./artifacts/Schnorr.sol/Schnorr.json",
|
||||
event_derives(serde::Deserialize, serde::Serialize),
|
||||
);
|
||||
|
||||
pub async fn deploy_schnorr_verifier_contract(
|
||||
client: Arc<SignerMiddleware<Provider<Http>, LocalWallet>>,
|
||||
) -> Result<Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>> {
|
||||
let path = "./artifacts/Schnorr.sol/Schnorr.json";
|
||||
let artifact: ContractBytecode = serde_json::from_reader(File::open(path).unwrap()).unwrap();
|
||||
let abi = artifact.abi.unwrap();
|
||||
let bin = artifact.bytecode.unwrap().object;
|
||||
let factory = ContractFactory::new(abi, bin.into_bytes().unwrap(), client.clone());
|
||||
let contract = factory.deploy(())?.send().await?;
|
||||
let contract = Schnorr::new(contract.address(), client);
|
||||
Ok(contract)
|
||||
}
|
||||
|
||||
pub async fn call_verify(
|
||||
contract: &Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>,
|
||||
params: &ProcessedSignature,
|
||||
) -> Result<()> {
|
||||
if contract
|
||||
.verify(
|
||||
params.parity + 27,
|
||||
params.px.to_bytes().into(),
|
||||
params.message,
|
||||
params.s.to_bytes().into(),
|
||||
params.e.to_bytes().into(),
|
||||
)
|
||||
.call()
|
||||
.await?
|
||||
{
|
||||
Ok(())
|
||||
} else {
|
||||
Err(eyre!(EthereumError::VerificationError))
|
||||
}
|
||||
}
|
||||
105
coins/ethereum/src/crypto.rs
Normal file
105
coins/ethereum/src/crypto.rs
Normal file
@@ -0,0 +1,105 @@
|
||||
use sha3::{Digest, Keccak256};
|
||||
|
||||
use group::Group;
|
||||
use k256::{
|
||||
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, sec1::ToEncodedPoint, DecompressPoint},
|
||||
AffinePoint, ProjectivePoint, Scalar, U256,
|
||||
};
|
||||
|
||||
use frost::{algorithm::Hram, curve::Secp256k1};
|
||||
|
||||
pub fn keccak256(data: &[u8]) -> [u8; 32] {
|
||||
Keccak256::digest(data).try_into().unwrap()
|
||||
}
|
||||
|
||||
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||
Scalar::from_uint_reduced(U256::from_be_slice(&keccak256(data)))
|
||||
}
|
||||
|
||||
pub fn address(point: &ProjectivePoint) -> [u8; 20] {
|
||||
let encoded_point = point.to_encoded_point(false);
|
||||
keccak256(&encoded_point.as_ref()[1 .. 65])[12 .. 32].try_into().unwrap()
|
||||
}
|
||||
|
||||
pub fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
|
||||
if r.is_zero().into() || s.is_zero().into() {
|
||||
return None;
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let R = AffinePoint::decompress(&r.to_bytes(), v.into());
|
||||
#[allow(non_snake_case)]
|
||||
if let Some(R) = Option::<AffinePoint>::from(R) {
|
||||
#[allow(non_snake_case)]
|
||||
let R = ProjectivePoint::from(R);
|
||||
|
||||
let r = r.invert().unwrap();
|
||||
let u1 = ProjectivePoint::GENERATOR * (-message * r);
|
||||
let u2 = R * (s * r);
|
||||
let key: ProjectivePoint = u1 + u2;
|
||||
if !bool::from(key.is_identity()) {
|
||||
return Some(address(&key));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct EthereumHram {}
|
||||
impl Hram<Secp256k1> for EthereumHram {
|
||||
#[allow(non_snake_case)]
|
||||
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
||||
let a_encoded_point = A.to_encoded_point(true);
|
||||
let mut a_encoded = a_encoded_point.as_ref().to_owned();
|
||||
a_encoded[0] += 25; // Ethereum uses 27/28 for point parity
|
||||
let mut data = address(R).to_vec();
|
||||
data.append(&mut a_encoded);
|
||||
data.append(&mut m.to_vec());
|
||||
Scalar::from_uint_reduced(U256::from_be_slice(&keccak256(&data)))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ProcessedSignature {
|
||||
pub s: Scalar,
|
||||
pub px: Scalar,
|
||||
pub parity: u8,
|
||||
pub message: [u8; 32],
|
||||
pub e: Scalar,
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn preprocess_signature_for_ecrecover(
|
||||
m: [u8; 32],
|
||||
R: &ProjectivePoint,
|
||||
s: Scalar,
|
||||
A: &ProjectivePoint,
|
||||
chain_id: U256,
|
||||
) -> (Scalar, Scalar) {
|
||||
let processed_sig = process_signature_for_contract(m, R, s, A, chain_id);
|
||||
let sr = processed_sig.s.mul(&processed_sig.px).negate();
|
||||
let er = processed_sig.e.mul(&processed_sig.px).negate();
|
||||
(sr, er)
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn process_signature_for_contract(
|
||||
m: [u8; 32],
|
||||
R: &ProjectivePoint,
|
||||
s: Scalar,
|
||||
A: &ProjectivePoint,
|
||||
chain_id: U256,
|
||||
) -> ProcessedSignature {
|
||||
let encoded_pk = A.to_encoded_point(true);
|
||||
let px = &encoded_pk.as_ref()[1 .. 33];
|
||||
let px_scalar = Scalar::from_uint_reduced(U256::from_be_slice(px));
|
||||
let e = EthereumHram::hram(R, A, &[chain_id.to_be_byte_array().as_slice(), &m].concat());
|
||||
ProcessedSignature {
|
||||
s,
|
||||
px: px_scalar,
|
||||
parity: &encoded_pk.as_ref()[0] - 2,
|
||||
#[allow(non_snake_case)]
|
||||
message: m,
|
||||
e,
|
||||
}
|
||||
}
|
||||
2
coins/ethereum/src/lib.rs
Normal file
2
coins/ethereum/src/lib.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub mod contract;
|
||||
pub mod crypto;
|
||||
70
coins/ethereum/tests/contract.rs
Normal file
70
coins/ethereum/tests/contract.rs
Normal file
@@ -0,0 +1,70 @@
|
||||
use std::{convert::TryFrom, sync::Arc, time::Duration};
|
||||
|
||||
use rand_core::OsRng;
|
||||
|
||||
use k256::{elliptic_curve::bigint::ArrayEncoding, U256};
|
||||
|
||||
use ethers::{
|
||||
prelude::*,
|
||||
utils::{keccak256, Anvil, AnvilInstance},
|
||||
};
|
||||
|
||||
use frost::{
|
||||
curve::Secp256k1,
|
||||
algorithm::Schnorr as Algo,
|
||||
tests::{key_gen, algorithm_machines, sign},
|
||||
};
|
||||
|
||||
use ethereum_serai::{
|
||||
crypto,
|
||||
contract::{Schnorr, call_verify, deploy_schnorr_verifier_contract},
|
||||
};
|
||||
|
||||
async fn deploy_test_contract(
|
||||
) -> (u32, AnvilInstance, Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>) {
|
||||
let anvil = Anvil::new().spawn();
|
||||
|
||||
let wallet: LocalWallet = anvil.keys()[0].clone().into();
|
||||
let provider =
|
||||
Provider::<Http>::try_from(anvil.endpoint()).unwrap().interval(Duration::from_millis(10u64));
|
||||
let chain_id = provider.get_chainid().await.unwrap().as_u32();
|
||||
let client = Arc::new(SignerMiddleware::new_with_provider_chain(provider, wallet).await.unwrap());
|
||||
|
||||
(chain_id, anvil, deploy_schnorr_verifier_contract(client).await.unwrap())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_deploy_contract() {
|
||||
deploy_test_contract().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_ecrecover_hack() {
|
||||
let (chain_id, _anvil, contract) = deploy_test_contract().await;
|
||||
let chain_id = U256::from(chain_id);
|
||||
|
||||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||
let group_key = keys[&1].group_key();
|
||||
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
let hashed_message = keccak256(MESSAGE);
|
||||
|
||||
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
||||
|
||||
let algo = Algo::<Secp256k1, crypto::EthereumHram>::new();
|
||||
let sig = sign(
|
||||
&mut OsRng,
|
||||
algo.clone(),
|
||||
keys.clone(),
|
||||
algorithm_machines(&mut OsRng, algo, &keys),
|
||||
full_message,
|
||||
);
|
||||
let mut processed_sig =
|
||||
crypto::process_signature_for_contract(hashed_message, &sig.R, sig.s, &group_key, chain_id);
|
||||
|
||||
call_verify(&contract, &processed_sig).await.unwrap();
|
||||
|
||||
// test invalid signature fails
|
||||
processed_sig.message[0] = 0;
|
||||
assert!(call_verify(&contract, &processed_sig).await.is_err());
|
||||
}
|
||||
86
coins/ethereum/tests/crypto.rs
Normal file
86
coins/ethereum/tests/crypto.rs
Normal file
@@ -0,0 +1,86 @@
|
||||
use ethereum_serai::crypto::*;
|
||||
use frost::curve::Secp256k1;
|
||||
use k256::{
|
||||
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, sec1::ToEncodedPoint},
|
||||
ProjectivePoint, Scalar, U256,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_ecrecover() {
|
||||
use k256::ecdsa::{
|
||||
recoverable::Signature,
|
||||
signature::{Signer, Verifier},
|
||||
SigningKey, VerifyingKey,
|
||||
};
|
||||
use rand_core::OsRng;
|
||||
|
||||
let private = SigningKey::random(&mut OsRng);
|
||||
let public = VerifyingKey::from(&private);
|
||||
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
let sig: Signature = private.sign(MESSAGE);
|
||||
public.verify(MESSAGE, &sig).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
ecrecover(hash_to_scalar(MESSAGE), sig.as_ref()[64], *sig.r(), *sig.s()).unwrap(),
|
||||
address(&ProjectivePoint::from(public))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_signing() {
|
||||
use frost::{
|
||||
algorithm::Schnorr,
|
||||
tests::{algorithm_machines, key_gen, sign},
|
||||
};
|
||||
use rand_core::OsRng;
|
||||
|
||||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||
let _group_key = keys[&1].group_key();
|
||||
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
|
||||
let algo = Schnorr::<Secp256k1, EthereumHram>::new();
|
||||
let _sig = sign(
|
||||
&mut OsRng,
|
||||
algo,
|
||||
keys.clone(),
|
||||
algorithm_machines(&mut OsRng, Schnorr::<Secp256k1, EthereumHram>::new(), &keys),
|
||||
MESSAGE,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ecrecover_hack() {
|
||||
use frost::{
|
||||
algorithm::Schnorr,
|
||||
tests::{algorithm_machines, key_gen, sign},
|
||||
};
|
||||
use rand_core::OsRng;
|
||||
|
||||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||
let group_key = keys[&1].group_key();
|
||||
let group_key_encoded = group_key.to_encoded_point(true);
|
||||
let group_key_compressed = group_key_encoded.as_ref();
|
||||
let group_key_x = Scalar::from_uint_reduced(U256::from_be_slice(&group_key_compressed[1 .. 33]));
|
||||
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
let hashed_message = keccak256(MESSAGE);
|
||||
let chain_id = U256::ONE;
|
||||
|
||||
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
||||
|
||||
let algo = Schnorr::<Secp256k1, EthereumHram>::new();
|
||||
let sig = sign(
|
||||
&mut OsRng,
|
||||
algo.clone(),
|
||||
keys.clone(),
|
||||
algorithm_machines(&mut OsRng, algo, &keys),
|
||||
full_message,
|
||||
);
|
||||
|
||||
let (sr, er) =
|
||||
preprocess_signature_for_ecrecover(hashed_message, &sig.R, sig.s, &group_key, chain_id);
|
||||
let q = ecrecover(sr, group_key_compressed[0] - 2, group_key_x, er).unwrap();
|
||||
assert_eq!(q, address(&sig.R));
|
||||
}
|
||||
2
coins/ethereum/tests/mod.rs
Normal file
2
coins/ethereum/tests/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
mod contract;
|
||||
mod crypto;
|
||||
62
coins/monero/Cargo.toml
Normal file
62
coins/monero/Cargo.toml
Normal file
@@ -0,0 +1,62 @@
|
||||
[package]
|
||||
name = "monero-serai"
|
||||
version = "0.1.2-alpha"
|
||||
description = "A modern Monero transaction library"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
hex-literal = "0.3"
|
||||
lazy_static = "1"
|
||||
thiserror = "1"
|
||||
|
||||
rand_core = "0.6"
|
||||
rand_chacha = { version = "0.3", optional = true }
|
||||
rand = "0.8"
|
||||
rand_distr = "0.4"
|
||||
|
||||
zeroize = { version = "1.5", features = ["zeroize_derive"] }
|
||||
subtle = "2.4"
|
||||
|
||||
sha3 = "0.10"
|
||||
blake2 = { version = "0.10", optional = true }
|
||||
|
||||
curve25519-dalek = { version = "3", features = ["std"] }
|
||||
|
||||
group = { version = "0.12" }
|
||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.1" }
|
||||
multiexp = { path = "../../crypto/multiexp", version = "0.2", features = ["batch"] }
|
||||
|
||||
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.2", features = ["recommended"], optional = true }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.5", features = ["ed25519"], optional = true }
|
||||
dleq = { path = "../../crypto/dleq", version = "0.2", features = ["serialize"], optional = true }
|
||||
|
||||
monero-generators = { path = "generators", version = "0.1" }
|
||||
|
||||
hex = "0.4"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
||||
base58-monero = "1"
|
||||
monero-epee-bin-serde = "1.0"
|
||||
|
||||
digest_auth = "0.3"
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
|
||||
[build-dependencies]
|
||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.1" }
|
||||
monero-generators = { path = "generators", version = "0.1" }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.5", features = ["ed25519", "tests"] }
|
||||
|
||||
[features]
|
||||
multisig = ["rand_chacha", "blake2", "transcript", "frost", "dleq"]
|
||||
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 Cypher Stack
|
||||
Copyright (c) 2022-2023 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
19
coins/monero/README.md
Normal file
19
coins/monero/README.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# monero-serai
|
||||
|
||||
A modern Monero transaction library intended for usage in wallets. It prides
|
||||
itself on accuracy, correctness, and removing common pit falls developers may
|
||||
face.
|
||||
|
||||
monero-serai contains safety features, such as first-class acknowledgement of
|
||||
the burning bug, yet also a high level API around creating transactions.
|
||||
monero-serai also offers a FROST-based multisig, which is orders of magnitude
|
||||
more performant than Monero's.
|
||||
|
||||
monero-serai was written for Serai, a decentralized exchange aiming to support
|
||||
Monero. Despite this, monero-serai is intended to be a widely usable library,
|
||||
accurate to Monero. monero-serai guarantees the functionality needed for Serai,
|
||||
yet will not deprive functionality from other users, and may potentially leave
|
||||
Serai's umbrella at some point.
|
||||
|
||||
Various legacy transaction formats are not currently implemented, yet
|
||||
monero-serai is still increasing its support for various transaction types.
|
||||
66
coins/monero/build.rs
Normal file
66
coins/monero/build.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
use std::{
|
||||
io::Write,
|
||||
env,
|
||||
path::Path,
|
||||
fs::{File, remove_file},
|
||||
};
|
||||
|
||||
use dalek_ff_group::EdwardsPoint;
|
||||
|
||||
use monero_generators::bulletproofs_generators;
|
||||
|
||||
fn serialize(generators_string: &mut String, points: &[EdwardsPoint]) {
|
||||
for generator in points {
|
||||
generators_string.extend(
|
||||
format!(
|
||||
"
|
||||
dalek_ff_group::EdwardsPoint(
|
||||
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap()
|
||||
),
|
||||
",
|
||||
generator.compress().to_bytes()
|
||||
)
|
||||
.chars(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn generators(prefix: &'static str, path: &str) {
|
||||
let generators = bulletproofs_generators(prefix.as_bytes());
|
||||
#[allow(non_snake_case)]
|
||||
let mut G_str = "".to_string();
|
||||
serialize(&mut G_str, &generators.G);
|
||||
#[allow(non_snake_case)]
|
||||
let mut H_str = "".to_string();
|
||||
serialize(&mut H_str, &generators.H);
|
||||
|
||||
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
|
||||
let _ = remove_file(&path);
|
||||
File::create(&path)
|
||||
.unwrap()
|
||||
.write_all(
|
||||
format!(
|
||||
"
|
||||
lazy_static! {{
|
||||
pub static ref GENERATORS: Generators = Generators {{
|
||||
G: [
|
||||
{G_str}
|
||||
],
|
||||
H: [
|
||||
{H_str}
|
||||
],
|
||||
}};
|
||||
}}
|
||||
",
|
||||
)
|
||||
.as_bytes(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
|
||||
generators("bulletproof", "generators.rs");
|
||||
generators("bulletproof_plus", "generators_plus.rs");
|
||||
}
|
||||
24
coins/monero/generators/Cargo.toml
Normal file
24
coins/monero/generators/Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[package]
|
||||
name = "monero-generators"
|
||||
version = "0.1.1"
|
||||
description = "Monero's hash_to_point and generators"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
lazy_static = "1"
|
||||
|
||||
subtle = "2.4"
|
||||
|
||||
sha3 = "0.10"
|
||||
|
||||
curve25519-dalek = { version = "3", features = ["std"] }
|
||||
|
||||
group = "0.12"
|
||||
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.1.4" }
|
||||
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022-2025 Luke Parker
|
||||
Copyright (c) 2022-2023 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
5
coins/monero/generators/README.md
Normal file
5
coins/monero/generators/README.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Monero Generators
|
||||
|
||||
Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
|
||||
An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
|
||||
`hash_to_point` here, is included, as needed to generate generators.
|
||||
51
coins/monero/generators/src/hash_to_point.rs
Normal file
51
coins/monero/generators/src/hash_to_point.rs
Normal file
@@ -0,0 +1,51 @@
|
||||
use subtle::ConditionallySelectable;
|
||||
|
||||
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
||||
|
||||
use group::ff::{Field, PrimeField};
|
||||
use dalek_ff_group::field::FieldElement;
|
||||
|
||||
use crate::hash;
|
||||
|
||||
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
|
||||
pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
|
||||
#[allow(non_snake_case)]
|
||||
let A = FieldElement::from(486662u64);
|
||||
|
||||
let v = FieldElement::from_square(hash(&bytes)).double();
|
||||
let w = v + FieldElement::one();
|
||||
let x = w.square() + (-A.square() * v);
|
||||
|
||||
// This isn't the complete X, yet its initial value
|
||||
// We don't calculate the full X, and instead solely calculate Y, letting dalek reconstruct X
|
||||
// While inefficient, it solves API boundaries and reduces the amount of work done here
|
||||
#[allow(non_snake_case)]
|
||||
let X = {
|
||||
let u = w;
|
||||
let v = x;
|
||||
let v3 = v * v * v;
|
||||
let uv3 = u * v3;
|
||||
let v7 = v3 * v3 * v;
|
||||
let uv7 = u * v7;
|
||||
uv3 * uv7.pow((-FieldElement::from(5u8)) * FieldElement::from(8u8).invert().unwrap())
|
||||
};
|
||||
let x = X.square() * x;
|
||||
|
||||
let y = w - x;
|
||||
let non_zero_0 = !y.is_zero();
|
||||
let y_if_non_zero_0 = w + x;
|
||||
let sign = non_zero_0 & (!y_if_non_zero_0.is_zero());
|
||||
|
||||
let mut z = -A;
|
||||
z *= FieldElement::conditional_select(&v, &FieldElement::from(1u8), sign);
|
||||
#[allow(non_snake_case)]
|
||||
let Z = z + w;
|
||||
#[allow(non_snake_case)]
|
||||
let mut Y = z - w;
|
||||
|
||||
Y *= Z.invert().unwrap();
|
||||
let mut bytes = Y.to_repr();
|
||||
bytes[31] |= sign.unwrap_u8() << 7;
|
||||
|
||||
CompressedEdwardsY(bytes).decompress().unwrap().mul_by_cofactor()
|
||||
}
|
||||
64
coins/monero/generators/src/lib.rs
Normal file
64
coins/monero/generators/src/lib.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
//! Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
|
||||
//! An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
|
||||
//! `hash_to_point` here, is included, as needed to generate generators.
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
use sha3::{Digest, Keccak256};
|
||||
|
||||
use curve25519_dalek::{
|
||||
constants::ED25519_BASEPOINT_POINT,
|
||||
edwards::{EdwardsPoint as DalekPoint, CompressedEdwardsY},
|
||||
};
|
||||
|
||||
use group::Group;
|
||||
use dalek_ff_group::EdwardsPoint;
|
||||
|
||||
mod varint;
|
||||
use varint::write_varint;
|
||||
|
||||
mod hash_to_point;
|
||||
pub use hash_to_point::hash_to_point;
|
||||
|
||||
fn hash(data: &[u8]) -> [u8; 32] {
|
||||
Keccak256::digest(data).into()
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
/// Monero alternate generator `H`, used for amounts in Pedersen commitments.
|
||||
pub static ref H: DalekPoint =
|
||||
CompressedEdwardsY(hash(&ED25519_BASEPOINT_POINT.compress().to_bytes()))
|
||||
.decompress()
|
||||
.unwrap()
|
||||
.mul_by_cofactor();
|
||||
}
|
||||
|
||||
const MAX_M: usize = 16;
|
||||
const N: usize = 64;
|
||||
const MAX_MN: usize = MAX_M * N;
|
||||
|
||||
/// Container struct for Bulletproofs(+) generators.
|
||||
#[allow(non_snake_case)]
|
||||
pub struct Generators {
|
||||
pub G: [EdwardsPoint; MAX_MN],
|
||||
pub H: [EdwardsPoint; MAX_MN],
|
||||
}
|
||||
|
||||
/// Generate generators as needed for Bulletproofs(+), as Monero does.
|
||||
pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
|
||||
let mut res =
|
||||
Generators { G: [EdwardsPoint::identity(); MAX_MN], H: [EdwardsPoint::identity(); MAX_MN] };
|
||||
for i in 0 .. MAX_MN {
|
||||
let i = 2 * i;
|
||||
|
||||
let mut even = H.compress().to_bytes().to_vec();
|
||||
even.extend(dst);
|
||||
let mut odd = even.clone();
|
||||
|
||||
write_varint(&i.try_into().unwrap(), &mut even).unwrap();
|
||||
write_varint(&(i + 1).try_into().unwrap(), &mut odd).unwrap();
|
||||
res.H[i / 2] = EdwardsPoint(hash_to_point(hash(&even)));
|
||||
res.G[i / 2] = EdwardsPoint(hash_to_point(hash(&odd)));
|
||||
}
|
||||
res
|
||||
}
|
||||
16
coins/monero/generators/src/varint.rs
Normal file
16
coins/monero/generators/src/varint.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use std::io::{self, Write};
|
||||
|
||||
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
||||
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
||||
let mut varint = *varint;
|
||||
while {
|
||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
||||
varint >>= 7;
|
||||
if varint != 0 {
|
||||
b |= VARINT_CONTINUATION_MASK;
|
||||
}
|
||||
w.write_all(&[b])?;
|
||||
varint != 0
|
||||
} {}
|
||||
Ok(())
|
||||
}
|
||||
71
coins/monero/src/block.rs
Normal file
71
coins/monero/src/block.rs
Normal file
@@ -0,0 +1,71 @@
|
||||
use std::io::{self, Read, Write};
|
||||
|
||||
use crate::{serialize::*, transaction::Transaction};
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct BlockHeader {
|
||||
pub major_version: u64,
|
||||
pub minor_version: u64,
|
||||
pub timestamp: u64,
|
||||
pub previous: [u8; 32],
|
||||
pub nonce: u32,
|
||||
}
|
||||
|
||||
impl BlockHeader {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
write_varint(&self.major_version, w)?;
|
||||
write_varint(&self.minor_version, w)?;
|
||||
write_varint(&self.timestamp, w)?;
|
||||
w.write_all(&self.previous)?;
|
||||
w.write_all(&self.nonce.to_le_bytes())
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<BlockHeader> {
|
||||
Ok(BlockHeader {
|
||||
major_version: read_varint(r)?,
|
||||
minor_version: read_varint(r)?,
|
||||
timestamp: read_varint(r)?,
|
||||
previous: read_bytes(r)?,
|
||||
nonce: read_bytes(r).map(u32::from_le_bytes)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct Block {
|
||||
pub header: BlockHeader,
|
||||
pub miner_tx: Transaction,
|
||||
pub txs: Vec<[u8; 32]>,
|
||||
}
|
||||
|
||||
impl Block {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.header.write(w)?;
|
||||
self.miner_tx.write(w)?;
|
||||
write_varint(&self.txs.len().try_into().unwrap(), w)?;
|
||||
for tx in &self.txs {
|
||||
w.write_all(tx)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Block> {
|
||||
Ok(Block {
|
||||
header: BlockHeader::read(r)?,
|
||||
miner_tx: Transaction::read(r)?,
|
||||
txs: (0 .. read_varint(r)?).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
136
coins/monero/src/lib.rs
Normal file
136
coins/monero/src/lib.rs
Normal file
@@ -0,0 +1,136 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
|
||||
//! A modern Monero transaction library intended for usage in wallets. It prides
|
||||
//! itself on accuracy, correctness, and removing common pit falls developers may
|
||||
//! face.
|
||||
|
||||
//! monero-serai contains safety features, such as first-class acknowledgement of
|
||||
//! the burning bug, yet also a high level API around creating transactions.
|
||||
//! monero-serai also offers a FROST-based multisig, which is orders of magnitude
|
||||
//! more performant than Monero's.
|
||||
|
||||
//! monero-serai was written for Serai, a decentralized exchange aiming to support
|
||||
//! Monero. Despite this, monero-serai is intended to be a widely usable library,
|
||||
//! accurate to Monero. monero-serai guarantees the functionality needed for Serai,
|
||||
//! yet will not deprive functionality from other users, and may potentially leave
|
||||
//! Serai's umbrella at some point.
|
||||
|
||||
//! Various legacy transaction formats are not currently implemented, yet
|
||||
//! monero-serai is still increasing its support for various transaction types.
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use sha3::{Digest, Keccak256};
|
||||
|
||||
use curve25519_dalek::{
|
||||
constants::ED25519_BASEPOINT_TABLE,
|
||||
scalar::Scalar,
|
||||
edwards::{EdwardsPoint, EdwardsBasepointTable},
|
||||
};
|
||||
|
||||
pub use monero_generators::H;
|
||||
|
||||
mod serialize;
|
||||
|
||||
/// RingCT structs and functionality.
|
||||
pub mod ringct;
|
||||
|
||||
/// Transaction structs.
|
||||
pub mod transaction;
|
||||
/// Block structs.
|
||||
pub mod block;
|
||||
|
||||
/// Monero daemon RPC interface.
|
||||
pub mod rpc;
|
||||
/// Wallet functionality, enabling scanning and sending transactions.
|
||||
pub mod wallet;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
/// Monero protocol version. v15 is omitted as v15 was simply v14 and v16 being active at the same
|
||||
/// time, with regards to the transactions supported. Accordingly, v16 should be used during v15.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
#[allow(non_camel_case_types)]
|
||||
pub enum Protocol {
|
||||
Unsupported(usize),
|
||||
v14,
|
||||
v16,
|
||||
Custom { ring_len: usize, bp_plus: bool },
|
||||
}
|
||||
|
||||
impl Protocol {
|
||||
/// Amount of ring members under this protocol version.
|
||||
pub fn ring_len(&self) -> usize {
|
||||
match self {
|
||||
Protocol::Unsupported(_) => panic!("Unsupported protocol version"),
|
||||
Protocol::v14 => 11,
|
||||
Protocol::v16 => 16,
|
||||
Protocol::Custom { ring_len, .. } => *ring_len,
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether or not the specified version uses Bulletproofs or Bulletproofs+.
|
||||
/// This method will likely be reworked when versions not using Bulletproofs at all are added.
|
||||
pub fn bp_plus(&self) -> bool {
|
||||
match self {
|
||||
Protocol::Unsupported(_) => panic!("Unsupported protocol version"),
|
||||
Protocol::v14 => false,
|
||||
Protocol::v16 => true,
|
||||
Protocol::Custom { bp_plus, .. } => *bp_plus,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref H_TABLE: EdwardsBasepointTable = EdwardsBasepointTable::create(&H);
|
||||
}
|
||||
|
||||
/// Transparent structure representing a Pedersen commitment's contents.
|
||||
#[allow(non_snake_case)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct Commitment {
|
||||
pub mask: Scalar,
|
||||
pub amount: u64,
|
||||
}
|
||||
|
||||
impl Commitment {
|
||||
/// The zero commitment, defined as a mask of 1 (as to not be the identity) and a 0 amount.
|
||||
pub fn zero() -> Commitment {
|
||||
Commitment { mask: Scalar::one(), amount: 0 }
|
||||
}
|
||||
|
||||
pub fn new(mask: Scalar, amount: u64) -> Commitment {
|
||||
Commitment { mask, amount }
|
||||
}
|
||||
|
||||
/// Calculate a Pedersen commitment, as a point, from the transparent structure.
|
||||
pub fn calculate(&self) -> EdwardsPoint {
|
||||
(&self.mask * &ED25519_BASEPOINT_TABLE) + (&Scalar::from(self.amount) * &*H_TABLE)
|
||||
}
|
||||
}
|
||||
|
||||
/// Support generating a random scalar using a modern rand, as dalek's is notoriously dated.
|
||||
pub fn random_scalar<R: RngCore + CryptoRng>(rng: &mut R) -> Scalar {
|
||||
let mut r = [0; 64];
|
||||
rng.fill_bytes(&mut r);
|
||||
Scalar::from_bytes_mod_order_wide(&r)
|
||||
}
|
||||
|
||||
pub(crate) fn hash(data: &[u8]) -> [u8; 32] {
|
||||
Keccak256::digest(data).into()
|
||||
}
|
||||
|
||||
/// Hash the provided data to a scalar via keccak256(data) % l.
|
||||
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||
let scalar = Scalar::from_bytes_mod_order(hash(data));
|
||||
// Monero will explicitly error in this case
|
||||
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
|
||||
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
|
||||
// not generate/verify a proof we believe to be valid when it isn't
|
||||
assert!(scalar != Scalar::zero(), "ZERO HASH: {data:?}");
|
||||
scalar
|
||||
}
|
||||
150
coins/monero/src/ringct/bulletproofs/core.rs
Normal file
150
coins/monero/src/ringct/bulletproofs/core.rs
Normal file
@@ -0,0 +1,150 @@
|
||||
// Required to be for this entire file, which isn't an issue, as it wouldn't bind to the static
|
||||
#![allow(non_upper_case_globals)]
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use subtle::{Choice, ConditionallySelectable};
|
||||
|
||||
use curve25519_dalek::edwards::EdwardsPoint as DalekPoint;
|
||||
|
||||
use group::{ff::Field, Group};
|
||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||
|
||||
use multiexp::multiexp as multiexp_const;
|
||||
|
||||
pub(crate) use monero_generators::Generators;
|
||||
|
||||
use crate::{H as DALEK_H, Commitment, hash_to_scalar as dalek_hash};
|
||||
pub(crate) use crate::ringct::bulletproofs::scalar_vector::*;
|
||||
|
||||
// Bring things into ff/group
|
||||
lazy_static! {
|
||||
pub(crate) static ref INV_EIGHT: Scalar = Scalar::from(8u8).invert().unwrap();
|
||||
pub(crate) static ref H: EdwardsPoint = EdwardsPoint(*DALEK_H);
|
||||
}
|
||||
|
||||
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||
Scalar(dalek_hash(data))
|
||||
}
|
||||
|
||||
// Components common between variants
|
||||
pub(crate) const MAX_M: usize = 16;
|
||||
pub(crate) const LOG_N: usize = 6; // 2 << 6 == N
|
||||
pub(crate) const N: usize = 64;
|
||||
|
||||
pub(crate) fn prove_multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
|
||||
multiexp_const(pairs) * *INV_EIGHT
|
||||
}
|
||||
|
||||
pub(crate) fn vector_exponent(
|
||||
generators: &Generators,
|
||||
a: &ScalarVector,
|
||||
b: &ScalarVector,
|
||||
) -> EdwardsPoint {
|
||||
debug_assert_eq!(a.len(), b.len());
|
||||
(a * &generators.G[.. a.len()]) + (b * &generators.H[.. b.len()])
|
||||
}
|
||||
|
||||
pub(crate) fn hash_cache(cache: &mut Scalar, mash: &[[u8; 32]]) -> Scalar {
|
||||
let slice =
|
||||
&[cache.to_bytes().as_ref(), mash.iter().cloned().flatten().collect::<Vec<_>>().as_ref()]
|
||||
.concat();
|
||||
*cache = hash_to_scalar(slice);
|
||||
*cache
|
||||
}
|
||||
|
||||
pub(crate) fn MN(outputs: usize) -> (usize, usize, usize) {
|
||||
let mut logM = 0;
|
||||
let mut M;
|
||||
while {
|
||||
M = 1 << logM;
|
||||
(M <= MAX_M) && (M < outputs)
|
||||
} {
|
||||
logM += 1;
|
||||
}
|
||||
|
||||
(logM + LOG_N, M, M * N)
|
||||
}
|
||||
|
||||
pub(crate) fn bit_decompose(commitments: &[Commitment]) -> (ScalarVector, ScalarVector) {
|
||||
let (_, M, MN) = MN(commitments.len());
|
||||
|
||||
let sv = commitments.iter().map(|c| Scalar::from(c.amount)).collect::<Vec<_>>();
|
||||
let mut aL = ScalarVector::new(MN);
|
||||
let mut aR = ScalarVector::new(MN);
|
||||
|
||||
for j in 0 .. M {
|
||||
for i in (0 .. N).rev() {
|
||||
let mut bit = Choice::from(0);
|
||||
if j < sv.len() {
|
||||
bit = Choice::from((sv[j][i / 8] >> (i % 8)) & 1);
|
||||
}
|
||||
aL.0[(j * N) + i] = Scalar::conditional_select(&Scalar::zero(), &Scalar::one(), bit);
|
||||
aR.0[(j * N) + i] = Scalar::conditional_select(&-Scalar::one(), &Scalar::zero(), bit);
|
||||
}
|
||||
}
|
||||
|
||||
(aL, aR)
|
||||
}
|
||||
|
||||
pub(crate) fn hash_commitments<C: IntoIterator<Item = DalekPoint>>(
|
||||
commitments: C,
|
||||
) -> (Scalar, Vec<EdwardsPoint>) {
|
||||
let V = commitments.into_iter().map(|c| EdwardsPoint(c) * *INV_EIGHT).collect::<Vec<_>>();
|
||||
(hash_to_scalar(&V.iter().flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>()), V)
|
||||
}
|
||||
|
||||
pub(crate) fn alpha_rho<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
generators: &Generators,
|
||||
aL: &ScalarVector,
|
||||
aR: &ScalarVector,
|
||||
) -> (Scalar, EdwardsPoint) {
|
||||
let ar = Scalar::random(rng);
|
||||
(ar, (vector_exponent(generators, aL, aR) + (EdwardsPoint::generator() * ar)) * *INV_EIGHT)
|
||||
}
|
||||
|
||||
pub(crate) fn LR_statements(
|
||||
a: &ScalarVector,
|
||||
G_i: &[EdwardsPoint],
|
||||
b: &ScalarVector,
|
||||
H_i: &[EdwardsPoint],
|
||||
cL: Scalar,
|
||||
U: EdwardsPoint,
|
||||
) -> Vec<(Scalar, EdwardsPoint)> {
|
||||
let mut res = a
|
||||
.0
|
||||
.iter()
|
||||
.cloned()
|
||||
.zip(G_i.iter().cloned())
|
||||
.chain(b.0.iter().cloned().zip(H_i.iter().cloned()))
|
||||
.collect::<Vec<_>>();
|
||||
res.push((cL, U));
|
||||
res
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
pub(crate) static ref TWO_N: ScalarVector = ScalarVector::powers(Scalar::from(2u8), N);
|
||||
}
|
||||
|
||||
pub(crate) fn challenge_products(w: &[Scalar], winv: &[Scalar]) -> Vec<Scalar> {
|
||||
let mut products = vec![Scalar::zero(); 1 << w.len()];
|
||||
products[0] = winv[0];
|
||||
products[1] = w[0];
|
||||
for j in 1 .. w.len() {
|
||||
let mut slots = (1 << (j + 1)) - 1;
|
||||
while slots > 0 {
|
||||
products[slots] = products[slots / 2] * w[j];
|
||||
products[slots - 1] = products[slots / 2] * winv[j];
|
||||
slots = slots.saturating_sub(2);
|
||||
}
|
||||
}
|
||||
|
||||
// Sanity check as if the above failed to populate, it'd be critical
|
||||
for w in &products {
|
||||
debug_assert!(!bool::from(w.is_zero()));
|
||||
}
|
||||
|
||||
products
|
||||
}
|
||||
175
coins/monero/src/ringct/bulletproofs/mod.rs
Normal file
175
coins/monero/src/ringct/bulletproofs/mod.rs
Normal file
@@ -0,0 +1,175 @@
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use std::io::{self, Read, Write};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use curve25519_dalek::edwards::EdwardsPoint;
|
||||
use multiexp::BatchVerifier;
|
||||
|
||||
use crate::{Commitment, wallet::TransactionError, serialize::*};
|
||||
|
||||
pub(crate) mod scalar_vector;
|
||||
pub(crate) mod core;
|
||||
use self::core::LOG_N;
|
||||
|
||||
pub(crate) mod original;
|
||||
pub use original::GENERATORS as BULLETPROOFS_GENERATORS;
|
||||
pub(crate) mod plus;
|
||||
pub use plus::GENERATORS as BULLETPROOFS_PLUS_GENERATORS;
|
||||
|
||||
pub(crate) use self::original::OriginalStruct;
|
||||
pub(crate) use self::plus::PlusStruct;
|
||||
|
||||
pub(crate) const MAX_OUTPUTS: usize = self::core::MAX_M;
|
||||
|
||||
/// Bulletproofs enum, supporting the original and plus formulations.
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub enum Bulletproofs {
|
||||
Original(OriginalStruct),
|
||||
Plus(PlusStruct),
|
||||
}
|
||||
|
||||
impl Bulletproofs {
|
||||
pub(crate) fn fee_weight(plus: bool, outputs: usize) -> usize {
|
||||
let fields = if plus { 6 } else { 9 };
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let mut LR_len = usize::try_from(usize::BITS - (outputs - 1).leading_zeros()).unwrap();
|
||||
let padded_outputs = 1 << LR_len;
|
||||
LR_len += LOG_N;
|
||||
|
||||
let len = (fields + (2 * LR_len)) * 32;
|
||||
len +
|
||||
if padded_outputs <= 2 {
|
||||
0
|
||||
} else {
|
||||
let base = ((fields + (2 * (LOG_N + 1))) * 32) / 2;
|
||||
let size = (fields + (2 * LR_len)) * 32;
|
||||
((base * padded_outputs) - size) * 4 / 5
|
||||
}
|
||||
}
|
||||
|
||||
/// Prove the list of commitments are within [0 .. 2^64).
|
||||
pub fn prove<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
outputs: &[Commitment],
|
||||
plus: bool,
|
||||
) -> Result<Bulletproofs, TransactionError> {
|
||||
if outputs.len() > MAX_OUTPUTS {
|
||||
return Err(TransactionError::TooManyOutputs)?;
|
||||
}
|
||||
Ok(if !plus {
|
||||
Bulletproofs::Original(OriginalStruct::prove(rng, outputs))
|
||||
} else {
|
||||
Bulletproofs::Plus(PlusStruct::prove(rng, outputs))
|
||||
})
|
||||
}
|
||||
|
||||
/// Verify the given Bulletproofs.
|
||||
#[must_use]
|
||||
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
|
||||
match self {
|
||||
Bulletproofs::Original(bp) => bp.verify(rng, commitments),
|
||||
Bulletproofs::Plus(bp) => bp.verify(rng, commitments),
|
||||
}
|
||||
}
|
||||
|
||||
/// Accumulate the verification for the given Bulletproofs into the specified BatchVerifier.
|
||||
/// Returns false if the Bulletproofs aren't sane, without mutating the BatchVerifier.
|
||||
/// Returns true if the Bulletproofs are sane, regardless of their validity.
|
||||
#[must_use]
|
||||
pub fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<ID, dalek_ff_group::EdwardsPoint>,
|
||||
id: ID,
|
||||
commitments: &[EdwardsPoint],
|
||||
) -> bool {
|
||||
match self {
|
||||
Bulletproofs::Original(bp) => bp.batch_verify(rng, verifier, id, commitments),
|
||||
Bulletproofs::Plus(bp) => bp.batch_verify(rng, verifier, id, commitments),
|
||||
}
|
||||
}
|
||||
|
||||
fn write_core<W: Write, F: Fn(&[EdwardsPoint], &mut W) -> io::Result<()>>(
|
||||
&self,
|
||||
w: &mut W,
|
||||
specific_write_vec: F,
|
||||
) -> io::Result<()> {
|
||||
match self {
|
||||
Bulletproofs::Original(bp) => {
|
||||
write_point(&bp.A, w)?;
|
||||
write_point(&bp.S, w)?;
|
||||
write_point(&bp.T1, w)?;
|
||||
write_point(&bp.T2, w)?;
|
||||
write_scalar(&bp.taux, w)?;
|
||||
write_scalar(&bp.mu, w)?;
|
||||
specific_write_vec(&bp.L, w)?;
|
||||
specific_write_vec(&bp.R, w)?;
|
||||
write_scalar(&bp.a, w)?;
|
||||
write_scalar(&bp.b, w)?;
|
||||
write_scalar(&bp.t, w)
|
||||
}
|
||||
|
||||
Bulletproofs::Plus(bp) => {
|
||||
write_point(&bp.A, w)?;
|
||||
write_point(&bp.A1, w)?;
|
||||
write_point(&bp.B, w)?;
|
||||
write_scalar(&bp.r1, w)?;
|
||||
write_scalar(&bp.s1, w)?;
|
||||
write_scalar(&bp.d1, w)?;
|
||||
specific_write_vec(&bp.L, w)?;
|
||||
specific_write_vec(&bp.R, w)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.write_core(w, |points, w| write_raw_vec(write_point, points, w))
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.write_core(w, |points, w| write_vec(write_point, points, w))
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
/// Read Bulletproofs.
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Bulletproofs> {
|
||||
Ok(Bulletproofs::Original(OriginalStruct {
|
||||
A: read_point(r)?,
|
||||
S: read_point(r)?,
|
||||
T1: read_point(r)?,
|
||||
T2: read_point(r)?,
|
||||
taux: read_scalar(r)?,
|
||||
mu: read_scalar(r)?,
|
||||
L: read_vec(read_point, r)?,
|
||||
R: read_vec(read_point, r)?,
|
||||
a: read_scalar(r)?,
|
||||
b: read_scalar(r)?,
|
||||
t: read_scalar(r)?,
|
||||
}))
|
||||
}
|
||||
|
||||
/// Read Bulletproofs+.
|
||||
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Bulletproofs> {
|
||||
Ok(Bulletproofs::Plus(PlusStruct {
|
||||
A: read_point(r)?,
|
||||
A1: read_point(r)?,
|
||||
B: read_point(r)?,
|
||||
r1: read_scalar(r)?,
|
||||
s1: read_scalar(r)?,
|
||||
d1: read_scalar(r)?,
|
||||
L: read_vec(read_point, r)?,
|
||||
R: read_vec(read_point, r)?,
|
||||
}))
|
||||
}
|
||||
}
|
||||
306
coins/monero/src/ringct/bulletproofs/original.rs
Normal file
306
coins/monero/src/ringct/bulletproofs/original.rs
Normal file
@@ -0,0 +1,306 @@
|
||||
use lazy_static::lazy_static;
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
|
||||
|
||||
use group::{ff::Field, Group};
|
||||
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
|
||||
|
||||
use multiexp::BatchVerifier;
|
||||
|
||||
use crate::{Commitment, ringct::bulletproofs::core::*};
|
||||
|
||||
include!(concat!(env!("OUT_DIR"), "/generators.rs"));
|
||||
|
||||
lazy_static! {
|
||||
static ref ONE_N: ScalarVector = ScalarVector(vec![Scalar::one(); N]);
|
||||
static ref IP12: Scalar = inner_product(&ONE_N, &TWO_N);
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct OriginalStruct {
|
||||
pub(crate) A: DalekPoint,
|
||||
pub(crate) S: DalekPoint,
|
||||
pub(crate) T1: DalekPoint,
|
||||
pub(crate) T2: DalekPoint,
|
||||
pub(crate) taux: DalekScalar,
|
||||
pub(crate) mu: DalekScalar,
|
||||
pub(crate) L: Vec<DalekPoint>,
|
||||
pub(crate) R: Vec<DalekPoint>,
|
||||
pub(crate) a: DalekScalar,
|
||||
pub(crate) b: DalekScalar,
|
||||
pub(crate) t: DalekScalar,
|
||||
}
|
||||
|
||||
impl OriginalStruct {
|
||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
commitments: &[Commitment],
|
||||
) -> OriginalStruct {
|
||||
let (logMN, M, MN) = MN(commitments.len());
|
||||
|
||||
let (aL, aR) = bit_decompose(commitments);
|
||||
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||
let (mut cache, _) = hash_commitments(commitments_points.clone());
|
||||
|
||||
let (sL, sR) =
|
||||
ScalarVector((0 .. (MN * 2)).map(|_| Scalar::random(&mut *rng)).collect::<Vec<_>>()).split();
|
||||
|
||||
let (mut alpha, A) = alpha_rho(&mut *rng, &GENERATORS, &aL, &aR);
|
||||
let (mut rho, S) = alpha_rho(&mut *rng, &GENERATORS, &sL, &sR);
|
||||
|
||||
let y = hash_cache(&mut cache, &[A.compress().to_bytes(), S.compress().to_bytes()]);
|
||||
let mut cache = hash_to_scalar(&y.to_bytes());
|
||||
let z = cache;
|
||||
|
||||
let l0 = &aL - z;
|
||||
let l1 = sL;
|
||||
|
||||
let mut zero_twos = Vec::with_capacity(MN);
|
||||
let zpow = ScalarVector::powers(z, M + 2);
|
||||
for j in 0 .. M {
|
||||
for i in 0 .. N {
|
||||
zero_twos.push(zpow[j + 2] * TWO_N[i]);
|
||||
}
|
||||
}
|
||||
|
||||
let yMN = ScalarVector::powers(y, MN);
|
||||
let r0 = (&(aR + z) * &yMN) + ScalarVector(zero_twos);
|
||||
let r1 = yMN * sR;
|
||||
|
||||
let (T1, T2, x, mut taux) = {
|
||||
let t1 = inner_product(&l0, &r1) + inner_product(&l1, &r0);
|
||||
let t2 = inner_product(&l1, &r1);
|
||||
|
||||
let mut tau1 = Scalar::random(&mut *rng);
|
||||
let mut tau2 = Scalar::random(&mut *rng);
|
||||
|
||||
let T1 = prove_multiexp(&[(t1, *H), (tau1, EdwardsPoint::generator())]);
|
||||
let T2 = prove_multiexp(&[(t2, *H), (tau2, EdwardsPoint::generator())]);
|
||||
|
||||
let x =
|
||||
hash_cache(&mut cache, &[z.to_bytes(), T1.compress().to_bytes(), T2.compress().to_bytes()]);
|
||||
|
||||
let taux = (tau2 * (x * x)) + (tau1 * x);
|
||||
|
||||
tau1.zeroize();
|
||||
tau2.zeroize();
|
||||
(T1, T2, x, taux)
|
||||
};
|
||||
|
||||
let mu = (x * rho) + alpha;
|
||||
alpha.zeroize();
|
||||
rho.zeroize();
|
||||
|
||||
for (i, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
|
||||
taux += zpow[i + 2] * gamma;
|
||||
}
|
||||
|
||||
let l = &l0 + &(l1 * x);
|
||||
let r = &r0 + &(r1 * x);
|
||||
|
||||
let t = inner_product(&l, &r);
|
||||
|
||||
let x_ip =
|
||||
hash_cache(&mut cache, &[x.to_bytes(), taux.to_bytes(), mu.to_bytes(), t.to_bytes()]);
|
||||
|
||||
let mut a = l;
|
||||
let mut b = r;
|
||||
|
||||
let yinv = y.invert().unwrap();
|
||||
let yinvpow = ScalarVector::powers(yinv, MN);
|
||||
|
||||
let mut G_proof = GENERATORS.G[.. a.len()].to_vec();
|
||||
let mut H_proof = GENERATORS.H[.. a.len()].to_vec();
|
||||
H_proof.iter_mut().zip(yinvpow.0.iter()).for_each(|(this_H, yinvpow)| *this_H *= yinvpow);
|
||||
let U = *H * x_ip;
|
||||
|
||||
let mut L = Vec::with_capacity(logMN);
|
||||
let mut R = Vec::with_capacity(logMN);
|
||||
|
||||
while a.len() != 1 {
|
||||
let (aL, aR) = a.split();
|
||||
let (bL, bR) = b.split();
|
||||
|
||||
let cL = inner_product(&aL, &bR);
|
||||
let cR = inner_product(&aR, &bL);
|
||||
|
||||
let (G_L, G_R) = G_proof.split_at(aL.len());
|
||||
let (H_L, H_R) = H_proof.split_at(aL.len());
|
||||
|
||||
let L_i = prove_multiexp(&LR_statements(&aL, G_R, &bR, H_L, cL, U));
|
||||
let R_i = prove_multiexp(&LR_statements(&aR, G_L, &bL, H_R, cR, U));
|
||||
L.push(L_i);
|
||||
R.push(R_i);
|
||||
|
||||
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
|
||||
let winv = w.invert().unwrap();
|
||||
|
||||
a = (aL * w) + (aR * winv);
|
||||
b = (bL * winv) + (bR * w);
|
||||
|
||||
if a.len() != 1 {
|
||||
G_proof = hadamard_fold(G_L, G_R, winv, w);
|
||||
H_proof = hadamard_fold(H_L, H_R, w, winv);
|
||||
}
|
||||
}
|
||||
|
||||
let res = OriginalStruct {
|
||||
A: *A,
|
||||
S: *S,
|
||||
T1: *T1,
|
||||
T2: *T2,
|
||||
taux: *taux,
|
||||
mu: *mu,
|
||||
L: L.drain(..).map(|L| *L).collect(),
|
||||
R: R.drain(..).map(|R| *R).collect(),
|
||||
a: *a[0],
|
||||
b: *b[0],
|
||||
t: *t,
|
||||
};
|
||||
debug_assert!(res.verify(rng, &commitments_points));
|
||||
res
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
||||
id: ID,
|
||||
commitments: &[DalekPoint],
|
||||
) -> bool {
|
||||
// Verify commitments are valid
|
||||
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify L and R are properly sized
|
||||
if self.L.len() != self.R.len() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let (logMN, M, MN) = MN(commitments.len());
|
||||
if self.L.len() != logMN {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Rebuild all challenges
|
||||
let (mut cache, commitments) = hash_commitments(commitments.iter().cloned());
|
||||
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes(), self.S.compress().to_bytes()]);
|
||||
|
||||
let z = hash_to_scalar(&y.to_bytes());
|
||||
cache = z;
|
||||
|
||||
let x = hash_cache(
|
||||
&mut cache,
|
||||
&[z.to_bytes(), self.T1.compress().to_bytes(), self.T2.compress().to_bytes()],
|
||||
);
|
||||
|
||||
let x_ip = hash_cache(
|
||||
&mut cache,
|
||||
&[x.to_bytes(), self.taux.to_bytes(), self.mu.to_bytes(), self.t.to_bytes()],
|
||||
);
|
||||
|
||||
let mut w = Vec::with_capacity(logMN);
|
||||
let mut winv = Vec::with_capacity(logMN);
|
||||
for (L, R) in self.L.iter().zip(&self.R) {
|
||||
w.push(hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]));
|
||||
winv.push(cache.invert().unwrap());
|
||||
}
|
||||
|
||||
// Convert the proof from * INV_EIGHT to its actual form
|
||||
let normalize = |point: &DalekPoint| EdwardsPoint(point.mul_by_cofactor());
|
||||
|
||||
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
|
||||
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
|
||||
let T1 = normalize(&self.T1);
|
||||
let T2 = normalize(&self.T2);
|
||||
let A = normalize(&self.A);
|
||||
let S = normalize(&self.S);
|
||||
|
||||
let commitments = commitments.iter().map(|c| c.mul_by_cofactor()).collect::<Vec<_>>();
|
||||
|
||||
// Verify it
|
||||
let mut proof = Vec::with_capacity(4 + commitments.len());
|
||||
|
||||
let zpow = ScalarVector::powers(z, M + 3);
|
||||
let ip1y = ScalarVector::powers(y, M * N).sum();
|
||||
let mut k = -(zpow[2] * ip1y);
|
||||
for j in 1 ..= M {
|
||||
k -= zpow[j + 2] * *IP12;
|
||||
}
|
||||
let y1 = Scalar(self.t) - ((z * ip1y) + k);
|
||||
proof.push((-y1, *H));
|
||||
|
||||
proof.push((-Scalar(self.taux), G));
|
||||
|
||||
for (j, commitment) in commitments.iter().enumerate() {
|
||||
proof.push((zpow[j + 2], *commitment));
|
||||
}
|
||||
|
||||
proof.push((x, T1));
|
||||
proof.push((x * x, T2));
|
||||
verifier.queue(&mut *rng, id, proof);
|
||||
|
||||
proof = Vec::with_capacity(4 + (2 * (MN + logMN)));
|
||||
let z3 = (Scalar(self.t) - (Scalar(self.a) * Scalar(self.b))) * x_ip;
|
||||
proof.push((z3, *H));
|
||||
proof.push((-Scalar(self.mu), G));
|
||||
|
||||
proof.push((Scalar::one(), A));
|
||||
proof.push((x, S));
|
||||
|
||||
{
|
||||
let ypow = ScalarVector::powers(y, MN);
|
||||
let yinv = y.invert().unwrap();
|
||||
let yinvpow = ScalarVector::powers(yinv, MN);
|
||||
|
||||
let w_cache = challenge_products(&w, &winv);
|
||||
|
||||
for i in 0 .. MN {
|
||||
let g = (Scalar(self.a) * w_cache[i]) + z;
|
||||
proof.push((-g, GENERATORS.G[i]));
|
||||
|
||||
let mut h = Scalar(self.b) * yinvpow[i] * w_cache[(!i) & (MN - 1)];
|
||||
h -= ((zpow[(i / N) + 2] * TWO_N[i % N]) + (z * ypow[i])) * yinvpow[i];
|
||||
proof.push((-h, GENERATORS.H[i]));
|
||||
}
|
||||
}
|
||||
|
||||
for i in 0 .. logMN {
|
||||
proof.push((w[i] * w[i], L[i]));
|
||||
proof.push((winv[i] * winv[i], R[i]));
|
||||
}
|
||||
verifier.queue(rng, id, proof);
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn verify<R: RngCore + CryptoRng>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
commitments: &[DalekPoint],
|
||||
) -> bool {
|
||||
let mut verifier = BatchVerifier::new(1);
|
||||
if self.verify_core(rng, &mut verifier, (), commitments) {
|
||||
verifier.verify_vartime()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
||||
id: ID,
|
||||
commitments: &[DalekPoint],
|
||||
) -> bool {
|
||||
self.verify_core(rng, verifier, id, commitments)
|
||||
}
|
||||
}
|
||||
306
coins/monero/src/ringct/bulletproofs/plus.rs
Normal file
306
coins/monero/src/ringct/bulletproofs/plus.rs
Normal file
@@ -0,0 +1,306 @@
|
||||
use lazy_static::lazy_static;
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
|
||||
|
||||
use group::ff::Field;
|
||||
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
|
||||
|
||||
use multiexp::BatchVerifier;
|
||||
|
||||
use crate::{
|
||||
Commitment, hash,
|
||||
ringct::{hash_to_point::raw_hash_to_point, bulletproofs::core::*},
|
||||
};
|
||||
|
||||
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
|
||||
|
||||
lazy_static! {
|
||||
static ref TRANSCRIPT: [u8; 32] =
|
||||
EdwardsPoint(raw_hash_to_point(hash(b"bulletproof_plus_transcript"))).compress().to_bytes();
|
||||
}
|
||||
|
||||
// TRANSCRIPT isn't a Scalar, so we need this alternative for the first hash
|
||||
fn hash_plus<C: IntoIterator<Item = DalekPoint>>(commitments: C) -> (Scalar, Vec<EdwardsPoint>) {
|
||||
let (cache, commitments) = hash_commitments(commitments);
|
||||
(hash_to_scalar(&[&*TRANSCRIPT as &[u8], &cache.to_bytes()].concat()), commitments)
|
||||
}
|
||||
|
||||
// d[j*N+i] = z**(2*(j+1)) * 2**i
|
||||
fn d(z: Scalar, M: usize, MN: usize) -> (ScalarVector, ScalarVector) {
|
||||
let zpow = ScalarVector::even_powers(z, 2 * M);
|
||||
let mut d = vec![Scalar::zero(); MN];
|
||||
for j in 0 .. M {
|
||||
for i in 0 .. N {
|
||||
d[(j * N) + i] = zpow[j] * TWO_N[i];
|
||||
}
|
||||
}
|
||||
(zpow, ScalarVector(d))
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct PlusStruct {
|
||||
pub(crate) A: DalekPoint,
|
||||
pub(crate) A1: DalekPoint,
|
||||
pub(crate) B: DalekPoint,
|
||||
pub(crate) r1: DalekScalar,
|
||||
pub(crate) s1: DalekScalar,
|
||||
pub(crate) d1: DalekScalar,
|
||||
pub(crate) L: Vec<DalekPoint>,
|
||||
pub(crate) R: Vec<DalekPoint>,
|
||||
}
|
||||
|
||||
impl PlusStruct {
|
||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
commitments: &[Commitment],
|
||||
) -> PlusStruct {
|
||||
let (logMN, M, MN) = MN(commitments.len());
|
||||
|
||||
let (aL, aR) = bit_decompose(commitments);
|
||||
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||
let (mut cache, _) = hash_plus(commitments_points.clone());
|
||||
let (mut alpha1, A) = alpha_rho(&mut *rng, &GENERATORS, &aL, &aR);
|
||||
|
||||
let y = hash_cache(&mut cache, &[A.compress().to_bytes()]);
|
||||
let mut cache = hash_to_scalar(&y.to_bytes());
|
||||
let z = cache;
|
||||
|
||||
let (zpow, d) = d(z, M, MN);
|
||||
|
||||
let aL1 = aL - z;
|
||||
|
||||
let ypow = ScalarVector::powers(y, MN + 2);
|
||||
let mut y_for_d = ScalarVector(ypow.0[1 ..= MN].to_vec());
|
||||
y_for_d.0.reverse();
|
||||
let aR1 = (aR + z) + (y_for_d * d);
|
||||
|
||||
for (j, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
|
||||
alpha1 += zpow[j] * ypow[MN + 1] * gamma;
|
||||
}
|
||||
|
||||
let mut a = aL1;
|
||||
let mut b = aR1;
|
||||
|
||||
let yinv = y.invert().unwrap();
|
||||
let yinvpow = ScalarVector::powers(yinv, MN);
|
||||
|
||||
let mut G_proof = GENERATORS.G[.. a.len()].to_vec();
|
||||
let mut H_proof = GENERATORS.H[.. a.len()].to_vec();
|
||||
|
||||
let mut L = Vec::with_capacity(logMN);
|
||||
let mut R = Vec::with_capacity(logMN);
|
||||
|
||||
while a.len() != 1 {
|
||||
let (aL, aR) = a.split();
|
||||
let (bL, bR) = b.split();
|
||||
|
||||
let cL = weighted_inner_product(&aL, &bR, y);
|
||||
let cR = weighted_inner_product(&(&aR * ypow[aR.len()]), &bL, y);
|
||||
|
||||
let (mut dL, mut dR) = (Scalar::random(&mut *rng), Scalar::random(&mut *rng));
|
||||
|
||||
let (G_L, G_R) = G_proof.split_at(aL.len());
|
||||
let (H_L, H_R) = H_proof.split_at(aL.len());
|
||||
|
||||
let mut L_i = LR_statements(&(&aL * yinvpow[aL.len()]), G_R, &bR, H_L, cL, *H);
|
||||
L_i.push((dL, G));
|
||||
let L_i = prove_multiexp(&L_i);
|
||||
L.push(L_i);
|
||||
|
||||
let mut R_i = LR_statements(&(&aR * ypow[aR.len()]), G_L, &bL, H_R, cR, *H);
|
||||
R_i.push((dR, G));
|
||||
let R_i = prove_multiexp(&R_i);
|
||||
R.push(R_i);
|
||||
|
||||
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
|
||||
let winv = w.invert().unwrap();
|
||||
|
||||
G_proof = hadamard_fold(G_L, G_R, winv, w * yinvpow[aL.len()]);
|
||||
H_proof = hadamard_fold(H_L, H_R, w, winv);
|
||||
|
||||
a = (&aL * w) + (aR * (winv * ypow[aL.len()]));
|
||||
b = (bL * winv) + (bR * w);
|
||||
|
||||
alpha1 += (dL * (w * w)) + (dR * (winv * winv));
|
||||
|
||||
dL.zeroize();
|
||||
dR.zeroize();
|
||||
}
|
||||
|
||||
let mut r = Scalar::random(&mut *rng);
|
||||
let mut s = Scalar::random(&mut *rng);
|
||||
let mut d = Scalar::random(&mut *rng);
|
||||
let mut eta = Scalar::random(&mut *rng);
|
||||
|
||||
let A1 = prove_multiexp(&[
|
||||
(r, G_proof[0]),
|
||||
(s, H_proof[0]),
|
||||
(d, G),
|
||||
((r * y * b[0]) + (s * y * a[0]), *H),
|
||||
]);
|
||||
let B = prove_multiexp(&[(r * y * s, *H), (eta, G)]);
|
||||
let e = hash_cache(&mut cache, &[A1.compress().to_bytes(), B.compress().to_bytes()]);
|
||||
|
||||
let r1 = (a[0] * e) + r;
|
||||
r.zeroize();
|
||||
let s1 = (b[0] * e) + s;
|
||||
s.zeroize();
|
||||
let d1 = ((d * e) + eta) + (alpha1 * (e * e));
|
||||
d.zeroize();
|
||||
eta.zeroize();
|
||||
alpha1.zeroize();
|
||||
|
||||
let res = PlusStruct {
|
||||
A: *A,
|
||||
A1: *A1,
|
||||
B: *B,
|
||||
r1: *r1,
|
||||
s1: *s1,
|
||||
d1: *d1,
|
||||
L: L.drain(..).map(|L| *L).collect(),
|
||||
R: R.drain(..).map(|R| *R).collect(),
|
||||
};
|
||||
debug_assert!(res.verify(rng, &commitments_points));
|
||||
res
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
||||
id: ID,
|
||||
commitments: &[DalekPoint],
|
||||
) -> bool {
|
||||
// Verify commitments are valid
|
||||
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify L and R are properly sized
|
||||
if self.L.len() != self.R.len() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let (logMN, M, MN) = MN(commitments.len());
|
||||
if self.L.len() != logMN {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Rebuild all challenges
|
||||
let (mut cache, commitments) = hash_plus(commitments.iter().cloned());
|
||||
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes()]);
|
||||
let yinv = y.invert().unwrap();
|
||||
let z = hash_to_scalar(&y.to_bytes());
|
||||
cache = z;
|
||||
|
||||
let mut w = Vec::with_capacity(logMN);
|
||||
let mut winv = Vec::with_capacity(logMN);
|
||||
for (L, R) in self.L.iter().zip(&self.R) {
|
||||
w.push(hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]));
|
||||
winv.push(cache.invert().unwrap());
|
||||
}
|
||||
|
||||
let e = hash_cache(&mut cache, &[self.A1.compress().to_bytes(), self.B.compress().to_bytes()]);
|
||||
|
||||
// Convert the proof from * INV_EIGHT to its actual form
|
||||
let normalize = |point: &DalekPoint| EdwardsPoint(point.mul_by_cofactor());
|
||||
|
||||
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
|
||||
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
|
||||
let A = normalize(&self.A);
|
||||
let A1 = normalize(&self.A1);
|
||||
let B = normalize(&self.B);
|
||||
|
||||
let mut commitments = commitments.iter().map(|c| c.mul_by_cofactor()).collect::<Vec<_>>();
|
||||
|
||||
// Verify it
|
||||
let mut proof = Vec::with_capacity(logMN + 5 + (2 * (MN + logMN)));
|
||||
|
||||
let mut yMN = y;
|
||||
for _ in 0 .. logMN {
|
||||
yMN *= yMN;
|
||||
}
|
||||
let yMNy = yMN * y;
|
||||
|
||||
let (zpow, d) = d(z, M, MN);
|
||||
let zsq = zpow[0];
|
||||
|
||||
let esq = e * e;
|
||||
let minus_esq = -esq;
|
||||
let commitment_weight = minus_esq * yMNy;
|
||||
for (i, commitment) in commitments.drain(..).enumerate() {
|
||||
proof.push((commitment_weight * zpow[i], commitment));
|
||||
}
|
||||
|
||||
// Invert B, instead of the Scalar, as the latter is only 2x as expensive yet enables reduction
|
||||
// to a single addition under vartime for the first BP verified in the batch, which is expected
|
||||
// to be much more significant
|
||||
proof.push((Scalar::one(), -B));
|
||||
proof.push((-e, A1));
|
||||
proof.push((minus_esq, A));
|
||||
proof.push((Scalar(self.d1), G));
|
||||
|
||||
let d_sum = zpow.sum() * Scalar::from(u64::MAX);
|
||||
let y_sum = weighted_powers(y, MN).sum();
|
||||
proof.push((
|
||||
Scalar(self.r1 * y.0 * self.s1) + (esq * ((yMNy * z * d_sum) + ((zsq - z) * y_sum))),
|
||||
*H,
|
||||
));
|
||||
|
||||
let w_cache = challenge_products(&w, &winv);
|
||||
|
||||
let mut e_r1_y = e * Scalar(self.r1);
|
||||
let e_s1 = e * Scalar(self.s1);
|
||||
let esq_z = esq * z;
|
||||
let minus_esq_z = -esq_z;
|
||||
let mut minus_esq_y = minus_esq * yMN;
|
||||
|
||||
for i in 0 .. MN {
|
||||
proof.push((e_r1_y * w_cache[i] + esq_z, GENERATORS.G[i]));
|
||||
proof.push((
|
||||
(e_s1 * w_cache[(!i) & (MN - 1)]) + minus_esq_z + (minus_esq_y * d[i]),
|
||||
GENERATORS.H[i],
|
||||
));
|
||||
|
||||
e_r1_y *= yinv;
|
||||
minus_esq_y *= yinv;
|
||||
}
|
||||
|
||||
for i in 0 .. logMN {
|
||||
proof.push((minus_esq * w[i] * w[i], L[i]));
|
||||
proof.push((minus_esq * winv[i] * winv[i], R[i]));
|
||||
}
|
||||
|
||||
verifier.queue(rng, id, proof);
|
||||
true
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn verify<R: RngCore + CryptoRng>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
commitments: &[DalekPoint],
|
||||
) -> bool {
|
||||
let mut verifier = BatchVerifier::new(1);
|
||||
if self.verify_core(rng, &mut verifier, (), commitments) {
|
||||
verifier.verify_vartime()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
||||
id: ID,
|
||||
commitments: &[DalekPoint],
|
||||
) -> bool {
|
||||
self.verify_core(rng, verifier, id, commitments)
|
||||
}
|
||||
}
|
||||
136
coins/monero/src/ringct/bulletproofs/scalar_vector.rs
Normal file
136
coins/monero/src/ringct/bulletproofs/scalar_vector.rs
Normal file
@@ -0,0 +1,136 @@
|
||||
use core::ops::{Add, Sub, Mul, Index};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use group::ff::Field;
|
||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||
|
||||
use multiexp::multiexp;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
||||
macro_rules! math_op {
|
||||
($Op: ident, $op: ident, $f: expr) => {
|
||||
impl $Op<Scalar> for ScalarVector {
|
||||
type Output = ScalarVector;
|
||||
fn $op(self, b: Scalar) -> ScalarVector {
|
||||
ScalarVector(self.0.iter().map(|a| $f((a, &b))).collect())
|
||||
}
|
||||
}
|
||||
|
||||
impl $Op<Scalar> for &ScalarVector {
|
||||
type Output = ScalarVector;
|
||||
fn $op(self, b: Scalar) -> ScalarVector {
|
||||
ScalarVector(self.0.iter().map(|a| $f((a, &b))).collect())
|
||||
}
|
||||
}
|
||||
|
||||
impl $Op<ScalarVector> for ScalarVector {
|
||||
type Output = ScalarVector;
|
||||
fn $op(self, b: ScalarVector) -> ScalarVector {
|
||||
debug_assert_eq!(self.len(), b.len());
|
||||
ScalarVector(self.0.iter().zip(b.0.iter()).map($f).collect())
|
||||
}
|
||||
}
|
||||
|
||||
impl $Op<&ScalarVector> for &ScalarVector {
|
||||
type Output = ScalarVector;
|
||||
fn $op(self, b: &ScalarVector) -> ScalarVector {
|
||||
debug_assert_eq!(self.len(), b.len());
|
||||
ScalarVector(self.0.iter().zip(b.0.iter()).map($f).collect())
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
math_op!(Add, add, |(a, b): (&Scalar, &Scalar)| *a + *b);
|
||||
math_op!(Sub, sub, |(a, b): (&Scalar, &Scalar)| *a - *b);
|
||||
math_op!(Mul, mul, |(a, b): (&Scalar, &Scalar)| *a * *b);
|
||||
|
||||
impl ScalarVector {
|
||||
pub(crate) fn new(len: usize) -> ScalarVector {
|
||||
ScalarVector(vec![Scalar::zero(); len])
|
||||
}
|
||||
|
||||
pub(crate) fn powers(x: Scalar, len: usize) -> ScalarVector {
|
||||
debug_assert!(len != 0);
|
||||
|
||||
let mut res = Vec::with_capacity(len);
|
||||
res.push(Scalar::one());
|
||||
for i in 1 .. len {
|
||||
res.push(res[i - 1] * x);
|
||||
}
|
||||
ScalarVector(res)
|
||||
}
|
||||
|
||||
pub(crate) fn even_powers(x: Scalar, pow: usize) -> ScalarVector {
|
||||
debug_assert!(pow != 0);
|
||||
// Verify pow is a power of two
|
||||
debug_assert_eq!(((pow - 1) & pow), 0);
|
||||
|
||||
let xsq = x * x;
|
||||
let mut res = ScalarVector(Vec::with_capacity(pow / 2));
|
||||
res.0.push(xsq);
|
||||
|
||||
let mut prev = 2;
|
||||
while prev < pow {
|
||||
res.0.push(res[res.len() - 1] * xsq);
|
||||
prev += 2;
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn sum(mut self) -> Scalar {
|
||||
self.0.drain(..).sum()
|
||||
}
|
||||
|
||||
pub(crate) fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub(crate) fn split(self) -> (ScalarVector, ScalarVector) {
|
||||
let (l, r) = self.0.split_at(self.0.len() / 2);
|
||||
(ScalarVector(l.to_vec()), ScalarVector(r.to_vec()))
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<usize> for ScalarVector {
|
||||
type Output = Scalar;
|
||||
fn index(&self, index: usize) -> &Scalar {
|
||||
&self.0[index]
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn inner_product(a: &ScalarVector, b: &ScalarVector) -> Scalar {
|
||||
(a * b).sum()
|
||||
}
|
||||
|
||||
pub(crate) fn weighted_powers(x: Scalar, len: usize) -> ScalarVector {
|
||||
ScalarVector(ScalarVector::powers(x, len + 1).0[1 ..].to_vec())
|
||||
}
|
||||
|
||||
pub(crate) fn weighted_inner_product(a: &ScalarVector, b: &ScalarVector, y: Scalar) -> Scalar {
|
||||
// y ** 0 is not used as a power
|
||||
(a * b * weighted_powers(y, a.len())).sum()
|
||||
}
|
||||
|
||||
impl Mul<&[EdwardsPoint]> for &ScalarVector {
|
||||
type Output = EdwardsPoint;
|
||||
fn mul(self, b: &[EdwardsPoint]) -> EdwardsPoint {
|
||||
debug_assert_eq!(self.len(), b.len());
|
||||
multiexp(&self.0.iter().cloned().zip(b.iter().cloned()).collect::<Vec<_>>())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn hadamard_fold(
|
||||
l: &[EdwardsPoint],
|
||||
r: &[EdwardsPoint],
|
||||
a: Scalar,
|
||||
b: Scalar,
|
||||
) -> Vec<EdwardsPoint> {
|
||||
let mut res = Vec::with_capacity(l.len() / 2);
|
||||
for i in 0 .. l.len() {
|
||||
res.push(multiexp(&[(a, l[i]), (b, r[i])]));
|
||||
}
|
||||
res
|
||||
}
|
||||
326
coins/monero/src/ringct/clsag/mod.rs
Normal file
326
coins/monero/src/ringct/clsag/mod.rs
Normal file
@@ -0,0 +1,326 @@
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use core::ops::Deref;
|
||||
use std::io::{self, Read, Write};
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use thiserror::Error;
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||
use subtle::{ConstantTimeEq, Choice, CtOption};
|
||||
|
||||
use curve25519_dalek::{
|
||||
constants::ED25519_BASEPOINT_TABLE,
|
||||
scalar::Scalar,
|
||||
traits::{IsIdentity, VartimePrecomputedMultiscalarMul},
|
||||
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
Commitment, random_scalar, hash_to_scalar, wallet::decoys::Decoys, ringct::hash_to_point,
|
||||
serialize::*,
|
||||
};
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
mod multisig;
|
||||
#[cfg(feature = "multisig")]
|
||||
pub use multisig::{ClsagDetails, ClsagAddendum, ClsagMultisig};
|
||||
#[cfg(feature = "multisig")]
|
||||
pub(crate) use multisig::add_key_image_share;
|
||||
|
||||
lazy_static! {
|
||||
static ref INV_EIGHT: Scalar = Scalar::from(8u8).invert();
|
||||
}
|
||||
|
||||
/// Errors returned when CLSAG signing fails.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
|
||||
pub enum ClsagError {
|
||||
#[error("internal error ({0})")]
|
||||
InternalError(&'static str),
|
||||
#[error("invalid ring")]
|
||||
InvalidRing,
|
||||
#[error("invalid ring member (member {0}, ring size {1})")]
|
||||
InvalidRingMember(u8, u8),
|
||||
#[error("invalid commitment")]
|
||||
InvalidCommitment,
|
||||
#[error("invalid key image")]
|
||||
InvalidImage,
|
||||
#[error("invalid D")]
|
||||
InvalidD,
|
||||
#[error("invalid s")]
|
||||
InvalidS,
|
||||
#[error("invalid c1")]
|
||||
InvalidC1,
|
||||
}
|
||||
|
||||
/// Input being signed for.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct ClsagInput {
|
||||
// The actual commitment for the true spend
|
||||
pub(crate) commitment: Commitment,
|
||||
// True spend index, offsets, and ring
|
||||
pub(crate) decoys: Decoys,
|
||||
}
|
||||
|
||||
impl ClsagInput {
|
||||
pub fn new(commitment: Commitment, decoys: Decoys) -> Result<ClsagInput, ClsagError> {
|
||||
let n = decoys.len();
|
||||
if n > u8::MAX.into() {
|
||||
Err(ClsagError::InternalError("max ring size in this library is u8 max"))?;
|
||||
}
|
||||
let n = u8::try_from(n).unwrap();
|
||||
if decoys.i >= n {
|
||||
Err(ClsagError::InvalidRingMember(decoys.i, n))?;
|
||||
}
|
||||
|
||||
// Validate the commitment matches
|
||||
if decoys.ring[usize::from(decoys.i)][1] != commitment.calculate() {
|
||||
Err(ClsagError::InvalidCommitment)?;
|
||||
}
|
||||
|
||||
Ok(ClsagInput { commitment, decoys })
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
enum Mode {
|
||||
Sign(usize, EdwardsPoint, EdwardsPoint),
|
||||
Verify(Scalar),
|
||||
}
|
||||
|
||||
// Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences
|
||||
// Said differences are covered via the above Mode
|
||||
fn core(
|
||||
ring: &[[EdwardsPoint; 2]],
|
||||
I: &EdwardsPoint,
|
||||
pseudo_out: &EdwardsPoint,
|
||||
msg: &[u8; 32],
|
||||
D: &EdwardsPoint,
|
||||
s: &[Scalar],
|
||||
A_c1: Mode,
|
||||
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
|
||||
let n = ring.len();
|
||||
|
||||
let images_precomp = VartimeEdwardsPrecomputation::new([I, D]);
|
||||
let D = D * *INV_EIGHT;
|
||||
|
||||
// Generate the transcript
|
||||
// Instead of generating multiple, a single transcript is created and then edited as needed
|
||||
const PREFIX: &[u8] = b"CLSAG_";
|
||||
#[rustfmt::skip]
|
||||
const AGG_0: &[u8] = b"agg_0";
|
||||
#[rustfmt::skip]
|
||||
const ROUND: &[u8] = b"round";
|
||||
const PREFIX_AGG_0_LEN: usize = PREFIX.len() + AGG_0.len();
|
||||
|
||||
let mut to_hash = Vec::with_capacity(((2 * n) + 5) * 32);
|
||||
to_hash.extend(PREFIX);
|
||||
to_hash.extend(AGG_0);
|
||||
to_hash.extend([0; 32 - PREFIX_AGG_0_LEN]);
|
||||
|
||||
let mut P = Vec::with_capacity(n);
|
||||
for member in ring {
|
||||
P.push(member[0]);
|
||||
to_hash.extend(member[0].compress().to_bytes());
|
||||
}
|
||||
|
||||
let mut C = Vec::with_capacity(n);
|
||||
for member in ring {
|
||||
C.push(member[1] - pseudo_out);
|
||||
to_hash.extend(member[1].compress().to_bytes());
|
||||
}
|
||||
|
||||
to_hash.extend(I.compress().to_bytes());
|
||||
to_hash.extend(D.compress().to_bytes());
|
||||
to_hash.extend(pseudo_out.compress().to_bytes());
|
||||
// mu_P with agg_0
|
||||
let mu_P = hash_to_scalar(&to_hash);
|
||||
// mu_C with agg_1
|
||||
to_hash[PREFIX_AGG_0_LEN - 1] = b'1';
|
||||
let mu_C = hash_to_scalar(&to_hash);
|
||||
|
||||
// Truncate it for the round transcript, altering the DST as needed
|
||||
to_hash.truncate(((2 * n) + 1) * 32);
|
||||
for i in 0 .. ROUND.len() {
|
||||
to_hash[PREFIX.len() + i] = ROUND[i];
|
||||
}
|
||||
// Unfortunately, it's I D pseudo_out instead of pseudo_out I D, meaning this needs to be
|
||||
// truncated just to add it back
|
||||
to_hash.extend(pseudo_out.compress().to_bytes());
|
||||
to_hash.extend(msg);
|
||||
|
||||
// Configure the loop based on if we're signing or verifying
|
||||
let start;
|
||||
let end;
|
||||
let mut c;
|
||||
match A_c1 {
|
||||
Mode::Sign(r, A, AH) => {
|
||||
start = r + 1;
|
||||
end = r + n;
|
||||
to_hash.extend(A.compress().to_bytes());
|
||||
to_hash.extend(AH.compress().to_bytes());
|
||||
c = hash_to_scalar(&to_hash);
|
||||
}
|
||||
|
||||
Mode::Verify(c1) => {
|
||||
start = 0;
|
||||
end = n;
|
||||
c = c1;
|
||||
}
|
||||
}
|
||||
|
||||
// Perform the core loop
|
||||
let mut c1 = CtOption::new(Scalar::zero(), Choice::from(0));
|
||||
for i in (start .. end).map(|i| i % n) {
|
||||
// This will only execute once and shouldn't need to be constant time. Making it constant time
|
||||
// removes the risk of branch prediction creating timing differences depending on ring index
|
||||
// however
|
||||
c1 = c1.or_else(|| CtOption::new(c, i.ct_eq(&0)));
|
||||
|
||||
let c_p = mu_P * c;
|
||||
let c_c = mu_C * c;
|
||||
|
||||
let L = (&s[i] * &ED25519_BASEPOINT_TABLE) + (c_p * P[i]) + (c_c * C[i]);
|
||||
let PH = hash_to_point(P[i]);
|
||||
// Shouldn't be an issue as all of the variables in this vartime statement are public
|
||||
let R = (s[i] * PH) + images_precomp.vartime_multiscalar_mul(&[c_p, c_c]);
|
||||
|
||||
to_hash.truncate(((2 * n) + 3) * 32);
|
||||
to_hash.extend(L.compress().to_bytes());
|
||||
to_hash.extend(R.compress().to_bytes());
|
||||
c = hash_to_scalar(&to_hash);
|
||||
}
|
||||
|
||||
// This first tuple is needed to continue signing, the latter is the c to be tested/worked with
|
||||
((D, c * mu_P, c * mu_C), c1.unwrap_or(c))
|
||||
}
|
||||
|
||||
/// CLSAG signature, as used in Monero.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct Clsag {
|
||||
pub D: EdwardsPoint,
|
||||
pub s: Vec<Scalar>,
|
||||
pub c1: Scalar,
|
||||
}
|
||||
|
||||
impl Clsag {
|
||||
// Sign core is the extension of core as needed for signing, yet is shared between single signer
|
||||
// and multisig, hence why it's still core
|
||||
pub(crate) fn sign_core<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
I: &EdwardsPoint,
|
||||
input: &ClsagInput,
|
||||
mask: Scalar,
|
||||
msg: &[u8; 32],
|
||||
A: EdwardsPoint,
|
||||
AH: EdwardsPoint,
|
||||
) -> (Clsag, EdwardsPoint, Scalar, Scalar) {
|
||||
let r: usize = input.decoys.i.into();
|
||||
|
||||
let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate();
|
||||
let z = input.commitment.mask - mask;
|
||||
|
||||
let H = hash_to_point(input.decoys.ring[r][0]);
|
||||
let D = H * z;
|
||||
let mut s = Vec::with_capacity(input.decoys.ring.len());
|
||||
for _ in 0 .. input.decoys.ring.len() {
|
||||
s.push(random_scalar(rng));
|
||||
}
|
||||
let ((D, p, c), c1) =
|
||||
core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
|
||||
|
||||
(Clsag { D, s, c1 }, pseudo_out, p, c * z)
|
||||
}
|
||||
|
||||
/// Generate CLSAG signatures for the given inputs.
|
||||
/// inputs is of the form (private key, key image, input).
|
||||
/// sum_outputs is for the sum of the outputs' commitment masks.
|
||||
pub fn sign<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
mut inputs: Vec<(Zeroizing<Scalar>, EdwardsPoint, ClsagInput)>,
|
||||
sum_outputs: Scalar,
|
||||
msg: [u8; 32],
|
||||
) -> Vec<(Clsag, EdwardsPoint)> {
|
||||
let mut res = Vec::with_capacity(inputs.len());
|
||||
let mut sum_pseudo_outs = Scalar::zero();
|
||||
for i in 0 .. inputs.len() {
|
||||
let mut mask = random_scalar(rng);
|
||||
if i == (inputs.len() - 1) {
|
||||
mask = sum_outputs - sum_pseudo_outs;
|
||||
} else {
|
||||
sum_pseudo_outs += mask;
|
||||
}
|
||||
|
||||
let mut nonce = Zeroizing::new(random_scalar(rng));
|
||||
let (mut clsag, pseudo_out, p, c) = Clsag::sign_core(
|
||||
rng,
|
||||
&inputs[i].1,
|
||||
&inputs[i].2,
|
||||
mask,
|
||||
&msg,
|
||||
nonce.deref() * &ED25519_BASEPOINT_TABLE,
|
||||
nonce.deref() *
|
||||
hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
|
||||
);
|
||||
clsag.s[usize::from(inputs[i].2.decoys.i)] =
|
||||
(-((p * inputs[i].0.deref()) + c)) + nonce.deref();
|
||||
inputs[i].0.zeroize();
|
||||
nonce.zeroize();
|
||||
|
||||
debug_assert!(clsag
|
||||
.verify(&inputs[i].2.decoys.ring, &inputs[i].1, &pseudo_out, &msg)
|
||||
.is_ok());
|
||||
|
||||
res.push((clsag, pseudo_out));
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
/// Verify the CLSAG signature against the given Transaction data.
|
||||
pub fn verify(
|
||||
&self,
|
||||
ring: &[[EdwardsPoint; 2]],
|
||||
I: &EdwardsPoint,
|
||||
pseudo_out: &EdwardsPoint,
|
||||
msg: &[u8; 32],
|
||||
) -> Result<(), ClsagError> {
|
||||
// Preliminary checks. s, c1, and points must also be encoded canonically, which isn't checked
|
||||
// here
|
||||
if ring.is_empty() {
|
||||
Err(ClsagError::InvalidRing)?;
|
||||
}
|
||||
if ring.len() != self.s.len() {
|
||||
Err(ClsagError::InvalidS)?;
|
||||
}
|
||||
if I.is_identity() {
|
||||
Err(ClsagError::InvalidImage)?;
|
||||
}
|
||||
|
||||
let D = self.D.mul_by_cofactor();
|
||||
if D.is_identity() {
|
||||
Err(ClsagError::InvalidD)?;
|
||||
}
|
||||
|
||||
let (_, c1) = core(ring, I, pseudo_out, msg, &D, &self.s, Mode::Verify(self.c1));
|
||||
if c1 != self.c1 {
|
||||
Err(ClsagError::InvalidC1)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn fee_weight(ring_len: usize) -> usize {
|
||||
(ring_len * 32) + 32 + 32
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
write_raw_vec(write_scalar, &self.s, w)?;
|
||||
w.write_all(&self.c1.to_bytes())?;
|
||||
write_point(&self.D, w)
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(decoys: usize, r: &mut R) -> io::Result<Clsag> {
|
||||
Ok(Clsag { s: read_raw_vec(read_scalar, decoys, r)?, c1: read_scalar(r)?, D: read_point(r)? })
|
||||
}
|
||||
}
|
||||
310
coins/monero/src/ringct/clsag/multisig.rs
Normal file
310
coins/monero/src/ringct/clsag/multisig.rs
Normal file
@@ -0,0 +1,310 @@
|
||||
use core::{ops::Deref, fmt::Debug};
|
||||
use std::{
|
||||
io::{self, Read, Write},
|
||||
sync::{Arc, RwLock},
|
||||
};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||
|
||||
use curve25519_dalek::{
|
||||
traits::{Identity, IsIdentity},
|
||||
scalar::Scalar,
|
||||
edwards::EdwardsPoint,
|
||||
};
|
||||
|
||||
use group::{ff::Field, Group, GroupEncoding};
|
||||
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
use dalek_ff_group as dfg;
|
||||
use dleq::DLEqProof;
|
||||
use frost::{
|
||||
dkg::lagrange,
|
||||
curve::Ed25519,
|
||||
FrostError, ThresholdKeys, ThresholdView,
|
||||
algorithm::{WriteAddendum, Algorithm},
|
||||
};
|
||||
|
||||
use crate::ringct::{
|
||||
hash_to_point,
|
||||
clsag::{ClsagInput, Clsag},
|
||||
};
|
||||
|
||||
fn dleq_transcript() -> RecommendedTranscript {
|
||||
RecommendedTranscript::new(b"monero_key_image_dleq")
|
||||
}
|
||||
|
||||
impl ClsagInput {
|
||||
fn transcript<T: Transcript>(&self, transcript: &mut T) {
|
||||
// Doesn't domain separate as this is considered part of the larger CLSAG proof
|
||||
|
||||
// Ring index
|
||||
transcript.append_message(b"real_spend", [self.decoys.i]);
|
||||
|
||||
// Ring
|
||||
for (i, pair) in self.decoys.ring.iter().enumerate() {
|
||||
// Doesn't include global output indexes as CLSAG doesn't care and won't be affected by it
|
||||
// They're just a unreliable reference to this data which will be included in the message
|
||||
// if in use
|
||||
transcript.append_message(b"member", [u8::try_from(i).expect("ring size exceeded 255")]);
|
||||
transcript.append_message(b"key", pair[0].compress().to_bytes());
|
||||
transcript.append_message(b"commitment", pair[1].compress().to_bytes())
|
||||
}
|
||||
|
||||
// Doesn't include the commitment's parts as the above ring + index includes the commitment
|
||||
// The only potential malleability would be if the G/H relationship is known breaking the
|
||||
// discrete log problem, which breaks everything already
|
||||
}
|
||||
}
|
||||
|
||||
/// CLSAG input and the mask to use for it.
|
||||
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct ClsagDetails {
|
||||
input: ClsagInput,
|
||||
mask: Scalar,
|
||||
}
|
||||
|
||||
impl ClsagDetails {
|
||||
pub fn new(input: ClsagInput, mask: Scalar) -> ClsagDetails {
|
||||
ClsagDetails { input, mask }
|
||||
}
|
||||
}
|
||||
|
||||
/// Addendum produced during the FROST signing process with relevant data.
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, Debug)]
|
||||
pub struct ClsagAddendum {
|
||||
pub(crate) key_image: dfg::EdwardsPoint,
|
||||
dleq: DLEqProof<dfg::EdwardsPoint>,
|
||||
}
|
||||
|
||||
impl WriteAddendum for ClsagAddendum {
|
||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
writer.write_all(self.key_image.compress().to_bytes().as_ref())?;
|
||||
self.dleq.write(writer)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
struct Interim {
|
||||
p: Scalar,
|
||||
c: Scalar,
|
||||
|
||||
clsag: Clsag,
|
||||
pseudo_out: EdwardsPoint,
|
||||
}
|
||||
|
||||
/// FROST algorithm for producing a CLSAG signature.
|
||||
#[allow(non_snake_case)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ClsagMultisig {
|
||||
transcript: RecommendedTranscript,
|
||||
|
||||
pub(crate) H: EdwardsPoint,
|
||||
// Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires
|
||||
// an extra round
|
||||
image: EdwardsPoint,
|
||||
|
||||
details: Arc<RwLock<Option<ClsagDetails>>>,
|
||||
|
||||
msg: Option<[u8; 32]>,
|
||||
interim: Option<Interim>,
|
||||
}
|
||||
|
||||
impl ClsagMultisig {
|
||||
pub fn new(
|
||||
transcript: RecommendedTranscript,
|
||||
output_key: EdwardsPoint,
|
||||
details: Arc<RwLock<Option<ClsagDetails>>>,
|
||||
) -> ClsagMultisig {
|
||||
ClsagMultisig {
|
||||
transcript,
|
||||
|
||||
H: hash_to_point(output_key),
|
||||
image: EdwardsPoint::identity(),
|
||||
|
||||
details,
|
||||
|
||||
msg: None,
|
||||
interim: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn input(&self) -> ClsagInput {
|
||||
(*self.details.read().unwrap()).as_ref().unwrap().input.clone()
|
||||
}
|
||||
|
||||
fn mask(&self) -> Scalar {
|
||||
(*self.details.read().unwrap()).as_ref().unwrap().mask
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn add_key_image_share(
|
||||
image: &mut EdwardsPoint,
|
||||
generator: EdwardsPoint,
|
||||
offset: Scalar,
|
||||
included: &[u16],
|
||||
participant: u16,
|
||||
share: EdwardsPoint,
|
||||
) {
|
||||
if image.is_identity() {
|
||||
*image = generator * offset;
|
||||
}
|
||||
*image += share * lagrange::<dfg::Scalar>(participant, included).0;
|
||||
}
|
||||
|
||||
impl Algorithm<Ed25519> for ClsagMultisig {
|
||||
type Transcript = RecommendedTranscript;
|
||||
type Addendum = ClsagAddendum;
|
||||
type Signature = (Clsag, EdwardsPoint);
|
||||
|
||||
fn nonces(&self) -> Vec<Vec<dfg::EdwardsPoint>> {
|
||||
vec![vec![dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)]]
|
||||
}
|
||||
|
||||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R,
|
||||
keys: &ThresholdKeys<Ed25519>,
|
||||
) -> ClsagAddendum {
|
||||
ClsagAddendum {
|
||||
key_image: dfg::EdwardsPoint(self.H) * keys.secret_share().deref(),
|
||||
dleq: DLEqProof::prove(
|
||||
rng,
|
||||
// Doesn't take in a larger transcript object due to the usage of this
|
||||
// Every prover would immediately write their own DLEq proof, when they can only do so in
|
||||
// the proper order if they want to reach consensus
|
||||
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to
|
||||
// try to merge later in some form, when it should instead just merge xH (as it does)
|
||||
&mut dleq_transcript(),
|
||||
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
|
||||
keys.secret_share(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
fn read_addendum<R: Read>(&self, reader: &mut R) -> io::Result<ClsagAddendum> {
|
||||
let mut bytes = [0; 32];
|
||||
reader.read_exact(&mut bytes)?;
|
||||
// dfg ensures the point is torsion free
|
||||
let xH = Option::<dfg::EdwardsPoint>::from(dfg::EdwardsPoint::from_bytes(&bytes))
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid key image"))?;
|
||||
// Ensure this is a canonical point
|
||||
if xH.to_bytes() != bytes {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "non-canonical key image"))?;
|
||||
}
|
||||
|
||||
Ok(ClsagAddendum { key_image: xH, dleq: DLEqProof::<dfg::EdwardsPoint>::read(reader)? })
|
||||
}
|
||||
|
||||
fn process_addendum(
|
||||
&mut self,
|
||||
view: &ThresholdView<Ed25519>,
|
||||
l: u16,
|
||||
addendum: ClsagAddendum,
|
||||
) -> Result<(), FrostError> {
|
||||
if self.image.is_identity() {
|
||||
self.transcript.domain_separate(b"CLSAG");
|
||||
self.input().transcript(&mut self.transcript);
|
||||
self.transcript.append_message(b"mask", self.mask().to_bytes());
|
||||
}
|
||||
|
||||
self.transcript.append_message(b"participant", l.to_be_bytes());
|
||||
|
||||
addendum
|
||||
.dleq
|
||||
.verify(
|
||||
&mut dleq_transcript(),
|
||||
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
|
||||
&[view.original_verification_share(l), addendum.key_image],
|
||||
)
|
||||
.map_err(|_| FrostError::InvalidPreprocess(l))?;
|
||||
|
||||
self.transcript.append_message(b"key_image_share", addendum.key_image.compress().to_bytes());
|
||||
add_key_image_share(
|
||||
&mut self.image,
|
||||
self.H,
|
||||
view.offset().0,
|
||||
view.included(),
|
||||
l,
|
||||
addendum.key_image.0,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn transcript(&mut self) -> &mut Self::Transcript {
|
||||
&mut self.transcript
|
||||
}
|
||||
|
||||
fn sign_share(
|
||||
&mut self,
|
||||
view: &ThresholdView<Ed25519>,
|
||||
nonce_sums: &[Vec<dfg::EdwardsPoint>],
|
||||
nonces: Vec<Zeroizing<dfg::Scalar>>,
|
||||
msg: &[u8],
|
||||
) -> dfg::Scalar {
|
||||
// Use the transcript to get a seeded random number generator
|
||||
// The transcript contains private data, preventing passive adversaries from recreating this
|
||||
// process even if they have access to commitments (specifically, the ring index being signed
|
||||
// for, along with the mask which should not only require knowing the shared keys yet also the
|
||||
// input commitment masks)
|
||||
let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"decoy_responses"));
|
||||
|
||||
self.msg = Some(msg.try_into().expect("CLSAG message should be 32-bytes"));
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let (clsag, pseudo_out, p, c) = Clsag::sign_core(
|
||||
&mut rng,
|
||||
&self.image,
|
||||
&self.input(),
|
||||
self.mask(),
|
||||
self.msg.as_ref().unwrap(),
|
||||
nonce_sums[0][0].0,
|
||||
nonce_sums[0][1].0,
|
||||
);
|
||||
self.interim = Some(Interim { p, c, clsag, pseudo_out });
|
||||
|
||||
(-(dfg::Scalar(p) * view.secret_share().deref())) + nonces[0].deref()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn verify(
|
||||
&self,
|
||||
_: dfg::EdwardsPoint,
|
||||
_: &[Vec<dfg::EdwardsPoint>],
|
||||
sum: dfg::Scalar,
|
||||
) -> Option<Self::Signature> {
|
||||
let interim = self.interim.as_ref().unwrap();
|
||||
let mut clsag = interim.clsag.clone();
|
||||
clsag.s[usize::from(self.input().decoys.i)] = sum.0 - interim.c;
|
||||
if clsag
|
||||
.verify(
|
||||
&self.input().decoys.ring,
|
||||
&self.image,
|
||||
&interim.pseudo_out,
|
||||
self.msg.as_ref().unwrap(),
|
||||
)
|
||||
.is_ok()
|
||||
{
|
||||
return Some((clsag, interim.pseudo_out));
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn verify_share(
|
||||
&self,
|
||||
verification_share: dfg::EdwardsPoint,
|
||||
nonces: &[Vec<dfg::EdwardsPoint>],
|
||||
share: dfg::Scalar,
|
||||
) -> Result<Vec<(dfg::Scalar, dfg::EdwardsPoint)>, ()> {
|
||||
let interim = self.interim.as_ref().unwrap();
|
||||
Ok(vec![
|
||||
(share, dfg::EdwardsPoint::generator()),
|
||||
(dfg::Scalar(interim.p), verification_share),
|
||||
(-dfg::Scalar::one(), nonces[0][0]),
|
||||
])
|
||||
}
|
||||
}
|
||||
8
coins/monero/src/ringct/hash_to_point.rs
Normal file
8
coins/monero/src/ringct/hash_to_point.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
use curve25519_dalek::edwards::EdwardsPoint;
|
||||
|
||||
pub use monero_generators::{hash_to_point as raw_hash_to_point};
|
||||
|
||||
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
|
||||
pub fn hash_to_point(key: EdwardsPoint) -> EdwardsPoint {
|
||||
raw_hash_to_point(key.compress().to_bytes())
|
||||
}
|
||||
165
coins/monero/src/ringct/mod.rs
Normal file
165
coins/monero/src/ringct/mod.rs
Normal file
@@ -0,0 +1,165 @@
|
||||
use core::ops::Deref;
|
||||
use std::io::{self, Read, Write};
|
||||
|
||||
use zeroize::Zeroizing;
|
||||
|
||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
||||
|
||||
pub(crate) mod hash_to_point;
|
||||
pub use hash_to_point::{raw_hash_to_point, hash_to_point};
|
||||
|
||||
/// CLSAG struct, along with signing and verifying functionality.
|
||||
pub mod clsag;
|
||||
/// Bulletproofs(+) structs, along with proving and verifying functionality.
|
||||
pub mod bulletproofs;
|
||||
|
||||
use crate::{
|
||||
Protocol,
|
||||
serialize::*,
|
||||
ringct::{clsag::Clsag, bulletproofs::Bulletproofs},
|
||||
};
|
||||
|
||||
/// Generate a key image for a given key. Defined as `x * hash_to_point(xG)`.
|
||||
pub fn generate_key_image(secret: &Zeroizing<Scalar>) -> EdwardsPoint {
|
||||
hash_to_point(&ED25519_BASEPOINT_TABLE * secret.deref()) * secret.deref()
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct RctBase {
|
||||
pub fee: u64,
|
||||
pub ecdh_info: Vec<[u8; 8]>,
|
||||
pub commitments: Vec<EdwardsPoint>,
|
||||
}
|
||||
|
||||
impl RctBase {
|
||||
pub(crate) fn fee_weight(outputs: usize) -> usize {
|
||||
1 + 8 + (outputs * (8 + 32))
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W, rct_type: u8) -> io::Result<()> {
|
||||
w.write_all(&[rct_type])?;
|
||||
match rct_type {
|
||||
0 => Ok(()),
|
||||
5 | 6 => {
|
||||
write_varint(&self.fee, w)?;
|
||||
for ecdh in &self.ecdh_info {
|
||||
w.write_all(ecdh)?;
|
||||
}
|
||||
write_raw_vec(write_point, &self.commitments, w)
|
||||
}
|
||||
_ => panic!("Serializing unknown RctType's Base"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(outputs: usize, r: &mut R) -> io::Result<(RctBase, u8)> {
|
||||
let rct_type = read_byte(r)?;
|
||||
Ok((
|
||||
if rct_type == 0 {
|
||||
RctBase { fee: 0, ecdh_info: vec![], commitments: vec![] }
|
||||
} else {
|
||||
RctBase {
|
||||
fee: read_varint(r)?,
|
||||
ecdh_info: (0 .. outputs).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
|
||||
commitments: read_raw_vec(read_point, outputs, r)?,
|
||||
}
|
||||
},
|
||||
rct_type,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub enum RctPrunable {
|
||||
Null,
|
||||
Clsag { bulletproofs: Vec<Bulletproofs>, clsags: Vec<Clsag>, pseudo_outs: Vec<EdwardsPoint> },
|
||||
}
|
||||
|
||||
impl RctPrunable {
|
||||
/// RCT Type byte for a given RctPrunable struct.
|
||||
pub fn rct_type(&self) -> u8 {
|
||||
match self {
|
||||
RctPrunable::Null => 0,
|
||||
RctPrunable::Clsag { bulletproofs, .. } => {
|
||||
if matches!(bulletproofs[0], Bulletproofs::Original { .. }) {
|
||||
5
|
||||
} else {
|
||||
6
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
|
||||
1 + Bulletproofs::fee_weight(protocol.bp_plus(), outputs) +
|
||||
(inputs * (Clsag::fee_weight(protocol.ring_len()) + 32))
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
match self {
|
||||
RctPrunable::Null => Ok(()),
|
||||
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs, .. } => {
|
||||
write_vec(Bulletproofs::write, bulletproofs, w)?;
|
||||
write_raw_vec(Clsag::write, clsags, w)?;
|
||||
write_raw_vec(write_point, pseudo_outs, w)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(rct_type: u8, decoys: &[usize], r: &mut R) -> io::Result<RctPrunable> {
|
||||
Ok(match rct_type {
|
||||
0 => RctPrunable::Null,
|
||||
5 | 6 => RctPrunable::Clsag {
|
||||
bulletproofs: read_vec(
|
||||
if rct_type == 5 { Bulletproofs::read } else { Bulletproofs::read_plus },
|
||||
r,
|
||||
)?,
|
||||
clsags: (0 .. decoys.len()).map(|o| Clsag::read(decoys[o], r)).collect::<Result<_, _>>()?,
|
||||
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
|
||||
},
|
||||
_ => Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown RCT type"))?,
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
match self {
|
||||
RctPrunable::Null => panic!("Serializing RctPrunable::Null for a signature"),
|
||||
RctPrunable::Clsag { bulletproofs, .. } => {
|
||||
bulletproofs.iter().try_for_each(|bp| bp.signature_write(w))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct RctSignatures {
|
||||
pub base: RctBase,
|
||||
pub prunable: RctPrunable,
|
||||
}
|
||||
|
||||
impl RctSignatures {
|
||||
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
|
||||
RctBase::fee_weight(outputs) + RctPrunable::fee_weight(protocol, inputs, outputs)
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.base.write(w, self.prunable.rct_type())?;
|
||||
self.prunable.write(w)
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(decoys: Vec<usize>, outputs: usize, r: &mut R) -> io::Result<RctSignatures> {
|
||||
let base = RctBase::read(outputs, r)?;
|
||||
Ok(RctSignatures { base: base.0, prunable: RctPrunable::read(base.1, &decoys, r)? })
|
||||
}
|
||||
}
|
||||
517
coins/monero/src/rpc.rs
Normal file
517
coins/monero/src/rpc.rs
Normal file
@@ -0,0 +1,517 @@
|
||||
use std::fmt::Debug;
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
||||
|
||||
use serde::{Serialize, Deserialize, de::DeserializeOwned};
|
||||
use serde_json::{Value, json};
|
||||
|
||||
use digest_auth::AuthContext;
|
||||
use reqwest::{Client, RequestBuilder};
|
||||
|
||||
use crate::{
|
||||
Protocol,
|
||||
transaction::{Input, Timelock, Transaction},
|
||||
block::Block,
|
||||
wallet::Fee,
|
||||
};
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct EmptyResponse {}
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct JsonRpcResponse<T> {
|
||||
result: T,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct TransactionResponse {
|
||||
tx_hash: String,
|
||||
block_height: Option<usize>,
|
||||
as_hex: String,
|
||||
pruned_as_hex: String,
|
||||
}
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct TransactionsResponse {
|
||||
#[serde(default)]
|
||||
missed_tx: Vec<String>,
|
||||
txs: Vec<TransactionResponse>,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
||||
pub enum RpcError {
|
||||
#[error("internal error ({0})")]
|
||||
InternalError(&'static str),
|
||||
#[error("connection error")]
|
||||
ConnectionError,
|
||||
#[error("invalid node")]
|
||||
InvalidNode,
|
||||
#[error("transactions not found")]
|
||||
TransactionsNotFound(Vec<[u8; 32]>),
|
||||
#[error("invalid point ({0})")]
|
||||
InvalidPoint(String),
|
||||
#[error("pruned transaction")]
|
||||
PrunedTransaction,
|
||||
#[error("invalid transaction ({0:?})")]
|
||||
InvalidTransaction([u8; 32]),
|
||||
}
|
||||
|
||||
fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
|
||||
hex::decode(value).map_err(|_| RpcError::InvalidNode)
|
||||
}
|
||||
|
||||
fn hash_hex(hash: &str) -> Result<[u8; 32], RpcError> {
|
||||
rpc_hex(hash)?.try_into().map_err(|_| RpcError::InvalidNode)
|
||||
}
|
||||
|
||||
fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
|
||||
CompressedEdwardsY(
|
||||
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?,
|
||||
)
|
||||
.decompress()
|
||||
.ok_or_else(|| RpcError::InvalidPoint(point.to_string()))
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Rpc {
|
||||
client: Client,
|
||||
userpass: Option<(String, String)>,
|
||||
url: String,
|
||||
}
|
||||
|
||||
impl Rpc {
|
||||
/// Create a new RPC connection.
|
||||
/// A daemon requiring authentication can be used via including the username and password in the
|
||||
/// URL.
|
||||
pub fn new(mut url: String) -> Result<Rpc, RpcError> {
|
||||
// Parse out the username and password
|
||||
let userpass = if url.contains('@') {
|
||||
let url_clone = url.clone();
|
||||
let split_url = url_clone.split('@').collect::<Vec<_>>();
|
||||
if split_url.len() != 2 {
|
||||
Err(RpcError::InvalidNode)?;
|
||||
}
|
||||
let mut userpass = split_url[0];
|
||||
url = split_url[1].to_string();
|
||||
|
||||
// If there was additionally a protocol string, restore that to the daemon URL
|
||||
if userpass.contains("://") {
|
||||
let split_userpass = userpass.split("://").collect::<Vec<_>>();
|
||||
if split_userpass.len() != 2 {
|
||||
Err(RpcError::InvalidNode)?;
|
||||
}
|
||||
url = split_userpass[0].to_string() + "://" + &url;
|
||||
userpass = split_userpass[1];
|
||||
}
|
||||
|
||||
let split_userpass = userpass.split(':').collect::<Vec<_>>();
|
||||
if split_userpass.len() != 2 {
|
||||
Err(RpcError::InvalidNode)?;
|
||||
}
|
||||
Some((split_userpass[0].to_string(), split_userpass[1].to_string()))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Rpc { client: Client::new(), userpass, url })
|
||||
}
|
||||
|
||||
/// Perform a RPC call to the specified method with the provided parameters.
|
||||
/// This is NOT a JSON-RPC call, which use a method of "json_rpc" and are available via
|
||||
/// `json_rpc_call`.
|
||||
pub async fn rpc_call<Params: Serialize + Debug, Response: DeserializeOwned + Debug>(
|
||||
&self,
|
||||
method: &str,
|
||||
params: Option<Params>,
|
||||
) -> Result<Response, RpcError> {
|
||||
let mut builder = self.client.post(self.url.clone() + "/" + method);
|
||||
if let Some(params) = params.as_ref() {
|
||||
builder = builder.json(params);
|
||||
}
|
||||
|
||||
self.call_tail(method, builder).await
|
||||
}
|
||||
|
||||
/// Perform a JSON-RPC call to the specified method with the provided parameters
|
||||
pub async fn json_rpc_call<Response: DeserializeOwned + Debug>(
|
||||
&self,
|
||||
method: &str,
|
||||
params: Option<Value>,
|
||||
) -> Result<Response, RpcError> {
|
||||
let mut req = json!({ "method": method });
|
||||
if let Some(params) = params {
|
||||
req.as_object_mut().unwrap().insert("params".into(), params);
|
||||
}
|
||||
Ok(self.rpc_call::<_, JsonRpcResponse<Response>>("json_rpc", Some(req)).await?.result)
|
||||
}
|
||||
|
||||
/// Perform a binary call to the specified method with the provided parameters.
|
||||
pub async fn bin_call<Response: DeserializeOwned + Debug>(
|
||||
&self,
|
||||
method: &str,
|
||||
params: Vec<u8>,
|
||||
) -> Result<Response, RpcError> {
|
||||
let builder = self.client.post(self.url.clone() + "/" + method).body(params.clone());
|
||||
self.call_tail(method, builder.header("Content-Type", "application/octet-stream")).await
|
||||
}
|
||||
|
||||
async fn call_tail<Response: DeserializeOwned + Debug>(
|
||||
&self,
|
||||
method: &str,
|
||||
mut builder: RequestBuilder,
|
||||
) -> Result<Response, RpcError> {
|
||||
if let Some((user, pass)) = &self.userpass {
|
||||
let req = self.client.post(&self.url).send().await.map_err(|_| RpcError::InvalidNode)?;
|
||||
// Only provide authentication if this daemon actually expects it
|
||||
if let Some(header) = req.headers().get("www-authenticate") {
|
||||
builder = builder.header(
|
||||
"Authorization",
|
||||
digest_auth::parse(header.to_str().map_err(|_| RpcError::InvalidNode)?)
|
||||
.map_err(|_| RpcError::InvalidNode)?
|
||||
.respond(&AuthContext::new_post::<_, _, _, &[u8]>(
|
||||
user,
|
||||
pass,
|
||||
"/".to_string() + method,
|
||||
None,
|
||||
))
|
||||
.map_err(|_| RpcError::InvalidNode)?
|
||||
.to_header_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let res = builder.send().await.map_err(|_| RpcError::ConnectionError)?;
|
||||
|
||||
Ok(if !method.ends_with(".bin") {
|
||||
serde_json::from_str(&res.text().await.map_err(|_| RpcError::ConnectionError)?)
|
||||
.map_err(|_| RpcError::InternalError("Failed to parse JSON response"))?
|
||||
} else {
|
||||
monero_epee_bin_serde::from_bytes(&res.bytes().await.map_err(|_| RpcError::ConnectionError)?)
|
||||
.map_err(|_| RpcError::InternalError("Failed to parse binary response"))?
|
||||
})
|
||||
}
|
||||
|
||||
/// Get the active blockchain protocol version.
|
||||
pub async fn get_protocol(&self) -> Result<Protocol, RpcError> {
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct ProtocolResponse {
|
||||
major_version: usize,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct LastHeaderResponse {
|
||||
block_header: ProtocolResponse,
|
||||
}
|
||||
|
||||
Ok(
|
||||
match self
|
||||
.json_rpc_call::<LastHeaderResponse>("get_last_block_header", None)
|
||||
.await?
|
||||
.block_header
|
||||
.major_version
|
||||
{
|
||||
13 | 14 => Protocol::v14,
|
||||
15 | 16 => Protocol::v16,
|
||||
version => Protocol::Unsupported(version),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_height(&self) -> Result<usize, RpcError> {
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct HeightResponse {
|
||||
height: usize,
|
||||
}
|
||||
Ok(self.rpc_call::<Option<()>, HeightResponse>("get_height", None).await?.height)
|
||||
}
|
||||
|
||||
pub async fn get_transactions(&self, hashes: &[[u8; 32]]) -> Result<Vec<Transaction>, RpcError> {
|
||||
if hashes.is_empty() {
|
||||
return Ok(vec![]);
|
||||
}
|
||||
|
||||
let txs: TransactionsResponse = self
|
||||
.rpc_call(
|
||||
"get_transactions",
|
||||
Some(json!({
|
||||
"txs_hashes": hashes.iter().map(hex::encode).collect::<Vec<_>>()
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
if !txs.missed_tx.is_empty() {
|
||||
Err(RpcError::TransactionsNotFound(
|
||||
txs.missed_tx.iter().map(|hash| hash_hex(hash)).collect::<Result<_, _>>()?,
|
||||
))?;
|
||||
}
|
||||
|
||||
txs
|
||||
.txs
|
||||
.iter()
|
||||
.map(|res| {
|
||||
let tx = Transaction::read::<&[u8]>(
|
||||
&mut rpc_hex(if !res.as_hex.is_empty() { &res.as_hex } else { &res.pruned_as_hex })?
|
||||
.as_ref(),
|
||||
)
|
||||
.map_err(|_| match hash_hex(&res.tx_hash) {
|
||||
Ok(hash) => RpcError::InvalidTransaction(hash),
|
||||
Err(err) => err,
|
||||
})?;
|
||||
|
||||
// https://github.com/monero-project/monero/issues/8311
|
||||
if res.as_hex.is_empty() {
|
||||
match tx.prefix.inputs.get(0) {
|
||||
Some(Input::Gen { .. }) => (),
|
||||
_ => Err(RpcError::PrunedTransaction)?,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(tx)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub async fn get_transaction(&self, tx: [u8; 32]) -> Result<Transaction, RpcError> {
|
||||
self.get_transactions(&[tx]).await.map(|mut txs| txs.swap_remove(0))
|
||||
}
|
||||
|
||||
pub async fn get_transaction_block_number(&self, tx: &[u8]) -> Result<Option<usize>, RpcError> {
|
||||
let txs: TransactionsResponse =
|
||||
self.rpc_call("get_transactions", Some(json!({ "txs_hashes": [hex::encode(tx)] }))).await?;
|
||||
|
||||
if !txs.missed_tx.is_empty() {
|
||||
Err(RpcError::TransactionsNotFound(
|
||||
txs.missed_tx.iter().map(|hash| hash_hex(hash)).collect::<Result<_, _>>()?,
|
||||
))?;
|
||||
}
|
||||
|
||||
Ok(txs.txs[0].block_height)
|
||||
}
|
||||
|
||||
pub async fn get_block_hash(&self, number: usize) -> Result<[u8; 32], RpcError> {
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct BlockHeaderResponse {
|
||||
hash: String,
|
||||
}
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct BlockHeaderByHeightResponse {
|
||||
block_header: BlockHeaderResponse,
|
||||
}
|
||||
|
||||
let header: BlockHeaderByHeightResponse =
|
||||
self.json_rpc_call("get_block_header_by_height", Some(json!({ "height": number }))).await?;
|
||||
rpc_hex(&header.block_header.hash)?.try_into().map_err(|_| RpcError::InvalidNode)
|
||||
}
|
||||
|
||||
pub async fn get_block(&self, hash: [u8; 32]) -> Result<Block, RpcError> {
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct BlockResponse {
|
||||
blob: String,
|
||||
}
|
||||
|
||||
let res: BlockResponse =
|
||||
self.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await?;
|
||||
|
||||
Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref()).map_err(|_| RpcError::InvalidNode)
|
||||
}
|
||||
|
||||
pub async fn get_block_by_number(&self, number: usize) -> Result<Block, RpcError> {
|
||||
self.get_block(self.get_block_hash(number).await?).await
|
||||
}
|
||||
|
||||
pub async fn get_block_transactions(&self, hash: [u8; 32]) -> Result<Vec<Transaction>, RpcError> {
|
||||
let block = self.get_block(hash).await?;
|
||||
let mut res = vec![block.miner_tx];
|
||||
res.extend(self.get_transactions(&block.txs).await?);
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub async fn get_block_transactions_by_number(
|
||||
&self,
|
||||
number: usize,
|
||||
) -> Result<Vec<Transaction>, RpcError> {
|
||||
self.get_block_transactions(self.get_block_hash(number).await?).await
|
||||
}
|
||||
|
||||
/// Get the output indexes of the specified transaction.
|
||||
pub async fn get_o_indexes(&self, hash: [u8; 32]) -> Result<Vec<u64>, RpcError> {
|
||||
#[derive(Serialize, Debug)]
|
||||
struct Request {
|
||||
txid: [u8; 32],
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct OIndexes {
|
||||
o_indexes: Vec<u64>,
|
||||
status: String,
|
||||
untrusted: bool,
|
||||
credits: usize,
|
||||
top_hash: String,
|
||||
}
|
||||
|
||||
let indexes: OIndexes = self
|
||||
.bin_call(
|
||||
"get_o_indexes.bin",
|
||||
monero_epee_bin_serde::to_bytes(&Request { txid: hash }).unwrap(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(indexes.o_indexes)
|
||||
}
|
||||
|
||||
/// Get the output distribution, from the specified height to the specified height (both
|
||||
/// inclusive).
|
||||
pub async fn get_output_distribution(
|
||||
&self,
|
||||
from: usize,
|
||||
to: usize,
|
||||
) -> Result<Vec<u64>, RpcError> {
|
||||
#[allow(dead_code)]
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct Distribution {
|
||||
distribution: Vec<u64>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct Distributions {
|
||||
distributions: Vec<Distribution>,
|
||||
}
|
||||
|
||||
let mut distributions: Distributions = self
|
||||
.json_rpc_call(
|
||||
"get_output_distribution",
|
||||
Some(json!({
|
||||
"binary": false,
|
||||
"amounts": [0],
|
||||
"cumulative": true,
|
||||
"from_height": from,
|
||||
"to_height": to,
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(distributions.distributions.swap_remove(0).distribution)
|
||||
}
|
||||
|
||||
/// Get the specified outputs from the RingCT (zero-amount) pool, but only return them if they're
|
||||
/// unlocked.
|
||||
pub async fn get_unlocked_outputs(
|
||||
&self,
|
||||
indexes: &[u64],
|
||||
height: usize,
|
||||
) -> Result<Vec<Option<[EdwardsPoint; 2]>>, RpcError> {
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct Out {
|
||||
key: String,
|
||||
mask: String,
|
||||
txid: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct Outs {
|
||||
outs: Vec<Out>,
|
||||
}
|
||||
|
||||
let outs: Outs = self
|
||||
.rpc_call(
|
||||
"get_outs",
|
||||
Some(json!({
|
||||
"get_txid": true,
|
||||
"outputs": indexes.iter().map(|o| json!({
|
||||
"amount": 0,
|
||||
"index": o
|
||||
})).collect::<Vec<_>>()
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let txs = self
|
||||
.get_transactions(
|
||||
&outs
|
||||
.outs
|
||||
.iter()
|
||||
.map(|out| rpc_hex(&out.txid)?.try_into().map_err(|_| RpcError::InvalidNode))
|
||||
.collect::<Result<Vec<_>, _>>()?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// TODO: https://github.com/serai-dex/serai/issues/104
|
||||
outs
|
||||
.outs
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, out)| {
|
||||
Ok(Some([rpc_point(&out.key)?, rpc_point(&out.mask)?]).filter(|_| {
|
||||
match txs[i].prefix.timelock {
|
||||
Timelock::Block(t_height) => t_height <= height,
|
||||
_ => false,
|
||||
}
|
||||
}))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get the currently estimated fee from the node. This may be manipulated to unsafe levels and
|
||||
/// MUST be sanity checked.
|
||||
// TODO: Take a sanity check argument
|
||||
pub async fn get_fee(&self) -> Result<Fee, RpcError> {
|
||||
#[allow(dead_code)]
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct FeeResponse {
|
||||
fee: u64,
|
||||
quantization_mask: u64,
|
||||
}
|
||||
|
||||
let res: FeeResponse = self.json_rpc_call("get_fee_estimate", None).await?;
|
||||
Ok(Fee { per_weight: res.fee, mask: res.quantization_mask })
|
||||
}
|
||||
|
||||
pub async fn publish_transaction(&self, tx: &Transaction) -> Result<(), RpcError> {
|
||||
#[allow(dead_code)]
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct SendRawResponse {
|
||||
status: String,
|
||||
double_spend: bool,
|
||||
fee_too_low: bool,
|
||||
invalid_input: bool,
|
||||
invalid_output: bool,
|
||||
low_mixin: bool,
|
||||
not_relayed: bool,
|
||||
overspend: bool,
|
||||
too_big: bool,
|
||||
too_few_outputs: bool,
|
||||
reason: String,
|
||||
}
|
||||
|
||||
let mut buf = Vec::with_capacity(2048);
|
||||
tx.write(&mut buf).unwrap();
|
||||
let res: SendRawResponse = self
|
||||
.rpc_call("send_raw_transaction", Some(json!({ "tx_as_hex": hex::encode(&buf) })))
|
||||
.await?;
|
||||
|
||||
if res.status != "OK" {
|
||||
Err(RpcError::InvalidTransaction(tx.hash()))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn generate_blocks(&self, address: &str, block_count: usize) -> Result<(), RpcError> {
|
||||
self
|
||||
.rpc_call::<_, EmptyResponse>(
|
||||
"json_rpc",
|
||||
Some(json!({
|
||||
"method": "generateblocks",
|
||||
"params": {
|
||||
"wallet_address": address,
|
||||
"amount_of_blocks": block_count
|
||||
},
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
141
coins/monero/src/serialize.rs
Normal file
141
coins/monero/src/serialize.rs
Normal file
@@ -0,0 +1,141 @@
|
||||
use std::io::{self, Read, Write};
|
||||
|
||||
use curve25519_dalek::{
|
||||
scalar::Scalar,
|
||||
edwards::{EdwardsPoint, CompressedEdwardsY},
|
||||
};
|
||||
|
||||
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
||||
|
||||
pub(crate) fn varint_len(varint: usize) -> usize {
|
||||
((usize::try_from(usize::BITS - varint.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
|
||||
}
|
||||
|
||||
pub(crate) fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(&[*byte])
|
||||
}
|
||||
|
||||
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
||||
let mut varint = *varint;
|
||||
while {
|
||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
||||
varint >>= 7;
|
||||
if varint != 0 {
|
||||
b |= VARINT_CONTINUATION_MASK;
|
||||
}
|
||||
write_byte(&b, w)?;
|
||||
varint != 0
|
||||
} {}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn write_scalar<W: Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(&scalar.to_bytes())
|
||||
}
|
||||
|
||||
pub(crate) fn write_point<W: Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(&point.compress().to_bytes())
|
||||
}
|
||||
|
||||
pub(crate) fn write_raw_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
||||
f: F,
|
||||
values: &[T],
|
||||
w: &mut W,
|
||||
) -> io::Result<()> {
|
||||
for value in values {
|
||||
f(value, w)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn write_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
||||
f: F,
|
||||
values: &[T],
|
||||
w: &mut W,
|
||||
) -> io::Result<()> {
|
||||
write_varint(&values.len().try_into().unwrap(), w)?;
|
||||
write_raw_vec(f, values, w)
|
||||
}
|
||||
|
||||
pub(crate) fn read_bytes<R: Read, const N: usize>(r: &mut R) -> io::Result<[u8; N]> {
|
||||
let mut res = [0; N];
|
||||
r.read_exact(&mut res)?;
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub(crate) fn read_byte<R: Read>(r: &mut R) -> io::Result<u8> {
|
||||
Ok(read_bytes::<_, 1>(r)?[0])
|
||||
}
|
||||
|
||||
pub(crate) fn read_u64<R: Read>(r: &mut R) -> io::Result<u64> {
|
||||
read_bytes(r).map(u64::from_le_bytes)
|
||||
}
|
||||
|
||||
pub(crate) fn read_u32<R: Read>(r: &mut R) -> io::Result<u32> {
|
||||
read_bytes(r).map(u32::from_le_bytes)
|
||||
}
|
||||
|
||||
pub(crate) fn read_varint<R: Read>(r: &mut R) -> io::Result<u64> {
|
||||
let mut bits = 0;
|
||||
let mut res = 0;
|
||||
while {
|
||||
let b = read_byte(r)?;
|
||||
if (bits != 0) && (b == 0) {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "non-canonical varint"))?;
|
||||
}
|
||||
if ((bits + 7) > 64) && (b >= (1 << (64 - bits))) {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "varint overflow"))?;
|
||||
}
|
||||
|
||||
res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits;
|
||||
bits += 7;
|
||||
b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK
|
||||
} {}
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
// All scalar fields supported by monero-serai are checked to be canonical for valid transactions
|
||||
// While from_bytes_mod_order would be more flexible, it's not currently needed and would be
|
||||
// inaccurate to include now. While casting a wide net may be preferable, it'd also be inaccurate
|
||||
// for now. There's also further edge cases as noted by
|
||||
// https://github.com/monero-project/monero/issues/8438, where some scalars had an archaic
|
||||
// reduction applied
|
||||
pub(crate) fn read_scalar<R: Read>(r: &mut R) -> io::Result<Scalar> {
|
||||
Scalar::from_canonical_bytes(read_bytes(r)?)
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
|
||||
}
|
||||
|
||||
pub(crate) fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||
let bytes = read_bytes(r)?;
|
||||
CompressedEdwardsY(bytes)
|
||||
.decompress()
|
||||
// Ban points which are either unreduced or -0
|
||||
.filter(|point| point.compress().to_bytes() == bytes)
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
|
||||
}
|
||||
|
||||
pub(crate) fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||
read_point(r)
|
||||
.ok()
|
||||
.filter(|point| point.is_torsion_free())
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
|
||||
}
|
||||
|
||||
pub(crate) fn read_raw_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
||||
f: F,
|
||||
len: usize,
|
||||
r: &mut R,
|
||||
) -> io::Result<Vec<T>> {
|
||||
let mut res = vec![];
|
||||
for _ in 0 .. len {
|
||||
res.push(f(r)?);
|
||||
}
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
pub(crate) fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
||||
f: F,
|
||||
r: &mut R,
|
||||
) -> io::Result<Vec<T>> {
|
||||
read_raw_vec(f, read_varint(r)?.try_into().unwrap(), r)
|
||||
}
|
||||
176
coins/monero/src/tests/address.rs
Normal file
176
coins/monero/src/tests/address.rs
Normal file
@@ -0,0 +1,176 @@
|
||||
use hex_literal::hex;
|
||||
|
||||
use rand_core::{RngCore, OsRng};
|
||||
|
||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, edwards::CompressedEdwardsY};
|
||||
|
||||
use crate::{
|
||||
random_scalar,
|
||||
wallet::address::{Network, AddressType, AddressMeta, MoneroAddress},
|
||||
};
|
||||
|
||||
const SPEND: [u8; 32] = hex!("f8631661f6ab4e6fda310c797330d86e23a682f20d5bc8cc27b18051191f16d7");
|
||||
const VIEW: [u8; 32] = hex!("4a1535063ad1fee2dabbf909d4fd9a873e29541b401f0944754e17c9a41820ce");
|
||||
|
||||
const STANDARD: &str =
|
||||
"4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey";
|
||||
|
||||
const PAYMENT_ID: [u8; 8] = hex!("b8963a57855cf73f");
|
||||
const INTEGRATED: &str =
|
||||
"4Ljin4CrSNHKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KbaTH6Mn\
|
||||
pXSn88oBX35";
|
||||
|
||||
const SUB_SPEND: [u8; 32] =
|
||||
hex!("fe358188b528335ad1cfdc24a22a23988d742c882b6f19a602892eaab3c1b62b");
|
||||
const SUB_VIEW: [u8; 32] = hex!("9bc2b464de90d058468522098d5610c5019c45fd1711a9517db1eea7794f5470");
|
||||
const SUBADDRESS: &str =
|
||||
"8C5zHM5ud8nGC4hC2ULiBLSWx9infi8JUUmWEat4fcTf8J4H38iWYVdFmPCA9UmfLTZxD43RsyKnGEdZkoGij6csDeUnbEB";
|
||||
|
||||
const FEATURED_JSON: &str = include_str!("vectors/featured_addresses.json");
|
||||
|
||||
#[test]
|
||||
fn standard_address() {
|
||||
let addr = MoneroAddress::from_str(Network::Mainnet, STANDARD).unwrap();
|
||||
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||
assert_eq!(addr.meta.kind, AddressType::Standard);
|
||||
assert!(!addr.meta.kind.subaddress());
|
||||
assert_eq!(addr.meta.kind.payment_id(), None);
|
||||
assert!(!addr.meta.kind.guaranteed());
|
||||
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
||||
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
||||
assert_eq!(addr.to_string(), STANDARD);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn integrated_address() {
|
||||
let addr = MoneroAddress::from_str(Network::Mainnet, INTEGRATED).unwrap();
|
||||
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||
assert_eq!(addr.meta.kind, AddressType::Integrated(PAYMENT_ID));
|
||||
assert!(!addr.meta.kind.subaddress());
|
||||
assert_eq!(addr.meta.kind.payment_id(), Some(PAYMENT_ID));
|
||||
assert!(!addr.meta.kind.guaranteed());
|
||||
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
||||
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
||||
assert_eq!(addr.to_string(), INTEGRATED);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn subaddress() {
|
||||
let addr = MoneroAddress::from_str(Network::Mainnet, SUBADDRESS).unwrap();
|
||||
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||
assert_eq!(addr.meta.kind, AddressType::Subaddress);
|
||||
assert!(addr.meta.kind.subaddress());
|
||||
assert_eq!(addr.meta.kind.payment_id(), None);
|
||||
assert!(!addr.meta.kind.guaranteed());
|
||||
assert_eq!(addr.spend.compress().to_bytes(), SUB_SPEND);
|
||||
assert_eq!(addr.view.compress().to_bytes(), SUB_VIEW);
|
||||
assert_eq!(addr.to_string(), SUBADDRESS);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn featured() {
|
||||
for (network, first) in
|
||||
[(Network::Mainnet, 'C'), (Network::Testnet, 'K'), (Network::Stagenet, 'F')]
|
||||
{
|
||||
for _ in 0 .. 100 {
|
||||
let spend = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
||||
let view = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
||||
|
||||
for features in 0 .. (1 << 3) {
|
||||
const SUBADDRESS_FEATURE_BIT: u8 = 1;
|
||||
const INTEGRATED_FEATURE_BIT: u8 = 1 << 1;
|
||||
const GUARANTEED_FEATURE_BIT: u8 = 1 << 2;
|
||||
|
||||
let subaddress = (features & SUBADDRESS_FEATURE_BIT) == SUBADDRESS_FEATURE_BIT;
|
||||
|
||||
let mut payment_id = [0; 8];
|
||||
OsRng.fill_bytes(&mut payment_id);
|
||||
let payment_id = Some(payment_id)
|
||||
.filter(|_| (features & INTEGRATED_FEATURE_BIT) == INTEGRATED_FEATURE_BIT);
|
||||
|
||||
let guaranteed = (features & GUARANTEED_FEATURE_BIT) == GUARANTEED_FEATURE_BIT;
|
||||
|
||||
let kind = AddressType::Featured { subaddress, payment_id, guaranteed };
|
||||
let meta = AddressMeta::new(network, kind);
|
||||
let addr = MoneroAddress::new(meta, spend, view);
|
||||
|
||||
assert_eq!(addr.to_string().chars().next().unwrap(), first);
|
||||
assert_eq!(MoneroAddress::from_str(network, &addr.to_string()).unwrap(), addr);
|
||||
|
||||
assert_eq!(addr.spend, spend);
|
||||
assert_eq!(addr.view, view);
|
||||
|
||||
assert_eq!(addr.subaddress(), subaddress);
|
||||
assert_eq!(addr.payment_id(), payment_id);
|
||||
assert_eq!(addr.guaranteed(), guaranteed);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn featured_vectors() {
|
||||
#[derive(serde::Deserialize)]
|
||||
struct Vector {
|
||||
address: String,
|
||||
|
||||
network: String,
|
||||
spend: String,
|
||||
view: String,
|
||||
|
||||
subaddress: bool,
|
||||
integrated: bool,
|
||||
payment_id: Option<[u8; 8]>,
|
||||
guaranteed: bool,
|
||||
}
|
||||
|
||||
let vectors = serde_json::from_str::<Vec<Vector>>(FEATURED_JSON).unwrap();
|
||||
for vector in vectors {
|
||||
let first = vector.address.chars().next().unwrap();
|
||||
let network = match vector.network.as_str() {
|
||||
"Mainnet" => {
|
||||
assert_eq!(first, 'C');
|
||||
Network::Mainnet
|
||||
}
|
||||
"Testnet" => {
|
||||
assert_eq!(first, 'K');
|
||||
Network::Testnet
|
||||
}
|
||||
"Stagenet" => {
|
||||
assert_eq!(first, 'F');
|
||||
Network::Stagenet
|
||||
}
|
||||
_ => panic!("Unknown network"),
|
||||
};
|
||||
let spend =
|
||||
CompressedEdwardsY::from_slice(&hex::decode(vector.spend).unwrap()).decompress().unwrap();
|
||||
let view =
|
||||
CompressedEdwardsY::from_slice(&hex::decode(vector.view).unwrap()).decompress().unwrap();
|
||||
|
||||
let addr = MoneroAddress::from_str(network, &vector.address).unwrap();
|
||||
assert_eq!(addr.spend, spend);
|
||||
assert_eq!(addr.view, view);
|
||||
|
||||
assert_eq!(addr.subaddress(), vector.subaddress);
|
||||
assert_eq!(vector.integrated, vector.payment_id.is_some());
|
||||
assert_eq!(addr.payment_id(), vector.payment_id);
|
||||
assert_eq!(addr.guaranteed(), vector.guaranteed);
|
||||
|
||||
assert_eq!(
|
||||
MoneroAddress::new(
|
||||
AddressMeta::new(
|
||||
network,
|
||||
AddressType::Featured {
|
||||
subaddress: vector.subaddress,
|
||||
payment_id: vector.payment_id,
|
||||
guaranteed: vector.guaranteed
|
||||
}
|
||||
),
|
||||
spend,
|
||||
view
|
||||
)
|
||||
.to_string(),
|
||||
vector.address
|
||||
);
|
||||
}
|
||||
}
|
||||
92
coins/monero/src/tests/bulletproofs.rs
Normal file
92
coins/monero/src/tests/bulletproofs.rs
Normal file
@@ -0,0 +1,92 @@
|
||||
use hex_literal::hex;
|
||||
use rand::rngs::OsRng;
|
||||
|
||||
use curve25519_dalek::{scalar::Scalar, edwards::CompressedEdwardsY};
|
||||
use multiexp::BatchVerifier;
|
||||
|
||||
use crate::{
|
||||
Commitment, random_scalar,
|
||||
ringct::bulletproofs::{Bulletproofs, original::OriginalStruct},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn bulletproofs_vector() {
|
||||
let scalar = |scalar| Scalar::from_canonical_bytes(scalar).unwrap();
|
||||
let point = |point| CompressedEdwardsY(point).decompress().unwrap();
|
||||
|
||||
// Generated from Monero
|
||||
assert!(Bulletproofs::Original(OriginalStruct {
|
||||
A: point(hex!("ef32c0b9551b804decdcb107eb22aa715b7ce259bf3c5cac20e24dfa6b28ac71")),
|
||||
S: point(hex!("e1285960861783574ee2b689ae53622834eb0b035d6943103f960cd23e063fa0")),
|
||||
T1: point(hex!("4ea07735f184ba159d0e0eb662bac8cde3eb7d39f31e567b0fbda3aa23fe5620")),
|
||||
T2: point(hex!("b8390aa4b60b255630d40e592f55ec6b7ab5e3a96bfcdcd6f1cd1d2fc95f441e")),
|
||||
taux: scalar(hex!("5957dba8ea9afb23d6e81cc048a92f2d502c10c749dc1b2bd148ae8d41ec7107")),
|
||||
mu: scalar(hex!("923023b234c2e64774b820b4961f7181f6c1dc152c438643e5a25b0bf271bc02")),
|
||||
L: vec![
|
||||
point(hex!("c45f656316b9ebf9d357fb6a9f85b5f09e0b991dd50a6e0ae9b02de3946c9d99")),
|
||||
point(hex!("9304d2bf0f27183a2acc58cc755a0348da11bd345485fda41b872fee89e72aac")),
|
||||
point(hex!("1bb8b71925d155dd9569f64129ea049d6149fdc4e7a42a86d9478801d922129b")),
|
||||
point(hex!("5756a7bf887aa72b9a952f92f47182122e7b19d89e5dd434c747492b00e1c6b7")),
|
||||
point(hex!("6e497c910d102592830555356af5ff8340e8d141e3fb60ea24cfa587e964f07d")),
|
||||
point(hex!("f4fa3898e7b08e039183d444f3d55040f3c790ed806cb314de49f3068bdbb218")),
|
||||
point(hex!("0bbc37597c3ead517a3841e159c8b7b79a5ceaee24b2a9a20350127aab428713")),
|
||||
],
|
||||
R: vec![
|
||||
point(hex!("609420ba1702781692e84accfd225adb3d077aedc3cf8125563400466b52dbd9")),
|
||||
point(hex!("fb4e1d079e7a2b0ec14f7e2a3943bf50b6d60bc346a54fcf562fb234b342abf8")),
|
||||
point(hex!("6ae3ac97289c48ce95b9c557289e82a34932055f7f5e32720139824fe81b12e5")),
|
||||
point(hex!("d071cc2ffbdab2d840326ad15f68c01da6482271cae3cf644670d1632f29a15c")),
|
||||
point(hex!("e52a1754b95e1060589ba7ce0c43d0060820ebfc0d49dc52884bc3c65ad18af5")),
|
||||
point(hex!("41573b06140108539957df71aceb4b1816d2409ce896659aa5c86f037ca5e851")),
|
||||
point(hex!("a65970b2cc3c7b08b2b5b739dbc8e71e646783c41c625e2a5b1535e3d2e0f742")),
|
||||
],
|
||||
a: scalar(hex!("0077c5383dea44d3cd1bc74849376bd60679612dc4b945255822457fa0c0a209")),
|
||||
b: scalar(hex!("fe80cf5756473482581e1d38644007793ddc66fdeb9404ec1689a907e4863302")),
|
||||
t: scalar(hex!("40dfb08e09249040df997851db311bd6827c26e87d6f0f332c55be8eef10e603"))
|
||||
})
|
||||
.verify(
|
||||
&mut OsRng,
|
||||
&[
|
||||
// For some reason, these vectors are * INV_EIGHT
|
||||
point(hex!("8e8f23f315edae4f6c2f948d9a861e0ae32d356b933cd11d2f0e031ac744c41f"))
|
||||
.mul_by_cofactor(),
|
||||
point(hex!("2829cbd025aa54cd6e1b59a032564f22f0b2e5627f7f2c4297f90da438b5510f"))
|
||||
.mul_by_cofactor(),
|
||||
]
|
||||
));
|
||||
}
|
||||
|
||||
macro_rules! bulletproofs_tests {
|
||||
($name: ident, $max: ident, $plus: literal) => {
|
||||
#[test]
|
||||
fn $name() {
|
||||
// Create Bulletproofs for all possible output quantities
|
||||
let mut verifier = BatchVerifier::new(16);
|
||||
for i in 1 .. 17 {
|
||||
let commitments = (1 ..= i)
|
||||
.map(|i| Commitment::new(random_scalar(&mut OsRng), u64::try_from(i).unwrap()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let bp = Bulletproofs::prove(&mut OsRng, &commitments, $plus).unwrap();
|
||||
|
||||
let commitments = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||
assert!(bp.verify(&mut OsRng, &commitments));
|
||||
assert!(bp.batch_verify(&mut OsRng, &mut verifier, i, &commitments));
|
||||
}
|
||||
assert!(verifier.verify_vartime());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn $max() {
|
||||
// Check Bulletproofs errors if we try to prove for too many outputs
|
||||
let mut commitments = vec![];
|
||||
for _ in 0 .. 17 {
|
||||
commitments.push(Commitment::new(Scalar::zero(), 0));
|
||||
}
|
||||
assert!(Bulletproofs::prove(&mut OsRng, &commitments, $plus).is_err());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
bulletproofs_tests!(bulletproofs, bulletproofs_max, false);
|
||||
bulletproofs_tests!(bulletproofs_plus, bulletproofs_plus_max, true);
|
||||
128
coins/monero/src/tests/clsag.rs
Normal file
128
coins/monero/src/tests/clsag.rs
Normal file
@@ -0,0 +1,128 @@
|
||||
use core::ops::Deref;
|
||||
#[cfg(feature = "multisig")]
|
||||
use std::sync::{Arc, RwLock};
|
||||
|
||||
use zeroize::Zeroizing;
|
||||
use rand_core::{RngCore, OsRng};
|
||||
|
||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
#[cfg(feature = "multisig")]
|
||||
use frost::curve::Ed25519;
|
||||
|
||||
use crate::{
|
||||
Commitment, random_scalar,
|
||||
wallet::Decoys,
|
||||
ringct::{
|
||||
generate_key_image,
|
||||
clsag::{ClsagInput, Clsag},
|
||||
},
|
||||
};
|
||||
#[cfg(feature = "multisig")]
|
||||
use crate::ringct::clsag::{ClsagDetails, ClsagMultisig};
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
use frost::tests::{key_gen, algorithm_machines, sign};
|
||||
|
||||
const RING_LEN: u64 = 11;
|
||||
const AMOUNT: u64 = 1337;
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
const RING_INDEX: u8 = 3;
|
||||
|
||||
#[test]
|
||||
fn clsag() {
|
||||
for real in 0 .. RING_LEN {
|
||||
let msg = [1; 32];
|
||||
|
||||
let mut secrets = (Zeroizing::new(Scalar::zero()), Scalar::zero());
|
||||
let mut ring = vec![];
|
||||
for i in 0 .. RING_LEN {
|
||||
let dest = Zeroizing::new(random_scalar(&mut OsRng));
|
||||
let mask = random_scalar(&mut OsRng);
|
||||
let amount;
|
||||
if i == real {
|
||||
secrets = (dest.clone(), mask);
|
||||
amount = AMOUNT;
|
||||
} else {
|
||||
amount = OsRng.next_u64();
|
||||
}
|
||||
ring
|
||||
.push([dest.deref() * &ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]);
|
||||
}
|
||||
|
||||
let image = generate_key_image(&secrets.0);
|
||||
let (clsag, pseudo_out) = Clsag::sign(
|
||||
&mut OsRng,
|
||||
vec![(
|
||||
secrets.0,
|
||||
image,
|
||||
ClsagInput::new(
|
||||
Commitment::new(secrets.1, AMOUNT),
|
||||
Decoys {
|
||||
i: u8::try_from(real).unwrap(),
|
||||
offsets: (1 ..= RING_LEN).into_iter().collect(),
|
||||
ring: ring.clone(),
|
||||
},
|
||||
)
|
||||
.unwrap(),
|
||||
)],
|
||||
random_scalar(&mut OsRng),
|
||||
msg,
|
||||
)
|
||||
.swap_remove(0);
|
||||
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
#[test]
|
||||
fn clsag_multisig() {
|
||||
let keys = key_gen::<_, Ed25519>(&mut OsRng);
|
||||
|
||||
let randomness = random_scalar(&mut OsRng);
|
||||
let mut ring = vec![];
|
||||
for i in 0 .. RING_LEN {
|
||||
let dest;
|
||||
let mask;
|
||||
let amount;
|
||||
if i != u64::from(RING_INDEX) {
|
||||
dest = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
||||
mask = random_scalar(&mut OsRng);
|
||||
amount = OsRng.next_u64();
|
||||
} else {
|
||||
dest = keys[&1].group_key().0;
|
||||
mask = randomness;
|
||||
amount = AMOUNT;
|
||||
}
|
||||
ring.push([dest, Commitment::new(mask, amount).calculate()]);
|
||||
}
|
||||
|
||||
let mask_sum = random_scalar(&mut OsRng);
|
||||
let algorithm = ClsagMultisig::new(
|
||||
RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
|
||||
keys[&1].group_key().0,
|
||||
Arc::new(RwLock::new(Some(ClsagDetails::new(
|
||||
ClsagInput::new(
|
||||
Commitment::new(randomness, AMOUNT),
|
||||
Decoys {
|
||||
i: RING_INDEX,
|
||||
offsets: (1 ..= RING_LEN).into_iter().collect(),
|
||||
ring: ring.clone(),
|
||||
},
|
||||
)
|
||||
.unwrap(),
|
||||
mask_sum,
|
||||
)))),
|
||||
);
|
||||
|
||||
sign(
|
||||
&mut OsRng,
|
||||
algorithm.clone(),
|
||||
keys.clone(),
|
||||
algorithm_machines(&mut OsRng, algorithm, &keys),
|
||||
&[1; 32],
|
||||
);
|
||||
}
|
||||
3
coins/monero/src/tests/mod.rs
Normal file
3
coins/monero/src/tests/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
mod clsag;
|
||||
mod bulletproofs;
|
||||
mod address;
|
||||
230
coins/monero/src/tests/vectors/featured_addresses.json
Normal file
230
coins/monero/src/tests/vectors/featured_addresses.json
Normal file
@@ -0,0 +1,230 @@
|
||||
[
|
||||
{
|
||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v3pYyUDn",
|
||||
"network": "Mainnet",
|
||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
||||
"subaddress": false,
|
||||
"integrated": false,
|
||||
"guaranteed": false
|
||||
},
|
||||
{
|
||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v3wfMHCy",
|
||||
"network": "Mainnet",
|
||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
||||
"subaddress": true,
|
||||
"integrated": false,
|
||||
"guaranteed": false
|
||||
},
|
||||
{
|
||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJTo4p5ayvj36PStM5AX",
|
||||
"network": "Mainnet",
|
||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
||||
"subaddress": false,
|
||||
"integrated": true,
|
||||
"payment_id": [46, 48, 134, 34, 245, 148, 243, 195],
|
||||
"guaranteed": false
|
||||
},
|
||||
{
|
||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJWv5WqMCNE2hRs9rJfy",
|
||||
"network": "Mainnet",
|
||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
||||
"subaddress": true,
|
||||
"integrated": true,
|
||||
"payment_id": [153, 176, 98, 204, 151, 27, 197, 168],
|
||||
"guaranteed": false
|
||||
},
|
||||
{
|
||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v4DwqwH1",
|
||||
"network": "Mainnet",
|
||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
||||
"subaddress": false,
|
||||
"integrated": false,
|
||||
"guaranteed": true
|
||||
},
|
||||
{
|
||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v4Pyz8bD",
|
||||
"network": "Mainnet",
|
||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
||||
"subaddress": true,
|
||||
"integrated": false,
|
||||
"guaranteed": true
|
||||
},
|
||||
{
|
||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJcwt7hykou237MqZZDA",
|
||||
"network": "Mainnet",
|
||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
||||
"subaddress": false,
|
||||
"integrated": true,
|
||||
"payment_id": [88, 37, 149, 111, 171, 108, 120, 181],
|
||||
"guaranteed": true
|
||||
},
|
||||
{
|
||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJfTrFAp69u2MYbf5YeN",
|
||||
"network": "Mainnet",
|
||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
||||
"subaddress": true,
|
||||
"integrated": true,
|
||||
"payment_id": [125, 69, 155, 152, 140, 160, 157, 186],
|
||||
"guaranteed": true
|
||||
},
|
||||
{
|
||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712U9w7ScYA",
|
||||
"network": "Testnet",
|
||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
||||
"subaddress": false,
|
||||
"integrated": false,
|
||||
"guaranteed": false
|
||||
},
|
||||
{
|
||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UA2gCrT1",
|
||||
"network": "Testnet",
|
||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
||||
"subaddress": true,
|
||||
"integrated": false,
|
||||
"guaranteed": false
|
||||
},
|
||||
{
|
||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71Vc1DbPKwJu81cxJjqBkS",
|
||||
"network": "Testnet",
|
||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
||||
"subaddress": false,
|
||||
"integrated": true,
|
||||
"payment_id": [92, 225, 118, 220, 39, 3, 72, 51],
|
||||
"guaranteed": false
|
||||
},
|
||||
{
|
||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71Vc2o1rPMaXN31Fe5J6dn",
|
||||
"network": "Testnet",
|
||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
||||
"subaddress": true,
|
||||
"integrated": true,
|
||||
"payment_id": [20, 120, 47, 89, 72, 165, 233, 115],
|
||||
"guaranteed": false
|
||||
},
|
||||
{
|
||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UAQHCRZ4",
|
||||
"network": "Testnet",
|
||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
||||
"subaddress": false,
|
||||
"integrated": false,
|
||||
"guaranteed": true
|
||||
},
|
||||
{
|
||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UAUzqaii",
|
||||
"network": "Testnet",
|
||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
||||
"subaddress": true,
|
||||
"integrated": false,
|
||||
"guaranteed": true
|
||||
},
|
||||
{
|
||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71VcAsfQc3gJQ2gHLd5DiQ",
|
||||
"network": "Testnet",
|
||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
||||
"subaddress": false,
|
||||
"integrated": true,
|
||||
"payment_id": [193, 149, 123, 214, 180, 205, 195, 91],
|
||||
"guaranteed": true
|
||||
},
|
||||
{
|
||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71VcDBAD5jbZQ3AMHFyvQB",
|
||||
"network": "Testnet",
|
||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
||||
"subaddress": true,
|
||||
"integrated": true,
|
||||
"payment_id": [205, 170, 65, 0, 51, 175, 251, 184],
|
||||
"guaranteed": true
|
||||
},
|
||||
{
|
||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPJnBtTP",
|
||||
"network": "Stagenet",
|
||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
||||
"subaddress": false,
|
||||
"integrated": false,
|
||||
"guaranteed": false
|
||||
},
|
||||
{
|
||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPUrwMvP",
|
||||
"network": "Stagenet",
|
||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
||||
"subaddress": true,
|
||||
"integrated": false,
|
||||
"guaranteed": false
|
||||
},
|
||||
{
|
||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AY5ECEhP5Nr1aCRPXdxk",
|
||||
"network": "Stagenet",
|
||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
||||
"subaddress": false,
|
||||
"integrated": true,
|
||||
"payment_id": [173, 149, 78, 64, 215, 211, 66, 170],
|
||||
"guaranteed": false
|
||||
},
|
||||
{
|
||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AY882kTUS1D2LttnPvTR",
|
||||
"network": "Stagenet",
|
||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
||||
"subaddress": true,
|
||||
"integrated": true,
|
||||
"payment_id": [254, 159, 186, 162, 1, 8, 156, 108],
|
||||
"guaranteed": false
|
||||
},
|
||||
{
|
||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPpBBo8F",
|
||||
"network": "Stagenet",
|
||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
||||
"subaddress": false,
|
||||
"integrated": false,
|
||||
"guaranteed": true
|
||||
},
|
||||
{
|
||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPuUJX3b",
|
||||
"network": "Stagenet",
|
||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
||||
"subaddress": true,
|
||||
"integrated": false,
|
||||
"guaranteed": true
|
||||
},
|
||||
{
|
||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AYCZPxVAoDu21DryMoto",
|
||||
"network": "Stagenet",
|
||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
||||
"subaddress": false,
|
||||
"integrated": true,
|
||||
"payment_id": [3, 115, 230, 129, 172, 108, 116, 235],
|
||||
"guaranteed": true
|
||||
},
|
||||
{
|
||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AYFYCqKQAWL18KkpBQ8R",
|
||||
"network": "Stagenet",
|
||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
||||
"subaddress": true,
|
||||
"integrated": true,
|
||||
"payment_id": [94, 122, 63, 167, 209, 225, 14, 180],
|
||||
"guaranteed": true
|
||||
}
|
||||
]
|
||||
310
coins/monero/src/transaction.rs
Normal file
310
coins/monero/src/transaction.rs
Normal file
@@ -0,0 +1,310 @@
|
||||
use core::cmp::Ordering;
|
||||
use std::io::{self, Read, Write};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use curve25519_dalek::{
|
||||
scalar::Scalar,
|
||||
edwards::{EdwardsPoint, CompressedEdwardsY},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
Protocol, hash,
|
||||
serialize::*,
|
||||
ringct::{RctBase, RctPrunable, RctSignatures},
|
||||
};
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub enum Input {
|
||||
Gen(u64),
|
||||
ToKey { amount: u64, key_offsets: Vec<u64>, key_image: EdwardsPoint },
|
||||
}
|
||||
|
||||
impl Input {
|
||||
// Worst-case predictive len
|
||||
pub(crate) fn fee_weight(ring_len: usize) -> usize {
|
||||
// Uses 1 byte for the VarInt amount due to amount being 0
|
||||
// Uses 1 byte for the VarInt encoding of the length of the ring as well
|
||||
1 + 1 + 1 + (8 * ring_len) + 32
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
match self {
|
||||
Input::Gen(height) => {
|
||||
w.write_all(&[255])?;
|
||||
write_varint(height, w)
|
||||
}
|
||||
|
||||
Input::ToKey { amount, key_offsets, key_image } => {
|
||||
w.write_all(&[2])?;
|
||||
write_varint(amount, w)?;
|
||||
write_vec(write_varint, key_offsets, w)?;
|
||||
write_point(key_image, w)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Input> {
|
||||
Ok(match read_byte(r)? {
|
||||
255 => Input::Gen(read_varint(r)?),
|
||||
2 => Input::ToKey {
|
||||
amount: read_varint(r)?,
|
||||
key_offsets: read_vec(read_varint, r)?,
|
||||
key_image: read_torsion_free_point(r)?,
|
||||
},
|
||||
_ => {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown/unused input type"))?
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Doesn't bother moving to an enum for the unused Script classes
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct Output {
|
||||
pub amount: u64,
|
||||
pub key: CompressedEdwardsY,
|
||||
pub view_tag: Option<u8>,
|
||||
}
|
||||
|
||||
impl Output {
|
||||
pub(crate) fn fee_weight() -> usize {
|
||||
1 + 1 + 32 + 1
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
write_varint(&self.amount, w)?;
|
||||
w.write_all(&[2 + u8::from(self.view_tag.is_some())])?;
|
||||
w.write_all(&self.key.to_bytes())?;
|
||||
if let Some(view_tag) = self.view_tag {
|
||||
w.write_all(&[view_tag])?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Output> {
|
||||
let amount = read_varint(r)?;
|
||||
let view_tag = match read_byte(r)? {
|
||||
2 => false,
|
||||
3 => true,
|
||||
_ => Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"Tried to deserialize unknown/unused output type",
|
||||
))?,
|
||||
};
|
||||
|
||||
Ok(Output {
|
||||
amount,
|
||||
key: CompressedEdwardsY(read_bytes(r)?),
|
||||
view_tag: if view_tag { Some(read_byte(r)?) } else { None },
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub enum Timelock {
|
||||
None,
|
||||
Block(usize),
|
||||
Time(u64),
|
||||
}
|
||||
|
||||
impl Timelock {
|
||||
fn from_raw(raw: u64) -> Timelock {
|
||||
if raw == 0 {
|
||||
Timelock::None
|
||||
} else if raw < 500_000_000 {
|
||||
Timelock::Block(usize::try_from(raw).unwrap())
|
||||
} else {
|
||||
Timelock::Time(raw)
|
||||
}
|
||||
}
|
||||
|
||||
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
write_varint(
|
||||
&match self {
|
||||
Timelock::None => 0,
|
||||
Timelock::Block(block) => (*block).try_into().unwrap(),
|
||||
Timelock::Time(time) => *time,
|
||||
},
|
||||
w,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Timelock {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
match (self, other) {
|
||||
(Timelock::None, _) => Some(Ordering::Less),
|
||||
(Timelock::Block(a), Timelock::Block(b)) => a.partial_cmp(b),
|
||||
(Timelock::Time(a), Timelock::Time(b)) => a.partial_cmp(b),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct TransactionPrefix {
|
||||
pub version: u64,
|
||||
pub timelock: Timelock,
|
||||
pub inputs: Vec<Input>,
|
||||
pub outputs: Vec<Output>,
|
||||
pub extra: Vec<u8>,
|
||||
}
|
||||
|
||||
impl TransactionPrefix {
|
||||
pub(crate) fn fee_weight(ring_len: usize, inputs: usize, outputs: usize, extra: usize) -> usize {
|
||||
// Assumes Timelock::None since this library won't let you create a TX with a timelock
|
||||
1 + 1 +
|
||||
varint_len(inputs) +
|
||||
(inputs * Input::fee_weight(ring_len)) +
|
||||
1 +
|
||||
(outputs * Output::fee_weight()) +
|
||||
varint_len(extra) +
|
||||
extra
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
write_varint(&self.version, w)?;
|
||||
self.timelock.write(w)?;
|
||||
write_vec(Input::write, &self.inputs, w)?;
|
||||
write_vec(Output::write, &self.outputs, w)?;
|
||||
write_varint(&self.extra.len().try_into().unwrap(), w)?;
|
||||
w.write_all(&self.extra)
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<TransactionPrefix> {
|
||||
let mut prefix = TransactionPrefix {
|
||||
version: read_varint(r)?,
|
||||
timelock: Timelock::from_raw(read_varint(r)?),
|
||||
inputs: read_vec(Input::read, r)?,
|
||||
outputs: read_vec(Output::read, r)?,
|
||||
extra: vec![],
|
||||
};
|
||||
prefix.extra = read_vec(read_byte, r)?;
|
||||
Ok(prefix)
|
||||
}
|
||||
}
|
||||
|
||||
/// Monero transaction. For version 1, rct_signatures still contains an accurate fee value.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct Transaction {
|
||||
pub prefix: TransactionPrefix,
|
||||
pub signatures: Vec<(Scalar, Scalar)>,
|
||||
pub rct_signatures: RctSignatures,
|
||||
}
|
||||
|
||||
impl Transaction {
|
||||
pub(crate) fn fee_weight(
|
||||
protocol: Protocol,
|
||||
inputs: usize,
|
||||
outputs: usize,
|
||||
extra: usize,
|
||||
) -> usize {
|
||||
TransactionPrefix::fee_weight(protocol.ring_len(), inputs, outputs, extra) +
|
||||
RctSignatures::fee_weight(protocol, inputs, outputs)
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.prefix.write(w)?;
|
||||
if self.prefix.version == 1 {
|
||||
for sig in &self.signatures {
|
||||
write_scalar(&sig.0, w)?;
|
||||
write_scalar(&sig.1, w)?;
|
||||
}
|
||||
Ok(())
|
||||
} else if self.prefix.version == 2 {
|
||||
self.rct_signatures.write(w)
|
||||
} else {
|
||||
panic!("Serializing a transaction with an unknown version");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Transaction> {
|
||||
let prefix = TransactionPrefix::read(r)?;
|
||||
let mut signatures = vec![];
|
||||
let mut rct_signatures = RctSignatures {
|
||||
base: RctBase { fee: 0, ecdh_info: vec![], commitments: vec![] },
|
||||
prunable: RctPrunable::Null,
|
||||
};
|
||||
|
||||
if prefix.version == 1 {
|
||||
for _ in 0 .. prefix.inputs.len() {
|
||||
signatures.push((read_scalar(r)?, read_scalar(r)?));
|
||||
}
|
||||
rct_signatures.base.fee = prefix
|
||||
.inputs
|
||||
.iter()
|
||||
.map(|input| match input {
|
||||
Input::Gen(..) => 0,
|
||||
Input::ToKey { amount, .. } => *amount,
|
||||
})
|
||||
.sum::<u64>()
|
||||
.saturating_sub(prefix.outputs.iter().map(|output| output.amount).sum());
|
||||
} else if prefix.version == 2 {
|
||||
rct_signatures = RctSignatures::read(
|
||||
prefix
|
||||
.inputs
|
||||
.iter()
|
||||
.map(|input| match input {
|
||||
Input::Gen(_) => 0,
|
||||
Input::ToKey { key_offsets, .. } => key_offsets.len(),
|
||||
})
|
||||
.collect(),
|
||||
prefix.outputs.len(),
|
||||
r,
|
||||
)?;
|
||||
} else {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown version"))?;
|
||||
}
|
||||
|
||||
Ok(Transaction { prefix, signatures, rct_signatures })
|
||||
}
|
||||
|
||||
pub fn hash(&self) -> [u8; 32] {
|
||||
let mut buf = Vec::with_capacity(2048);
|
||||
if self.prefix.version == 1 {
|
||||
self.write(&mut buf).unwrap();
|
||||
hash(&buf)
|
||||
} else {
|
||||
let mut hashes = Vec::with_capacity(96);
|
||||
|
||||
self.prefix.write(&mut buf).unwrap();
|
||||
hashes.extend(hash(&buf));
|
||||
buf.clear();
|
||||
|
||||
self.rct_signatures.base.write(&mut buf, self.rct_signatures.prunable.rct_type()).unwrap();
|
||||
hashes.extend(hash(&buf));
|
||||
buf.clear();
|
||||
|
||||
match self.rct_signatures.prunable {
|
||||
RctPrunable::Null => buf.resize(32, 0),
|
||||
_ => {
|
||||
self.rct_signatures.prunable.write(&mut buf).unwrap();
|
||||
buf = hash(&buf).to_vec();
|
||||
}
|
||||
}
|
||||
hashes.extend(&buf);
|
||||
|
||||
hash(&hashes)
|
||||
}
|
||||
}
|
||||
|
||||
/// Calculate the hash of this transaction as needed for signing it.
|
||||
pub fn signature_hash(&self) -> [u8; 32] {
|
||||
let mut buf = Vec::with_capacity(2048);
|
||||
let mut sig_hash = Vec::with_capacity(96);
|
||||
|
||||
self.prefix.write(&mut buf).unwrap();
|
||||
sig_hash.extend(hash(&buf));
|
||||
buf.clear();
|
||||
|
||||
self.rct_signatures.base.write(&mut buf, self.rct_signatures.prunable.rct_type()).unwrap();
|
||||
sig_hash.extend(hash(&buf));
|
||||
buf.clear();
|
||||
|
||||
self.rct_signatures.prunable.signature_write(&mut buf).unwrap();
|
||||
sig_hash.extend(hash(&buf));
|
||||
|
||||
hash(&sig_hash)
|
||||
}
|
||||
}
|
||||
313
coins/monero/src/wallet/address.rs
Normal file
313
coins/monero/src/wallet/address.rs
Normal file
@@ -0,0 +1,313 @@
|
||||
use core::{marker::PhantomData, fmt::Debug};
|
||||
use std::string::ToString;
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
||||
|
||||
use base58_monero::base58::{encode_check, decode_check};
|
||||
|
||||
/// The network this address is for.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub enum Network {
|
||||
Mainnet,
|
||||
Testnet,
|
||||
Stagenet,
|
||||
}
|
||||
|
||||
/// The address type, supporting the officially documented addresses, along with
|
||||
/// [Featured Addresses](https://gist.github.com/kayabaNerve/01c50bbc35441e0bbdcee63a9d823789).
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub enum AddressType {
|
||||
Standard,
|
||||
Integrated([u8; 8]),
|
||||
Subaddress,
|
||||
Featured { subaddress: bool, payment_id: Option<[u8; 8]>, guaranteed: bool },
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct SubaddressIndex {
|
||||
pub(crate) account: u32,
|
||||
pub(crate) address: u32,
|
||||
}
|
||||
|
||||
impl SubaddressIndex {
|
||||
pub const fn new(account: u32, address: u32) -> Option<SubaddressIndex> {
|
||||
if (account == 0) && (address == 0) {
|
||||
return None;
|
||||
}
|
||||
Some(SubaddressIndex { account, address })
|
||||
}
|
||||
|
||||
pub fn account(&self) -> u32 {
|
||||
self.account
|
||||
}
|
||||
|
||||
pub fn address(&self) -> u32 {
|
||||
self.address
|
||||
}
|
||||
}
|
||||
|
||||
/// Address specification. Used internally to create addresses.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub enum AddressSpec {
|
||||
Standard,
|
||||
Integrated([u8; 8]),
|
||||
Subaddress(SubaddressIndex),
|
||||
Featured { subaddress: Option<SubaddressIndex>, payment_id: Option<[u8; 8]>, guaranteed: bool },
|
||||
}
|
||||
|
||||
impl AddressType {
|
||||
pub fn subaddress(&self) -> bool {
|
||||
matches!(self, AddressType::Subaddress) ||
|
||||
matches!(self, AddressType::Featured { subaddress: true, .. })
|
||||
}
|
||||
|
||||
pub fn payment_id(&self) -> Option<[u8; 8]> {
|
||||
if let AddressType::Integrated(id) = self {
|
||||
Some(*id)
|
||||
} else if let AddressType::Featured { payment_id, .. } = self {
|
||||
*payment_id
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn guaranteed(&self) -> bool {
|
||||
matches!(self, AddressType::Featured { guaranteed: true, .. })
|
||||
}
|
||||
}
|
||||
|
||||
/// A type which returns the byte for a given address.
|
||||
pub trait AddressBytes: Clone + Copy + PartialEq + Eq + Debug {
|
||||
fn network_bytes(network: Network) -> (u8, u8, u8, u8);
|
||||
}
|
||||
|
||||
/// Address bytes for Monero.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct MoneroAddressBytes;
|
||||
impl AddressBytes for MoneroAddressBytes {
|
||||
fn network_bytes(network: Network) -> (u8, u8, u8, u8) {
|
||||
match network {
|
||||
Network::Mainnet => (18, 19, 42, 70),
|
||||
Network::Testnet => (53, 54, 63, 111),
|
||||
Network::Stagenet => (24, 25, 36, 86),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Address metadata.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct AddressMeta<B: AddressBytes> {
|
||||
_bytes: PhantomData<B>,
|
||||
pub network: Network,
|
||||
pub kind: AddressType,
|
||||
}
|
||||
|
||||
impl<B: AddressBytes> Zeroize for AddressMeta<B> {
|
||||
fn zeroize(&mut self) {
|
||||
self.network.zeroize();
|
||||
self.kind.zeroize();
|
||||
}
|
||||
}
|
||||
|
||||
/// Error when decoding an address.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
|
||||
pub enum AddressError {
|
||||
#[error("invalid address byte")]
|
||||
InvalidByte,
|
||||
#[error("invalid address encoding")]
|
||||
InvalidEncoding,
|
||||
#[error("invalid length")]
|
||||
InvalidLength,
|
||||
#[error("invalid key")]
|
||||
InvalidKey,
|
||||
#[error("unknown features")]
|
||||
UnknownFeatures,
|
||||
#[error("different network than expected")]
|
||||
DifferentNetwork,
|
||||
}
|
||||
|
||||
impl<B: AddressBytes> AddressMeta<B> {
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
fn to_byte(&self) -> u8 {
|
||||
let bytes = B::network_bytes(self.network);
|
||||
match self.kind {
|
||||
AddressType::Standard => bytes.0,
|
||||
AddressType::Integrated(_) => bytes.1,
|
||||
AddressType::Subaddress => bytes.2,
|
||||
AddressType::Featured { .. } => bytes.3,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create an address's metadata.
|
||||
pub fn new(network: Network, kind: AddressType) -> Self {
|
||||
AddressMeta { _bytes: PhantomData, network, kind }
|
||||
}
|
||||
|
||||
// Returns an incomplete instantiation in the case of Integrated/Featured addresses
|
||||
fn from_byte(byte: u8) -> Result<Self, AddressError> {
|
||||
let mut meta = None;
|
||||
for network in [Network::Mainnet, Network::Testnet, Network::Stagenet] {
|
||||
let (standard, integrated, subaddress, featured) = B::network_bytes(network);
|
||||
if let Some(kind) = match byte {
|
||||
_ if byte == standard => Some(AddressType::Standard),
|
||||
_ if byte == integrated => Some(AddressType::Integrated([0; 8])),
|
||||
_ if byte == subaddress => Some(AddressType::Subaddress),
|
||||
_ if byte == featured => {
|
||||
Some(AddressType::Featured { subaddress: false, payment_id: None, guaranteed: false })
|
||||
}
|
||||
_ => None,
|
||||
} {
|
||||
meta = Some(AddressMeta::new(network, kind));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
meta.ok_or(AddressError::InvalidByte)
|
||||
}
|
||||
|
||||
pub fn subaddress(&self) -> bool {
|
||||
self.kind.subaddress()
|
||||
}
|
||||
|
||||
pub fn payment_id(&self) -> Option<[u8; 8]> {
|
||||
self.kind.payment_id()
|
||||
}
|
||||
|
||||
pub fn guaranteed(&self) -> bool {
|
||||
self.kind.guaranteed()
|
||||
}
|
||||
}
|
||||
|
||||
/// A Monero address, composed of metadata and a spend/view key.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub struct Address<B: AddressBytes> {
|
||||
pub meta: AddressMeta<B>,
|
||||
pub spend: EdwardsPoint,
|
||||
pub view: EdwardsPoint,
|
||||
}
|
||||
|
||||
impl<B: AddressBytes> Zeroize for Address<B> {
|
||||
fn zeroize(&mut self) {
|
||||
self.meta.zeroize();
|
||||
self.spend.zeroize();
|
||||
self.view.zeroize();
|
||||
}
|
||||
}
|
||||
|
||||
impl<B: AddressBytes> ToString for Address<B> {
|
||||
fn to_string(&self) -> String {
|
||||
let mut data = vec![self.meta.to_byte()];
|
||||
data.extend(self.spend.compress().to_bytes());
|
||||
data.extend(self.view.compress().to_bytes());
|
||||
if let AddressType::Featured { subaddress, payment_id, guaranteed } = self.meta.kind {
|
||||
// Technically should be a VarInt, yet we don't have enough features it's needed
|
||||
data.push(
|
||||
u8::from(subaddress) + (u8::from(payment_id.is_some()) << 1) + (u8::from(guaranteed) << 2),
|
||||
);
|
||||
}
|
||||
if let Some(id) = self.meta.kind.payment_id() {
|
||||
data.extend(id);
|
||||
}
|
||||
encode_check(&data).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl<B: AddressBytes> Address<B> {
|
||||
pub fn new(meta: AddressMeta<B>, spend: EdwardsPoint, view: EdwardsPoint) -> Self {
|
||||
Address { meta, spend, view }
|
||||
}
|
||||
|
||||
pub fn from_str_raw(s: &str) -> Result<Self, AddressError> {
|
||||
let raw = decode_check(s).map_err(|_| AddressError::InvalidEncoding)?;
|
||||
if raw.len() < (1 + 32 + 32) {
|
||||
Err(AddressError::InvalidLength)?;
|
||||
}
|
||||
|
||||
let mut meta = AddressMeta::from_byte(raw[0])?;
|
||||
let spend = CompressedEdwardsY(raw[1 .. 33].try_into().unwrap())
|
||||
.decompress()
|
||||
.ok_or(AddressError::InvalidKey)?;
|
||||
let view = CompressedEdwardsY(raw[33 .. 65].try_into().unwrap())
|
||||
.decompress()
|
||||
.ok_or(AddressError::InvalidKey)?;
|
||||
let mut read = 65;
|
||||
|
||||
if matches!(meta.kind, AddressType::Featured { .. }) {
|
||||
if raw[read] >= (2 << 3) {
|
||||
Err(AddressError::UnknownFeatures)?;
|
||||
}
|
||||
|
||||
let subaddress = (raw[read] & 1) == 1;
|
||||
let integrated = ((raw[read] >> 1) & 1) == 1;
|
||||
let guaranteed = ((raw[read] >> 2) & 1) == 1;
|
||||
|
||||
meta.kind = AddressType::Featured {
|
||||
subaddress,
|
||||
payment_id: Some([0; 8]).filter(|_| integrated),
|
||||
guaranteed,
|
||||
};
|
||||
read += 1;
|
||||
}
|
||||
|
||||
// Update read early so we can verify the length
|
||||
if meta.kind.payment_id().is_some() {
|
||||
read += 8;
|
||||
}
|
||||
if raw.len() != read {
|
||||
Err(AddressError::InvalidLength)?;
|
||||
}
|
||||
|
||||
if let AddressType::Integrated(ref mut id) = meta.kind {
|
||||
id.copy_from_slice(&raw[(read - 8) .. read]);
|
||||
}
|
||||
if let AddressType::Featured { payment_id: Some(ref mut id), .. } = meta.kind {
|
||||
id.copy_from_slice(&raw[(read - 8) .. read]);
|
||||
}
|
||||
|
||||
Ok(Address { meta, spend, view })
|
||||
}
|
||||
|
||||
pub fn from_str(network: Network, s: &str) -> Result<Self, AddressError> {
|
||||
Self::from_str_raw(s).and_then(|addr| {
|
||||
if addr.meta.network == network {
|
||||
Ok(addr)
|
||||
} else {
|
||||
Err(AddressError::DifferentNetwork)?
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn network(&self) -> Network {
|
||||
self.meta.network
|
||||
}
|
||||
|
||||
pub fn subaddress(&self) -> bool {
|
||||
self.meta.subaddress()
|
||||
}
|
||||
|
||||
pub fn payment_id(&self) -> Option<[u8; 8]> {
|
||||
self.meta.payment_id()
|
||||
}
|
||||
|
||||
pub fn guaranteed(&self) -> bool {
|
||||
self.meta.guaranteed()
|
||||
}
|
||||
}
|
||||
|
||||
/// Instantiation of the Address type with Monero's network bytes.
|
||||
pub type MoneroAddress = Address<MoneroAddressBytes>;
|
||||
// Allow re-interpreting of an arbitrary address as a monero address so it can be used with the
|
||||
// rest of this library. Doesn't use From as it was conflicting with From<T> for T.
|
||||
impl MoneroAddress {
|
||||
pub fn from<B: AddressBytes>(address: Address<B>) -> MoneroAddress {
|
||||
MoneroAddress::new(
|
||||
AddressMeta::new(address.meta.network, address.meta.kind),
|
||||
address.spend,
|
||||
address.view,
|
||||
)
|
||||
}
|
||||
}
|
||||
247
coins/monero/src/wallet/decoys.rs
Normal file
247
coins/monero/src/wallet/decoys.rs
Normal file
@@ -0,0 +1,247 @@
|
||||
use std::{sync::Mutex, collections::HashSet};
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
use rand_distr::{Distribution, Gamma};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use curve25519_dalek::edwards::EdwardsPoint;
|
||||
|
||||
use crate::{
|
||||
wallet::SpendableOutput,
|
||||
rpc::{RpcError, Rpc},
|
||||
};
|
||||
|
||||
const LOCK_WINDOW: usize = 10;
|
||||
const MATURITY: u64 = 60;
|
||||
const RECENT_WINDOW: usize = 15;
|
||||
const BLOCK_TIME: usize = 120;
|
||||
const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME;
|
||||
const TIP_APPLICATION: f64 = (LOCK_WINDOW * BLOCK_TIME) as f64;
|
||||
|
||||
lazy_static! {
|
||||
static ref GAMMA: Gamma<f64> = Gamma::new(19.28, 1.0 / 1.61).unwrap();
|
||||
static ref DISTRIBUTION: Mutex<Vec<u64>> = Mutex::new(Vec::with_capacity(3000000));
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn select_n<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
rpc: &Rpc,
|
||||
height: usize,
|
||||
high: u64,
|
||||
per_second: f64,
|
||||
real: &[u64],
|
||||
used: &mut HashSet<u64>,
|
||||
count: usize,
|
||||
) -> Result<Vec<(u64, [EdwardsPoint; 2])>, RpcError> {
|
||||
let mut iters = 0;
|
||||
let mut confirmed = Vec::with_capacity(count);
|
||||
// Retries on failure. Retries are obvious as decoys, yet should be minimal
|
||||
while confirmed.len() != count {
|
||||
let remaining = count - confirmed.len();
|
||||
let mut candidates = Vec::with_capacity(remaining);
|
||||
while candidates.len() != remaining {
|
||||
iters += 1;
|
||||
// This is cheap and on fresh chains, thousands of rounds may be needed
|
||||
if iters == 10000 {
|
||||
Err(RpcError::InternalError("not enough decoy candidates"))?;
|
||||
}
|
||||
|
||||
// Use a gamma distribution
|
||||
let mut age = GAMMA.sample(rng).exp();
|
||||
if age > TIP_APPLICATION {
|
||||
age -= TIP_APPLICATION;
|
||||
} else {
|
||||
// f64 does not have try_from available, which is why these are written with `as`
|
||||
age = (rng.next_u64() % u64::try_from(RECENT_WINDOW * BLOCK_TIME).unwrap()) as f64;
|
||||
}
|
||||
|
||||
let o = (age * per_second) as u64;
|
||||
if o < high {
|
||||
let distribution = DISTRIBUTION.lock().unwrap();
|
||||
let i = distribution.partition_point(|s| *s < (high - 1 - o));
|
||||
let prev = i.saturating_sub(1);
|
||||
let n = distribution[i] - distribution[prev];
|
||||
if n != 0 {
|
||||
let o = distribution[prev] + (rng.next_u64() % n);
|
||||
if !used.contains(&o) {
|
||||
// It will either actually be used, or is unusable and this prevents trying it again
|
||||
used.insert(o);
|
||||
candidates.push(o);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If this is the first time we're requesting these outputs, include the real one as well
|
||||
// Prevents the node we're connected to from having a list of known decoys and then seeing a
|
||||
// TX which uses all of them, with one additional output (the true spend)
|
||||
let mut real_indexes = HashSet::with_capacity(real.len());
|
||||
if confirmed.is_empty() {
|
||||
for real in real {
|
||||
candidates.push(*real);
|
||||
}
|
||||
// Sort candidates so the real spends aren't the ones at the end
|
||||
candidates.sort();
|
||||
for real in real {
|
||||
real_indexes.insert(candidates.binary_search(real).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
for (i, output) in rpc.get_unlocked_outputs(&candidates, height).await?.iter_mut().enumerate() {
|
||||
// Don't include the real spend as a decoy, despite requesting it
|
||||
if real_indexes.contains(&i) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(output) = output.take() {
|
||||
confirmed.push((candidates[i], output));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(confirmed)
|
||||
}
|
||||
|
||||
fn offset(ring: &[u64]) -> Vec<u64> {
|
||||
let mut res = vec![ring[0]];
|
||||
res.resize(ring.len(), 0);
|
||||
for m in (1 .. ring.len()).rev() {
|
||||
res[m] = ring[m] - ring[m - 1];
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Decoy data, containing the actual member as well (at index `i`).
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct Decoys {
|
||||
pub i: u8,
|
||||
pub offsets: Vec<u64>,
|
||||
pub ring: Vec<[EdwardsPoint; 2]>,
|
||||
}
|
||||
|
||||
impl Decoys {
|
||||
pub fn len(&self) -> usize {
|
||||
self.offsets.len()
|
||||
}
|
||||
|
||||
/// Select decoys using the same distribution as Monero.
|
||||
pub async fn select<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
rpc: &Rpc,
|
||||
ring_len: usize,
|
||||
height: usize,
|
||||
inputs: &[SpendableOutput],
|
||||
) -> Result<Vec<Decoys>, RpcError> {
|
||||
let decoy_count = ring_len - 1;
|
||||
|
||||
// Convert the inputs in question to the raw output data
|
||||
let mut real = Vec::with_capacity(inputs.len());
|
||||
let mut outputs = Vec::with_capacity(inputs.len());
|
||||
for input in inputs {
|
||||
real.push(input.global_index);
|
||||
outputs.push((real[real.len() - 1], [input.key(), input.commitment().calculate()]));
|
||||
}
|
||||
|
||||
let distribution_len = {
|
||||
let distribution = DISTRIBUTION.lock().unwrap();
|
||||
distribution.len()
|
||||
};
|
||||
if distribution_len <= height {
|
||||
let extension = rpc.get_output_distribution(distribution_len, height).await?;
|
||||
DISTRIBUTION.lock().unwrap().extend(extension);
|
||||
}
|
||||
|
||||
let high;
|
||||
let per_second;
|
||||
{
|
||||
let mut distribution = DISTRIBUTION.lock().unwrap();
|
||||
// If asked to use an older height than previously asked, truncate to ensure accuracy
|
||||
// Should never happen, yet risks desyncing if it did
|
||||
distribution.truncate(height + 1); // height is inclusive, and 0 is a valid height
|
||||
|
||||
high = distribution[distribution.len() - 1];
|
||||
per_second = {
|
||||
let blocks = distribution.len().min(BLOCKS_PER_YEAR);
|
||||
let outputs = high - distribution[distribution.len().saturating_sub(blocks + 1)];
|
||||
(outputs as f64) / ((blocks * BLOCK_TIME) as f64)
|
||||
};
|
||||
};
|
||||
|
||||
let mut used = HashSet::<u64>::new();
|
||||
for o in &outputs {
|
||||
used.insert(o.0);
|
||||
}
|
||||
|
||||
// TODO: Simply create a TX with less than the target amount
|
||||
if (high - MATURITY) < u64::try_from(inputs.len() * ring_len).unwrap() {
|
||||
Err(RpcError::InternalError("not enough decoy candidates"))?;
|
||||
}
|
||||
|
||||
// Select all decoys for this transaction, assuming we generate a sane transaction
|
||||
// We should almost never naturally generate an insane transaction, hence why this doesn't
|
||||
// bother with an overage
|
||||
let mut decoys =
|
||||
select_n(rng, rpc, height, high, per_second, &real, &mut used, inputs.len() * decoy_count)
|
||||
.await?;
|
||||
real.zeroize();
|
||||
|
||||
let mut res = Vec::with_capacity(inputs.len());
|
||||
for o in outputs {
|
||||
// Grab the decoys for this specific output
|
||||
let mut ring = decoys.drain((decoys.len() - decoy_count) ..).collect::<Vec<_>>();
|
||||
ring.push(o);
|
||||
ring.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
|
||||
// Sanity checks are only run when 1000 outputs are available in Monero
|
||||
// We run this check whenever the highest output index, which we acknowledge, is > 500
|
||||
// This means we assume (for presumably test blockchains) the height being used has not had
|
||||
// 500 outputs since while itself not being a sufficiently mature blockchain
|
||||
// Considering Monero's p2p layer doesn't actually check transaction sanity, it should be
|
||||
// fine for us to not have perfectly matching rules, especially since this code will infinite
|
||||
// loop if it can't determine sanity, which is possible with sufficient inputs on
|
||||
// sufficiently small chains
|
||||
if high > 500 {
|
||||
// Make sure the TX passes the sanity check that the median output is within the last 40%
|
||||
let target_median = high * 3 / 5;
|
||||
while ring[ring_len / 2].0 < target_median {
|
||||
// If it's not, update the bottom half with new values to ensure the median only moves up
|
||||
for removed in ring.drain(0 .. (ring_len / 2)).collect::<Vec<_>>() {
|
||||
// If we removed the real spend, add it back
|
||||
if removed.0 == o.0 {
|
||||
ring.push(o);
|
||||
} else {
|
||||
// We could not remove this, saving CPU time and removing low values as
|
||||
// possibilities, yet it'd increase the amount of decoys required to create this
|
||||
// transaction and some removed outputs may be the best option (as we drop the first
|
||||
// half, not just the bottom n)
|
||||
used.remove(&removed.0);
|
||||
}
|
||||
}
|
||||
|
||||
// Select new outputs until we have a full sized ring again
|
||||
ring.extend(
|
||||
select_n(rng, rpc, height, high, per_second, &[], &mut used, ring_len - ring.len())
|
||||
.await?,
|
||||
);
|
||||
ring.sort_by(|a, b| a.0.cmp(&b.0));
|
||||
}
|
||||
|
||||
// The other sanity check rule is about duplicates, yet we already enforce unique ring
|
||||
// members
|
||||
}
|
||||
|
||||
res.push(Decoys {
|
||||
// Binary searches for the real spend since we don't know where it sorted to
|
||||
i: u8::try_from(ring.partition_point(|x| x.0 < o.0)).unwrap(),
|
||||
offsets: offset(&ring.iter().map(|output| output.0).collect::<Vec<_>>()),
|
||||
ring: ring.iter().map(|output| output.1).collect(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
197
coins/monero/src/wallet/extra.rs
Normal file
197
coins/monero/src/wallet/extra.rs
Normal file
@@ -0,0 +1,197 @@
|
||||
use core::ops::BitXor;
|
||||
use std::io::{self, Read, Write};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use curve25519_dalek::edwards::EdwardsPoint;
|
||||
|
||||
use crate::serialize::{
|
||||
varint_len, read_byte, read_bytes, read_varint, read_point, read_vec, write_byte, write_varint,
|
||||
write_point, write_vec,
|
||||
};
|
||||
|
||||
pub const MAX_TX_EXTRA_NONCE_SIZE: usize = 255;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub(crate) enum PaymentId {
|
||||
Unencrypted([u8; 32]),
|
||||
Encrypted([u8; 8]),
|
||||
}
|
||||
|
||||
impl BitXor<[u8; 8]> for PaymentId {
|
||||
type Output = PaymentId;
|
||||
|
||||
fn bitxor(self, bytes: [u8; 8]) -> PaymentId {
|
||||
match self {
|
||||
PaymentId::Unencrypted(_) => self,
|
||||
PaymentId::Encrypted(id) => {
|
||||
PaymentId::Encrypted((u64::from_le_bytes(id) ^ u64::from_le_bytes(bytes)).to_le_bytes())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PaymentId {
|
||||
pub(crate) fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
match self {
|
||||
PaymentId::Unencrypted(id) => {
|
||||
w.write_all(&[0])?;
|
||||
w.write_all(id)?;
|
||||
}
|
||||
PaymentId::Encrypted(id) => {
|
||||
w.write_all(&[1])?;
|
||||
w.write_all(id)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read<R: Read>(r: &mut R) -> io::Result<PaymentId> {
|
||||
Ok(match read_byte(r)? {
|
||||
0 => PaymentId::Unencrypted(read_bytes(r)?),
|
||||
1 => PaymentId::Encrypted(read_bytes(r)?),
|
||||
_ => Err(io::Error::new(io::ErrorKind::Other, "unknown payment ID type"))?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Doesn't bother with padding nor MinerGate
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub(crate) enum ExtraField {
|
||||
PublicKey(EdwardsPoint),
|
||||
Nonce(Vec<u8>),
|
||||
MergeMining(usize, [u8; 32]),
|
||||
PublicKeys(Vec<EdwardsPoint>),
|
||||
}
|
||||
|
||||
impl ExtraField {
|
||||
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
match self {
|
||||
ExtraField::PublicKey(key) => {
|
||||
w.write_all(&[1])?;
|
||||
w.write_all(&key.compress().to_bytes())?;
|
||||
}
|
||||
ExtraField::Nonce(data) => {
|
||||
w.write_all(&[2])?;
|
||||
write_vec(write_byte, data, w)?;
|
||||
}
|
||||
ExtraField::MergeMining(height, merkle) => {
|
||||
w.write_all(&[3])?;
|
||||
write_varint(&u64::try_from(*height).unwrap(), w)?;
|
||||
w.write_all(merkle)?;
|
||||
}
|
||||
ExtraField::PublicKeys(keys) => {
|
||||
w.write_all(&[4])?;
|
||||
write_vec(write_point, keys, w)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read<R: Read>(r: &mut R) -> io::Result<ExtraField> {
|
||||
Ok(match read_byte(r)? {
|
||||
1 => ExtraField::PublicKey(read_point(r)?),
|
||||
2 => ExtraField::Nonce({
|
||||
let nonce = read_vec(read_byte, r)?;
|
||||
if nonce.len() > MAX_TX_EXTRA_NONCE_SIZE {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "too long nonce"))?;
|
||||
}
|
||||
nonce
|
||||
}),
|
||||
3 => ExtraField::MergeMining(
|
||||
usize::try_from(read_varint(r)?)
|
||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "varint for height exceeds usize"))?,
|
||||
read_bytes(r)?,
|
||||
),
|
||||
4 => ExtraField::PublicKeys(read_vec(read_point, r)?),
|
||||
_ => Err(io::Error::new(io::ErrorKind::Other, "unknown extra field"))?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub(crate) struct Extra(Vec<ExtraField>);
|
||||
impl Extra {
|
||||
pub(crate) fn keys(&self) -> Vec<EdwardsPoint> {
|
||||
let mut keys = Vec::with_capacity(2);
|
||||
for field in &self.0 {
|
||||
match field.clone() {
|
||||
ExtraField::PublicKey(key) => keys.push(key),
|
||||
ExtraField::PublicKeys(additional) => keys.extend(additional),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
keys
|
||||
}
|
||||
|
||||
pub(crate) fn payment_id(&self) -> Option<PaymentId> {
|
||||
for field in &self.0 {
|
||||
if let ExtraField::Nonce(data) = field {
|
||||
return PaymentId::read::<&[u8]>(&mut data.as_ref()).ok();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub(crate) fn data(&self) -> Vec<Vec<u8>> {
|
||||
let mut first = true;
|
||||
let mut res = vec![];
|
||||
for field in &self.0 {
|
||||
if let ExtraField::Nonce(data) = field {
|
||||
// Skip the first Nonce, which should be the payment ID
|
||||
if first {
|
||||
first = false;
|
||||
continue;
|
||||
}
|
||||
res.push(data.clone());
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn new(mut keys: Vec<EdwardsPoint>) -> Extra {
|
||||
let mut res = Extra(Vec::with_capacity(3));
|
||||
if !keys.is_empty() {
|
||||
res.push(ExtraField::PublicKey(keys[0]));
|
||||
}
|
||||
if keys.len() > 1 {
|
||||
res.push(ExtraField::PublicKeys(keys.drain(1 ..).collect()));
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn push(&mut self, field: ExtraField) {
|
||||
self.0.push(field);
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
pub(crate) fn fee_weight(outputs: usize, data: &[Vec<u8>]) -> usize {
|
||||
// PublicKey, key
|
||||
(1 + 32) +
|
||||
// PublicKeys, length, additional keys
|
||||
(1 + 1 + (outputs.saturating_sub(1) * 32)) +
|
||||
// PaymentId (Nonce), length, encrypted, ID
|
||||
(1 + 1 + 1 + 8) +
|
||||
// Nonce, length, data (if existent)
|
||||
data.iter().map(|v| 1 + varint_len(v.len()) + v.len()).sum::<usize>()
|
||||
}
|
||||
|
||||
pub(crate) fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
for field in &self.0 {
|
||||
field.write(w)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn read<R: Read>(r: &mut R) -> io::Result<Extra> {
|
||||
let mut res = Extra(vec![]);
|
||||
let mut field;
|
||||
while {
|
||||
field = ExtraField::read(r);
|
||||
field.is_ok()
|
||||
} {
|
||||
res.0.push(field.unwrap());
|
||||
}
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
218
coins/monero/src/wallet/mod.rs
Normal file
218
coins/monero/src/wallet/mod.rs
Normal file
@@ -0,0 +1,218 @@
|
||||
use core::ops::Deref;
|
||||
use std::collections::{HashSet, HashMap};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||
|
||||
use curve25519_dalek::{
|
||||
constants::ED25519_BASEPOINT_TABLE,
|
||||
scalar::Scalar,
|
||||
edwards::{EdwardsPoint, CompressedEdwardsY},
|
||||
};
|
||||
|
||||
use crate::{hash, hash_to_scalar, serialize::write_varint, transaction::Input};
|
||||
|
||||
mod extra;
|
||||
pub(crate) use extra::{PaymentId, ExtraField, Extra};
|
||||
|
||||
/// Address encoding and decoding functionality.
|
||||
pub mod address;
|
||||
use address::{Network, AddressType, SubaddressIndex, AddressSpec, AddressMeta, MoneroAddress};
|
||||
|
||||
mod scan;
|
||||
pub use scan::{ReceivedOutput, SpendableOutput, Timelocked};
|
||||
|
||||
pub(crate) mod decoys;
|
||||
pub(crate) use decoys::Decoys;
|
||||
|
||||
mod send;
|
||||
pub use send::{Fee, TransactionError, SignableTransaction, SignableTransactionBuilder};
|
||||
#[cfg(feature = "multisig")]
|
||||
pub use send::TransactionMachine;
|
||||
|
||||
fn key_image_sort(x: &EdwardsPoint, y: &EdwardsPoint) -> std::cmp::Ordering {
|
||||
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
|
||||
}
|
||||
|
||||
// https://gist.github.com/kayabaNerve/8066c13f1fe1573286ba7a2fd79f6100
|
||||
pub(crate) fn uniqueness(inputs: &[Input]) -> [u8; 32] {
|
||||
let mut u = b"uniqueness".to_vec();
|
||||
for input in inputs {
|
||||
match input {
|
||||
// If Gen, this should be the only input, making this loop somewhat pointless
|
||||
// This works and even if there were somehow multiple inputs, it'd be a false negative
|
||||
Input::Gen(height) => {
|
||||
write_varint(height, &mut u).unwrap();
|
||||
}
|
||||
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
|
||||
}
|
||||
}
|
||||
hash(&u)
|
||||
}
|
||||
|
||||
// Hs("view_tag" || 8Ra || o), Hs(8Ra || o), and H(8Ra || 0x8d) with uniqueness inclusion in the
|
||||
// Scalar as an option
|
||||
#[allow(non_snake_case)]
|
||||
pub(crate) fn shared_key(
|
||||
uniqueness: Option<[u8; 32]>,
|
||||
s: &Scalar,
|
||||
P: &EdwardsPoint,
|
||||
o: usize,
|
||||
) -> (u8, Scalar, [u8; 8]) {
|
||||
// 8Ra
|
||||
let mut output_derivation = (s * P).mul_by_cofactor().compress().to_bytes().to_vec();
|
||||
// || o
|
||||
write_varint(&o.try_into().unwrap(), &mut output_derivation).unwrap();
|
||||
|
||||
let view_tag = hash(&[b"view_tag".as_ref(), &output_derivation].concat())[0];
|
||||
let mut payment_id_xor = [0; 8];
|
||||
payment_id_xor
|
||||
.copy_from_slice(&hash(&[output_derivation.as_ref(), [0x8d].as_ref()].concat())[.. 8]);
|
||||
|
||||
// uniqueness ||
|
||||
let shared_key = if let Some(uniqueness) = uniqueness {
|
||||
[uniqueness.as_ref(), &output_derivation].concat().to_vec()
|
||||
} else {
|
||||
output_derivation
|
||||
};
|
||||
|
||||
(view_tag, hash_to_scalar(&shared_key), payment_id_xor)
|
||||
}
|
||||
|
||||
pub(crate) fn amount_encryption(amount: u64, key: Scalar) -> [u8; 8] {
|
||||
let mut amount_mask = b"amount".to_vec();
|
||||
amount_mask.extend(key.to_bytes());
|
||||
(amount ^ u64::from_le_bytes(hash(&amount_mask)[.. 8].try_into().unwrap())).to_le_bytes()
|
||||
}
|
||||
|
||||
fn amount_decryption(amount: [u8; 8], key: Scalar) -> u64 {
|
||||
u64::from_le_bytes(amount_encryption(u64::from_le_bytes(amount), key))
|
||||
}
|
||||
|
||||
pub(crate) fn commitment_mask(shared_key: Scalar) -> Scalar {
|
||||
let mut mask = b"commitment_mask".to_vec();
|
||||
mask.extend(shared_key.to_bytes());
|
||||
hash_to_scalar(&mask)
|
||||
}
|
||||
|
||||
/// The private view key and public spend key, enabling scanning transactions.
|
||||
#[derive(Clone, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct ViewPair {
|
||||
spend: EdwardsPoint,
|
||||
view: Zeroizing<Scalar>,
|
||||
}
|
||||
|
||||
impl ViewPair {
|
||||
pub fn new(spend: EdwardsPoint, view: Zeroizing<Scalar>) -> ViewPair {
|
||||
ViewPair { spend, view }
|
||||
}
|
||||
|
||||
fn subaddress_derivation(&self, index: SubaddressIndex) -> Scalar {
|
||||
hash_to_scalar(&Zeroizing::new(
|
||||
[
|
||||
b"SubAddr\0".as_ref(),
|
||||
Zeroizing::new(self.view.to_bytes()).as_ref(),
|
||||
&index.account().to_le_bytes(),
|
||||
&index.address().to_le_bytes(),
|
||||
]
|
||||
.concat(),
|
||||
))
|
||||
}
|
||||
|
||||
fn subaddress_keys(&self, index: SubaddressIndex) -> (EdwardsPoint, EdwardsPoint) {
|
||||
let scalar = self.subaddress_derivation(index);
|
||||
let spend = self.spend + (&scalar * &ED25519_BASEPOINT_TABLE);
|
||||
let view = self.view.deref() * spend;
|
||||
(spend, view)
|
||||
}
|
||||
|
||||
/// Returns an address with the provided specification.
|
||||
pub fn address(&self, network: Network, spec: AddressSpec) -> MoneroAddress {
|
||||
let mut spend = self.spend;
|
||||
let mut view: EdwardsPoint = self.view.deref() * &ED25519_BASEPOINT_TABLE;
|
||||
|
||||
// construct the address meta
|
||||
let meta = match spec {
|
||||
AddressSpec::Standard => AddressMeta::new(network, AddressType::Standard),
|
||||
AddressSpec::Integrated(payment_id) => {
|
||||
AddressMeta::new(network, AddressType::Integrated(payment_id))
|
||||
}
|
||||
AddressSpec::Subaddress(index) => {
|
||||
(spend, view) = self.subaddress_keys(index);
|
||||
AddressMeta::new(network, AddressType::Subaddress)
|
||||
}
|
||||
AddressSpec::Featured { subaddress, payment_id, guaranteed } => {
|
||||
if let Some(index) = subaddress {
|
||||
(spend, view) = self.subaddress_keys(index);
|
||||
}
|
||||
AddressMeta::new(
|
||||
network,
|
||||
AddressType::Featured { subaddress: subaddress.is_some(), payment_id, guaranteed },
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
MoneroAddress::new(meta, spend, view)
|
||||
}
|
||||
}
|
||||
|
||||
/// Transaction scanner.
|
||||
/// This scanner is capable of generating subaddresses, additionally scanning for them once they've
|
||||
/// been explicitly generated. If the burning bug is attempted, any secondary outputs will be
|
||||
/// ignored.
|
||||
#[derive(Clone)]
|
||||
pub struct Scanner {
|
||||
pair: ViewPair,
|
||||
// Also contains the spend key as None
|
||||
pub(crate) subaddresses: HashMap<CompressedEdwardsY, Option<SubaddressIndex>>,
|
||||
pub(crate) burning_bug: Option<HashSet<CompressedEdwardsY>>,
|
||||
}
|
||||
|
||||
impl Zeroize for Scanner {
|
||||
fn zeroize(&mut self) {
|
||||
self.pair.zeroize();
|
||||
|
||||
// These may not be effective, unfortunately
|
||||
for (mut key, mut value) in self.subaddresses.drain() {
|
||||
key.zeroize();
|
||||
value.zeroize();
|
||||
}
|
||||
if let Some(ref mut burning_bug) = self.burning_bug.take() {
|
||||
for mut output in burning_bug.drain() {
|
||||
output.zeroize();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Scanner {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize();
|
||||
}
|
||||
}
|
||||
|
||||
impl ZeroizeOnDrop for Scanner {}
|
||||
|
||||
impl Scanner {
|
||||
/// Create a Scanner from a ViewPair.
|
||||
/// burning_bug is a HashSet of used keys, intended to prevent key reuse which would burn funds.
|
||||
/// When an output is successfully scanned, the output key MUST be saved to disk.
|
||||
/// When a new scanner is created, ALL saved output keys must be passed in to be secure.
|
||||
/// If None is passed, a modified shared key derivation is used which is immune to the burning
|
||||
/// bug (specifically the Guaranteed feature from Featured Addresses).
|
||||
// TODO: Should this take in a DB access handle to ensure output keys are saved?
|
||||
pub fn from_view(pair: ViewPair, burning_bug: Option<HashSet<CompressedEdwardsY>>) -> Scanner {
|
||||
let mut subaddresses = HashMap::new();
|
||||
subaddresses.insert(pair.spend.compress(), None);
|
||||
Scanner { pair, subaddresses, burning_bug }
|
||||
}
|
||||
|
||||
/// Register a subaddress.
|
||||
// There used to be an address function here, yet it wasn't safe. It could generate addresses
|
||||
// incompatible with the Scanner. While we could return None for that, then we have the issue
|
||||
// of runtime failures to generate an address.
|
||||
// Removing that API was the simplest option.
|
||||
pub fn register_subaddress(&mut self, subaddress: SubaddressIndex) {
|
||||
let (spend, _) = self.pair.subaddress_keys(subaddress);
|
||||
self.subaddresses.insert(spend.compress(), Some(subaddress));
|
||||
}
|
||||
}
|
||||
437
coins/monero/src/wallet/scan.rs
Normal file
437
coins/monero/src/wallet/scan.rs
Normal file
@@ -0,0 +1,437 @@
|
||||
use std::io::{self, Read, Write};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
||||
|
||||
use crate::{
|
||||
Commitment,
|
||||
serialize::{read_byte, read_u32, read_u64, read_bytes, read_scalar, read_point, read_raw_vec},
|
||||
transaction::{Input, Timelock, Transaction},
|
||||
block::Block,
|
||||
rpc::{Rpc, RpcError},
|
||||
wallet::{
|
||||
PaymentId, Extra, address::SubaddressIndex, Scanner, uniqueness, shared_key, amount_decryption,
|
||||
commitment_mask,
|
||||
},
|
||||
};
|
||||
|
||||
/// An absolute output ID, defined as its transaction hash and output index.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct AbsoluteId {
|
||||
pub tx: [u8; 32],
|
||||
pub o: u8,
|
||||
}
|
||||
|
||||
impl AbsoluteId {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(&self.tx)?;
|
||||
w.write_all(&[self.o])
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = Vec::with_capacity(32 + 1);
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<AbsoluteId> {
|
||||
Ok(AbsoluteId { tx: read_bytes(r)?, o: read_byte(r)? })
|
||||
}
|
||||
}
|
||||
|
||||
/// The data contained with an output.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct OutputData {
|
||||
pub key: EdwardsPoint,
|
||||
/// Absolute difference between the spend key and the key in this output
|
||||
pub key_offset: Scalar,
|
||||
pub commitment: Commitment,
|
||||
}
|
||||
|
||||
impl OutputData {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(&self.key.compress().to_bytes())?;
|
||||
w.write_all(&self.key_offset.to_bytes())?;
|
||||
w.write_all(&self.commitment.mask.to_bytes())?;
|
||||
w.write_all(&self.commitment.amount.to_le_bytes())
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = Vec::with_capacity(32 + 32 + 32 + 8);
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<OutputData> {
|
||||
Ok(OutputData {
|
||||
key: read_point(r)?,
|
||||
key_offset: read_scalar(r)?,
|
||||
commitment: Commitment::new(read_scalar(r)?, read_u64(r)?),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// The metadata for an output.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct Metadata {
|
||||
/// The subaddress this output was sent to.
|
||||
pub subaddress: Option<SubaddressIndex>,
|
||||
/// The payment ID included with this output.
|
||||
/// This will be gibberish if the payment ID wasn't intended for the recipient or wasn't included.
|
||||
// Could be an Option, as extra doesn't necessarily have a payment ID, yet all Monero TXs should
|
||||
// have this making it simplest for it to be as-is.
|
||||
pub payment_id: [u8; 8],
|
||||
/// Arbitrary data encoded in TX extra.
|
||||
pub arbitrary_data: Vec<Vec<u8>>,
|
||||
}
|
||||
|
||||
impl Metadata {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
if let Some(subaddress) = self.subaddress {
|
||||
w.write_all(&[1])?;
|
||||
w.write_all(&subaddress.account().to_le_bytes())?;
|
||||
w.write_all(&subaddress.address().to_le_bytes())?;
|
||||
} else {
|
||||
w.write_all(&[0])?;
|
||||
}
|
||||
w.write_all(&self.payment_id)?;
|
||||
|
||||
w.write_all(&u32::try_from(self.arbitrary_data.len()).unwrap().to_le_bytes())?;
|
||||
for part in &self.arbitrary_data {
|
||||
w.write_all(&[u8::try_from(part.len()).unwrap()])?;
|
||||
w.write_all(part)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = Vec::with_capacity(1 + 8 + 1);
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Metadata> {
|
||||
let subaddress = if read_byte(r)? == 1 {
|
||||
Some(
|
||||
SubaddressIndex::new(read_u32(r)?, read_u32(r)?)
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid subaddress in metadata"))?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Metadata {
|
||||
subaddress,
|
||||
payment_id: read_bytes(r)?,
|
||||
arbitrary_data: {
|
||||
let mut data = vec![];
|
||||
for _ in 0 .. read_u32(r)? {
|
||||
let len = read_byte(r)?;
|
||||
data.push(read_raw_vec(read_byte, usize::from(len), r)?);
|
||||
}
|
||||
data
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A received output, defined as its absolute ID, data, and metadara.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct ReceivedOutput {
|
||||
pub absolute: AbsoluteId,
|
||||
pub data: OutputData,
|
||||
pub metadata: Metadata,
|
||||
}
|
||||
|
||||
impl ReceivedOutput {
|
||||
pub fn key(&self) -> EdwardsPoint {
|
||||
self.data.key
|
||||
}
|
||||
|
||||
pub fn key_offset(&self) -> Scalar {
|
||||
self.data.key_offset
|
||||
}
|
||||
|
||||
pub fn commitment(&self) -> Commitment {
|
||||
self.data.commitment.clone()
|
||||
}
|
||||
|
||||
pub fn arbitrary_data(&self) -> &[Vec<u8>] {
|
||||
&self.metadata.arbitrary_data
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.absolute.write(w)?;
|
||||
self.data.write(w)?;
|
||||
self.metadata.write(w)
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
|
||||
Ok(ReceivedOutput {
|
||||
absolute: AbsoluteId::read(r)?,
|
||||
data: OutputData::read(r)?,
|
||||
metadata: Metadata::read(r)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A spendable output, defined as a received output and its index on the Monero blockchain.
|
||||
/// This index is dependent on the Monero blockchain and will only be known once the output is
|
||||
/// included within a block. This may change if there's a reorganization.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct SpendableOutput {
|
||||
pub output: ReceivedOutput,
|
||||
pub global_index: u64,
|
||||
}
|
||||
|
||||
impl SpendableOutput {
|
||||
/// Update the spendable output's global index. This is intended to be called if a
|
||||
/// re-organization occurred.
|
||||
pub async fn refresh_global_index(&mut self, rpc: &Rpc) -> Result<(), RpcError> {
|
||||
self.global_index =
|
||||
rpc.get_o_indexes(self.output.absolute.tx).await?[usize::from(self.output.absolute.o)];
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn from(rpc: &Rpc, output: ReceivedOutput) -> Result<SpendableOutput, RpcError> {
|
||||
let mut output = SpendableOutput { output, global_index: 0 };
|
||||
output.refresh_global_index(rpc).await?;
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
pub fn key(&self) -> EdwardsPoint {
|
||||
self.output.key()
|
||||
}
|
||||
|
||||
pub fn key_offset(&self) -> Scalar {
|
||||
self.output.key_offset()
|
||||
}
|
||||
|
||||
pub fn commitment(&self) -> Commitment {
|
||||
self.output.commitment()
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.output.write(w)?;
|
||||
w.write_all(&self.global_index.to_le_bytes())
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<SpendableOutput> {
|
||||
Ok(SpendableOutput { output: ReceivedOutput::read(r)?, global_index: read_u64(r)? })
|
||||
}
|
||||
}
|
||||
|
||||
/// A collection of timelocked outputs, either received or spendable.
|
||||
#[derive(Zeroize)]
|
||||
pub struct Timelocked<O: Clone + Zeroize>(Timelock, Vec<O>);
|
||||
impl<O: Clone + Zeroize> Drop for Timelocked<O> {
|
||||
fn drop(&mut self) {
|
||||
self.zeroize();
|
||||
}
|
||||
}
|
||||
impl<O: Clone + Zeroize> ZeroizeOnDrop for Timelocked<O> {}
|
||||
|
||||
impl<O: Clone + Zeroize> Timelocked<O> {
|
||||
pub fn timelock(&self) -> Timelock {
|
||||
self.0
|
||||
}
|
||||
|
||||
/// Return the outputs if they're not timelocked, or an empty vector if they are.
|
||||
pub fn not_locked(&self) -> Vec<O> {
|
||||
if self.0 == Timelock::None {
|
||||
return self.1.clone();
|
||||
}
|
||||
vec![]
|
||||
}
|
||||
|
||||
/// Returns None if the Timelocks aren't comparable. Returns Some(vec![]) if none are unlocked.
|
||||
pub fn unlocked(&self, timelock: Timelock) -> Option<Vec<O>> {
|
||||
// If the Timelocks are comparable, return the outputs if they're now unlocked
|
||||
self.0.partial_cmp(&timelock).filter(|_| self.0 <= timelock).map(|_| self.1.clone())
|
||||
}
|
||||
|
||||
pub fn ignore_timelock(&self) -> Vec<O> {
|
||||
self.1.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Scanner {
|
||||
/// Scan a transaction to discover the received outputs.
|
||||
pub fn scan_transaction(&mut self, tx: &Transaction) -> Timelocked<ReceivedOutput> {
|
||||
let extra = Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref());
|
||||
let keys;
|
||||
let extra = if let Ok(extra) = extra {
|
||||
keys = extra.keys();
|
||||
extra
|
||||
} else {
|
||||
return Timelocked(tx.prefix.timelock, vec![]);
|
||||
};
|
||||
let payment_id = extra.payment_id();
|
||||
|
||||
let mut res = vec![];
|
||||
for (o, output) in tx.prefix.outputs.iter().enumerate() {
|
||||
// https://github.com/serai-dex/serai/issues/106
|
||||
if let Some(burning_bug) = self.burning_bug.as_ref() {
|
||||
if burning_bug.contains(&output.key) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let output_key = output.key.decompress();
|
||||
if output_key.is_none() {
|
||||
continue;
|
||||
}
|
||||
let output_key = output_key.unwrap();
|
||||
|
||||
for key in &keys {
|
||||
let (view_tag, shared_key, payment_id_xor) = shared_key(
|
||||
if self.burning_bug.is_none() { Some(uniqueness(&tx.prefix.inputs)) } else { None },
|
||||
&self.pair.view,
|
||||
key,
|
||||
o,
|
||||
);
|
||||
|
||||
let payment_id =
|
||||
if let Some(PaymentId::Encrypted(id)) = payment_id.map(|id| id ^ payment_id_xor) {
|
||||
id
|
||||
} else {
|
||||
payment_id_xor
|
||||
};
|
||||
|
||||
if let Some(actual_view_tag) = output.view_tag {
|
||||
if actual_view_tag != view_tag {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// P - shared == spend
|
||||
let subaddress = self
|
||||
.subaddresses
|
||||
.get(&(output_key - (&shared_key * &ED25519_BASEPOINT_TABLE)).compress());
|
||||
if subaddress.is_none() {
|
||||
continue;
|
||||
}
|
||||
let subaddress = *subaddress.unwrap();
|
||||
|
||||
// If it has torsion, it'll substract the non-torsioned shared key to a torsioned key
|
||||
// We will not have a torsioned key in our HashMap of keys, so we wouldn't identify it as
|
||||
// ours
|
||||
// If we did though, it'd enable bypassing the included burning bug protection
|
||||
debug_assert!(output_key.is_torsion_free());
|
||||
|
||||
let mut key_offset = shared_key;
|
||||
if let Some(subaddress) = subaddress {
|
||||
key_offset += self.pair.subaddress_derivation(subaddress);
|
||||
}
|
||||
// Since we've found an output to us, get its amount
|
||||
let mut commitment = Commitment::zero();
|
||||
|
||||
// Miner transaction
|
||||
if output.amount != 0 {
|
||||
commitment.amount = output.amount;
|
||||
// Regular transaction
|
||||
} else {
|
||||
let amount = match tx.rct_signatures.base.ecdh_info.get(o) {
|
||||
Some(amount) => amount_decryption(*amount, shared_key),
|
||||
// This should never happen, yet it may be possible with miner transactions?
|
||||
// Using get just decreases the possibility of a panic and lets us move on in that case
|
||||
None => break,
|
||||
};
|
||||
|
||||
// Rebuild the commitment to verify it
|
||||
commitment = Commitment::new(commitment_mask(shared_key), amount);
|
||||
// If this is a malicious commitment, move to the next output
|
||||
// Any other R value will calculate to a different spend key and are therefore ignorable
|
||||
if Some(&commitment.calculate()) != tx.rct_signatures.base.commitments.get(o) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if commitment.amount != 0 {
|
||||
res.push(ReceivedOutput {
|
||||
absolute: AbsoluteId { tx: tx.hash(), o: o.try_into().unwrap() },
|
||||
|
||||
data: OutputData { key: output_key, key_offset, commitment },
|
||||
|
||||
metadata: Metadata { subaddress, payment_id, arbitrary_data: extra.data() },
|
||||
});
|
||||
|
||||
if let Some(burning_bug) = self.burning_bug.as_mut() {
|
||||
burning_bug.insert(output.key);
|
||||
}
|
||||
}
|
||||
// Break to prevent public keys from being included multiple times, triggering multiple
|
||||
// inclusions of the same output
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Timelocked(tx.prefix.timelock, res)
|
||||
}
|
||||
|
||||
/// Scan a block to obtain its spendable outputs. Its the presence in a block giving these
|
||||
/// transactions their global index, and this must be batched as asking for the index of specific
|
||||
/// transactions is a dead giveaway for which transactions you successfully scanned. This
|
||||
/// function obtains the output indexes for the miner transaction, incrementing from there
|
||||
/// instead.
|
||||
pub async fn scan(
|
||||
&mut self,
|
||||
rpc: &Rpc,
|
||||
block: &Block,
|
||||
) -> Result<Vec<Timelocked<SpendableOutput>>, RpcError> {
|
||||
let mut index = rpc.get_o_indexes(block.miner_tx.hash()).await?[0];
|
||||
let mut txs = vec![block.miner_tx.clone()];
|
||||
txs.extend(rpc.get_transactions(&block.txs).await?);
|
||||
|
||||
let map = |mut timelock: Timelocked<ReceivedOutput>, index| {
|
||||
if timelock.1.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(Timelocked(
|
||||
timelock.0,
|
||||
timelock
|
||||
.1
|
||||
.drain(..)
|
||||
.map(|output| SpendableOutput {
|
||||
global_index: index + u64::from(output.absolute.o),
|
||||
output,
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
let mut res = vec![];
|
||||
for tx in txs.drain(..) {
|
||||
if let Some(timelock) = map(self.scan_transaction(&tx), index) {
|
||||
res.push(timelock);
|
||||
}
|
||||
index += u64::try_from(
|
||||
tx.prefix
|
||||
.outputs
|
||||
.iter()
|
||||
// Filter to miner TX outputs/0-amount outputs since we're tacking the 0-amount index
|
||||
// This will fail to scan blocks containing pre-RingCT miner TXs
|
||||
.filter(|output| {
|
||||
matches!(tx.prefix.inputs.get(0), Some(Input::Gen(..))) || (output.amount == 0)
|
||||
})
|
||||
.count(),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
125
coins/monero/src/wallet/send/builder.rs
Normal file
125
coins/monero/src/wallet/send/builder.rs
Normal file
@@ -0,0 +1,125 @@
|
||||
use std::sync::{Arc, RwLock};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use crate::{
|
||||
Protocol,
|
||||
wallet::{
|
||||
address::MoneroAddress, Fee, SpendableOutput, SignableTransaction, TransactionError,
|
||||
extra::MAX_TX_EXTRA_NONCE_SIZE,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
struct SignableTransactionBuilderInternal {
|
||||
protocol: Protocol,
|
||||
fee: Fee,
|
||||
|
||||
inputs: Vec<SpendableOutput>,
|
||||
payments: Vec<(MoneroAddress, u64)>,
|
||||
change_address: Option<MoneroAddress>,
|
||||
data: Vec<Vec<u8>>,
|
||||
}
|
||||
|
||||
impl SignableTransactionBuilderInternal {
|
||||
// Takes in the change address so users don't miss that they have to manually set one
|
||||
// If they don't, all leftover funds will become part of the fee
|
||||
fn new(protocol: Protocol, fee: Fee, change_address: Option<MoneroAddress>) -> Self {
|
||||
Self { protocol, fee, inputs: vec![], payments: vec![], change_address, data: vec![] }
|
||||
}
|
||||
|
||||
fn add_input(&mut self, input: SpendableOutput) {
|
||||
self.inputs.push(input);
|
||||
}
|
||||
fn add_inputs(&mut self, inputs: &[SpendableOutput]) {
|
||||
self.inputs.extend(inputs.iter().cloned());
|
||||
}
|
||||
|
||||
fn add_payment(&mut self, dest: MoneroAddress, amount: u64) {
|
||||
self.payments.push((dest, amount));
|
||||
}
|
||||
fn add_payments(&mut self, payments: &[(MoneroAddress, u64)]) {
|
||||
self.payments.extend(payments);
|
||||
}
|
||||
|
||||
fn add_data(&mut self, data: Vec<u8>) {
|
||||
self.data.push(data);
|
||||
}
|
||||
}
|
||||
|
||||
/// A Transaction Builder for Monero transactions.
|
||||
/// All methods provided will modify self while also returning a shallow copy, enabling efficient
|
||||
/// chaining with a clean API.
|
||||
/// In order to fork the builder at some point, clone will still return a deep copy.
|
||||
#[derive(Debug)]
|
||||
pub struct SignableTransactionBuilder(Arc<RwLock<SignableTransactionBuilderInternal>>);
|
||||
impl Clone for SignableTransactionBuilder {
|
||||
fn clone(&self) -> Self {
|
||||
Self(Arc::new(RwLock::new((*self.0.read().unwrap()).clone())))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for SignableTransactionBuilder {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
*self.0.read().unwrap() == *other.0.read().unwrap()
|
||||
}
|
||||
}
|
||||
impl Eq for SignableTransactionBuilder {}
|
||||
|
||||
impl Zeroize for SignableTransactionBuilder {
|
||||
fn zeroize(&mut self) {
|
||||
self.0.write().unwrap().zeroize()
|
||||
}
|
||||
}
|
||||
|
||||
impl SignableTransactionBuilder {
|
||||
fn shallow_copy(&self) -> Self {
|
||||
Self(self.0.clone())
|
||||
}
|
||||
|
||||
pub fn new(protocol: Protocol, fee: Fee, change_address: Option<MoneroAddress>) -> Self {
|
||||
Self(Arc::new(RwLock::new(SignableTransactionBuilderInternal::new(
|
||||
protocol,
|
||||
fee,
|
||||
change_address,
|
||||
))))
|
||||
}
|
||||
|
||||
pub fn add_input(&mut self, input: SpendableOutput) -> Self {
|
||||
self.0.write().unwrap().add_input(input);
|
||||
self.shallow_copy()
|
||||
}
|
||||
pub fn add_inputs(&mut self, inputs: &[SpendableOutput]) -> Self {
|
||||
self.0.write().unwrap().add_inputs(inputs);
|
||||
self.shallow_copy()
|
||||
}
|
||||
|
||||
pub fn add_payment(&mut self, dest: MoneroAddress, amount: u64) -> Self {
|
||||
self.0.write().unwrap().add_payment(dest, amount);
|
||||
self.shallow_copy()
|
||||
}
|
||||
pub fn add_payments(&mut self, payments: &[(MoneroAddress, u64)]) -> Self {
|
||||
self.0.write().unwrap().add_payments(payments);
|
||||
self.shallow_copy()
|
||||
}
|
||||
|
||||
pub fn add_data(&mut self, data: Vec<u8>) -> Result<Self, TransactionError> {
|
||||
if data.len() > MAX_TX_EXTRA_NONCE_SIZE {
|
||||
Err(TransactionError::TooMuchData)?;
|
||||
}
|
||||
self.0.write().unwrap().add_data(data);
|
||||
Ok(self.shallow_copy())
|
||||
}
|
||||
|
||||
pub fn build(self) -> Result<SignableTransaction, TransactionError> {
|
||||
let read = self.0.read().unwrap();
|
||||
SignableTransaction::new(
|
||||
read.protocol,
|
||||
read.inputs.clone(),
|
||||
read.payments.clone(),
|
||||
read.change_address,
|
||||
read.data.clone(),
|
||||
read.fee,
|
||||
)
|
||||
}
|
||||
}
|
||||
407
coins/monero/src/wallet/send/mod.rs
Normal file
407
coins/monero/src/wallet/send/mod.rs
Normal file
@@ -0,0 +1,407 @@
|
||||
use core::ops::Deref;
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
use rand::seq::SliceRandom;
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||
|
||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
use frost::FrostError;
|
||||
|
||||
use crate::{
|
||||
Protocol, Commitment, random_scalar,
|
||||
ringct::{
|
||||
generate_key_image,
|
||||
clsag::{ClsagError, ClsagInput, Clsag},
|
||||
bulletproofs::{MAX_OUTPUTS, Bulletproofs},
|
||||
RctBase, RctPrunable, RctSignatures,
|
||||
},
|
||||
transaction::{Input, Output, Timelock, TransactionPrefix, Transaction},
|
||||
rpc::{Rpc, RpcError},
|
||||
wallet::{
|
||||
address::MoneroAddress, SpendableOutput, Decoys, PaymentId, ExtraField, Extra, key_image_sort,
|
||||
uniqueness, shared_key, commitment_mask, amount_encryption, extra::MAX_TX_EXTRA_NONCE_SIZE,
|
||||
},
|
||||
};
|
||||
|
||||
mod builder;
|
||||
pub use builder::SignableTransactionBuilder;
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
mod multisig;
|
||||
#[cfg(feature = "multisig")]
|
||||
pub use multisig::TransactionMachine;
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
struct SendOutput {
|
||||
R: EdwardsPoint,
|
||||
view_tag: u8,
|
||||
dest: EdwardsPoint,
|
||||
commitment: Commitment,
|
||||
amount: [u8; 8],
|
||||
}
|
||||
|
||||
impl SendOutput {
|
||||
fn new<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
unique: [u8; 32],
|
||||
output: (usize, (MoneroAddress, u64)),
|
||||
) -> (SendOutput, Option<[u8; 8]>) {
|
||||
let o = output.0;
|
||||
let output = output.1;
|
||||
|
||||
let r = random_scalar(rng);
|
||||
let (view_tag, shared_key, payment_id_xor) =
|
||||
shared_key(Some(unique).filter(|_| output.0.meta.kind.guaranteed()), &r, &output.0.view, o);
|
||||
|
||||
(
|
||||
SendOutput {
|
||||
R: if !output.0.meta.kind.subaddress() {
|
||||
&r * &ED25519_BASEPOINT_TABLE
|
||||
} else {
|
||||
r * output.0.spend
|
||||
},
|
||||
view_tag,
|
||||
dest: ((&shared_key * &ED25519_BASEPOINT_TABLE) + output.0.spend),
|
||||
commitment: Commitment::new(commitment_mask(shared_key), output.1),
|
||||
amount: amount_encryption(output.1, shared_key),
|
||||
},
|
||||
output
|
||||
.0
|
||||
.payment_id()
|
||||
.map(|id| (u64::from_le_bytes(id) ^ u64::from_le_bytes(payment_id_xor)).to_le_bytes()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
||||
pub enum TransactionError {
|
||||
#[error("multiple addresses with payment IDs")]
|
||||
MultiplePaymentIds,
|
||||
#[error("no inputs")]
|
||||
NoInputs,
|
||||
#[error("no outputs")]
|
||||
NoOutputs,
|
||||
#[error("only one output and no change address")]
|
||||
NoChange,
|
||||
#[error("too many outputs")]
|
||||
TooManyOutputs,
|
||||
#[error("too much data")]
|
||||
TooMuchData,
|
||||
#[error("not enough funds (in {0}, out {1})")]
|
||||
NotEnoughFunds(u64, u64),
|
||||
#[error("wrong spend private key")]
|
||||
WrongPrivateKey,
|
||||
#[error("rpc error ({0})")]
|
||||
RpcError(RpcError),
|
||||
#[error("clsag error ({0})")]
|
||||
ClsagError(ClsagError),
|
||||
#[error("invalid transaction ({0})")]
|
||||
InvalidTransaction(RpcError),
|
||||
#[cfg(feature = "multisig")]
|
||||
#[error("frost error {0}")]
|
||||
FrostError(FrostError),
|
||||
}
|
||||
|
||||
async fn prepare_inputs<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
rpc: &Rpc,
|
||||
ring_len: usize,
|
||||
inputs: &[SpendableOutput],
|
||||
spend: &Zeroizing<Scalar>,
|
||||
tx: &mut Transaction,
|
||||
) -> Result<Vec<(Zeroizing<Scalar>, EdwardsPoint, ClsagInput)>, TransactionError> {
|
||||
let mut signable = Vec::with_capacity(inputs.len());
|
||||
|
||||
// Select decoys
|
||||
let decoys = Decoys::select(
|
||||
rng,
|
||||
rpc,
|
||||
ring_len,
|
||||
rpc.get_height().await.map_err(TransactionError::RpcError)? - 10,
|
||||
inputs,
|
||||
)
|
||||
.await
|
||||
.map_err(TransactionError::RpcError)?;
|
||||
|
||||
for (i, input) in inputs.iter().enumerate() {
|
||||
let input_spend = Zeroizing::new(input.key_offset() + spend.deref());
|
||||
let image = generate_key_image(&input_spend);
|
||||
signable.push((
|
||||
input_spend,
|
||||
image,
|
||||
ClsagInput::new(input.commitment().clone(), decoys[i].clone())
|
||||
.map_err(TransactionError::ClsagError)?,
|
||||
));
|
||||
|
||||
tx.prefix.inputs.push(Input::ToKey {
|
||||
amount: 0,
|
||||
key_offsets: decoys[i].offsets.clone(),
|
||||
key_image: signable[i].1,
|
||||
});
|
||||
}
|
||||
|
||||
signable.sort_by(|x, y| x.1.compress().to_bytes().cmp(&y.1.compress().to_bytes()).reverse());
|
||||
tx.prefix.inputs.sort_by(|x, y| {
|
||||
if let (Input::ToKey { key_image: x, .. }, Input::ToKey { key_image: y, .. }) = (x, y) {
|
||||
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
|
||||
} else {
|
||||
panic!("Input wasn't ToKey")
|
||||
}
|
||||
});
|
||||
|
||||
Ok(signable)
|
||||
}
|
||||
|
||||
/// Fee struct, defined as a per-unit cost and a mask for rounding purposes.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct Fee {
|
||||
pub per_weight: u64,
|
||||
pub mask: u64,
|
||||
}
|
||||
|
||||
impl Fee {
|
||||
pub fn calculate(&self, weight: usize) -> u64 {
|
||||
((((self.per_weight * u64::try_from(weight).unwrap()) - 1) / self.mask) + 1) * self.mask
|
||||
}
|
||||
}
|
||||
|
||||
/// A signable transaction, either in a single-signer or multisig context.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct SignableTransaction {
|
||||
protocol: Protocol,
|
||||
inputs: Vec<SpendableOutput>,
|
||||
payments: Vec<(MoneroAddress, u64)>,
|
||||
data: Vec<Vec<u8>>,
|
||||
fee: u64,
|
||||
}
|
||||
|
||||
impl SignableTransaction {
|
||||
/// Create a signable transaction. If the change address is specified, leftover funds will be
|
||||
/// sent to it. If the change address isn't specified, up to 16 outputs may be specified, using
|
||||
/// any leftover funds as a bonus to the fee. The optional data field will be embedded in TX
|
||||
/// extra.
|
||||
pub fn new(
|
||||
protocol: Protocol,
|
||||
inputs: Vec<SpendableOutput>,
|
||||
mut payments: Vec<(MoneroAddress, u64)>,
|
||||
change_address: Option<MoneroAddress>,
|
||||
data: Vec<Vec<u8>>,
|
||||
fee_rate: Fee,
|
||||
) -> Result<SignableTransaction, TransactionError> {
|
||||
// Make sure there's only one payment ID
|
||||
{
|
||||
let mut payment_ids = 0;
|
||||
let mut count = |addr: MoneroAddress| {
|
||||
if addr.payment_id().is_some() {
|
||||
payment_ids += 1
|
||||
}
|
||||
};
|
||||
for payment in &payments {
|
||||
count(payment.0);
|
||||
}
|
||||
if let Some(change) = change_address {
|
||||
count(change);
|
||||
}
|
||||
if payment_ids > 1 {
|
||||
Err(TransactionError::MultiplePaymentIds)?;
|
||||
}
|
||||
}
|
||||
|
||||
if inputs.is_empty() {
|
||||
Err(TransactionError::NoInputs)?;
|
||||
}
|
||||
if payments.is_empty() {
|
||||
Err(TransactionError::NoOutputs)?;
|
||||
}
|
||||
|
||||
for part in &data {
|
||||
if part.len() > MAX_TX_EXTRA_NONCE_SIZE {
|
||||
Err(TransactionError::TooMuchData)?;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO TX MAX SIZE
|
||||
|
||||
// If we don't have two outputs, as required by Monero, add a second
|
||||
let mut change = payments.len() == 1;
|
||||
if change && change_address.is_none() {
|
||||
Err(TransactionError::NoChange)?;
|
||||
}
|
||||
let outputs = payments.len() + usize::from(change);
|
||||
|
||||
// Calculate the extra length
|
||||
let extra = Extra::fee_weight(outputs, data.as_ref());
|
||||
|
||||
// Calculate the fee.
|
||||
let mut fee =
|
||||
fee_rate.calculate(Transaction::fee_weight(protocol, inputs.len(), outputs, extra));
|
||||
|
||||
// Make sure we have enough funds
|
||||
let in_amount = inputs.iter().map(|input| input.commitment().amount).sum::<u64>();
|
||||
let mut out_amount = payments.iter().map(|payment| payment.1).sum::<u64>() + fee;
|
||||
if in_amount < out_amount {
|
||||
Err(TransactionError::NotEnoughFunds(in_amount, out_amount))?;
|
||||
}
|
||||
|
||||
// If we have yet to add a change output, do so if it's economically viable
|
||||
if (!change) && change_address.is_some() && (in_amount != out_amount) {
|
||||
// Check even with the new fee, there's remaining funds
|
||||
let change_fee =
|
||||
fee_rate.calculate(Transaction::fee_weight(protocol, inputs.len(), outputs + 1, extra)) -
|
||||
fee;
|
||||
if (out_amount + change_fee) < in_amount {
|
||||
change = true;
|
||||
out_amount += change_fee;
|
||||
fee += change_fee;
|
||||
}
|
||||
}
|
||||
|
||||
if change {
|
||||
payments.push((change_address.unwrap(), in_amount - out_amount));
|
||||
}
|
||||
|
||||
if payments.len() > MAX_OUTPUTS {
|
||||
Err(TransactionError::TooManyOutputs)?;
|
||||
}
|
||||
|
||||
Ok(SignableTransaction { protocol, inputs, payments, data, fee })
|
||||
}
|
||||
|
||||
fn prepare_transaction<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R,
|
||||
uniqueness: [u8; 32],
|
||||
) -> (Transaction, Scalar) {
|
||||
// Shuffle the payments
|
||||
self.payments.shuffle(rng);
|
||||
|
||||
// Actually create the outputs
|
||||
let mut outputs = Vec::with_capacity(self.payments.len());
|
||||
let mut id = None;
|
||||
for payment in self.payments.drain(..).enumerate() {
|
||||
let (output, payment_id) = SendOutput::new(rng, uniqueness, payment);
|
||||
outputs.push(output);
|
||||
id = id.or(payment_id);
|
||||
}
|
||||
|
||||
// Include a random payment ID if we don't actually have one
|
||||
// It prevents transactions from leaking if they're sending to integrated addresses or not
|
||||
let id = if let Some(id) = id {
|
||||
id
|
||||
} else {
|
||||
let mut id = [0; 8];
|
||||
rng.fill_bytes(&mut id);
|
||||
id
|
||||
};
|
||||
|
||||
let commitments = outputs.iter().map(|output| output.commitment.clone()).collect::<Vec<_>>();
|
||||
let sum = commitments.iter().map(|commitment| commitment.mask).sum();
|
||||
|
||||
// Safe due to the constructor checking MAX_OUTPUTS
|
||||
let bp = Bulletproofs::prove(rng, &commitments, self.protocol.bp_plus()).unwrap();
|
||||
|
||||
// Create the TX extra
|
||||
let extra = {
|
||||
let mut extra = Extra::new(outputs.iter().map(|output| output.R).collect());
|
||||
|
||||
let mut id_vec = Vec::with_capacity(1 + 8);
|
||||
PaymentId::Encrypted(id).write(&mut id_vec).unwrap();
|
||||
extra.push(ExtraField::Nonce(id_vec));
|
||||
|
||||
// Include data if present
|
||||
for part in self.data.drain(..) {
|
||||
extra.push(ExtraField::Nonce(part));
|
||||
}
|
||||
|
||||
let mut serialized = Vec::with_capacity(Extra::fee_weight(outputs.len(), self.data.as_ref()));
|
||||
extra.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
};
|
||||
|
||||
let mut tx_outputs = Vec::with_capacity(outputs.len());
|
||||
let mut ecdh_info = Vec::with_capacity(outputs.len());
|
||||
for output in &outputs {
|
||||
tx_outputs.push(Output {
|
||||
amount: 0,
|
||||
key: output.dest.compress(),
|
||||
view_tag: Some(output.view_tag).filter(|_| matches!(self.protocol, Protocol::v16)),
|
||||
});
|
||||
ecdh_info.push(output.amount);
|
||||
}
|
||||
|
||||
(
|
||||
Transaction {
|
||||
prefix: TransactionPrefix {
|
||||
version: 2,
|
||||
timelock: Timelock::None,
|
||||
inputs: vec![],
|
||||
outputs: tx_outputs,
|
||||
extra,
|
||||
},
|
||||
signatures: vec![],
|
||||
rct_signatures: RctSignatures {
|
||||
base: RctBase {
|
||||
fee: self.fee,
|
||||
ecdh_info,
|
||||
commitments: commitments.iter().map(|commitment| commitment.calculate()).collect(),
|
||||
},
|
||||
prunable: RctPrunable::Clsag {
|
||||
bulletproofs: vec![bp],
|
||||
clsags: vec![],
|
||||
pseudo_outs: vec![],
|
||||
},
|
||||
},
|
||||
},
|
||||
sum,
|
||||
)
|
||||
}
|
||||
|
||||
/// Sign this transaction.
|
||||
pub async fn sign<R: RngCore + CryptoRng>(
|
||||
mut self,
|
||||
rng: &mut R,
|
||||
rpc: &Rpc,
|
||||
spend: &Zeroizing<Scalar>,
|
||||
) -> Result<Transaction, TransactionError> {
|
||||
let mut images = Vec::with_capacity(self.inputs.len());
|
||||
for input in &self.inputs {
|
||||
let mut offset = Zeroizing::new(spend.deref() + input.key_offset());
|
||||
if (offset.deref() * &ED25519_BASEPOINT_TABLE) != input.key() {
|
||||
Err(TransactionError::WrongPrivateKey)?;
|
||||
}
|
||||
|
||||
images.push(generate_key_image(&offset));
|
||||
offset.zeroize();
|
||||
}
|
||||
images.sort_by(key_image_sort);
|
||||
|
||||
let (mut tx, mask_sum) = self.prepare_transaction(
|
||||
rng,
|
||||
uniqueness(
|
||||
&images
|
||||
.iter()
|
||||
.map(|image| Input::ToKey { amount: 0, key_offsets: vec![], key_image: *image })
|
||||
.collect::<Vec<_>>(),
|
||||
),
|
||||
);
|
||||
|
||||
let signable =
|
||||
prepare_inputs(rng, rpc, self.protocol.ring_len(), &self.inputs, spend, &mut tx).await?;
|
||||
|
||||
let clsag_pairs = Clsag::sign(rng, signable, mask_sum, tx.signature_hash());
|
||||
match tx.rct_signatures.prunable {
|
||||
RctPrunable::Null => panic!("Signing for RctPrunable::Null"),
|
||||
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
|
||||
clsags.append(&mut clsag_pairs.iter().map(|clsag| clsag.0.clone()).collect::<Vec<_>>());
|
||||
pseudo_outs.append(&mut clsag_pairs.iter().map(|clsag| clsag.1).collect::<Vec<_>>());
|
||||
}
|
||||
}
|
||||
Ok(tx)
|
||||
}
|
||||
}
|
||||
409
coins/monero/src/wallet/send/multisig.rs
Normal file
409
coins/monero/src/wallet/send/multisig.rs
Normal file
@@ -0,0 +1,409 @@
|
||||
use std::{
|
||||
io::{self, Read},
|
||||
sync::{Arc, RwLock},
|
||||
collections::HashMap,
|
||||
};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
|
||||
use group::ff::Field;
|
||||
use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::EdwardsPoint};
|
||||
use dalek_ff_group as dfg;
|
||||
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
use frost::{
|
||||
curve::Ed25519,
|
||||
FrostError, ThresholdKeys,
|
||||
sign::{
|
||||
Writable, Preprocess, CachedPreprocess, SignatureShare, PreprocessMachine, SignMachine,
|
||||
SignatureMachine, AlgorithmMachine, AlgorithmSignMachine, AlgorithmSignatureMachine,
|
||||
},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
random_scalar,
|
||||
ringct::{
|
||||
clsag::{ClsagInput, ClsagDetails, ClsagAddendum, ClsagMultisig, add_key_image_share},
|
||||
RctPrunable,
|
||||
},
|
||||
transaction::{Input, Transaction},
|
||||
rpc::Rpc,
|
||||
wallet::{TransactionError, SignableTransaction, Decoys, key_image_sort, uniqueness},
|
||||
};
|
||||
|
||||
/// FROST signing machine to produce a signed transaction.
|
||||
pub struct TransactionMachine {
|
||||
signable: SignableTransaction,
|
||||
i: u16,
|
||||
transcript: RecommendedTranscript,
|
||||
|
||||
decoys: Vec<Decoys>,
|
||||
|
||||
// Hashed key and scalar offset
|
||||
key_images: Vec<(EdwardsPoint, Scalar)>,
|
||||
inputs: Vec<Arc<RwLock<Option<ClsagDetails>>>>,
|
||||
clsags: Vec<AlgorithmMachine<Ed25519, ClsagMultisig>>,
|
||||
}
|
||||
|
||||
pub struct TransactionSignMachine {
|
||||
signable: SignableTransaction,
|
||||
i: u16,
|
||||
transcript: RecommendedTranscript,
|
||||
|
||||
decoys: Vec<Decoys>,
|
||||
|
||||
key_images: Vec<(EdwardsPoint, Scalar)>,
|
||||
inputs: Vec<Arc<RwLock<Option<ClsagDetails>>>>,
|
||||
clsags: Vec<AlgorithmSignMachine<Ed25519, ClsagMultisig>>,
|
||||
|
||||
our_preprocess: Vec<Preprocess<Ed25519, ClsagAddendum>>,
|
||||
}
|
||||
|
||||
pub struct TransactionSignatureMachine {
|
||||
tx: Transaction,
|
||||
clsags: Vec<AlgorithmSignatureMachine<Ed25519, ClsagMultisig>>,
|
||||
}
|
||||
|
||||
impl SignableTransaction {
|
||||
/// Create a FROST signing machine out of this signable transaction.
|
||||
/// The height is the Monero blockchain height to synchronize around.
|
||||
pub async fn multisig(
|
||||
self,
|
||||
rpc: &Rpc,
|
||||
keys: ThresholdKeys<Ed25519>,
|
||||
mut transcript: RecommendedTranscript,
|
||||
height: usize,
|
||||
) -> Result<TransactionMachine, TransactionError> {
|
||||
let mut inputs = vec![];
|
||||
for _ in 0 .. self.inputs.len() {
|
||||
// Doesn't resize as that will use a single Rc for the entire Vec
|
||||
inputs.push(Arc::new(RwLock::new(None)));
|
||||
}
|
||||
let mut clsags = vec![];
|
||||
|
||||
// Create a RNG out of the input shared keys, which either requires the view key or being every
|
||||
// sender, and the payments (address and amount), which a passive adversary may be able to know
|
||||
// depending on how these transactions are coordinated
|
||||
// Being every sender would already let you note rings which happen to use your transactions
|
||||
// multiple times, already breaking privacy there
|
||||
|
||||
transcript.domain_separate(b"monero_transaction");
|
||||
// Include the height we're using for our data
|
||||
// The data itself will be included, making this unnecessary, yet a lot of this is technically
|
||||
// unnecessary. Anything which further increases security at almost no cost should be followed
|
||||
transcript.append_message(b"height", u64::try_from(height).unwrap().to_le_bytes());
|
||||
// Also include the spend_key as below only the key offset is included, so this transcripts the
|
||||
// sum product
|
||||
// Useful as transcripting the sum product effectively transcripts the key image, further
|
||||
// guaranteeing the one time properties noted below
|
||||
transcript.append_message(b"spend_key", keys.group_key().0.compress().to_bytes());
|
||||
for input in &self.inputs {
|
||||
// These outputs can only be spent once. Therefore, it forces all RNGs derived from this
|
||||
// transcript (such as the one used to create one time keys) to be unique
|
||||
transcript.append_message(b"input_hash", input.output.absolute.tx);
|
||||
transcript.append_message(b"input_output_index", [input.output.absolute.o]);
|
||||
// Not including this, with a doxxed list of payments, would allow brute forcing the inputs
|
||||
// to determine RNG seeds and therefore the true spends
|
||||
transcript.append_message(b"input_shared_key", input.key_offset().to_bytes());
|
||||
}
|
||||
for payment in &self.payments {
|
||||
transcript.append_message(b"payment_address", payment.0.to_string().as_bytes());
|
||||
transcript.append_message(b"payment_amount", payment.1.to_le_bytes());
|
||||
}
|
||||
|
||||
let mut key_images = vec![];
|
||||
for (i, input) in self.inputs.iter().enumerate() {
|
||||
// Check this the right set of keys
|
||||
let offset = keys.offset(dfg::Scalar(input.key_offset()));
|
||||
if offset.group_key().0 != input.key() {
|
||||
Err(TransactionError::WrongPrivateKey)?;
|
||||
}
|
||||
|
||||
let clsag = ClsagMultisig::new(transcript.clone(), input.key(), inputs[i].clone());
|
||||
key_images.push((
|
||||
clsag.H,
|
||||
keys.current_offset().unwrap_or_else(dfg::Scalar::zero).0 + self.inputs[i].key_offset(),
|
||||
));
|
||||
clsags.push(AlgorithmMachine::new(clsag, offset).map_err(TransactionError::FrostError)?);
|
||||
}
|
||||
|
||||
// Select decoys
|
||||
// Ideally, this would be done post entropy, instead of now, yet doing so would require sign
|
||||
// to be async which isn't preferable. This should be suitably competent though
|
||||
// While this inability means we can immediately create the input, moving it out of the
|
||||
// Arc RwLock, keeping it within an Arc RwLock keeps our options flexible
|
||||
let decoys = Decoys::select(
|
||||
// Using a seeded RNG with a specific height, committed to above, should make these decoys
|
||||
// committed to. They'll also be committed to later via the TX message as a whole
|
||||
&mut ChaCha20Rng::from_seed(transcript.rng_seed(b"decoys")),
|
||||
rpc,
|
||||
self.protocol.ring_len(),
|
||||
height,
|
||||
&self.inputs,
|
||||
)
|
||||
.await
|
||||
.map_err(TransactionError::RpcError)?;
|
||||
|
||||
Ok(TransactionMachine {
|
||||
signable: self,
|
||||
i: keys.params().i(),
|
||||
transcript,
|
||||
|
||||
decoys,
|
||||
|
||||
key_images,
|
||||
inputs,
|
||||
clsags,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl PreprocessMachine for TransactionMachine {
|
||||
type Preprocess = Vec<Preprocess<Ed25519, ClsagAddendum>>;
|
||||
type Signature = Transaction;
|
||||
type SignMachine = TransactionSignMachine;
|
||||
|
||||
fn preprocess<R: RngCore + CryptoRng>(
|
||||
mut self,
|
||||
rng: &mut R,
|
||||
) -> (TransactionSignMachine, Self::Preprocess) {
|
||||
// Iterate over each CLSAG calling preprocess
|
||||
let mut preprocesses = Vec::with_capacity(self.clsags.len());
|
||||
let clsags = self
|
||||
.clsags
|
||||
.drain(..)
|
||||
.map(|clsag| {
|
||||
let (clsag, preprocess) = clsag.preprocess(rng);
|
||||
preprocesses.push(preprocess);
|
||||
clsag
|
||||
})
|
||||
.collect();
|
||||
let our_preprocess = preprocesses.clone();
|
||||
|
||||
// We could add further entropy here, and previous versions of this library did so
|
||||
// As of right now, the multisig's key, the inputs being spent, and the FROST data itself
|
||||
// will be used for RNG seeds. In order to recreate these RNG seeds, breaking privacy,
|
||||
// counterparties must have knowledge of the multisig, either the view key or access to the
|
||||
// coordination layer, and then access to the actual FROST signing process
|
||||
// If the commitments are sent in plain text, then entropy here also would be, making it not
|
||||
// increase privacy. If they're not sent in plain text, or are otherwise inaccessible, they
|
||||
// already offer sufficient entropy. That's why further entropy is not included
|
||||
|
||||
(
|
||||
TransactionSignMachine {
|
||||
signable: self.signable,
|
||||
i: self.i,
|
||||
transcript: self.transcript,
|
||||
|
||||
decoys: self.decoys,
|
||||
|
||||
key_images: self.key_images,
|
||||
inputs: self.inputs,
|
||||
clsags,
|
||||
|
||||
our_preprocess,
|
||||
},
|
||||
preprocesses,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl SignMachine<Transaction> for TransactionSignMachine {
|
||||
type Params = ();
|
||||
type Keys = ThresholdKeys<Ed25519>;
|
||||
type Preprocess = Vec<Preprocess<Ed25519, ClsagAddendum>>;
|
||||
type SignatureShare = Vec<SignatureShare<Ed25519>>;
|
||||
type SignatureMachine = TransactionSignatureMachine;
|
||||
|
||||
fn cache(self) -> CachedPreprocess {
|
||||
unimplemented!(
|
||||
"Monero transactions don't support caching their preprocesses due to {}",
|
||||
"being already bound to a specific transaction"
|
||||
);
|
||||
}
|
||||
|
||||
fn from_cache(_: (), _: ThresholdKeys<Ed25519>, _: CachedPreprocess) -> Result<Self, FrostError> {
|
||||
unimplemented!(
|
||||
"Monero transactions don't support caching their preprocesses due to {}",
|
||||
"being already bound to a specific transaction"
|
||||
);
|
||||
}
|
||||
|
||||
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
|
||||
self.clsags.iter().map(|clsag| clsag.read_preprocess(reader)).collect()
|
||||
}
|
||||
|
||||
fn sign(
|
||||
mut self,
|
||||
mut commitments: HashMap<u16, Self::Preprocess>,
|
||||
msg: &[u8],
|
||||
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
|
||||
if !msg.is_empty() {
|
||||
Err(FrostError::InternalError(
|
||||
"message was passed to the TransactionMachine when it generates its own",
|
||||
))?;
|
||||
}
|
||||
|
||||
// Find out who's included
|
||||
// This may not be a valid set of signers yet the algorithm machine will error if it's not
|
||||
commitments.remove(&self.i); // Remove, if it was included for some reason
|
||||
let mut included = commitments.keys().into_iter().cloned().collect::<Vec<_>>();
|
||||
included.push(self.i);
|
||||
included.sort_unstable();
|
||||
|
||||
// Convert the unified commitments to a Vec of the individual commitments
|
||||
let mut images = vec![EdwardsPoint::identity(); self.clsags.len()];
|
||||
let mut commitments = (0 .. self.clsags.len())
|
||||
.map(|c| {
|
||||
included
|
||||
.iter()
|
||||
.map(|l| {
|
||||
// Add all commitments to the transcript for their entropy
|
||||
// While each CLSAG will do this as they need to for security, they have their own
|
||||
// transcripts cloned from this TX's initial premise's transcript. For our TX
|
||||
// transcript to have the CLSAG data for entropy, it'll have to be added ourselves here
|
||||
self.transcript.append_message(b"participant", (*l).to_be_bytes());
|
||||
|
||||
let preprocess = if *l == self.i {
|
||||
self.our_preprocess[c].clone()
|
||||
} else {
|
||||
commitments.get_mut(l).ok_or(FrostError::MissingParticipant(*l))?[c].clone()
|
||||
};
|
||||
|
||||
{
|
||||
let mut buf = vec![];
|
||||
preprocess.write(&mut buf).unwrap();
|
||||
self.transcript.append_message(b"preprocess", buf);
|
||||
}
|
||||
|
||||
// While here, calculate the key image
|
||||
// Clsag will parse/calculate/validate this as needed, yet doing so here as well
|
||||
// provides the easiest API overall, as this is where the TX is (which needs the key
|
||||
// images in its message), along with where the outputs are determined (where our
|
||||
// outputs may need these in order to guarantee uniqueness)
|
||||
add_key_image_share(
|
||||
&mut images[c],
|
||||
self.key_images[c].0,
|
||||
self.key_images[c].1,
|
||||
&included,
|
||||
*l,
|
||||
preprocess.addendum.key_image.0,
|
||||
);
|
||||
|
||||
Ok((*l, preprocess))
|
||||
})
|
||||
.collect::<Result<HashMap<_, _>, _>>()
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
// Remove our preprocess which shouldn't be here. It was just the easiest way to implement the
|
||||
// above
|
||||
for map in commitments.iter_mut() {
|
||||
map.remove(&self.i);
|
||||
}
|
||||
|
||||
// Create the actual transaction
|
||||
let (mut tx, output_masks) = {
|
||||
let mut sorted_images = images.clone();
|
||||
sorted_images.sort_by(key_image_sort);
|
||||
|
||||
self.signable.prepare_transaction(
|
||||
&mut ChaCha20Rng::from_seed(self.transcript.rng_seed(b"transaction_keys_bulletproofs")),
|
||||
uniqueness(
|
||||
&sorted_images
|
||||
.iter()
|
||||
.map(|image| Input::ToKey { amount: 0, key_offsets: vec![], key_image: *image })
|
||||
.collect::<Vec<_>>(),
|
||||
),
|
||||
)
|
||||
};
|
||||
|
||||
// Sort the inputs, as expected
|
||||
let mut sorted = Vec::with_capacity(self.clsags.len());
|
||||
while !self.clsags.is_empty() {
|
||||
sorted.push((
|
||||
images.swap_remove(0),
|
||||
self.signable.inputs.swap_remove(0),
|
||||
self.decoys.swap_remove(0),
|
||||
self.inputs.swap_remove(0),
|
||||
self.clsags.swap_remove(0),
|
||||
commitments.swap_remove(0),
|
||||
));
|
||||
}
|
||||
sorted.sort_by(|x, y| key_image_sort(&x.0, &y.0));
|
||||
|
||||
let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"pseudo_out_masks"));
|
||||
let mut sum_pseudo_outs = Scalar::zero();
|
||||
while !sorted.is_empty() {
|
||||
let value = sorted.remove(0);
|
||||
|
||||
let mut mask = random_scalar(&mut rng);
|
||||
if sorted.is_empty() {
|
||||
mask = output_masks - sum_pseudo_outs;
|
||||
} else {
|
||||
sum_pseudo_outs += mask;
|
||||
}
|
||||
|
||||
tx.prefix.inputs.push(Input::ToKey {
|
||||
amount: 0,
|
||||
key_offsets: value.2.offsets.clone(),
|
||||
key_image: value.0,
|
||||
});
|
||||
|
||||
*value.3.write().unwrap() = Some(ClsagDetails::new(
|
||||
ClsagInput::new(value.1.commitment().clone(), value.2).map_err(|_| {
|
||||
panic!("Signing an input which isn't present in the ring we created for it")
|
||||
})?,
|
||||
mask,
|
||||
));
|
||||
|
||||
self.clsags.push(value.4);
|
||||
commitments.push(value.5);
|
||||
}
|
||||
|
||||
let msg = tx.signature_hash();
|
||||
|
||||
// Iterate over each CLSAG calling sign
|
||||
let mut shares = Vec::with_capacity(self.clsags.len());
|
||||
let clsags = self
|
||||
.clsags
|
||||
.drain(..)
|
||||
.map(|clsag| {
|
||||
let (clsag, share) = clsag.sign(commitments.remove(0), &msg)?;
|
||||
shares.push(share);
|
||||
Ok(clsag)
|
||||
})
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
Ok((TransactionSignatureMachine { tx, clsags }, shares))
|
||||
}
|
||||
}
|
||||
|
||||
impl SignatureMachine<Transaction> for TransactionSignatureMachine {
|
||||
type SignatureShare = Vec<SignatureShare<Ed25519>>;
|
||||
|
||||
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare> {
|
||||
self.clsags.iter().map(|clsag| clsag.read_share(reader)).collect()
|
||||
}
|
||||
|
||||
fn complete(
|
||||
mut self,
|
||||
shares: HashMap<u16, Self::SignatureShare>,
|
||||
) -> Result<Transaction, FrostError> {
|
||||
let mut tx = self.tx;
|
||||
match tx.rct_signatures.prunable {
|
||||
RctPrunable::Null => panic!("Signing for RctPrunable::Null"),
|
||||
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
|
||||
for (c, clsag) in self.clsags.drain(..).enumerate() {
|
||||
let (clsag, pseudo_out) = clsag.complete(
|
||||
shares.iter().map(|(l, shares)| (*l, shares[c].clone())).collect::<HashMap<_, _>>(),
|
||||
)?;
|
||||
clsags.push(clsag);
|
||||
pseudo_outs.push(pseudo_out);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(tx)
|
||||
}
|
||||
}
|
||||
72
coins/monero/tests/add_data.rs
Normal file
72
coins/monero/tests/add_data.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use monero_serai::{wallet::TransactionError, transaction::Transaction};
|
||||
|
||||
mod runner;
|
||||
|
||||
test!(
|
||||
add_single_data_less_than_255,
|
||||
(
|
||||
|_, mut builder: Builder, addr| async move {
|
||||
let arbitrary_data = vec![b'\0', 254];
|
||||
|
||||
// make sure we can add to tx
|
||||
let result = builder.add_data(arbitrary_data.clone());
|
||||
assert!(result.is_ok());
|
||||
|
||||
builder.add_payment(addr, 5);
|
||||
(builder.build().unwrap(), (arbitrary_data,))
|
||||
},
|
||||
|_, tx: Transaction, mut scanner: Scanner, data: (Vec<u8>,)| async move {
|
||||
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
assert_eq!(output.arbitrary_data()[0], data.0);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
test!(
|
||||
add_multiple_data_less_than_255,
|
||||
(
|
||||
|_, mut builder: Builder, addr| async move {
|
||||
let data = vec![b'\0', 254];
|
||||
|
||||
// Add tx multiple times
|
||||
for _ in 0 .. 5 {
|
||||
let result = builder.add_data(data.clone());
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
builder.add_payment(addr, 5);
|
||||
(builder.build().unwrap(), data)
|
||||
},
|
||||
|_, tx: Transaction, mut scanner: Scanner, data: Vec<u8>| async move {
|
||||
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
assert_eq!(output.arbitrary_data(), vec![data; 5]);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
test!(
|
||||
add_single_data_more_than_255,
|
||||
(
|
||||
|_, mut builder: Builder, addr| async move {
|
||||
// Make a data that is bigger than 255 bytes
|
||||
let mut data = vec![b'a'; 256];
|
||||
|
||||
// Make sure we get an error if we try to add it to the TX
|
||||
assert_eq!(builder.add_data(data.clone()), Err(TransactionError::TooMuchData));
|
||||
|
||||
// Reduce data size and retry. The data will now be 255 bytes long, exactly
|
||||
data.pop();
|
||||
assert!(builder.add_data(data.clone()).is_ok());
|
||||
|
||||
builder.add_payment(addr, 5);
|
||||
(builder.build().unwrap(), data)
|
||||
},
|
||||
|_, tx: Transaction, mut scanner: Scanner, data: Vec<u8>| async move {
|
||||
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
assert_eq!(output.arbitrary_data(), vec![data]);
|
||||
},
|
||||
),
|
||||
);
|
||||
280
coins/monero/tests/runner.rs
Normal file
280
coins/monero/tests/runner.rs
Normal file
@@ -0,0 +1,280 @@
|
||||
use core::ops::Deref;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
use zeroize::Zeroizing;
|
||||
use rand_core::OsRng;
|
||||
|
||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
|
||||
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use monero_serai::{
|
||||
Protocol, random_scalar,
|
||||
wallet::{
|
||||
ViewPair, Scanner,
|
||||
address::{Network, AddressType, AddressSpec, AddressMeta, MoneroAddress},
|
||||
SpendableOutput,
|
||||
},
|
||||
rpc::Rpc,
|
||||
};
|
||||
|
||||
pub fn random_address() -> (Scalar, ViewPair, MoneroAddress) {
|
||||
let spend = random_scalar(&mut OsRng);
|
||||
let spend_pub = &spend * &ED25519_BASEPOINT_TABLE;
|
||||
let view = Zeroizing::new(random_scalar(&mut OsRng));
|
||||
(
|
||||
spend,
|
||||
ViewPair::new(spend_pub, view.clone()),
|
||||
MoneroAddress {
|
||||
meta: AddressMeta::new(Network::Mainnet, AddressType::Standard),
|
||||
spend: spend_pub,
|
||||
view: view.deref() * &ED25519_BASEPOINT_TABLE,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// TODO: Support transactions already on-chain
|
||||
// TODO: Don't have a side effect of mining blocks more blocks than needed under race conditions
|
||||
// TODO: mine as much as needed instead of default 10 blocks
|
||||
pub async fn mine_until_unlocked(rpc: &Rpc, addr: &str, tx_hash: [u8; 32]) {
|
||||
// mine until tx is in a block
|
||||
let mut height = rpc.get_height().await.unwrap();
|
||||
let mut found = false;
|
||||
while !found {
|
||||
let block = rpc.get_block_by_number(height - 1).await.unwrap();
|
||||
found = match block.txs.iter().find(|&&x| x == tx_hash) {
|
||||
Some(_) => true,
|
||||
None => {
|
||||
rpc.generate_blocks(addr, 1).await.unwrap();
|
||||
height += 1;
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// mine 9 more blocks to unlock the tx
|
||||
rpc.generate_blocks(addr, 9).await.unwrap();
|
||||
}
|
||||
|
||||
// Mines 60 blocks and returns an unlocked miner TX output.
|
||||
pub async fn get_miner_tx_output(rpc: &Rpc, view: &ViewPair) -> SpendableOutput {
|
||||
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||
|
||||
// Mine 60 blocks to unlock a miner TX
|
||||
let start = rpc.get_height().await.unwrap();
|
||||
rpc
|
||||
.generate_blocks(&view.address(Network::Mainnet, AddressSpec::Standard).to_string(), 60)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let block = rpc.get_block_by_number(start).await.unwrap();
|
||||
scanner.scan(rpc, &block).await.unwrap().swap_remove(0).ignore_timelock().swap_remove(0)
|
||||
}
|
||||
|
||||
pub async fn rpc() -> Rpc {
|
||||
let rpc = Rpc::new("http://127.0.0.1:18081".to_string()).unwrap();
|
||||
|
||||
// Only run once
|
||||
if rpc.get_height().await.unwrap() != 1 {
|
||||
return rpc;
|
||||
}
|
||||
|
||||
let addr = MoneroAddress {
|
||||
meta: AddressMeta::new(Network::Mainnet, AddressType::Standard),
|
||||
spend: &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE,
|
||||
view: &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE,
|
||||
}
|
||||
.to_string();
|
||||
|
||||
// Mine 40 blocks to ensure decoy availability
|
||||
rpc.generate_blocks(&addr, 40).await.unwrap();
|
||||
assert!(!matches!(rpc.get_protocol().await.unwrap(), Protocol::Unsupported(_)));
|
||||
|
||||
rpc
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
pub static ref SEQUENTIAL: Mutex<()> = Mutex::new(());
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! async_sequential {
|
||||
($(async fn $name: ident() $body: block)*) => {
|
||||
$(
|
||||
#[tokio::test]
|
||||
async fn $name() {
|
||||
let guard = runner::SEQUENTIAL.lock().await;
|
||||
let local = tokio::task::LocalSet::new();
|
||||
local.run_until(async move {
|
||||
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {
|
||||
drop(guard);
|
||||
Err(err).unwrap()
|
||||
}
|
||||
}).await;
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! test {
|
||||
(
|
||||
$name: ident,
|
||||
(
|
||||
$first_tx: expr,
|
||||
$first_checks: expr,
|
||||
),
|
||||
$((
|
||||
$tx: expr,
|
||||
$checks: expr,
|
||||
)$(,)?),*
|
||||
) => {
|
||||
async_sequential! {
|
||||
async fn $name() {
|
||||
use core::{ops::Deref, any::Any};
|
||||
use std::collections::HashSet;
|
||||
#[cfg(feature = "multisig")]
|
||||
use std::collections::HashMap;
|
||||
|
||||
use zeroize::Zeroizing;
|
||||
use rand_core::OsRng;
|
||||
|
||||
use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
#[cfg(feature = "multisig")]
|
||||
use frost::{
|
||||
curve::Ed25519,
|
||||
tests::{THRESHOLD, key_gen},
|
||||
};
|
||||
|
||||
use monero_serai::{
|
||||
random_scalar,
|
||||
wallet::{
|
||||
address::{Network, AddressSpec}, ViewPair, Scanner, SignableTransaction,
|
||||
SignableTransactionBuilder,
|
||||
},
|
||||
};
|
||||
|
||||
use runner::{random_address, rpc, mine_until_unlocked, get_miner_tx_output};
|
||||
|
||||
type Builder = SignableTransactionBuilder;
|
||||
|
||||
// Run each function as both a single signer and as a multisig
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
for multisig in [false, true] {
|
||||
// Only run the multisig variant if multisig is enabled
|
||||
if multisig {
|
||||
#[cfg(not(feature = "multisig"))]
|
||||
continue;
|
||||
}
|
||||
|
||||
let spend = Zeroizing::new(random_scalar(&mut OsRng));
|
||||
#[cfg(feature = "multisig")]
|
||||
let keys = key_gen::<_, Ed25519>(&mut OsRng);
|
||||
|
||||
let spend_pub = if !multisig {
|
||||
spend.deref() * &ED25519_BASEPOINT_TABLE
|
||||
} else {
|
||||
#[cfg(not(feature = "multisig"))]
|
||||
panic!("Multisig branch called without the multisig feature");
|
||||
#[cfg(feature = "multisig")]
|
||||
keys[&1].group_key().0
|
||||
};
|
||||
|
||||
let rpc = rpc().await;
|
||||
|
||||
let view = ViewPair::new(spend_pub, Zeroizing::new(random_scalar(&mut OsRng)));
|
||||
let addr = view.address(Network::Mainnet, AddressSpec::Standard);
|
||||
|
||||
let miner_tx = get_miner_tx_output(&rpc, &view).await;
|
||||
|
||||
let builder = SignableTransactionBuilder::new(
|
||||
rpc.get_protocol().await.unwrap(),
|
||||
rpc.get_fee().await.unwrap(),
|
||||
Some(random_address().2),
|
||||
);
|
||||
|
||||
let sign = |tx: SignableTransaction| {
|
||||
let rpc = rpc.clone();
|
||||
let spend = spend.clone();
|
||||
#[cfg(feature = "multisig")]
|
||||
let keys = keys.clone();
|
||||
async move {
|
||||
if !multisig {
|
||||
tx.sign(&mut OsRng, &rpc, &spend).await.unwrap()
|
||||
} else {
|
||||
#[cfg(not(feature = "multisig"))]
|
||||
panic!("Multisig branch called without the multisig feature");
|
||||
#[cfg(feature = "multisig")]
|
||||
{
|
||||
let mut machines = HashMap::new();
|
||||
for i in 1 ..= THRESHOLD {
|
||||
machines.insert(
|
||||
i,
|
||||
tx
|
||||
.clone()
|
||||
.multisig(
|
||||
&rpc,
|
||||
keys[&i].clone(),
|
||||
RecommendedTranscript::new(b"Monero Serai Test Transaction"),
|
||||
rpc.get_height().await.unwrap() - 10,
|
||||
)
|
||||
.await
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
|
||||
frost::tests::sign_without_caching(&mut OsRng, machines, &[])
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// TODO: Generate a distinct wallet for each transaction to prevent overlap
|
||||
let next_addr = addr;
|
||||
|
||||
let temp = Box::new({
|
||||
let mut builder = builder.clone();
|
||||
builder.add_input(miner_tx);
|
||||
let (tx, state) = ($first_tx)(rpc.clone(), builder, next_addr).await;
|
||||
|
||||
let signed = sign(tx).await;
|
||||
rpc.publish_transaction(&signed).await.unwrap();
|
||||
mine_until_unlocked(&rpc, &random_address().2.to_string(), signed.hash()).await;
|
||||
let tx = rpc.get_transaction(signed.hash()).await.unwrap();
|
||||
let scanner =
|
||||
Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||
($first_checks)(rpc.clone(), tx, scanner, state).await
|
||||
});
|
||||
#[allow(unused_variables, unused_mut, unused_assignments)]
|
||||
let mut carried_state: Box<dyn Any> = temp;
|
||||
|
||||
$(
|
||||
let (tx, state) = ($tx)(
|
||||
rpc.clone(),
|
||||
builder.clone(),
|
||||
next_addr,
|
||||
*carried_state.downcast().unwrap()
|
||||
).await;
|
||||
|
||||
let signed = sign(tx).await;
|
||||
rpc.publish_transaction(&signed).await.unwrap();
|
||||
mine_until_unlocked(&rpc, &random_address().2.to_string(), signed.hash()).await;
|
||||
let tx = rpc.get_transaction(signed.hash()).await.unwrap();
|
||||
#[allow(unused_assignments)]
|
||||
{
|
||||
let scanner =
|
||||
Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||
carried_state =
|
||||
Box::new(($checks)(rpc.clone(), tx, scanner, state).await);
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
300
coins/monero/tests/scan.rs
Normal file
300
coins/monero/tests/scan.rs
Normal file
@@ -0,0 +1,300 @@
|
||||
use rand::RngCore;
|
||||
|
||||
use monero_serai::{transaction::Transaction, wallet::address::SubaddressIndex};
|
||||
|
||||
mod runner;
|
||||
|
||||
test!(
|
||||
scan_standard_address,
|
||||
(
|
||||
|_, mut builder: Builder, _| async move {
|
||||
let view = runner::random_address().1;
|
||||
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||
builder.add_payment(view.address(Network::Mainnet, AddressSpec::Standard), 5);
|
||||
(builder.build().unwrap(), scanner)
|
||||
},
|
||||
|_, tx: Transaction, _, mut state: Scanner| async move {
|
||||
let output = state.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
test!(
|
||||
scan_subaddress,
|
||||
(
|
||||
|_, mut builder: Builder, _| async move {
|
||||
let subaddress = SubaddressIndex::new(0, 1).unwrap();
|
||||
|
||||
let view = runner::random_address().1;
|
||||
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||
scanner.register_subaddress(subaddress);
|
||||
|
||||
builder.add_payment(view.address(Network::Mainnet, AddressSpec::Subaddress(subaddress)), 5);
|
||||
(builder.build().unwrap(), (scanner, subaddress))
|
||||
},
|
||||
|_, tx: Transaction, _, mut state: (Scanner, SubaddressIndex)| async move {
|
||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
assert_eq!(output.metadata.subaddress, Some(state.1));
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
test!(
|
||||
scan_integrated_address,
|
||||
(
|
||||
|_, mut builder: Builder, _| async move {
|
||||
let view = runner::random_address().1;
|
||||
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||
|
||||
let mut payment_id = [0u8; 8];
|
||||
OsRng.fill_bytes(&mut payment_id);
|
||||
|
||||
builder.add_payment(view.address(Network::Mainnet, AddressSpec::Integrated(payment_id)), 5);
|
||||
(builder.build().unwrap(), (scanner, payment_id))
|
||||
},
|
||||
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
|
||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
assert_eq!(output.metadata.payment_id, state.1);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
test!(
|
||||
scan_featured_standard,
|
||||
(
|
||||
|_, mut builder: Builder, _| async move {
|
||||
let view = runner::random_address().1;
|
||||
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||
builder.add_payment(
|
||||
view.address(
|
||||
Network::Mainnet,
|
||||
AddressSpec::Featured { subaddress: None, payment_id: None, guaranteed: false },
|
||||
),
|
||||
5,
|
||||
);
|
||||
(builder.build().unwrap(), scanner)
|
||||
},
|
||||
|_, tx: Transaction, _, mut state: Scanner| async move {
|
||||
let output = state.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
test!(
|
||||
scan_featured_subaddress,
|
||||
(
|
||||
|_, mut builder: Builder, _| async move {
|
||||
let subaddress = SubaddressIndex::new(0, 2).unwrap();
|
||||
|
||||
let view = runner::random_address().1;
|
||||
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||
scanner.register_subaddress(subaddress);
|
||||
|
||||
builder.add_payment(
|
||||
view.address(
|
||||
Network::Mainnet,
|
||||
AddressSpec::Featured {
|
||||
subaddress: Some(subaddress),
|
||||
payment_id: None,
|
||||
guaranteed: false,
|
||||
},
|
||||
),
|
||||
5,
|
||||
);
|
||||
(builder.build().unwrap(), (scanner, subaddress))
|
||||
},
|
||||
|_, tx: Transaction, _, mut state: (Scanner, SubaddressIndex)| async move {
|
||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
assert_eq!(output.metadata.subaddress, Some(state.1));
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
test!(
|
||||
scan_featured_integrated,
|
||||
(
|
||||
|_, mut builder: Builder, _| async move {
|
||||
let view = runner::random_address().1;
|
||||
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||
let mut payment_id = [0u8; 8];
|
||||
OsRng.fill_bytes(&mut payment_id);
|
||||
|
||||
builder.add_payment(
|
||||
view.address(
|
||||
Network::Mainnet,
|
||||
AddressSpec::Featured {
|
||||
subaddress: None,
|
||||
payment_id: Some(payment_id),
|
||||
guaranteed: false,
|
||||
},
|
||||
),
|
||||
5,
|
||||
);
|
||||
(builder.build().unwrap(), (scanner, payment_id))
|
||||
},
|
||||
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
|
||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
assert_eq!(output.metadata.payment_id, state.1);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
test!(
|
||||
scan_featured_integrated_subaddress,
|
||||
(
|
||||
|_, mut builder: Builder, _| async move {
|
||||
let subaddress = SubaddressIndex::new(0, 3).unwrap();
|
||||
|
||||
let view = runner::random_address().1;
|
||||
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
||||
scanner.register_subaddress(subaddress);
|
||||
|
||||
let mut payment_id = [0u8; 8];
|
||||
OsRng.fill_bytes(&mut payment_id);
|
||||
|
||||
builder.add_payment(
|
||||
view.address(
|
||||
Network::Mainnet,
|
||||
AddressSpec::Featured {
|
||||
subaddress: Some(subaddress),
|
||||
payment_id: Some(payment_id),
|
||||
guaranteed: false,
|
||||
},
|
||||
),
|
||||
5,
|
||||
);
|
||||
(builder.build().unwrap(), (scanner, payment_id, subaddress))
|
||||
},
|
||||
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8], SubaddressIndex)| async move {
|
||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
assert_eq!(output.metadata.payment_id, state.1);
|
||||
assert_eq!(output.metadata.subaddress, Some(state.2));
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
test!(
|
||||
scan_guaranteed_standard,
|
||||
(
|
||||
|_, mut builder: Builder, _| async move {
|
||||
let view = runner::random_address().1;
|
||||
let scanner = Scanner::from_view(view.clone(), None);
|
||||
|
||||
builder.add_payment(
|
||||
view.address(
|
||||
Network::Mainnet,
|
||||
AddressSpec::Featured { subaddress: None, payment_id: None, guaranteed: true },
|
||||
),
|
||||
5,
|
||||
);
|
||||
(builder.build().unwrap(), scanner)
|
||||
},
|
||||
|_, tx: Transaction, _, mut state: Scanner| async move {
|
||||
let output = state.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
test!(
|
||||
scan_guaranteed_subaddress,
|
||||
(
|
||||
|_, mut builder: Builder, _| async move {
|
||||
let subaddress = SubaddressIndex::new(1, 0).unwrap();
|
||||
|
||||
let view = runner::random_address().1;
|
||||
let mut scanner = Scanner::from_view(view.clone(), None);
|
||||
scanner.register_subaddress(subaddress);
|
||||
|
||||
builder.add_payment(
|
||||
view.address(
|
||||
Network::Mainnet,
|
||||
AddressSpec::Featured {
|
||||
subaddress: Some(subaddress),
|
||||
payment_id: None,
|
||||
guaranteed: true,
|
||||
},
|
||||
),
|
||||
5,
|
||||
);
|
||||
(builder.build().unwrap(), (scanner, subaddress))
|
||||
},
|
||||
|_, tx: Transaction, _, mut state: (Scanner, SubaddressIndex)| async move {
|
||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
assert_eq!(output.metadata.subaddress, Some(state.1));
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
test!(
|
||||
scan_guaranteed_integrated,
|
||||
(
|
||||
|_, mut builder: Builder, _| async move {
|
||||
let view = runner::random_address().1;
|
||||
let scanner = Scanner::from_view(view.clone(), None);
|
||||
let mut payment_id = [0u8; 8];
|
||||
OsRng.fill_bytes(&mut payment_id);
|
||||
|
||||
builder.add_payment(
|
||||
view.address(
|
||||
Network::Mainnet,
|
||||
AddressSpec::Featured {
|
||||
subaddress: None,
|
||||
payment_id: Some(payment_id),
|
||||
guaranteed: true,
|
||||
},
|
||||
),
|
||||
5,
|
||||
);
|
||||
(builder.build().unwrap(), (scanner, payment_id))
|
||||
},
|
||||
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
|
||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
assert_eq!(output.metadata.payment_id, state.1);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
test!(
|
||||
scan_guaranteed_integrated_subaddress,
|
||||
(
|
||||
|_, mut builder: Builder, _| async move {
|
||||
let subaddress = SubaddressIndex::new(1, 1).unwrap();
|
||||
|
||||
let view = runner::random_address().1;
|
||||
let mut scanner = Scanner::from_view(view.clone(), None);
|
||||
scanner.register_subaddress(subaddress);
|
||||
|
||||
let mut payment_id = [0u8; 8];
|
||||
OsRng.fill_bytes(&mut payment_id);
|
||||
|
||||
builder.add_payment(
|
||||
view.address(
|
||||
Network::Mainnet,
|
||||
AddressSpec::Featured {
|
||||
subaddress: Some(subaddress),
|
||||
payment_id: Some(payment_id),
|
||||
guaranteed: true,
|
||||
},
|
||||
),
|
||||
5,
|
||||
);
|
||||
(builder.build().unwrap(), (scanner, payment_id, subaddress))
|
||||
},
|
||||
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8], SubaddressIndex)| async move {
|
||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
assert_eq!(output.metadata.payment_id, state.1);
|
||||
assert_eq!(output.metadata.subaddress, Some(state.2));
|
||||
},
|
||||
),
|
||||
);
|
||||
51
coins/monero/tests/send.rs
Normal file
51
coins/monero/tests/send.rs
Normal file
@@ -0,0 +1,51 @@
|
||||
use monero_serai::{
|
||||
wallet::{ReceivedOutput, SpendableOutput},
|
||||
transaction::Transaction,
|
||||
};
|
||||
|
||||
mod runner;
|
||||
|
||||
test!(
|
||||
spend_miner_output,
|
||||
(
|
||||
|_, mut builder: Builder, addr| async move {
|
||||
builder.add_payment(addr, 5);
|
||||
(builder.build().unwrap(), ())
|
||||
},
|
||||
|_, tx: Transaction, mut scanner: Scanner, _| async move {
|
||||
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 5);
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
test!(
|
||||
spend_multiple_outputs,
|
||||
(
|
||||
|_, mut builder: Builder, addr| async move {
|
||||
builder.add_payment(addr, 1000000000000);
|
||||
builder.add_payment(addr, 2000000000000);
|
||||
(builder.build().unwrap(), ())
|
||||
},
|
||||
|_, tx: Transaction, mut scanner: Scanner, _| async move {
|
||||
let mut outputs = scanner.scan_transaction(&tx).not_locked();
|
||||
outputs.sort_by(|x, y| x.commitment().amount.cmp(&y.commitment().amount));
|
||||
assert_eq!(outputs[0].commitment().amount, 1000000000000);
|
||||
assert_eq!(outputs[1].commitment().amount, 2000000000000);
|
||||
outputs
|
||||
},
|
||||
),
|
||||
(
|
||||
|rpc, mut builder: Builder, addr, mut outputs: Vec<ReceivedOutput>| async move {
|
||||
for output in outputs.drain(..) {
|
||||
builder.add_input(SpendableOutput::from(&rpc, output).await.unwrap());
|
||||
}
|
||||
builder.add_payment(addr, 6);
|
||||
(builder.build().unwrap(), ())
|
||||
},
|
||||
|_, tx: Transaction, mut scanner: Scanner, _| async move {
|
||||
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
||||
assert_eq!(output.commitment().amount, 6);
|
||||
},
|
||||
),
|
||||
);
|
||||
@@ -1,25 +0,0 @@
|
||||
[package]
|
||||
name = "serai-db"
|
||||
version = "0.1.1"
|
||||
description = "A simple database trait and backends for it"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/common/db"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = []
|
||||
edition = "2021"
|
||||
rust-version = "1.65"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
parity-db = { version = "0.5", default-features = false, features = ["arc"], optional = true }
|
||||
rocksdb = { version = "0.24", default-features = false, features = ["zstd"], optional = true }
|
||||
|
||||
[features]
|
||||
parity-db = ["dep:parity-db"]
|
||||
rocksdb = ["dep:rocksdb"]
|
||||
@@ -1,8 +0,0 @@
|
||||
# Serai DB
|
||||
|
||||
An inefficient, minimal abstraction around databases.
|
||||
|
||||
The abstraction offers `get`, `put`, and `del` with helper functions and macros
|
||||
built on top. Database iteration is not offered, forcing the caller to manually
|
||||
implement indexing schemes. This ensures wide compatibility across abstracted
|
||||
databases.
|
||||
@@ -1,179 +0,0 @@
|
||||
#[doc(hidden)]
|
||||
pub fn serai_db_key(
|
||||
db_dst: &'static [u8],
|
||||
item_dst: &'static [u8],
|
||||
key: impl AsRef<[u8]>,
|
||||
) -> Vec<u8> {
|
||||
let db_len = u8::try_from(db_dst.len()).unwrap();
|
||||
let dst_len = u8::try_from(item_dst.len()).unwrap();
|
||||
[[db_len].as_ref(), db_dst, [dst_len].as_ref(), item_dst, key.as_ref()].concat()
|
||||
}
|
||||
|
||||
/// Creates a series of structs which provide namespacing for keys
|
||||
///
|
||||
/// # Description
|
||||
///
|
||||
/// Creates a unit struct and a default implementation for the `key`, `get`, and `set`. The macro
|
||||
/// uses a syntax similar to defining a function. Parameters are concatenated to produce a key,
|
||||
/// they must be `borsh` serializable. The return type is used to auto (de)serialize the database
|
||||
/// value bytes using `borsh`.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `db_name` - A database name
|
||||
/// * `field_name` - An item name
|
||||
/// * `args` - Comma separated list of key arguments
|
||||
/// * `field_type` - The return type
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```ignore
|
||||
/// create_db!(
|
||||
/// TributariesDb {
|
||||
/// AttemptsDb: (key_bytes: &[u8], attempt_id: u32) -> u64,
|
||||
/// ExpiredDb: (genesis: [u8; 32]) -> Vec<u8>
|
||||
/// }
|
||||
/// )
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! create_db {
|
||||
($db_name: ident {
|
||||
$(
|
||||
$field_name: ident:
|
||||
$(<$($generic_name: tt: $generic_type: tt),+>)?(
|
||||
$($arg: ident: $arg_type: ty),*
|
||||
) -> $field_type: ty$(,)?
|
||||
)*
|
||||
}) => {
|
||||
$(
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct $field_name$(
|
||||
<$($generic_name: $generic_type),+>
|
||||
)?$(
|
||||
(core::marker::PhantomData<($($generic_name),+)>)
|
||||
)?;
|
||||
impl$(<$($generic_name: $generic_type),+>)? $field_name$(<$($generic_name),+>)? {
|
||||
pub(crate) fn key($($arg: $arg_type),*) -> Vec<u8> {
|
||||
$crate::serai_db_key(
|
||||
stringify!($db_name).as_bytes(),
|
||||
stringify!($field_name).as_bytes(),
|
||||
&borsh::to_vec(&($($arg),*)).unwrap(),
|
||||
)
|
||||
}
|
||||
pub(crate) fn set(
|
||||
txn: &mut impl DbTxn
|
||||
$(, $arg: $arg_type)*,
|
||||
data: &$field_type
|
||||
) {
|
||||
let key = Self::key($($arg),*);
|
||||
txn.put(&key, borsh::to_vec(data).unwrap());
|
||||
}
|
||||
pub(crate) fn get(
|
||||
getter: &impl Get,
|
||||
$($arg: $arg_type),*
|
||||
) -> Option<$field_type> {
|
||||
getter.get(Self::key($($arg),*)).map(|data| {
|
||||
borsh::from_slice(data.as_ref()).unwrap()
|
||||
})
|
||||
}
|
||||
// Returns a PhantomData of all generic types so if the generic was only used in the value,
|
||||
// not the keys, this doesn't have unused generic types
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn del(
|
||||
txn: &mut impl DbTxn
|
||||
$(, $arg: $arg_type)*
|
||||
) -> core::marker::PhantomData<($($($generic_name),+)?)> {
|
||||
txn.del(&Self::key($($arg),*));
|
||||
core::marker::PhantomData
|
||||
}
|
||||
|
||||
pub(crate) fn take(
|
||||
txn: &mut impl DbTxn
|
||||
$(, $arg: $arg_type)*
|
||||
) -> Option<$field_type> {
|
||||
let key = Self::key($($arg),*);
|
||||
let res = txn.get(&key).map(|data| borsh::from_slice(data.as_ref()).unwrap());
|
||||
if res.is_some() {
|
||||
txn.del(key);
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
)*
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! db_channel {
|
||||
($db_name: ident {
|
||||
$($field_name: ident:
|
||||
$(<$($generic_name: tt: $generic_type: tt),+>)?(
|
||||
$($arg: ident: $arg_type: ty),*
|
||||
) -> $field_type: ty$(,)?
|
||||
)*
|
||||
}) => {
|
||||
$(
|
||||
create_db! {
|
||||
$db_name {
|
||||
$field_name: $(<$($generic_name: $generic_type),+>)?(
|
||||
$($arg: $arg_type,)*
|
||||
index: u32
|
||||
) -> $field_type
|
||||
}
|
||||
}
|
||||
|
||||
impl$(<$($generic_name: $generic_type),+>)? $field_name$(<$($generic_name),+>)? {
|
||||
pub(crate) fn send(
|
||||
txn: &mut impl DbTxn
|
||||
$(, $arg: $arg_type)*
|
||||
, value: &$field_type
|
||||
) {
|
||||
// Use index 0 to store the amount of messages
|
||||
let messages_sent_key = Self::key($($arg,)* 0);
|
||||
let messages_sent = txn.get(&messages_sent_key).map(|counter| {
|
||||
u32::from_le_bytes(counter.try_into().unwrap())
|
||||
}).unwrap_or(0);
|
||||
txn.put(&messages_sent_key, (messages_sent + 1).to_le_bytes());
|
||||
|
||||
// + 2 as index 1 is used for the amount of messages read
|
||||
// Using distinct counters enables send to be called without mutating anything recv may
|
||||
// at the same time
|
||||
let index_to_use = messages_sent + 2;
|
||||
|
||||
Self::set(txn, $($arg,)* index_to_use, value);
|
||||
}
|
||||
pub(crate) fn peek(
|
||||
getter: &impl Get
|
||||
$(, $arg: $arg_type)*
|
||||
) -> Option<$field_type> {
|
||||
let messages_recvd_key = Self::key($($arg,)* 1);
|
||||
let messages_recvd = getter.get(&messages_recvd_key).map(|counter| {
|
||||
u32::from_le_bytes(counter.try_into().unwrap())
|
||||
}).unwrap_or(0);
|
||||
|
||||
let index_to_read = messages_recvd + 2;
|
||||
|
||||
Self::get(getter, $($arg,)* index_to_read)
|
||||
}
|
||||
pub(crate) fn try_recv(
|
||||
txn: &mut impl DbTxn
|
||||
$(, $arg: $arg_type)*
|
||||
) -> Option<$field_type> {
|
||||
let messages_recvd_key = Self::key($($arg,)* 1);
|
||||
let messages_recvd = txn.get(&messages_recvd_key).map(|counter| {
|
||||
u32::from_le_bytes(counter.try_into().unwrap())
|
||||
}).unwrap_or(0);
|
||||
|
||||
let index_to_read = messages_recvd + 2;
|
||||
|
||||
let res = Self::get(txn, $($arg,)* index_to_read);
|
||||
if res.is_some() {
|
||||
Self::del(txn, $($arg,)* index_to_read);
|
||||
txn.put(&messages_recvd_key, (messages_recvd + 1).to_le_bytes());
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
)*
|
||||
};
|
||||
}
|
||||
@@ -1,56 +0,0 @@
|
||||
mod create_db;
|
||||
pub use create_db::*;
|
||||
|
||||
mod mem;
|
||||
pub use mem::*;
|
||||
|
||||
#[cfg(feature = "rocksdb")]
|
||||
mod rocks;
|
||||
#[cfg(feature = "rocksdb")]
|
||||
pub use rocks::{RocksDB, new_rocksdb};
|
||||
|
||||
#[cfg(feature = "parity-db")]
|
||||
mod parity_db;
|
||||
#[cfg(feature = "parity-db")]
|
||||
pub use parity_db::{ParityDb, new_parity_db};
|
||||
|
||||
/// An object implementing `get`.
|
||||
pub trait Get {
|
||||
/// Get a value from the database.
|
||||
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>>;
|
||||
}
|
||||
|
||||
/// An atomic database transaction.
|
||||
///
|
||||
/// A transaction is only required to atomically commit. It is not required that two `Get` calls
|
||||
/// made with the same transaction return the same result, if another transaction wrote to that
|
||||
/// key.
|
||||
///
|
||||
/// If two transactions are created, and both write (including deletions) to the same key, behavior
|
||||
/// is undefined. The transaction may block, deadlock, panic, overwrite one of the two values
|
||||
/// randomly, or any other action, at time of write or at time of commit.
|
||||
#[must_use]
|
||||
pub trait DbTxn: Send + Get {
|
||||
/// Write a value to this key.
|
||||
fn put(&mut self, key: impl AsRef<[u8]>, value: impl AsRef<[u8]>);
|
||||
/// Delete the value from this key.
|
||||
fn del(&mut self, key: impl AsRef<[u8]>);
|
||||
/// Commit this transaction.
|
||||
fn commit(self);
|
||||
}
|
||||
|
||||
/// A database supporting atomic transaction.
|
||||
pub trait Db: 'static + Send + Sync + Clone + Get {
|
||||
/// The type representing a database transaction.
|
||||
type Transaction<'a>: DbTxn;
|
||||
/// Calculate a key for a database entry.
|
||||
///
|
||||
/// Keys are separated by the database, the item within the database, and the item's key itself.
|
||||
fn key(db_dst: &'static [u8], item_dst: &'static [u8], key: impl AsRef<[u8]>) -> Vec<u8> {
|
||||
let db_len = u8::try_from(db_dst.len()).unwrap();
|
||||
let dst_len = u8::try_from(item_dst.len()).unwrap();
|
||||
[[db_len].as_ref(), db_dst, [dst_len].as_ref(), item_dst, key.as_ref()].concat()
|
||||
}
|
||||
/// Open a new transaction.
|
||||
fn txn(&mut self) -> Self::Transaction<'_>;
|
||||
}
|
||||
@@ -1,80 +0,0 @@
|
||||
use core::fmt::Debug;
|
||||
use std::{
|
||||
sync::{Arc, RwLock},
|
||||
collections::{HashSet, HashMap},
|
||||
};
|
||||
|
||||
use crate::*;
|
||||
|
||||
/// An atomic operation for the in-memory database.
|
||||
#[must_use]
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
pub struct MemDbTxn<'a>(&'a MemDb, HashMap<Vec<u8>, Vec<u8>>, HashSet<Vec<u8>>);
|
||||
|
||||
impl Get for MemDbTxn<'_> {
|
||||
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
||||
if self.2.contains(key.as_ref()) {
|
||||
return None;
|
||||
}
|
||||
self
|
||||
.1
|
||||
.get(key.as_ref())
|
||||
.cloned()
|
||||
.or_else(|| self.0 .0.read().unwrap().get(key.as_ref()).cloned())
|
||||
}
|
||||
}
|
||||
impl DbTxn for MemDbTxn<'_> {
|
||||
fn put(&mut self, key: impl AsRef<[u8]>, value: impl AsRef<[u8]>) {
|
||||
self.2.remove(key.as_ref());
|
||||
self.1.insert(key.as_ref().to_vec(), value.as_ref().to_vec());
|
||||
}
|
||||
fn del(&mut self, key: impl AsRef<[u8]>) {
|
||||
self.1.remove(key.as_ref());
|
||||
self.2.insert(key.as_ref().to_vec());
|
||||
}
|
||||
fn commit(mut self) {
|
||||
let mut db = self.0 .0.write().unwrap();
|
||||
for (key, value) in self.1.drain() {
|
||||
db.insert(key, value);
|
||||
}
|
||||
for key in self.2 {
|
||||
db.remove(&key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An in-memory database.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MemDb(Arc<RwLock<HashMap<Vec<u8>, Vec<u8>>>>);
|
||||
|
||||
impl PartialEq for MemDb {
|
||||
fn eq(&self, other: &MemDb) -> bool {
|
||||
*self.0.read().unwrap() == *other.0.read().unwrap()
|
||||
}
|
||||
}
|
||||
impl Eq for MemDb {}
|
||||
|
||||
impl Default for MemDb {
|
||||
fn default() -> MemDb {
|
||||
MemDb(Arc::new(RwLock::new(HashMap::new())))
|
||||
}
|
||||
}
|
||||
|
||||
impl MemDb {
|
||||
/// Create a new in-memory database.
|
||||
pub fn new() -> MemDb {
|
||||
MemDb::default()
|
||||
}
|
||||
}
|
||||
|
||||
impl Get for MemDb {
|
||||
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
||||
self.0.read().unwrap().get(key.as_ref()).cloned()
|
||||
}
|
||||
}
|
||||
impl Db for MemDb {
|
||||
type Transaction<'a> = MemDbTxn<'a>;
|
||||
fn txn(&mut self) -> MemDbTxn<'_> {
|
||||
MemDbTxn(self, HashMap::new(), HashSet::new())
|
||||
}
|
||||
}
|
||||
@@ -1,47 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use ::parity_db::{Options, Db as ParityDb};
|
||||
|
||||
use crate::*;
|
||||
|
||||
#[must_use]
|
||||
pub struct Transaction<'a>(&'a Arc<ParityDb>, Vec<(u8, Vec<u8>, Option<Vec<u8>>)>);
|
||||
|
||||
impl Get for Transaction<'_> {
|
||||
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
||||
let mut res = self.0.get(&key);
|
||||
for change in &self.1 {
|
||||
if change.1 == key.as_ref() {
|
||||
res.clone_from(&change.2);
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
impl DbTxn for Transaction<'_> {
|
||||
fn put(&mut self, key: impl AsRef<[u8]>, value: impl AsRef<[u8]>) {
|
||||
self.1.push((0, key.as_ref().to_vec(), Some(value.as_ref().to_vec())))
|
||||
}
|
||||
fn del(&mut self, key: impl AsRef<[u8]>) {
|
||||
self.1.push((0, key.as_ref().to_vec(), None))
|
||||
}
|
||||
fn commit(self) {
|
||||
self.0.commit(self.1).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl Get for Arc<ParityDb> {
|
||||
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
||||
ParityDb::get(self, 0, key.as_ref()).unwrap()
|
||||
}
|
||||
}
|
||||
impl Db for Arc<ParityDb> {
|
||||
type Transaction<'a> = Transaction<'a>;
|
||||
fn txn(&mut self) -> Self::Transaction<'_> {
|
||||
Transaction(self, vec![])
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_parity_db(path: &str) -> Arc<ParityDb> {
|
||||
Arc::new(ParityDb::open_or_create(&Options::with_columns(std::path::Path::new(path), 1)).unwrap())
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use rocksdb::{
|
||||
DBCompressionType, ThreadMode, SingleThreaded, LogLevel, WriteOptions,
|
||||
Transaction as RocksTransaction, Options, OptimisticTransactionDB,
|
||||
};
|
||||
|
||||
use crate::*;
|
||||
|
||||
#[must_use]
|
||||
pub struct Transaction<'a, T: ThreadMode>(
|
||||
RocksTransaction<'a, OptimisticTransactionDB<T>>,
|
||||
&'a OptimisticTransactionDB<T>,
|
||||
);
|
||||
|
||||
impl<T: ThreadMode> Get for Transaction<'_, T> {
|
||||
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
||||
self.0.get(key).expect("couldn't read from RocksDB via transaction")
|
||||
}
|
||||
}
|
||||
impl<T: ThreadMode> DbTxn for Transaction<'_, T> {
|
||||
fn put(&mut self, key: impl AsRef<[u8]>, value: impl AsRef<[u8]>) {
|
||||
self.0.put(key, value).expect("couldn't write to RocksDB via transaction")
|
||||
}
|
||||
fn del(&mut self, key: impl AsRef<[u8]>) {
|
||||
self.0.delete(key).expect("couldn't delete from RocksDB via transaction")
|
||||
}
|
||||
fn commit(self) {
|
||||
self.0.commit().expect("couldn't commit to RocksDB via transaction");
|
||||
self.1.flush_wal(true).expect("couldn't flush RocksDB WAL");
|
||||
self.1.flush().expect("couldn't flush RocksDB");
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ThreadMode> Get for Arc<OptimisticTransactionDB<T>> {
|
||||
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
||||
OptimisticTransactionDB::get(self, key).expect("couldn't read from RocksDB")
|
||||
}
|
||||
}
|
||||
impl<T: Send + ThreadMode + 'static> Db for Arc<OptimisticTransactionDB<T>> {
|
||||
type Transaction<'a> = Transaction<'a, T>;
|
||||
fn txn(&mut self) -> Self::Transaction<'_> {
|
||||
let mut opts = WriteOptions::default();
|
||||
opts.set_sync(true);
|
||||
Transaction(self.transaction_opt(&opts, &Default::default()), &**self)
|
||||
}
|
||||
}
|
||||
|
||||
pub type RocksDB = Arc<OptimisticTransactionDB<SingleThreaded>>;
|
||||
pub fn new_rocksdb(path: &str) -> RocksDB {
|
||||
let mut options = Options::default();
|
||||
options.create_if_missing(true);
|
||||
options.set_compression_type(DBCompressionType::Zstd);
|
||||
|
||||
options.set_wal_compression_type(DBCompressionType::Zstd);
|
||||
// 10 MB
|
||||
options.set_max_total_wal_size(10 * 1024 * 1024);
|
||||
options.set_wal_size_limit_mb(10);
|
||||
|
||||
options.set_log_level(LogLevel::Warn);
|
||||
// 1 MB
|
||||
options.set_max_log_file_size(1024 * 1024);
|
||||
options.set_recycle_log_file_num(1);
|
||||
|
||||
Arc::new(OptimisticTransactionDB::open(&options, path).unwrap())
|
||||
}
|
||||
17
common/env/Cargo.toml
vendored
17
common/env/Cargo.toml
vendored
@@ -1,17 +0,0 @@
|
||||
[package]
|
||||
name = "serai-env"
|
||||
version = "0.1.0"
|
||||
description = "A common library for Serai apps to access environment variables"
|
||||
license = "AGPL-3.0-only"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/common/env"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = []
|
||||
edition = "2021"
|
||||
rust-version = "1.64"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
9
common/env/src/lib.rs
vendored
9
common/env/src/lib.rs
vendored
@@ -1,9 +0,0 @@
|
||||
#![cfg_attr(docsrs, feature(doc_cfg))]
|
||||
#![cfg_attr(docsrs, feature(doc_cfg))]
|
||||
|
||||
// Obtain a variable from the Serai environment/secret store.
|
||||
pub fn var(variable: &str) -> Option<String> {
|
||||
// TODO: Move this to a proper secret store
|
||||
// TODO: Unset this variable
|
||||
std::env::var(variable).ok()
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
[package]
|
||||
name = "patchable-async-sleep"
|
||||
version = "0.1.0"
|
||||
description = "An async sleep function, patchable to the preferred runtime"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/common/patchable-async-sleep"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = ["async", "sleep", "tokio", "smol", "async-std"]
|
||||
edition = "2021"
|
||||
rust-version = "1.70"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
tokio = { version = "1", default-features = false, features = [ "time"] }
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user