mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-09 12:49:23 +00:00
Compare commits
6 Commits
aggressive
...
firo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d2e5d9184d | ||
|
|
9b3985e120 | ||
|
|
c3cc8d51b7 | ||
|
|
e3ff4f7af6 | ||
|
|
a770e29b0c | ||
|
|
6d9221d56c |
5
.gitattributes
vendored
5
.gitattributes
vendored
@@ -1,5 +0,0 @@
|
|||||||
# Auto detect text files and perform LF normalization
|
|
||||||
* text=auto
|
|
||||||
* text eol=lf
|
|
||||||
|
|
||||||
*.pdf binary
|
|
||||||
21
.github/actions/LICENSE
vendored
21
.github/actions/LICENSE
vendored
@@ -1,21 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2022-2023 Luke Parker
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
47
.github/actions/bitcoin/action.yml
vendored
47
.github/actions/bitcoin/action.yml
vendored
@@ -1,47 +0,0 @@
|
|||||||
name: bitcoin-regtest
|
|
||||||
description: Spawns a regtest Bitcoin daemon
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
version:
|
|
||||||
description: "Version to download and run"
|
|
||||||
required: false
|
|
||||||
default: 24.0.1
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: "composite"
|
|
||||||
steps:
|
|
||||||
- name: Bitcoin Daemon Cache
|
|
||||||
id: cache-bitcoind
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: bitcoin.tar.gz
|
|
||||||
key: bitcoind-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
|
||||||
|
|
||||||
- name: Download the Bitcoin Daemon
|
|
||||||
if: steps.cache-bitcoind.outputs.cache-hit != 'true'
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
RUNNER_OS=linux
|
|
||||||
RUNNER_ARCH=x86_64
|
|
||||||
FILE=bitcoin-${{ inputs.version }}-$RUNNER_ARCH-$RUNNER_OS-gnu.tar.gz
|
|
||||||
|
|
||||||
wget https://bitcoincore.org/bin/bitcoin-core-${{ inputs.version }}/$FILE
|
|
||||||
mv $FILE bitcoin.tar.gz
|
|
||||||
|
|
||||||
- name: Extract the Bitcoin Daemon
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
tar xzvf bitcoin.tar.gz
|
|
||||||
cd bitcoin-${{ inputs.version }}
|
|
||||||
sudo mv bin/* /bin && sudo mv lib/* /lib
|
|
||||||
|
|
||||||
- name: Bitcoin Regtest Daemon
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
RPC_USER=serai
|
|
||||||
RPC_PASS=seraidex
|
|
||||||
|
|
||||||
bitcoind -txindex -regtest \
|
|
||||||
-rpcuser=$RPC_USER -rpcpassword=$RPC_PASS \
|
|
||||||
-rpcbind=127.0.0.1 -rpcbind=$(hostname) -rpcallowip=0.0.0.0/0 \
|
|
||||||
-daemon
|
|
||||||
43
.github/actions/build-dependencies/action.yml
vendored
43
.github/actions/build-dependencies/action.yml
vendored
@@ -1,43 +0,0 @@
|
|||||||
name: build-dependencies
|
|
||||||
description: Installs build dependencies for Serai
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
github-token:
|
|
||||||
description: "GitHub token to install Protobuf with"
|
|
||||||
require: true
|
|
||||||
default:
|
|
||||||
|
|
||||||
rust-toolchain:
|
|
||||||
description: "Rust toolchain to install"
|
|
||||||
required: false
|
|
||||||
default: stable
|
|
||||||
|
|
||||||
rust-components:
|
|
||||||
description: "Rust components to install"
|
|
||||||
required: false
|
|
||||||
default:
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: "composite"
|
|
||||||
steps:
|
|
||||||
- name: Install Protobuf
|
|
||||||
uses: arduino/setup-protoc@v2.0.0
|
|
||||||
with:
|
|
||||||
repo-token: ${{ inputs.github-token }}
|
|
||||||
|
|
||||||
- name: Install solc
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
pip3 install solc-select==0.2.1
|
|
||||||
solc-select install 0.8.16
|
|
||||||
solc-select use 0.8.16
|
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@master
|
|
||||||
with:
|
|
||||||
toolchain: ${{ inputs.rust-toolchain }}
|
|
||||||
components: ${{ inputs.rust-components }}
|
|
||||||
targets: wasm32-unknown-unknown, riscv32imac-unknown-none-elf
|
|
||||||
|
|
||||||
# - name: Cache Rust
|
|
||||||
# uses: Swatinem/rust-cache@v2
|
|
||||||
44
.github/actions/monero-wallet-rpc/action.yml
vendored
44
.github/actions/monero-wallet-rpc/action.yml
vendored
@@ -1,44 +0,0 @@
|
|||||||
name: monero-wallet-rpc
|
|
||||||
description: Spawns a Monero Wallet-RPC.
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
version:
|
|
||||||
description: "Version to download and run"
|
|
||||||
required: false
|
|
||||||
default: v0.18.2.0
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: "composite"
|
|
||||||
steps:
|
|
||||||
- name: Monero Wallet RPC Cache
|
|
||||||
id: cache-monero-wallet-rpc
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: monero-wallet-rpc
|
|
||||||
key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
|
||||||
|
|
||||||
- name: Download the Monero Wallet RPC
|
|
||||||
if: steps.cache-monero-wallet-rpc.outputs.cache-hit != 'true'
|
|
||||||
# Calculates OS/ARCH to demonstrate it, yet then locks to linux-x64 due
|
|
||||||
# to the contained folder not following the same naming scheme and
|
|
||||||
# requiring further expansion not worth doing right now
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
RUNNER_OS=${{ runner.os }}
|
|
||||||
RUNNER_ARCH=${{ runner.arch }}
|
|
||||||
|
|
||||||
RUNNER_OS=${RUNNER_OS,,}
|
|
||||||
RUNNER_ARCH=${RUNNER_ARCH,,}
|
|
||||||
|
|
||||||
RUNNER_OS=linux
|
|
||||||
RUNNER_ARCH=x64
|
|
||||||
|
|
||||||
FILE=monero-$RUNNER_OS-$RUNNER_ARCH-${{ inputs.version }}.tar.bz2
|
|
||||||
wget https://downloads.getmonero.org/cli/$FILE
|
|
||||||
tar -xvf $FILE
|
|
||||||
|
|
||||||
mv monero-x86_64-linux-gnu-${{ inputs.version }}/monero-wallet-rpc monero-wallet-rpc
|
|
||||||
|
|
||||||
- name: Monero Wallet RPC
|
|
||||||
shell: bash
|
|
||||||
run: ./monero-wallet-rpc --disable-rpc-login --rpc-bind-port 6061 --allow-mismatched-daemon-version --wallet-dir ./ --detach
|
|
||||||
44
.github/actions/monero/action.yml
vendored
44
.github/actions/monero/action.yml
vendored
@@ -1,44 +0,0 @@
|
|||||||
name: monero-regtest
|
|
||||||
description: Spawns a regtest Monero daemon
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
version:
|
|
||||||
description: "Version to download and run"
|
|
||||||
required: false
|
|
||||||
default: v0.18.2.0
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: "composite"
|
|
||||||
steps:
|
|
||||||
- name: Monero Daemon Cache
|
|
||||||
id: cache-monerod
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: monerod
|
|
||||||
key: monerod-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
|
||||||
|
|
||||||
- name: Download the Monero Daemon
|
|
||||||
if: steps.cache-monerod.outputs.cache-hit != 'true'
|
|
||||||
# Calculates OS/ARCH to demonstrate it, yet then locks to linux-x64 due
|
|
||||||
# to the contained folder not following the same naming scheme and
|
|
||||||
# requiring further expansion not worth doing right now
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
RUNNER_OS=${{ runner.os }}
|
|
||||||
RUNNER_ARCH=${{ runner.arch }}
|
|
||||||
|
|
||||||
RUNNER_OS=${RUNNER_OS,,}
|
|
||||||
RUNNER_ARCH=${RUNNER_ARCH,,}
|
|
||||||
|
|
||||||
RUNNER_OS=linux
|
|
||||||
RUNNER_ARCH=x64
|
|
||||||
|
|
||||||
FILE=monero-$RUNNER_OS-$RUNNER_ARCH-${{ inputs.version }}.tar.bz2
|
|
||||||
wget https://downloads.getmonero.org/cli/$FILE
|
|
||||||
tar -xvf $FILE
|
|
||||||
|
|
||||||
mv monero-x86_64-linux-gnu-${{ inputs.version }}/monerod monerod
|
|
||||||
|
|
||||||
- name: Monero Regtest Daemon
|
|
||||||
shell: bash
|
|
||||||
run: ./monerod --regtest --offline --fixed-difficulty=1 --detach
|
|
||||||
44
.github/actions/test-dependencies/action.yml
vendored
44
.github/actions/test-dependencies/action.yml
vendored
@@ -1,44 +0,0 @@
|
|||||||
name: test-dependencies
|
|
||||||
description: Installs test dependencies for Serai
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
github-token:
|
|
||||||
description: "GitHub token to install Protobuf with"
|
|
||||||
require: true
|
|
||||||
default:
|
|
||||||
|
|
||||||
monero-version:
|
|
||||||
description: "Monero version to download and run as a regtest node"
|
|
||||||
required: false
|
|
||||||
default: v0.18.2.0
|
|
||||||
|
|
||||||
bitcoin-version:
|
|
||||||
description: "Bitcoin version to download and run as a regtest node"
|
|
||||||
required: false
|
|
||||||
default: 24.0.1
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: "composite"
|
|
||||||
steps:
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
with:
|
|
||||||
github-token: ${{ inputs.github-token }}
|
|
||||||
|
|
||||||
- name: Install Foundry
|
|
||||||
uses: foundry-rs/foundry-toolchain@v1
|
|
||||||
with:
|
|
||||||
version: nightly
|
|
||||||
|
|
||||||
- name: Run a Monero Regtest Node
|
|
||||||
uses: ./.github/actions/monero
|
|
||||||
with:
|
|
||||||
version: ${{ inputs.monero-version }}
|
|
||||||
|
|
||||||
- name: Run a Bitcoin Regtest Node
|
|
||||||
uses: ./.github/actions/bitcoin
|
|
||||||
with:
|
|
||||||
version: ${{ inputs.bitcoin-version }}
|
|
||||||
|
|
||||||
- name: Run a Monero Wallet-RPC
|
|
||||||
uses: ./.github/actions/monero-wallet-rpc
|
|
||||||
1
.github/nightly-version
vendored
1
.github/nightly-version
vendored
@@ -1 +0,0 @@
|
|||||||
nightly-2023-07-01
|
|
||||||
27
.github/workflows/daily-deny.yml
vendored
27
.github/workflows/daily-deny.yml
vendored
@@ -1,27 +0,0 @@
|
|||||||
name: Daily Deny Check
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: "0 0 * * *"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deny:
|
|
||||||
name: Run cargo deny
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Advisory Cache
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: ~/.cargo/advisory-db
|
|
||||||
key: rust-advisory-db
|
|
||||||
|
|
||||||
- name: Install cargo
|
|
||||||
uses: dtolnay/rust-toolchain@stable
|
|
||||||
|
|
||||||
- name: Install cargo deny
|
|
||||||
run: cargo install --locked cargo-deny
|
|
||||||
|
|
||||||
- name: Run cargo deny
|
|
||||||
run: cargo deny -L error --all-features check
|
|
||||||
59
.github/workflows/monero-tests.yaml
vendored
59
.github/workflows/monero-tests.yaml
vendored
@@ -1,59 +0,0 @@
|
|||||||
name: Monero Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
paths:
|
|
||||||
- "coins/monero/**"
|
|
||||||
- "processor/**"
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "coins/monero/**"
|
|
||||||
- "processor/**"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Only run these once since they will be consistent regardless of any node
|
|
||||||
unit-tests:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Test Dependencies
|
|
||||||
uses: ./.github/actions/test-dependencies
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Run Unit Tests Without Features
|
|
||||||
run: cargo test --package monero-serai --lib
|
|
||||||
|
|
||||||
# Doesn't run unit tests with features as the tests workflow will
|
|
||||||
|
|
||||||
integration-tests:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
# Test against all supported protocol versions
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
version: [v0.17.3.2, v0.18.2.0]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Test Dependencies
|
|
||||||
uses: ./.github/actions/test-dependencies
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
monero-version: ${{ matrix.version }}
|
|
||||||
|
|
||||||
- name: Run Integration Tests Without Features
|
|
||||||
# Runs with the binaries feature so the binaries build
|
|
||||||
# https://github.com/rust-lang/cargo/issues/8396
|
|
||||||
run: cargo test --package monero-serai --features binaries --test '*'
|
|
||||||
|
|
||||||
- name: Run Integration Tests
|
|
||||||
# Don't run if the the tests workflow also will
|
|
||||||
if: ${{ matrix.version != 'v0.18.2.0' }}
|
|
||||||
run: |
|
|
||||||
cargo test --package monero-serai --all-features --test '*'
|
|
||||||
cargo test --package serai-processor --all-features monero
|
|
||||||
53
.github/workflows/monthly-nightly-update.yml
vendored
53
.github/workflows/monthly-nightly-update.yml
vendored
@@ -1,53 +0,0 @@
|
|||||||
name: Monthly Nightly Update
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: "0 0 1 * *"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update:
|
|
||||||
name: Update nightly
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
submodules: "recursive"
|
|
||||||
|
|
||||||
- name: Write nightly version
|
|
||||||
run: echo $(date +"nightly-%Y-%m"-01) > .github/nightly-version
|
|
||||||
|
|
||||||
- name: Create the commit
|
|
||||||
run: |
|
|
||||||
git config user.name "GitHub Actions"
|
|
||||||
git config user.email "<>"
|
|
||||||
|
|
||||||
git checkout -b $(date +"nightly-%Y-%m")
|
|
||||||
|
|
||||||
git add .github/nightly-version
|
|
||||||
git commit -m "Update nightly"
|
|
||||||
git push -u origin $(date +"nightly-%Y-%m")
|
|
||||||
|
|
||||||
- name: Pull Request
|
|
||||||
uses: actions/github-script@v6
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const { repo, owner } = context.repo;
|
|
||||||
|
|
||||||
const result = await github.rest.pulls.create({
|
|
||||||
title: (new Date()).toLocaleString(
|
|
||||||
false,
|
|
||||||
{ month: "long", year: "numeric" }
|
|
||||||
) + " - Rust Nightly Update",
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
head: "nightly-" + (new Date()).toISOString().split("-").splice(0, 2).join("-"),
|
|
||||||
base: "develop",
|
|
||||||
body: "PR auto-generated by a GitHub workflow."
|
|
||||||
});
|
|
||||||
|
|
||||||
github.rest.issues.addLabels({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
issue_number: result.data.number,
|
|
||||||
labels: ["improvement"]
|
|
||||||
});
|
|
||||||
21
.github/workflows/no-std.yml
vendored
21
.github/workflows/no-std.yml
vendored
@@ -1,21 +0,0 @@
|
|||||||
name: no-std build
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
pull_request:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
with:
|
|
||||||
github-token: ${{ inputs.github-token }}
|
|
||||||
|
|
||||||
- name: Verify no-std builds
|
|
||||||
run: cd tests/no-std && cargo build --target riscv32imac-unknown-none-elf
|
|
||||||
85
.github/workflows/tests.yml
vendored
85
.github/workflows/tests.yml
vendored
@@ -1,85 +0,0 @@
|
|||||||
name: Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- develop
|
|
||||||
pull_request:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
clippy:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Get nightly version to use
|
|
||||||
id: nightly
|
|
||||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Build Dependencies
|
|
||||||
uses: ./.github/actions/build-dependencies
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
rust-toolchain: ${{ steps.nightly.outputs.version }}
|
|
||||||
rust-components: clippy
|
|
||||||
|
|
||||||
- name: Run Clippy
|
|
||||||
# Allow dbg_macro when run locally, yet not when pushed
|
|
||||||
run: cargo clippy --all-features --all-targets -- -D clippy::dbg_macro $(grep "\S" ../../clippy-config | grep -v "#")
|
|
||||||
|
|
||||||
deny:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Advisory Cache
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: ~/.cargo/advisory-db
|
|
||||||
key: rust-advisory-db
|
|
||||||
|
|
||||||
- name: Install cargo
|
|
||||||
uses: dtolnay/rust-toolchain@stable
|
|
||||||
|
|
||||||
- name: Install cargo deny
|
|
||||||
run: cargo install --locked cargo-deny
|
|
||||||
|
|
||||||
- name: Run cargo deny
|
|
||||||
run: cargo deny -L error --all-features check
|
|
||||||
|
|
||||||
test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Test Dependencies
|
|
||||||
uses: ./.github/actions/test-dependencies
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build node
|
|
||||||
run: |
|
|
||||||
cd substrate/node
|
|
||||||
cargo build
|
|
||||||
|
|
||||||
- name: Run Tests
|
|
||||||
run: GITHUB_CI=true cargo test --all-features
|
|
||||||
|
|
||||||
fmt:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Get nightly version to use
|
|
||||||
id: nightly
|
|
||||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Install rustfmt
|
|
||||||
uses: dtolnay/rust-toolchain@master
|
|
||||||
with:
|
|
||||||
toolchain: ${{ steps.nightly.outputs.version }}
|
|
||||||
components: rustfmt
|
|
||||||
|
|
||||||
- name: Run rustfmt
|
|
||||||
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check
|
|
||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,2 +1,2 @@
|
|||||||
target
|
target
|
||||||
.vscode
|
Cargo.lock
|
||||||
|
|||||||
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
[submodule "coins/monero/c/monero"]
|
||||||
|
path = coins/monero/c/monero
|
||||||
|
url = https://github.com/monero-project/monero
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
tab_spaces = 2
|
|
||||||
|
|
||||||
max_width = 100
|
|
||||||
# Let the developer decide based on the 100 char line limit
|
|
||||||
use_small_heuristics = "Max"
|
|
||||||
|
|
||||||
error_on_line_overflow = true
|
|
||||||
error_on_unformatted = true
|
|
||||||
|
|
||||||
imports_granularity = "Crate"
|
|
||||||
reorder_imports = false
|
|
||||||
reorder_modules = false
|
|
||||||
|
|
||||||
unstable_features = true
|
|
||||||
spaces_around_ranges = true
|
|
||||||
binop_separator = "Back"
|
|
||||||
661
AGPL-3.0
661
AGPL-3.0
@@ -1,661 +0,0 @@
|
|||||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
|
||||||
Version 3, 19 November 2007
|
|
||||||
|
|
||||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
|
||||||
Everyone is permitted to copy and distribute verbatim copies
|
|
||||||
of this license document, but changing it is not allowed.
|
|
||||||
|
|
||||||
Preamble
|
|
||||||
|
|
||||||
The GNU Affero General Public License is a free, copyleft license for
|
|
||||||
software and other kinds of works, specifically designed to ensure
|
|
||||||
cooperation with the community in the case of network server software.
|
|
||||||
|
|
||||||
The licenses for most software and other practical works are designed
|
|
||||||
to take away your freedom to share and change the works. By contrast,
|
|
||||||
our General Public Licenses are intended to guarantee your freedom to
|
|
||||||
share and change all versions of a program--to make sure it remains free
|
|
||||||
software for all its users.
|
|
||||||
|
|
||||||
When we speak of free software, we are referring to freedom, not
|
|
||||||
price. Our General Public Licenses are designed to make sure that you
|
|
||||||
have the freedom to distribute copies of free software (and charge for
|
|
||||||
them if you wish), that you receive source code or can get it if you
|
|
||||||
want it, that you can change the software or use pieces of it in new
|
|
||||||
free programs, and that you know you can do these things.
|
|
||||||
|
|
||||||
Developers that use our General Public Licenses protect your rights
|
|
||||||
with two steps: (1) assert copyright on the software, and (2) offer
|
|
||||||
you this License which gives you legal permission to copy, distribute
|
|
||||||
and/or modify the software.
|
|
||||||
|
|
||||||
A secondary benefit of defending all users' freedom is that
|
|
||||||
improvements made in alternate versions of the program, if they
|
|
||||||
receive widespread use, become available for other developers to
|
|
||||||
incorporate. Many developers of free software are heartened and
|
|
||||||
encouraged by the resulting cooperation. However, in the case of
|
|
||||||
software used on network servers, this result may fail to come about.
|
|
||||||
The GNU General Public License permits making a modified version and
|
|
||||||
letting the public access it on a server without ever releasing its
|
|
||||||
source code to the public.
|
|
||||||
|
|
||||||
The GNU Affero General Public License is designed specifically to
|
|
||||||
ensure that, in such cases, the modified source code becomes available
|
|
||||||
to the community. It requires the operator of a network server to
|
|
||||||
provide the source code of the modified version running there to the
|
|
||||||
users of that server. Therefore, public use of a modified version, on
|
|
||||||
a publicly accessible server, gives the public access to the source
|
|
||||||
code of the modified version.
|
|
||||||
|
|
||||||
An older license, called the Affero General Public License and
|
|
||||||
published by Affero, was designed to accomplish similar goals. This is
|
|
||||||
a different license, not a version of the Affero GPL, but Affero has
|
|
||||||
released a new version of the Affero GPL which permits relicensing under
|
|
||||||
this license.
|
|
||||||
|
|
||||||
The precise terms and conditions for copying, distribution and
|
|
||||||
modification follow.
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
0. Definitions.
|
|
||||||
|
|
||||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
|
||||||
|
|
||||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
|
||||||
works, such as semiconductor masks.
|
|
||||||
|
|
||||||
"The Program" refers to any copyrightable work licensed under this
|
|
||||||
License. Each licensee is addressed as "you". "Licensees" and
|
|
||||||
"recipients" may be individuals or organizations.
|
|
||||||
|
|
||||||
To "modify" a work means to copy from or adapt all or part of the work
|
|
||||||
in a fashion requiring copyright permission, other than the making of an
|
|
||||||
exact copy. The resulting work is called a "modified version" of the
|
|
||||||
earlier work or a work "based on" the earlier work.
|
|
||||||
|
|
||||||
A "covered work" means either the unmodified Program or a work based
|
|
||||||
on the Program.
|
|
||||||
|
|
||||||
To "propagate" a work means to do anything with it that, without
|
|
||||||
permission, would make you directly or secondarily liable for
|
|
||||||
infringement under applicable copyright law, except executing it on a
|
|
||||||
computer or modifying a private copy. Propagation includes copying,
|
|
||||||
distribution (with or without modification), making available to the
|
|
||||||
public, and in some countries other activities as well.
|
|
||||||
|
|
||||||
To "convey" a work means any kind of propagation that enables other
|
|
||||||
parties to make or receive copies. Mere interaction with a user through
|
|
||||||
a computer network, with no transfer of a copy, is not conveying.
|
|
||||||
|
|
||||||
An interactive user interface displays "Appropriate Legal Notices"
|
|
||||||
to the extent that it includes a convenient and prominently visible
|
|
||||||
feature that (1) displays an appropriate copyright notice, and (2)
|
|
||||||
tells the user that there is no warranty for the work (except to the
|
|
||||||
extent that warranties are provided), that licensees may convey the
|
|
||||||
work under this License, and how to view a copy of this License. If
|
|
||||||
the interface presents a list of user commands or options, such as a
|
|
||||||
menu, a prominent item in the list meets this criterion.
|
|
||||||
|
|
||||||
1. Source Code.
|
|
||||||
|
|
||||||
The "source code" for a work means the preferred form of the work
|
|
||||||
for making modifications to it. "Object code" means any non-source
|
|
||||||
form of a work.
|
|
||||||
|
|
||||||
A "Standard Interface" means an interface that either is an official
|
|
||||||
standard defined by a recognized standards body, or, in the case of
|
|
||||||
interfaces specified for a particular programming language, one that
|
|
||||||
is widely used among developers working in that language.
|
|
||||||
|
|
||||||
The "System Libraries" of an executable work include anything, other
|
|
||||||
than the work as a whole, that (a) is included in the normal form of
|
|
||||||
packaging a Major Component, but which is not part of that Major
|
|
||||||
Component, and (b) serves only to enable use of the work with that
|
|
||||||
Major Component, or to implement a Standard Interface for which an
|
|
||||||
implementation is available to the public in source code form. A
|
|
||||||
"Major Component", in this context, means a major essential component
|
|
||||||
(kernel, window system, and so on) of the specific operating system
|
|
||||||
(if any) on which the executable work runs, or a compiler used to
|
|
||||||
produce the work, or an object code interpreter used to run it.
|
|
||||||
|
|
||||||
The "Corresponding Source" for a work in object code form means all
|
|
||||||
the source code needed to generate, install, and (for an executable
|
|
||||||
work) run the object code and to modify the work, including scripts to
|
|
||||||
control those activities. However, it does not include the work's
|
|
||||||
System Libraries, or general-purpose tools or generally available free
|
|
||||||
programs which are used unmodified in performing those activities but
|
|
||||||
which are not part of the work. For example, Corresponding Source
|
|
||||||
includes interface definition files associated with source files for
|
|
||||||
the work, and the source code for shared libraries and dynamically
|
|
||||||
linked subprograms that the work is specifically designed to require,
|
|
||||||
such as by intimate data communication or control flow between those
|
|
||||||
subprograms and other parts of the work.
|
|
||||||
|
|
||||||
The Corresponding Source need not include anything that users
|
|
||||||
can regenerate automatically from other parts of the Corresponding
|
|
||||||
Source.
|
|
||||||
|
|
||||||
The Corresponding Source for a work in source code form is that
|
|
||||||
same work.
|
|
||||||
|
|
||||||
2. Basic Permissions.
|
|
||||||
|
|
||||||
All rights granted under this License are granted for the term of
|
|
||||||
copyright on the Program, and are irrevocable provided the stated
|
|
||||||
conditions are met. This License explicitly affirms your unlimited
|
|
||||||
permission to run the unmodified Program. The output from running a
|
|
||||||
covered work is covered by this License only if the output, given its
|
|
||||||
content, constitutes a covered work. This License acknowledges your
|
|
||||||
rights of fair use or other equivalent, as provided by copyright law.
|
|
||||||
|
|
||||||
You may make, run and propagate covered works that you do not
|
|
||||||
convey, without conditions so long as your license otherwise remains
|
|
||||||
in force. You may convey covered works to others for the sole purpose
|
|
||||||
of having them make modifications exclusively for you, or provide you
|
|
||||||
with facilities for running those works, provided that you comply with
|
|
||||||
the terms of this License in conveying all material for which you do
|
|
||||||
not control copyright. Those thus making or running the covered works
|
|
||||||
for you must do so exclusively on your behalf, under your direction
|
|
||||||
and control, on terms that prohibit them from making any copies of
|
|
||||||
your copyrighted material outside their relationship with you.
|
|
||||||
|
|
||||||
Conveying under any other circumstances is permitted solely under
|
|
||||||
the conditions stated below. Sublicensing is not allowed; section 10
|
|
||||||
makes it unnecessary.
|
|
||||||
|
|
||||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
|
||||||
|
|
||||||
No covered work shall be deemed part of an effective technological
|
|
||||||
measure under any applicable law fulfilling obligations under article
|
|
||||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
|
||||||
similar laws prohibiting or restricting circumvention of such
|
|
||||||
measures.
|
|
||||||
|
|
||||||
When you convey a covered work, you waive any legal power to forbid
|
|
||||||
circumvention of technological measures to the extent such circumvention
|
|
||||||
is effected by exercising rights under this License with respect to
|
|
||||||
the covered work, and you disclaim any intention to limit operation or
|
|
||||||
modification of the work as a means of enforcing, against the work's
|
|
||||||
users, your or third parties' legal rights to forbid circumvention of
|
|
||||||
technological measures.
|
|
||||||
|
|
||||||
4. Conveying Verbatim Copies.
|
|
||||||
|
|
||||||
You may convey verbatim copies of the Program's source code as you
|
|
||||||
receive it, in any medium, provided that you conspicuously and
|
|
||||||
appropriately publish on each copy an appropriate copyright notice;
|
|
||||||
keep intact all notices stating that this License and any
|
|
||||||
non-permissive terms added in accord with section 7 apply to the code;
|
|
||||||
keep intact all notices of the absence of any warranty; and give all
|
|
||||||
recipients a copy of this License along with the Program.
|
|
||||||
|
|
||||||
You may charge any price or no price for each copy that you convey,
|
|
||||||
and you may offer support or warranty protection for a fee.
|
|
||||||
|
|
||||||
5. Conveying Modified Source Versions.
|
|
||||||
|
|
||||||
You may convey a work based on the Program, or the modifications to
|
|
||||||
produce it from the Program, in the form of source code under the
|
|
||||||
terms of section 4, provided that you also meet all of these conditions:
|
|
||||||
|
|
||||||
a) The work must carry prominent notices stating that you modified
|
|
||||||
it, and giving a relevant date.
|
|
||||||
|
|
||||||
b) The work must carry prominent notices stating that it is
|
|
||||||
released under this License and any conditions added under section
|
|
||||||
7. This requirement modifies the requirement in section 4 to
|
|
||||||
"keep intact all notices".
|
|
||||||
|
|
||||||
c) You must license the entire work, as a whole, under this
|
|
||||||
License to anyone who comes into possession of a copy. This
|
|
||||||
License will therefore apply, along with any applicable section 7
|
|
||||||
additional terms, to the whole of the work, and all its parts,
|
|
||||||
regardless of how they are packaged. This License gives no
|
|
||||||
permission to license the work in any other way, but it does not
|
|
||||||
invalidate such permission if you have separately received it.
|
|
||||||
|
|
||||||
d) If the work has interactive user interfaces, each must display
|
|
||||||
Appropriate Legal Notices; however, if the Program has interactive
|
|
||||||
interfaces that do not display Appropriate Legal Notices, your
|
|
||||||
work need not make them do so.
|
|
||||||
|
|
||||||
A compilation of a covered work with other separate and independent
|
|
||||||
works, which are not by their nature extensions of the covered work,
|
|
||||||
and which are not combined with it such as to form a larger program,
|
|
||||||
in or on a volume of a storage or distribution medium, is called an
|
|
||||||
"aggregate" if the compilation and its resulting copyright are not
|
|
||||||
used to limit the access or legal rights of the compilation's users
|
|
||||||
beyond what the individual works permit. Inclusion of a covered work
|
|
||||||
in an aggregate does not cause this License to apply to the other
|
|
||||||
parts of the aggregate.
|
|
||||||
|
|
||||||
6. Conveying Non-Source Forms.
|
|
||||||
|
|
||||||
You may convey a covered work in object code form under the terms
|
|
||||||
of sections 4 and 5, provided that you also convey the
|
|
||||||
machine-readable Corresponding Source under the terms of this License,
|
|
||||||
in one of these ways:
|
|
||||||
|
|
||||||
a) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by the
|
|
||||||
Corresponding Source fixed on a durable physical medium
|
|
||||||
customarily used for software interchange.
|
|
||||||
|
|
||||||
b) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by a
|
|
||||||
written offer, valid for at least three years and valid for as
|
|
||||||
long as you offer spare parts or customer support for that product
|
|
||||||
model, to give anyone who possesses the object code either (1) a
|
|
||||||
copy of the Corresponding Source for all the software in the
|
|
||||||
product that is covered by this License, on a durable physical
|
|
||||||
medium customarily used for software interchange, for a price no
|
|
||||||
more than your reasonable cost of physically performing this
|
|
||||||
conveying of source, or (2) access to copy the
|
|
||||||
Corresponding Source from a network server at no charge.
|
|
||||||
|
|
||||||
c) Convey individual copies of the object code with a copy of the
|
|
||||||
written offer to provide the Corresponding Source. This
|
|
||||||
alternative is allowed only occasionally and noncommercially, and
|
|
||||||
only if you received the object code with such an offer, in accord
|
|
||||||
with subsection 6b.
|
|
||||||
|
|
||||||
d) Convey the object code by offering access from a designated
|
|
||||||
place (gratis or for a charge), and offer equivalent access to the
|
|
||||||
Corresponding Source in the same way through the same place at no
|
|
||||||
further charge. You need not require recipients to copy the
|
|
||||||
Corresponding Source along with the object code. If the place to
|
|
||||||
copy the object code is a network server, the Corresponding Source
|
|
||||||
may be on a different server (operated by you or a third party)
|
|
||||||
that supports equivalent copying facilities, provided you maintain
|
|
||||||
clear directions next to the object code saying where to find the
|
|
||||||
Corresponding Source. Regardless of what server hosts the
|
|
||||||
Corresponding Source, you remain obligated to ensure that it is
|
|
||||||
available for as long as needed to satisfy these requirements.
|
|
||||||
|
|
||||||
e) Convey the object code using peer-to-peer transmission, provided
|
|
||||||
you inform other peers where the object code and Corresponding
|
|
||||||
Source of the work are being offered to the general public at no
|
|
||||||
charge under subsection 6d.
|
|
||||||
|
|
||||||
A separable portion of the object code, whose source code is excluded
|
|
||||||
from the Corresponding Source as a System Library, need not be
|
|
||||||
included in conveying the object code work.
|
|
||||||
|
|
||||||
A "User Product" is either (1) a "consumer product", which means any
|
|
||||||
tangible personal property which is normally used for personal, family,
|
|
||||||
or household purposes, or (2) anything designed or sold for incorporation
|
|
||||||
into a dwelling. In determining whether a product is a consumer product,
|
|
||||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
|
||||||
product received by a particular user, "normally used" refers to a
|
|
||||||
typical or common use of that class of product, regardless of the status
|
|
||||||
of the particular user or of the way in which the particular user
|
|
||||||
actually uses, or expects or is expected to use, the product. A product
|
|
||||||
is a consumer product regardless of whether the product has substantial
|
|
||||||
commercial, industrial or non-consumer uses, unless such uses represent
|
|
||||||
the only significant mode of use of the product.
|
|
||||||
|
|
||||||
"Installation Information" for a User Product means any methods,
|
|
||||||
procedures, authorization keys, or other information required to install
|
|
||||||
and execute modified versions of a covered work in that User Product from
|
|
||||||
a modified version of its Corresponding Source. The information must
|
|
||||||
suffice to ensure that the continued functioning of the modified object
|
|
||||||
code is in no case prevented or interfered with solely because
|
|
||||||
modification has been made.
|
|
||||||
|
|
||||||
If you convey an object code work under this section in, or with, or
|
|
||||||
specifically for use in, a User Product, and the conveying occurs as
|
|
||||||
part of a transaction in which the right of possession and use of the
|
|
||||||
User Product is transferred to the recipient in perpetuity or for a
|
|
||||||
fixed term (regardless of how the transaction is characterized), the
|
|
||||||
Corresponding Source conveyed under this section must be accompanied
|
|
||||||
by the Installation Information. But this requirement does not apply
|
|
||||||
if neither you nor any third party retains the ability to install
|
|
||||||
modified object code on the User Product (for example, the work has
|
|
||||||
been installed in ROM).
|
|
||||||
|
|
||||||
The requirement to provide Installation Information does not include a
|
|
||||||
requirement to continue to provide support service, warranty, or updates
|
|
||||||
for a work that has been modified or installed by the recipient, or for
|
|
||||||
the User Product in which it has been modified or installed. Access to a
|
|
||||||
network may be denied when the modification itself materially and
|
|
||||||
adversely affects the operation of the network or violates the rules and
|
|
||||||
protocols for communication across the network.
|
|
||||||
|
|
||||||
Corresponding Source conveyed, and Installation Information provided,
|
|
||||||
in accord with this section must be in a format that is publicly
|
|
||||||
documented (and with an implementation available to the public in
|
|
||||||
source code form), and must require no special password or key for
|
|
||||||
unpacking, reading or copying.
|
|
||||||
|
|
||||||
7. Additional Terms.
|
|
||||||
|
|
||||||
"Additional permissions" are terms that supplement the terms of this
|
|
||||||
License by making exceptions from one or more of its conditions.
|
|
||||||
Additional permissions that are applicable to the entire Program shall
|
|
||||||
be treated as though they were included in this License, to the extent
|
|
||||||
that they are valid under applicable law. If additional permissions
|
|
||||||
apply only to part of the Program, that part may be used separately
|
|
||||||
under those permissions, but the entire Program remains governed by
|
|
||||||
this License without regard to the additional permissions.
|
|
||||||
|
|
||||||
When you convey a copy of a covered work, you may at your option
|
|
||||||
remove any additional permissions from that copy, or from any part of
|
|
||||||
it. (Additional permissions may be written to require their own
|
|
||||||
removal in certain cases when you modify the work.) You may place
|
|
||||||
additional permissions on material, added by you to a covered work,
|
|
||||||
for which you have or can give appropriate copyright permission.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, for material you
|
|
||||||
add to a covered work, you may (if authorized by the copyright holders of
|
|
||||||
that material) supplement the terms of this License with terms:
|
|
||||||
|
|
||||||
a) Disclaiming warranty or limiting liability differently from the
|
|
||||||
terms of sections 15 and 16 of this License; or
|
|
||||||
|
|
||||||
b) Requiring preservation of specified reasonable legal notices or
|
|
||||||
author attributions in that material or in the Appropriate Legal
|
|
||||||
Notices displayed by works containing it; or
|
|
||||||
|
|
||||||
c) Prohibiting misrepresentation of the origin of that material, or
|
|
||||||
requiring that modified versions of such material be marked in
|
|
||||||
reasonable ways as different from the original version; or
|
|
||||||
|
|
||||||
d) Limiting the use for publicity purposes of names of licensors or
|
|
||||||
authors of the material; or
|
|
||||||
|
|
||||||
e) Declining to grant rights under trademark law for use of some
|
|
||||||
trade names, trademarks, or service marks; or
|
|
||||||
|
|
||||||
f) Requiring indemnification of licensors and authors of that
|
|
||||||
material by anyone who conveys the material (or modified versions of
|
|
||||||
it) with contractual assumptions of liability to the recipient, for
|
|
||||||
any liability that these contractual assumptions directly impose on
|
|
||||||
those licensors and authors.
|
|
||||||
|
|
||||||
All other non-permissive additional terms are considered "further
|
|
||||||
restrictions" within the meaning of section 10. If the Program as you
|
|
||||||
received it, or any part of it, contains a notice stating that it is
|
|
||||||
governed by this License along with a term that is a further
|
|
||||||
restriction, you may remove that term. If a license document contains
|
|
||||||
a further restriction but permits relicensing or conveying under this
|
|
||||||
License, you may add to a covered work material governed by the terms
|
|
||||||
of that license document, provided that the further restriction does
|
|
||||||
not survive such relicensing or conveying.
|
|
||||||
|
|
||||||
If you add terms to a covered work in accord with this section, you
|
|
||||||
must place, in the relevant source files, a statement of the
|
|
||||||
additional terms that apply to those files, or a notice indicating
|
|
||||||
where to find the applicable terms.
|
|
||||||
|
|
||||||
Additional terms, permissive or non-permissive, may be stated in the
|
|
||||||
form of a separately written license, or stated as exceptions;
|
|
||||||
the above requirements apply either way.
|
|
||||||
|
|
||||||
8. Termination.
|
|
||||||
|
|
||||||
You may not propagate or modify a covered work except as expressly
|
|
||||||
provided under this License. Any attempt otherwise to propagate or
|
|
||||||
modify it is void, and will automatically terminate your rights under
|
|
||||||
this License (including any patent licenses granted under the third
|
|
||||||
paragraph of section 11).
|
|
||||||
|
|
||||||
However, if you cease all violation of this License, then your
|
|
||||||
license from a particular copyright holder is reinstated (a)
|
|
||||||
provisionally, unless and until the copyright holder explicitly and
|
|
||||||
finally terminates your license, and (b) permanently, if the copyright
|
|
||||||
holder fails to notify you of the violation by some reasonable means
|
|
||||||
prior to 60 days after the cessation.
|
|
||||||
|
|
||||||
Moreover, your license from a particular copyright holder is
|
|
||||||
reinstated permanently if the copyright holder notifies you of the
|
|
||||||
violation by some reasonable means, this is the first time you have
|
|
||||||
received notice of violation of this License (for any work) from that
|
|
||||||
copyright holder, and you cure the violation prior to 30 days after
|
|
||||||
your receipt of the notice.
|
|
||||||
|
|
||||||
Termination of your rights under this section does not terminate the
|
|
||||||
licenses of parties who have received copies or rights from you under
|
|
||||||
this License. If your rights have been terminated and not permanently
|
|
||||||
reinstated, you do not qualify to receive new licenses for the same
|
|
||||||
material under section 10.
|
|
||||||
|
|
||||||
9. Acceptance Not Required for Having Copies.
|
|
||||||
|
|
||||||
You are not required to accept this License in order to receive or
|
|
||||||
run a copy of the Program. Ancillary propagation of a covered work
|
|
||||||
occurring solely as a consequence of using peer-to-peer transmission
|
|
||||||
to receive a copy likewise does not require acceptance. However,
|
|
||||||
nothing other than this License grants you permission to propagate or
|
|
||||||
modify any covered work. These actions infringe copyright if you do
|
|
||||||
not accept this License. Therefore, by modifying or propagating a
|
|
||||||
covered work, you indicate your acceptance of this License to do so.
|
|
||||||
|
|
||||||
10. Automatic Licensing of Downstream Recipients.
|
|
||||||
|
|
||||||
Each time you convey a covered work, the recipient automatically
|
|
||||||
receives a license from the original licensors, to run, modify and
|
|
||||||
propagate that work, subject to this License. You are not responsible
|
|
||||||
for enforcing compliance by third parties with this License.
|
|
||||||
|
|
||||||
An "entity transaction" is a transaction transferring control of an
|
|
||||||
organization, or substantially all assets of one, or subdividing an
|
|
||||||
organization, or merging organizations. If propagation of a covered
|
|
||||||
work results from an entity transaction, each party to that
|
|
||||||
transaction who receives a copy of the work also receives whatever
|
|
||||||
licenses to the work the party's predecessor in interest had or could
|
|
||||||
give under the previous paragraph, plus a right to possession of the
|
|
||||||
Corresponding Source of the work from the predecessor in interest, if
|
|
||||||
the predecessor has it or can get it with reasonable efforts.
|
|
||||||
|
|
||||||
You may not impose any further restrictions on the exercise of the
|
|
||||||
rights granted or affirmed under this License. For example, you may
|
|
||||||
not impose a license fee, royalty, or other charge for exercise of
|
|
||||||
rights granted under this License, and you may not initiate litigation
|
|
||||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
|
||||||
any patent claim is infringed by making, using, selling, offering for
|
|
||||||
sale, or importing the Program or any portion of it.
|
|
||||||
|
|
||||||
11. Patents.
|
|
||||||
|
|
||||||
A "contributor" is a copyright holder who authorizes use under this
|
|
||||||
License of the Program or a work on which the Program is based. The
|
|
||||||
work thus licensed is called the contributor's "contributor version".
|
|
||||||
|
|
||||||
A contributor's "essential patent claims" are all patent claims
|
|
||||||
owned or controlled by the contributor, whether already acquired or
|
|
||||||
hereafter acquired, that would be infringed by some manner, permitted
|
|
||||||
by this License, of making, using, or selling its contributor version,
|
|
||||||
but do not include claims that would be infringed only as a
|
|
||||||
consequence of further modification of the contributor version. For
|
|
||||||
purposes of this definition, "control" includes the right to grant
|
|
||||||
patent sublicenses in a manner consistent with the requirements of
|
|
||||||
this License.
|
|
||||||
|
|
||||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
|
||||||
patent license under the contributor's essential patent claims, to
|
|
||||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
|
||||||
propagate the contents of its contributor version.
|
|
||||||
|
|
||||||
In the following three paragraphs, a "patent license" is any express
|
|
||||||
agreement or commitment, however denominated, not to enforce a patent
|
|
||||||
(such as an express permission to practice a patent or covenant not to
|
|
||||||
sue for patent infringement). To "grant" such a patent license to a
|
|
||||||
party means to make such an agreement or commitment not to enforce a
|
|
||||||
patent against the party.
|
|
||||||
|
|
||||||
If you convey a covered work, knowingly relying on a patent license,
|
|
||||||
and the Corresponding Source of the work is not available for anyone
|
|
||||||
to copy, free of charge and under the terms of this License, through a
|
|
||||||
publicly available network server or other readily accessible means,
|
|
||||||
then you must either (1) cause the Corresponding Source to be so
|
|
||||||
available, or (2) arrange to deprive yourself of the benefit of the
|
|
||||||
patent license for this particular work, or (3) arrange, in a manner
|
|
||||||
consistent with the requirements of this License, to extend the patent
|
|
||||||
license to downstream recipients. "Knowingly relying" means you have
|
|
||||||
actual knowledge that, but for the patent license, your conveying the
|
|
||||||
covered work in a country, or your recipient's use of the covered work
|
|
||||||
in a country, would infringe one or more identifiable patents in that
|
|
||||||
country that you have reason to believe are valid.
|
|
||||||
|
|
||||||
If, pursuant to or in connection with a single transaction or
|
|
||||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
|
||||||
covered work, and grant a patent license to some of the parties
|
|
||||||
receiving the covered work authorizing them to use, propagate, modify
|
|
||||||
or convey a specific copy of the covered work, then the patent license
|
|
||||||
you grant is automatically extended to all recipients of the covered
|
|
||||||
work and works based on it.
|
|
||||||
|
|
||||||
A patent license is "discriminatory" if it does not include within
|
|
||||||
the scope of its coverage, prohibits the exercise of, or is
|
|
||||||
conditioned on the non-exercise of one or more of the rights that are
|
|
||||||
specifically granted under this License. You may not convey a covered
|
|
||||||
work if you are a party to an arrangement with a third party that is
|
|
||||||
in the business of distributing software, under which you make payment
|
|
||||||
to the third party based on the extent of your activity of conveying
|
|
||||||
the work, and under which the third party grants, to any of the
|
|
||||||
parties who would receive the covered work from you, a discriminatory
|
|
||||||
patent license (a) in connection with copies of the covered work
|
|
||||||
conveyed by you (or copies made from those copies), or (b) primarily
|
|
||||||
for and in connection with specific products or compilations that
|
|
||||||
contain the covered work, unless you entered into that arrangement,
|
|
||||||
or that patent license was granted, prior to 28 March 2007.
|
|
||||||
|
|
||||||
Nothing in this License shall be construed as excluding or limiting
|
|
||||||
any implied license or other defenses to infringement that may
|
|
||||||
otherwise be available to you under applicable patent law.
|
|
||||||
|
|
||||||
12. No Surrender of Others' Freedom.
|
|
||||||
|
|
||||||
If conditions are imposed on you (whether by court order, agreement or
|
|
||||||
otherwise) that contradict the conditions of this License, they do not
|
|
||||||
excuse you from the conditions of this License. If you cannot convey a
|
|
||||||
covered work so as to satisfy simultaneously your obligations under this
|
|
||||||
License and any other pertinent obligations, then as a consequence you may
|
|
||||||
not convey it at all. For example, if you agree to terms that obligate you
|
|
||||||
to collect a royalty for further conveying from those to whom you convey
|
|
||||||
the Program, the only way you could satisfy both those terms and this
|
|
||||||
License would be to refrain entirely from conveying the Program.
|
|
||||||
|
|
||||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, if you modify the
|
|
||||||
Program, your modified version must prominently offer all users
|
|
||||||
interacting with it remotely through a computer network (if your version
|
|
||||||
supports such interaction) an opportunity to receive the Corresponding
|
|
||||||
Source of your version by providing access to the Corresponding Source
|
|
||||||
from a network server at no charge, through some standard or customary
|
|
||||||
means of facilitating copying of software. This Corresponding Source
|
|
||||||
shall include the Corresponding Source for any work covered by version 3
|
|
||||||
of the GNU General Public License that is incorporated pursuant to the
|
|
||||||
following paragraph.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, you have
|
|
||||||
permission to link or combine any covered work with a work licensed
|
|
||||||
under version 3 of the GNU General Public License into a single
|
|
||||||
combined work, and to convey the resulting work. The terms of this
|
|
||||||
License will continue to apply to the part which is the covered work,
|
|
||||||
but the work with which it is combined will remain governed by version
|
|
||||||
3 of the GNU General Public License.
|
|
||||||
|
|
||||||
14. Revised Versions of this License.
|
|
||||||
|
|
||||||
The Free Software Foundation may publish revised and/or new versions of
|
|
||||||
the GNU Affero General Public License from time to time. Such new versions
|
|
||||||
will be similar in spirit to the present version, but may differ in detail to
|
|
||||||
address new problems or concerns.
|
|
||||||
|
|
||||||
Each version is given a distinguishing version number. If the
|
|
||||||
Program specifies that a certain numbered version of the GNU Affero General
|
|
||||||
Public License "or any later version" applies to it, you have the
|
|
||||||
option of following the terms and conditions either of that numbered
|
|
||||||
version or of any later version published by the Free Software
|
|
||||||
Foundation. If the Program does not specify a version number of the
|
|
||||||
GNU Affero General Public License, you may choose any version ever published
|
|
||||||
by the Free Software Foundation.
|
|
||||||
|
|
||||||
If the Program specifies that a proxy can decide which future
|
|
||||||
versions of the GNU Affero General Public License can be used, that proxy's
|
|
||||||
public statement of acceptance of a version permanently authorizes you
|
|
||||||
to choose that version for the Program.
|
|
||||||
|
|
||||||
Later license versions may give you additional or different
|
|
||||||
permissions. However, no additional obligations are imposed on any
|
|
||||||
author or copyright holder as a result of your choosing to follow a
|
|
||||||
later version.
|
|
||||||
|
|
||||||
15. Disclaimer of Warranty.
|
|
||||||
|
|
||||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
|
||||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
|
||||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
|
||||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
||||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
|
||||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
|
||||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
|
||||||
|
|
||||||
16. Limitation of Liability.
|
|
||||||
|
|
||||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
|
||||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
|
||||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
|
||||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
|
||||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
|
||||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
|
||||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
|
||||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
|
||||||
SUCH DAMAGES.
|
|
||||||
|
|
||||||
17. Interpretation of Sections 15 and 16.
|
|
||||||
|
|
||||||
If the disclaimer of warranty and limitation of liability provided
|
|
||||||
above cannot be given local legal effect according to their terms,
|
|
||||||
reviewing courts shall apply local law that most closely approximates
|
|
||||||
an absolute waiver of all civil liability in connection with the
|
|
||||||
Program, unless a warranty or assumption of liability accompanies a
|
|
||||||
copy of the Program in return for a fee.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
How to Apply These Terms to Your New Programs
|
|
||||||
|
|
||||||
If you develop a new program, and you want it to be of the greatest
|
|
||||||
possible use to the public, the best way to achieve this is to make it
|
|
||||||
free software which everyone can redistribute and change under these terms.
|
|
||||||
|
|
||||||
To do so, attach the following notices to the program. It is safest
|
|
||||||
to attach them to the start of each source file to most effectively
|
|
||||||
state the exclusion of warranty; and each file should have at least
|
|
||||||
the "copyright" line and a pointer to where the full notice is found.
|
|
||||||
|
|
||||||
<one line to give the program's name and a brief idea of what it does.>
|
|
||||||
Copyright (C) <year> <name of author>
|
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
|
||||||
it under the terms of the GNU Affero General Public License as published by
|
|
||||||
the Free Software Foundation, either version 3 of the License, or
|
|
||||||
(at your option) any later version.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Affero General Public License
|
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
Also add information on how to contact you by electronic and paper mail.
|
|
||||||
|
|
||||||
If your software can interact with users remotely through a computer
|
|
||||||
network, you should also make sure that it provides a way for users to
|
|
||||||
get its source. For example, if your program is a web application, its
|
|
||||||
interface could display a "Source" link that leads users to an archive
|
|
||||||
of the code. There are many ways you could offer source, and different
|
|
||||||
solutions will be better for different programs; see section 13 for the
|
|
||||||
specific requirements.
|
|
||||||
|
|
||||||
You should also get your employer (if you work as a programmer) or school,
|
|
||||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
|
||||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
|
||||||
<https://www.gnu.org/licenses/>.
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
# Contributing
|
|
||||||
|
|
||||||
Contributions come in a variety of forms. Developing Serai, helping document it,
|
|
||||||
using its libraries in another project, using and testing it, and simply sharing
|
|
||||||
it are all valuable ways of contributing.
|
|
||||||
|
|
||||||
This document will specifically focus on contributions to this repository in the
|
|
||||||
form of code and documentation.
|
|
||||||
|
|
||||||
### Rules
|
|
||||||
|
|
||||||
- Stable native Rust, nightly wasm and tools.
|
|
||||||
- `cargo fmt` must be used.
|
|
||||||
- `cargo clippy` must pass, except for the ignored rules (`type_complexity` and
|
|
||||||
`dead_code`).
|
|
||||||
- The CI must pass.
|
|
||||||
|
|
||||||
- Only use uppercase variable names when relevant to cryptography.
|
|
||||||
|
|
||||||
- Use a two-space ident when possible.
|
|
||||||
- Put a space after comment markers.
|
|
||||||
- Don't use multiple newlines between sections of code.
|
|
||||||
- Have a newline before EOF.
|
|
||||||
|
|
||||||
### Guidelines
|
|
||||||
|
|
||||||
- Sort inputs as core, std, third party, and then Serai.
|
|
||||||
- Comment code reasonably.
|
|
||||||
- Include tests for new features.
|
|
||||||
- Sign commits.
|
|
||||||
|
|
||||||
### Submission
|
|
||||||
|
|
||||||
All submissions should be through GitHub. Contributions to a crate will be
|
|
||||||
licensed according to the crate's existing license, with the crate's copyright
|
|
||||||
holders (distinct from authors) having the right to re-license the crate via a
|
|
||||||
unanimous decision.
|
|
||||||
12192
Cargo.lock
generated
12192
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
60
Cargo.toml
60
Cargo.toml
@@ -1,72 +1,16 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
|
||||||
"common/std-shims",
|
|
||||||
"common/zalloc",
|
|
||||||
"common/db",
|
|
||||||
|
|
||||||
|
members = [
|
||||||
"crypto/transcript",
|
"crypto/transcript",
|
||||||
|
|
||||||
"crypto/ff-group-tests",
|
|
||||||
"crypto/dalek-ff-group",
|
"crypto/dalek-ff-group",
|
||||||
"crypto/ed448",
|
|
||||||
"crypto/ciphersuite",
|
|
||||||
|
|
||||||
"crypto/multiexp",
|
"crypto/multiexp",
|
||||||
|
|
||||||
"crypto/schnorr",
|
|
||||||
"crypto/dleq",
|
"crypto/dleq",
|
||||||
"crypto/dkg",
|
|
||||||
"crypto/frost",
|
"crypto/frost",
|
||||||
"crypto/schnorrkel",
|
|
||||||
|
|
||||||
"coins/ethereum",
|
|
||||||
"coins/monero/generators",
|
|
||||||
"coins/monero",
|
"coins/monero",
|
||||||
|
"coins/firo",
|
||||||
|
|
||||||
"message-queue",
|
|
||||||
|
|
||||||
"processor/messages",
|
|
||||||
"processor",
|
"processor",
|
||||||
|
|
||||||
"coordinator/tributary/tendermint",
|
|
||||||
"coordinator/tributary",
|
|
||||||
"coordinator",
|
|
||||||
|
|
||||||
"substrate/primitives",
|
|
||||||
|
|
||||||
"substrate/tokens/primitives",
|
|
||||||
"substrate/tokens/pallet",
|
|
||||||
|
|
||||||
"substrate/in-instructions/primitives",
|
|
||||||
"substrate/in-instructions/pallet",
|
|
||||||
|
|
||||||
"substrate/validator-sets/primitives",
|
|
||||||
"substrate/validator-sets/pallet",
|
|
||||||
|
|
||||||
"substrate/runtime",
|
|
||||||
"substrate/node",
|
|
||||||
|
|
||||||
"substrate/client",
|
|
||||||
|
|
||||||
"tests/no-std",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# Always compile Monero (and a variety of dependencies) with optimizations due
|
|
||||||
# to the extensive operations required for Bulletproofs
|
|
||||||
[profile.dev.package]
|
|
||||||
subtle = { opt-level = 3 }
|
|
||||||
curve25519-dalek = { opt-level = 3 }
|
|
||||||
|
|
||||||
ff = { opt-level = 3 }
|
|
||||||
group = { opt-level = 3 }
|
|
||||||
|
|
||||||
crypto-bigint = { opt-level = 3 }
|
|
||||||
dalek-ff-group = { opt-level = 3 }
|
|
||||||
minimal-ed448 = { opt-level = 3 }
|
|
||||||
|
|
||||||
multiexp = { opt-level = 3 }
|
|
||||||
|
|
||||||
monero-serai = { opt-level = 3 }
|
|
||||||
|
|
||||||
[profile.release]
|
|
||||||
panic = "unwind"
|
|
||||||
|
|||||||
8
LICENSE
8
LICENSE
@@ -1,8 +0,0 @@
|
|||||||
Serai crates are licensed under one of two licenses, either MIT or AGPL-3.0,
|
|
||||||
depending on the crate in question. Each crate declares their license in their
|
|
||||||
`Cargo.toml` and includes a `LICENSE` file detailing its status. Additionally,
|
|
||||||
a full copy of the AGPL-3.0 License is included in the root of this repository
|
|
||||||
as a reference text. This copy should be provided with any distribution of a
|
|
||||||
crate licensed under the AGPL-3.0, as per its terms.
|
|
||||||
|
|
||||||
The GitHub actions (`.github/actions`) are licensed under the MIT license.
|
|
||||||
38
README.md
38
README.md
@@ -1,44 +1,22 @@
|
|||||||
# Serai
|
# Serai
|
||||||
|
|
||||||
Serai is a new DEX, built from the ground up, initially planning on listing
|
Serai is a new DEX, built from the ground up, initially planning on listing
|
||||||
Bitcoin, Ethereum, DAI, and Monero, offering a liquidity-pool-based trading
|
Bitcoin, Ethereum, Monero, DAI, and USDC, offering a liquidity pool trading
|
||||||
experience. Funds are stored in an economically secured threshold-multisig
|
experience. Funds are stored in an economically secured threshold multisig
|
||||||
wallet.
|
wallet.
|
||||||
|
|
||||||
[Getting Started](docs/Getting%20Started.md)
|
|
||||||
|
|
||||||
### Layout
|
### Layout
|
||||||
|
|
||||||
- `audits`: Audits for various parts of Serai.
|
- `docs` - Documentation on the Serai protocol.
|
||||||
|
|
||||||
- `docs`: Documentation on the Serai protocol.
|
- `coins` - Various coin libraries intended for usage in Serai yet also by the
|
||||||
|
wider community. This means they will always support the functionality Serai
|
||||||
|
needs, yet won't disadvantage other use cases when possible.
|
||||||
|
|
||||||
- `common`: Crates containing utilities common to a variety of areas under
|
- `crypto` - A series of composable cryptographic libraries built around the
|
||||||
Serai, none neatly fitting under another category.
|
|
||||||
|
|
||||||
- `crypto`: A series of composable cryptographic libraries built around the
|
|
||||||
`ff`/`group` APIs achieving a variety of tasks. These range from generic
|
`ff`/`group` APIs achieving a variety of tasks. These range from generic
|
||||||
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
|
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
|
||||||
needed for Bitcoin-Monero atomic swaps.
|
needed for Bitcoin-Monero atomic swaps.
|
||||||
|
|
||||||
- `coins`: Various coin libraries intended for usage in Serai yet also by the
|
- `processor` - A generic chain processor to process data for Serai and process
|
||||||
wider community. This means they will always support the functionality Serai
|
|
||||||
needs, yet won't disadvantage other use cases when possible.
|
|
||||||
|
|
||||||
- `processor`: A generic chain processor to process data for Serai and process
|
|
||||||
events from Serai, executing transactions as expected and needed.
|
events from Serai, executing transactions as expected and needed.
|
||||||
|
|
||||||
- `coordinator`: A service to manage processors and communicate over a P2P
|
|
||||||
network with other validators.
|
|
||||||
|
|
||||||
- `substrate`: Substrate crates used to instantiate the Serai network.
|
|
||||||
|
|
||||||
- `deploy`: Scripts to deploy a Serai node/test environment.
|
|
||||||
|
|
||||||
### Links
|
|
||||||
|
|
||||||
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
|
|
||||||
- [Mastodon](https://cryptodon.lol/@serai): https://cryptodon.lol/@serai
|
|
||||||
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
|
|
||||||
- [Matrix](https://matrix.to/#/#serai:matrix.org):
|
|
||||||
https://matrix.to/#/#serai:matrix.org
|
|
||||||
|
|||||||
Binary file not shown.
@@ -1,21 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2023 Cypher Stack
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
# Cypher Stack /crypto Audit, March 2023
|
|
||||||
|
|
||||||
This audit was over the /crypto folder, excluding the ed448 crate, the `Ed448`
|
|
||||||
ciphersuite in the ciphersuite crate, and the `dleq/experimental` feature. It is
|
|
||||||
encompassing up to commit 669d2dbffc1dafb82a09d9419ea182667115df06.
|
|
||||||
|
|
||||||
Please see https://github.com/cypherstack/serai-audit for provenance.
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
# No warnings allowed
|
|
||||||
-D warnings
|
|
||||||
|
|
||||||
# nursery
|
|
||||||
-D clippy::nursery
|
|
||||||
# Erratic and unhelpful
|
|
||||||
-A clippy::missing_const_for_fn
|
|
||||||
# Too many false/irrelevant positives
|
|
||||||
-A clippy::redundant_pub_crate
|
|
||||||
# Flags on any debug_assert using an RNG
|
|
||||||
-A clippy::debug_assert_with_mut_call
|
|
||||||
# Stylistic preference
|
|
||||||
-A clippy::option_if_let_else
|
|
||||||
|
|
||||||
# pedantic
|
|
||||||
-D clippy::unnecessary_wraps
|
|
||||||
-D clippy::unused_async
|
|
||||||
-D clippy::unused_self
|
|
||||||
|
|
||||||
# restrictions
|
|
||||||
|
|
||||||
# Safety
|
|
||||||
-D clippy::as_conversions
|
|
||||||
-D clippy::disallowed_script_idents
|
|
||||||
-D clippy::wildcard_enum_match_arm
|
|
||||||
|
|
||||||
# Clarity
|
|
||||||
-D clippy::assertions_on_result_states
|
|
||||||
-D clippy::deref_by_slicing
|
|
||||||
-D clippy::empty_structs_with_brackets
|
|
||||||
-D clippy::get_unwrap
|
|
||||||
-D clippy::rest_pat_in_fully_bound_structs
|
|
||||||
-D clippy::semicolon_inside_block
|
|
||||||
-D clippy::tests_outside_test_module
|
|
||||||
|
|
||||||
# Quality
|
|
||||||
-D clippy::format_push_string
|
|
||||||
-D clippy::string_to_string
|
|
||||||
|
|
||||||
# These potentially should be enabled in the future
|
|
||||||
# -D clippy::missing_errors_doc
|
|
||||||
# -D clippy::missing_panics_doc
|
|
||||||
# -D clippy::doc_markdown
|
|
||||||
|
|
||||||
# TODO: Enable this
|
|
||||||
# -D clippy::cargo
|
|
||||||
|
|
||||||
# Not in nightly yet
|
|
||||||
# -D clippy::redundant_type_annotations
|
|
||||||
# -D clippy::big_endian_bytes
|
|
||||||
# -D clippy::host_endian_bytes
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "bitcoin-serai"
|
|
||||||
version = "0.2.0"
|
|
||||||
description = "A Bitcoin library for FROST-signing transactions"
|
|
||||||
license = "MIT"
|
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
|
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
lazy_static = "1"
|
|
||||||
thiserror = "1"
|
|
||||||
|
|
||||||
zeroize = "^1.5"
|
|
||||||
rand_core = "0.6"
|
|
||||||
|
|
||||||
sha2 = "0.10"
|
|
||||||
|
|
||||||
secp256k1 = { version = "0.27", features = ["global-context"] }
|
|
||||||
bitcoin = { version = "0.30", features = ["serde"] }
|
|
||||||
|
|
||||||
k256 = { version = "^0.13.1", default-features = false, features = ["std", "arithmetic", "bits"] }
|
|
||||||
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", features = ["recommended"] }
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["secp256k1"] }
|
|
||||||
|
|
||||||
hex = "0.4"
|
|
||||||
serde = { version = "1", features = ["derive"] }
|
|
||||||
serde_json = "1"
|
|
||||||
reqwest = { version = "0.11", features = ["json"] }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["tests"] }
|
|
||||||
|
|
||||||
tokio = { version = "1", features = ["full"] }
|
|
||||||
|
|
||||||
[features]
|
|
||||||
hazmat = []
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2022-2023 Luke Parker
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
# bitcoin-serai
|
|
||||||
|
|
||||||
An application of [modular-frost](https://docs.rs/modular-frost) to Bitcoin
|
|
||||||
transactions, enabling extremely-efficient multisigs.
|
|
||||||
@@ -1,160 +0,0 @@
|
|||||||
use core::fmt::Debug;
|
|
||||||
use std::io;
|
|
||||||
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
|
|
||||||
use zeroize::Zeroizing;
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
use transcript::Transcript;
|
|
||||||
|
|
||||||
use secp256k1::schnorr::Signature;
|
|
||||||
use k256::{
|
|
||||||
elliptic_curve::{
|
|
||||||
ops::Reduce,
|
|
||||||
sec1::{Tag, ToEncodedPoint},
|
|
||||||
},
|
|
||||||
U256, Scalar, ProjectivePoint,
|
|
||||||
};
|
|
||||||
use frost::{
|
|
||||||
curve::{Ciphersuite, Secp256k1},
|
|
||||||
Participant, ThresholdKeys, ThresholdView, FrostError,
|
|
||||||
algorithm::{Hram as HramTrait, Algorithm, Schnorr as FrostSchnorr},
|
|
||||||
};
|
|
||||||
|
|
||||||
use bitcoin::key::XOnlyPublicKey;
|
|
||||||
|
|
||||||
/// Get the x coordinate of a non-infinity, even point. Panics on invalid input.
|
|
||||||
pub fn x(key: &ProjectivePoint) -> [u8; 32] {
|
|
||||||
let encoded = key.to_encoded_point(true);
|
|
||||||
assert_eq!(encoded.tag(), Tag::CompressedEvenY, "x coordinate of odd key");
|
|
||||||
(*encoded.x().expect("point at infinity")).into()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert a non-infinite even point to a XOnlyPublicKey. Panics on invalid input.
|
|
||||||
pub fn x_only(key: &ProjectivePoint) -> XOnlyPublicKey {
|
|
||||||
XOnlyPublicKey::from_slice(&x(key)).unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Make a point even by adding the generator until it is even. Returns the even point and the
|
|
||||||
/// amount of additions required.
|
|
||||||
pub fn make_even(mut key: ProjectivePoint) -> (ProjectivePoint, u64) {
|
|
||||||
let mut c = 0;
|
|
||||||
while key.to_encoded_point(true).tag() == Tag::CompressedOddY {
|
|
||||||
key += ProjectivePoint::GENERATOR;
|
|
||||||
c += 1;
|
|
||||||
}
|
|
||||||
(key, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A BIP-340 compatible HRAm for use with the modular-frost Schnorr Algorithm.
|
|
||||||
///
|
|
||||||
/// If passed an odd nonce, it will have the generator added until it is even.
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
|
||||||
pub struct Hram {}
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref TAG_HASH: [u8; 32] = Sha256::digest(b"BIP0340/challenge").into();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
impl HramTrait<Secp256k1> for Hram {
|
|
||||||
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
|
||||||
// Convert the nonce to be even
|
|
||||||
let (R, _) = make_even(*R);
|
|
||||||
|
|
||||||
let mut data = Sha256::new();
|
|
||||||
data.update(*TAG_HASH);
|
|
||||||
data.update(*TAG_HASH);
|
|
||||||
data.update(x(&R));
|
|
||||||
data.update(x(A));
|
|
||||||
data.update(m);
|
|
||||||
|
|
||||||
Scalar::reduce(U256::from_be_slice(&data.finalize()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// BIP-340 Schnorr signature algorithm.
|
|
||||||
///
|
|
||||||
/// This must be used with a ThresholdKeys whose group key is even. If it is odd, this will panic.
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct Schnorr<T: Sync + Clone + Debug + Transcript>(FrostSchnorr<Secp256k1, T, Hram>);
|
|
||||||
impl<T: Sync + Clone + Debug + Transcript> Schnorr<T> {
|
|
||||||
/// Construct a Schnorr algorithm continuing the specified transcript.
|
|
||||||
pub fn new(transcript: T) -> Schnorr<T> {
|
|
||||||
Schnorr(FrostSchnorr::new(transcript))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Sync + Clone + Debug + Transcript> Algorithm<Secp256k1> for Schnorr<T> {
|
|
||||||
type Transcript = T;
|
|
||||||
type Addendum = ();
|
|
||||||
type Signature = Signature;
|
|
||||||
|
|
||||||
fn transcript(&mut self) -> &mut Self::Transcript {
|
|
||||||
self.0.transcript()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn nonces(&self) -> Vec<Vec<ProjectivePoint>> {
|
|
||||||
self.0.nonces()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
|
||||||
&mut self,
|
|
||||||
rng: &mut R,
|
|
||||||
keys: &ThresholdKeys<Secp256k1>,
|
|
||||||
) {
|
|
||||||
self.0.preprocess_addendum(rng, keys)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_addendum<R: io::Read>(&self, reader: &mut R) -> io::Result<Self::Addendum> {
|
|
||||||
self.0.read_addendum(reader)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn process_addendum(
|
|
||||||
&mut self,
|
|
||||||
view: &ThresholdView<Secp256k1>,
|
|
||||||
i: Participant,
|
|
||||||
addendum: (),
|
|
||||||
) -> Result<(), FrostError> {
|
|
||||||
self.0.process_addendum(view, i, addendum)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn sign_share(
|
|
||||||
&mut self,
|
|
||||||
params: &ThresholdView<Secp256k1>,
|
|
||||||
nonce_sums: &[Vec<<Secp256k1 as Ciphersuite>::G>],
|
|
||||||
nonces: Vec<Zeroizing<<Secp256k1 as Ciphersuite>::F>>,
|
|
||||||
msg: &[u8],
|
|
||||||
) -> <Secp256k1 as Ciphersuite>::F {
|
|
||||||
self.0.sign_share(params, nonce_sums, nonces, msg)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn verify(
|
|
||||||
&self,
|
|
||||||
group_key: ProjectivePoint,
|
|
||||||
nonces: &[Vec<ProjectivePoint>],
|
|
||||||
sum: Scalar,
|
|
||||||
) -> Option<Self::Signature> {
|
|
||||||
self.0.verify(group_key, nonces, sum).map(|mut sig| {
|
|
||||||
// Make the R of the final signature even
|
|
||||||
let offset;
|
|
||||||
(sig.R, offset) = make_even(sig.R);
|
|
||||||
// s = r + cx. Since we added to the r, add to s
|
|
||||||
sig.s += Scalar::from(offset);
|
|
||||||
// Convert to a secp256k1 signature
|
|
||||||
Signature::from_slice(&sig.serialize()[1 ..]).unwrap()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn verify_share(
|
|
||||||
&self,
|
|
||||||
verification_share: ProjectivePoint,
|
|
||||||
nonces: &[Vec<ProjectivePoint>],
|
|
||||||
share: Scalar,
|
|
||||||
) -> Result<Vec<(Scalar, ProjectivePoint)>, ()> {
|
|
||||||
self.0.verify_share(verification_share, nonces, share)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
|
||||||
#![doc = include_str!("../README.md")]
|
|
||||||
|
|
||||||
/// The bitcoin Rust library.
|
|
||||||
pub use bitcoin;
|
|
||||||
|
|
||||||
/// Cryptographic helpers.
|
|
||||||
#[cfg(feature = "hazmat")]
|
|
||||||
pub mod crypto;
|
|
||||||
#[cfg(not(feature = "hazmat"))]
|
|
||||||
pub(crate) mod crypto;
|
|
||||||
|
|
||||||
/// Wallet functionality to create transactions.
|
|
||||||
pub mod wallet;
|
|
||||||
/// A minimal asynchronous Bitcoin RPC client.
|
|
||||||
pub mod rpc;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests;
|
|
||||||
@@ -1,145 +0,0 @@
|
|||||||
use core::fmt::Debug;
|
|
||||||
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
use serde::{Deserialize, de::DeserializeOwned};
|
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use bitcoin::{
|
|
||||||
hashes::{Hash, hex::FromHex},
|
|
||||||
consensus::encode,
|
|
||||||
Txid, Transaction, BlockHash, Block,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Deserialize)]
|
|
||||||
pub struct Error {
|
|
||||||
code: isize,
|
|
||||||
message: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
|
||||||
#[serde(untagged)]
|
|
||||||
enum RpcResponse<T> {
|
|
||||||
Ok { result: T },
|
|
||||||
Err { error: Error },
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A minimal asynchronous Bitcoin RPC client.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Rpc(String);
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
|
||||||
pub enum RpcError {
|
|
||||||
#[error("couldn't connect to node")]
|
|
||||||
ConnectionError,
|
|
||||||
#[error("request had an error: {0:?}")]
|
|
||||||
RequestError(Error),
|
|
||||||
#[error("node sent an invalid response")]
|
|
||||||
InvalidResponse,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Rpc {
|
|
||||||
pub async fn new(url: String) -> Result<Rpc, RpcError> {
|
|
||||||
let rpc = Rpc(url);
|
|
||||||
// Make an RPC request to verify the node is reachable and sane
|
|
||||||
rpc.get_latest_block_number().await?;
|
|
||||||
Ok(rpc)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Perform an arbitrary RPC call.
|
|
||||||
pub async fn rpc_call<Response: DeserializeOwned + Debug>(
|
|
||||||
&self,
|
|
||||||
method: &str,
|
|
||||||
params: serde_json::Value,
|
|
||||||
) -> Result<Response, RpcError> {
|
|
||||||
let client = reqwest::Client::new();
|
|
||||||
let res = client
|
|
||||||
.post(&self.0)
|
|
||||||
.json(&json!({ "jsonrpc": "2.0", "method": method, "params": params }))
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.map_err(|_| RpcError::ConnectionError)?
|
|
||||||
.text()
|
|
||||||
.await
|
|
||||||
.map_err(|_| RpcError::ConnectionError)?;
|
|
||||||
|
|
||||||
let res: RpcResponse<Response> =
|
|
||||||
serde_json::from_str(&res).map_err(|_| RpcError::InvalidResponse)?;
|
|
||||||
match res {
|
|
||||||
RpcResponse::Ok { result } => Ok(result),
|
|
||||||
RpcResponse::Err { error } => Err(RpcError::RequestError(error)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the latest block's number.
|
|
||||||
///
|
|
||||||
/// The genesis block's 'number' is zero. They increment from there.
|
|
||||||
pub async fn get_latest_block_number(&self) -> Result<usize, RpcError> {
|
|
||||||
// getblockcount doesn't return the amount of blocks on the current chain, yet the "height"
|
|
||||||
// of the current chain. The "height" of the current chain is defined as the "height" of the
|
|
||||||
// tip block of the current chain. The "height" of a block is defined as the amount of blocks
|
|
||||||
// present when the block was created. Accordingly, the genesis block has height 0, and
|
|
||||||
// getblockcount will return 0 when it's only the only block, despite their being one block.
|
|
||||||
self.rpc_call("getblockcount", json!([])).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the hash of a block by the block's number.
|
|
||||||
pub async fn get_block_hash(&self, number: usize) -> Result<[u8; 32], RpcError> {
|
|
||||||
let mut hash = *self
|
|
||||||
.rpc_call::<BlockHash>("getblockhash", json!([number]))
|
|
||||||
.await?
|
|
||||||
.as_raw_hash()
|
|
||||||
.as_byte_array();
|
|
||||||
// bitcoin stores the inner bytes in reverse order.
|
|
||||||
hash.reverse();
|
|
||||||
Ok(hash)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a block's number by its hash.
|
|
||||||
pub async fn get_block_number(&self, hash: &[u8; 32]) -> Result<usize, RpcError> {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct Number {
|
|
||||||
height: usize,
|
|
||||||
}
|
|
||||||
Ok(self.rpc_call::<Number>("getblockheader", json!([hex::encode(hash)])).await?.height)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a block by its hash.
|
|
||||||
pub async fn get_block(&self, hash: &[u8; 32]) -> Result<Block, RpcError> {
|
|
||||||
let hex = self.rpc_call::<String>("getblock", json!([hex::encode(hash), 0])).await?;
|
|
||||||
let bytes: Vec<u8> = FromHex::from_hex(&hex).map_err(|_| RpcError::InvalidResponse)?;
|
|
||||||
let block: Block = encode::deserialize(&bytes).map_err(|_| RpcError::InvalidResponse)?;
|
|
||||||
|
|
||||||
let mut block_hash = *block.block_hash().as_raw_hash().as_byte_array();
|
|
||||||
block_hash.reverse();
|
|
||||||
if hash != &block_hash {
|
|
||||||
Err(RpcError::InvalidResponse)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(block)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Publish a transaction.
|
|
||||||
pub async fn send_raw_transaction(&self, tx: &Transaction) -> Result<Txid, RpcError> {
|
|
||||||
let txid = self.rpc_call("sendrawtransaction", json!([encode::serialize_hex(tx)])).await?;
|
|
||||||
if txid != tx.txid() {
|
|
||||||
Err(RpcError::InvalidResponse)?;
|
|
||||||
}
|
|
||||||
Ok(txid)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a transaction by its hash.
|
|
||||||
pub async fn get_transaction(&self, hash: &[u8; 32]) -> Result<Transaction, RpcError> {
|
|
||||||
let hex = self.rpc_call::<String>("getrawtransaction", json!([hex::encode(hash)])).await?;
|
|
||||||
let bytes: Vec<u8> = FromHex::from_hex(&hex).map_err(|_| RpcError::InvalidResponse)?;
|
|
||||||
let tx: Transaction = encode::deserialize(&bytes).map_err(|_| RpcError::InvalidResponse)?;
|
|
||||||
|
|
||||||
let mut tx_hash = *tx.txid().as_raw_hash().as_byte_array();
|
|
||||||
tx_hash.reverse();
|
|
||||||
if hash != &tx_hash {
|
|
||||||
Err(RpcError::InvalidResponse)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(tx)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
|
|
||||||
use secp256k1::{SECP256K1, Message};
|
|
||||||
|
|
||||||
use k256::Scalar;
|
|
||||||
use transcript::{Transcript, RecommendedTranscript};
|
|
||||||
use frost::{
|
|
||||||
curve::Secp256k1,
|
|
||||||
Participant,
|
|
||||||
tests::{algorithm_machines, key_gen, sign},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
bitcoin::hashes::{Hash as HashTrait, sha256::Hash},
|
|
||||||
crypto::{x_only, make_even, Schnorr},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_algorithm() {
|
|
||||||
let mut keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
|
||||||
|
|
||||||
for (_, keys) in keys.iter_mut() {
|
|
||||||
let (_, offset) = make_even(keys.group_key());
|
|
||||||
*keys = keys.offset(Scalar::from(offset));
|
|
||||||
}
|
|
||||||
|
|
||||||
let algo =
|
|
||||||
Schnorr::<RecommendedTranscript>::new(RecommendedTranscript::new(b"bitcoin-serai sign test"));
|
|
||||||
let sig = sign(
|
|
||||||
&mut OsRng,
|
|
||||||
algo.clone(),
|
|
||||||
keys.clone(),
|
|
||||||
algorithm_machines(&mut OsRng, algo, &keys),
|
|
||||||
&Sha256::digest(MESSAGE),
|
|
||||||
);
|
|
||||||
|
|
||||||
SECP256K1
|
|
||||||
.verify_schnorr(
|
|
||||||
&sig,
|
|
||||||
&Message::from(Hash::hash(MESSAGE)),
|
|
||||||
&x_only(&keys[&Participant::new(1).unwrap()].group_key()),
|
|
||||||
)
|
|
||||||
.unwrap()
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
mod crypto;
|
|
||||||
@@ -1,160 +0,0 @@
|
|||||||
use std::{
|
|
||||||
io::{self, Read, Write},
|
|
||||||
collections::HashMap,
|
|
||||||
};
|
|
||||||
|
|
||||||
use k256::{
|
|
||||||
elliptic_curve::sec1::{Tag, ToEncodedPoint},
|
|
||||||
Scalar, ProjectivePoint,
|
|
||||||
};
|
|
||||||
use frost::{
|
|
||||||
curve::{Ciphersuite, Secp256k1},
|
|
||||||
ThresholdKeys,
|
|
||||||
};
|
|
||||||
|
|
||||||
use bitcoin::{
|
|
||||||
consensus::encode::{Decodable, serialize},
|
|
||||||
key::TweakedPublicKey,
|
|
||||||
OutPoint, ScriptBuf, TxOut, Transaction, Block, Network, Address,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::crypto::{x_only, make_even};
|
|
||||||
|
|
||||||
mod send;
|
|
||||||
pub use send::*;
|
|
||||||
|
|
||||||
/// Tweak keys to ensure they're usable with Bitcoin.
|
|
||||||
pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
|
|
||||||
let (_, offset) = make_even(keys.group_key());
|
|
||||||
keys.offset(Scalar::from(offset))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the Taproot address for a public key.
|
|
||||||
pub fn address(network: Network, key: ProjectivePoint) -> Option<Address> {
|
|
||||||
if key.to_encoded_point(true).tag() != Tag::CompressedEvenY {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(Address::p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key)), network))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A spendable output.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct ReceivedOutput {
|
|
||||||
// The scalar offset to obtain the key usable to spend this output.
|
|
||||||
offset: Scalar,
|
|
||||||
// The output to spend.
|
|
||||||
output: TxOut,
|
|
||||||
// The TX ID and vout of the output to spend.
|
|
||||||
outpoint: OutPoint,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ReceivedOutput {
|
|
||||||
/// The offset for this output.
|
|
||||||
pub fn offset(&self) -> Scalar {
|
|
||||||
self.offset
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The outpoint for this output.
|
|
||||||
pub fn outpoint(&self) -> &OutPoint {
|
|
||||||
&self.outpoint
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The value of this output.
|
|
||||||
pub fn value(&self) -> u64 {
|
|
||||||
self.output.value
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Read a ReceivedOutput from a generic satisfying Read.
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
|
|
||||||
Ok(ReceivedOutput {
|
|
||||||
offset: Secp256k1::read_F(r)?,
|
|
||||||
output: TxOut::consensus_decode(r)
|
|
||||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid TxOut"))?,
|
|
||||||
outpoint: OutPoint::consensus_decode(r)
|
|
||||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid OutPoint"))?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Write a ReceivedOutput to a generic satisfying Write.
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
w.write_all(&self.offset.to_bytes())?;
|
|
||||||
w.write_all(&serialize(&self.output))?;
|
|
||||||
w.write_all(&serialize(&self.outpoint))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Serialize a ReceivedOutput to a Vec<u8>.
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut res = vec![];
|
|
||||||
self.write(&mut res).unwrap();
|
|
||||||
res
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A transaction scanner capable of being used with HDKD schemes.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Scanner {
|
|
||||||
key: ProjectivePoint,
|
|
||||||
scripts: HashMap<ScriptBuf, Scalar>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Scanner {
|
|
||||||
/// Construct a Scanner for a key.
|
|
||||||
///
|
|
||||||
/// Returns None if this key can't be scanned for.
|
|
||||||
pub fn new(key: ProjectivePoint) -> Option<Scanner> {
|
|
||||||
let mut scripts = HashMap::new();
|
|
||||||
// Uses Network::Bitcoin since network is irrelevant here
|
|
||||||
scripts.insert(address(Network::Bitcoin, key)?.script_pubkey(), Scalar::ZERO);
|
|
||||||
Some(Scanner { key, scripts })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Register an offset to scan for.
|
|
||||||
///
|
|
||||||
/// Due to Bitcoin's requirement that points are even, not every offset may be used.
|
|
||||||
/// If an offset isn't usable, it will be incremented until it is. If this offset is already
|
|
||||||
/// present, None is returned. Else, Some(offset) will be, with the used offset.
|
|
||||||
pub fn register_offset(&mut self, mut offset: Scalar) -> Option<Scalar> {
|
|
||||||
loop {
|
|
||||||
match address(Network::Bitcoin, self.key + (ProjectivePoint::GENERATOR * offset)) {
|
|
||||||
Some(address) => {
|
|
||||||
let script = address.script_pubkey();
|
|
||||||
if self.scripts.contains_key(&script) {
|
|
||||||
None?;
|
|
||||||
}
|
|
||||||
self.scripts.insert(script, offset);
|
|
||||||
return Some(offset);
|
|
||||||
}
|
|
||||||
None => offset += Scalar::ONE,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Scan a transaction.
|
|
||||||
pub fn scan_transaction(&self, tx: &Transaction) -> Vec<ReceivedOutput> {
|
|
||||||
let mut res = vec![];
|
|
||||||
for (vout, output) in tx.output.iter().enumerate() {
|
|
||||||
if let Some(offset) = self.scripts.get(&output.script_pubkey) {
|
|
||||||
res.push(ReceivedOutput {
|
|
||||||
offset: *offset,
|
|
||||||
output: output.clone(),
|
|
||||||
outpoint: OutPoint::new(tx.txid(), u32::try_from(vout).unwrap()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Scan a block.
|
|
||||||
///
|
|
||||||
/// This will also scan the coinbase transaction which is bound by maturity. If received outputs
|
|
||||||
/// must be immediately spendable, a post-processing pass is needed to remove those outputs.
|
|
||||||
/// Alternatively, scan_transaction can be called on `block.txdata[1 ..]`.
|
|
||||||
pub fn scan_block(&self, block: &Block) -> Vec<ReceivedOutput> {
|
|
||||||
let mut res = vec![];
|
|
||||||
for tx in &block.txdata {
|
|
||||||
res.extend(self.scan_transaction(tx));
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,382 +0,0 @@
|
|||||||
use std::{
|
|
||||||
io::{self, Read},
|
|
||||||
collections::HashMap,
|
|
||||||
};
|
|
||||||
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use transcript::{Transcript, RecommendedTranscript};
|
|
||||||
|
|
||||||
use k256::{elliptic_curve::sec1::ToEncodedPoint, Scalar};
|
|
||||||
use frost::{curve::Secp256k1, Participant, ThresholdKeys, FrostError, sign::*};
|
|
||||||
|
|
||||||
use bitcoin::{
|
|
||||||
sighash::{TapSighashType, SighashCache, Prevouts},
|
|
||||||
absolute::LockTime,
|
|
||||||
script::{PushBytesBuf, ScriptBuf},
|
|
||||||
OutPoint, Sequence, Witness, TxIn, TxOut, Transaction, Network, Address,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
crypto::Schnorr,
|
|
||||||
wallet::{address, ReceivedOutput},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[rustfmt::skip]
|
|
||||||
// https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04/src/policy/policy.h#L27
|
|
||||||
const MAX_STANDARD_TX_WEIGHT: u64 = 400_000;
|
|
||||||
|
|
||||||
#[rustfmt::skip]
|
|
||||||
//https://github.com/bitcoin/bitcoin/blob/a245429d680eb95cf4c0c78e58e63e3f0f5d979a/src/test/transaction_tests.cpp#L815-L816
|
|
||||||
const DUST: u64 = 674;
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
|
||||||
pub enum TransactionError {
|
|
||||||
#[error("no inputs were specified")]
|
|
||||||
NoInputs,
|
|
||||||
#[error("no outputs were created")]
|
|
||||||
NoOutputs,
|
|
||||||
#[error("a specified payment's amount was less than bitcoin's required minimum")]
|
|
||||||
DustPayment,
|
|
||||||
#[error("too much data was specified")]
|
|
||||||
TooMuchData,
|
|
||||||
#[error("not enough funds for these payments")]
|
|
||||||
NotEnoughFunds,
|
|
||||||
#[error("transaction was too large")]
|
|
||||||
TooLargeTransaction,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A signable transaction, clone-able across attempts.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct SignableTransaction {
|
|
||||||
tx: Transaction,
|
|
||||||
offsets: Vec<Scalar>,
|
|
||||||
prevouts: Vec<TxOut>,
|
|
||||||
needed_fee: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SignableTransaction {
|
|
||||||
fn calculate_weight(inputs: usize, payments: &[(Address, u64)], change: Option<&Address>) -> u64 {
|
|
||||||
// Expand this a full transaction in order to use the bitcoin library's weight function
|
|
||||||
let mut tx = Transaction {
|
|
||||||
version: 2,
|
|
||||||
lock_time: LockTime::ZERO,
|
|
||||||
input: vec![
|
|
||||||
TxIn {
|
|
||||||
// This is a fixed size
|
|
||||||
// See https://developer.bitcoin.org/reference/transactions.html#raw-transaction-format
|
|
||||||
previous_output: OutPoint::default(),
|
|
||||||
// This is empty for a Taproot spend
|
|
||||||
script_sig: ScriptBuf::new(),
|
|
||||||
// This is fixed size, yet we do use Sequence::MAX
|
|
||||||
sequence: Sequence::MAX,
|
|
||||||
// Our witnesses contains a single 64-byte signature
|
|
||||||
witness: Witness::from_slice(&[vec![0; 64]])
|
|
||||||
};
|
|
||||||
inputs
|
|
||||||
],
|
|
||||||
output: payments
|
|
||||||
.iter()
|
|
||||||
// The payment is a fixed size so we don't have to use it here
|
|
||||||
// The script pub key is not of a fixed size and does have to be used here
|
|
||||||
.map(|payment| TxOut { value: payment.1, script_pubkey: payment.0.script_pubkey() })
|
|
||||||
.collect(),
|
|
||||||
};
|
|
||||||
if let Some(change) = change {
|
|
||||||
// Use a 0 value since we're currently unsure what the change amount will be, and since
|
|
||||||
// the value is fixed size (so any value could be used here)
|
|
||||||
tx.output.push(TxOut { value: 0, script_pubkey: change.script_pubkey() });
|
|
||||||
}
|
|
||||||
u64::try_from(tx.weight()).unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the fee necessary for this transaction to achieve the fee rate specified at
|
|
||||||
/// construction.
|
|
||||||
///
|
|
||||||
/// The actual fee this transaction will use is `sum(inputs) - sum(outputs)`.
|
|
||||||
pub fn needed_fee(&self) -> u64 {
|
|
||||||
self.needed_fee
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new SignableTransaction.
|
|
||||||
///
|
|
||||||
/// If a change address is specified, any leftover funds will be sent to it if the leftover funds
|
|
||||||
/// exceed the minimum output amount. If a change address isn't specified, all leftover funds
|
|
||||||
/// will become part of the paid fee.
|
|
||||||
///
|
|
||||||
/// If data is specified, an OP_RETURN output will be added with it.
|
|
||||||
pub fn new(
|
|
||||||
mut inputs: Vec<ReceivedOutput>,
|
|
||||||
payments: &[(Address, u64)],
|
|
||||||
change: Option<Address>,
|
|
||||||
data: Option<Vec<u8>>,
|
|
||||||
fee_per_weight: u64,
|
|
||||||
) -> Result<SignableTransaction, TransactionError> {
|
|
||||||
if inputs.is_empty() {
|
|
||||||
Err(TransactionError::NoInputs)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if payments.is_empty() && change.is_none() && data.is_none() {
|
|
||||||
Err(TransactionError::NoOutputs)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (_, amount) in payments {
|
|
||||||
if *amount < DUST {
|
|
||||||
Err(TransactionError::DustPayment)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if data.as_ref().map(|data| data.len()).unwrap_or(0) > 80 {
|
|
||||||
Err(TransactionError::TooMuchData)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let input_sat = inputs.iter().map(|input| input.output.value).sum::<u64>();
|
|
||||||
let offsets = inputs.iter().map(|input| input.offset).collect();
|
|
||||||
let tx_ins = inputs
|
|
||||||
.iter()
|
|
||||||
.map(|input| TxIn {
|
|
||||||
previous_output: input.outpoint,
|
|
||||||
script_sig: ScriptBuf::new(),
|
|
||||||
sequence: Sequence::MAX,
|
|
||||||
witness: Witness::new(),
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>();
|
|
||||||
let mut tx_outs = payments
|
|
||||||
.iter()
|
|
||||||
.map(|payment| TxOut { value: payment.1, script_pubkey: payment.0.script_pubkey() })
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
// Add the OP_RETURN output
|
|
||||||
if let Some(data) = data {
|
|
||||||
tx_outs.push(TxOut {
|
|
||||||
value: 0,
|
|
||||||
script_pubkey: ScriptBuf::new_op_return(
|
|
||||||
&PushBytesBuf::try_from(data)
|
|
||||||
.expect("data didn't fit into PushBytes depsite being checked"),
|
|
||||||
),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut weight = Self::calculate_weight(tx_ins.len(), payments, None);
|
|
||||||
let mut needed_fee = fee_per_weight * weight;
|
|
||||||
if input_sat < (payment_sat + needed_fee) {
|
|
||||||
Err(TransactionError::NotEnoughFunds)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If there's a change address, check if there's change to give it
|
|
||||||
if let Some(change) = change.as_ref() {
|
|
||||||
let weight_with_change = Self::calculate_weight(tx_ins.len(), payments, Some(change));
|
|
||||||
let fee_with_change = fee_per_weight * weight_with_change;
|
|
||||||
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
|
|
||||||
if value >= DUST {
|
|
||||||
tx_outs.push(TxOut { value, script_pubkey: change.script_pubkey() });
|
|
||||||
weight = weight_with_change;
|
|
||||||
needed_fee = fee_with_change;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if tx_outs.is_empty() {
|
|
||||||
Err(TransactionError::NoOutputs)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if weight > MAX_STANDARD_TX_WEIGHT {
|
|
||||||
Err(TransactionError::TooLargeTransaction)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(SignableTransaction {
|
|
||||||
tx: Transaction { version: 2, lock_time: LockTime::ZERO, input: tx_ins, output: tx_outs },
|
|
||||||
offsets,
|
|
||||||
prevouts: inputs.drain(..).map(|input| input.output).collect(),
|
|
||||||
needed_fee,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a multisig machine for this transaction.
|
|
||||||
///
|
|
||||||
/// Returns None if the wrong keys are used.
|
|
||||||
pub fn multisig(
|
|
||||||
self,
|
|
||||||
keys: ThresholdKeys<Secp256k1>,
|
|
||||||
mut transcript: RecommendedTranscript,
|
|
||||||
) -> Option<TransactionMachine> {
|
|
||||||
transcript.domain_separate(b"bitcoin_transaction");
|
|
||||||
transcript.append_message(b"root_key", keys.group_key().to_encoded_point(true).as_bytes());
|
|
||||||
|
|
||||||
// Transcript the inputs and outputs
|
|
||||||
let tx = &self.tx;
|
|
||||||
for input in &tx.input {
|
|
||||||
transcript.append_message(b"input_hash", input.previous_output.txid);
|
|
||||||
transcript.append_message(b"input_output_index", input.previous_output.vout.to_le_bytes());
|
|
||||||
}
|
|
||||||
for payment in &tx.output {
|
|
||||||
transcript.append_message(b"output_script", payment.script_pubkey.as_bytes());
|
|
||||||
transcript.append_message(b"output_amount", payment.value.to_le_bytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut sigs = vec![];
|
|
||||||
for i in 0 .. tx.input.len() {
|
|
||||||
let mut transcript = transcript.clone();
|
|
||||||
transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes());
|
|
||||||
|
|
||||||
let offset = keys.clone().offset(self.offsets[i]);
|
|
||||||
if address(Network::Bitcoin, offset.group_key())?.script_pubkey() !=
|
|
||||||
self.prevouts[i].script_pubkey
|
|
||||||
{
|
|
||||||
None?;
|
|
||||||
}
|
|
||||||
|
|
||||||
sigs.push(AlgorithmMachine::new(
|
|
||||||
Schnorr::new(transcript),
|
|
||||||
keys.clone().offset(self.offsets[i]),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(TransactionMachine { tx: self, sigs })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A FROST signing machine to produce a Bitcoin transaction.
|
|
||||||
///
|
|
||||||
/// This does not support caching its preprocess. When sign is called, the message must be empty.
|
|
||||||
/// This will panic if it isn't.
|
|
||||||
pub struct TransactionMachine {
|
|
||||||
tx: SignableTransaction,
|
|
||||||
sigs: Vec<AlgorithmMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PreprocessMachine for TransactionMachine {
|
|
||||||
type Preprocess = Vec<Preprocess<Secp256k1, ()>>;
|
|
||||||
type Signature = Transaction;
|
|
||||||
type SignMachine = TransactionSignMachine;
|
|
||||||
|
|
||||||
fn preprocess<R: RngCore + CryptoRng>(
|
|
||||||
mut self,
|
|
||||||
rng: &mut R,
|
|
||||||
) -> (Self::SignMachine, Self::Preprocess) {
|
|
||||||
let mut preprocesses = Vec::with_capacity(self.sigs.len());
|
|
||||||
let sigs = self
|
|
||||||
.sigs
|
|
||||||
.drain(..)
|
|
||||||
.map(|sig| {
|
|
||||||
let (sig, preprocess) = sig.preprocess(rng);
|
|
||||||
preprocesses.push(preprocess);
|
|
||||||
sig
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
(TransactionSignMachine { tx: self.tx, sigs }, preprocesses)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct TransactionSignMachine {
|
|
||||||
tx: SignableTransaction,
|
|
||||||
sigs: Vec<AlgorithmSignMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SignMachine<Transaction> for TransactionSignMachine {
|
|
||||||
type Params = ();
|
|
||||||
type Keys = ThresholdKeys<Secp256k1>;
|
|
||||||
type Preprocess = Vec<Preprocess<Secp256k1, ()>>;
|
|
||||||
type SignatureShare = Vec<SignatureShare<Secp256k1>>;
|
|
||||||
type SignatureMachine = TransactionSignatureMachine;
|
|
||||||
|
|
||||||
fn cache(self) -> CachedPreprocess {
|
|
||||||
unimplemented!(
|
|
||||||
"Bitcoin transactions don't support caching their preprocesses due to {}",
|
|
||||||
"being already bound to a specific transaction"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_cache(
|
|
||||||
_: (),
|
|
||||||
_: ThresholdKeys<Secp256k1>,
|
|
||||||
_: CachedPreprocess,
|
|
||||||
) -> Result<Self, FrostError> {
|
|
||||||
unimplemented!(
|
|
||||||
"Bitcoin transactions don't support caching their preprocesses due to {}",
|
|
||||||
"being already bound to a specific transaction"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
|
|
||||||
self.sigs.iter().map(|sig| sig.read_preprocess(reader)).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn sign(
|
|
||||||
mut self,
|
|
||||||
commitments: HashMap<Participant, Self::Preprocess>,
|
|
||||||
msg: &[u8],
|
|
||||||
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
|
|
||||||
if !msg.is_empty() {
|
|
||||||
panic!("message was passed to the TransactionMachine when it generates its own");
|
|
||||||
}
|
|
||||||
|
|
||||||
let commitments = (0 .. self.sigs.len())
|
|
||||||
.map(|c| {
|
|
||||||
commitments
|
|
||||||
.iter()
|
|
||||||
.map(|(l, commitments)| (*l, commitments[c].clone()))
|
|
||||||
.collect::<HashMap<_, _>>()
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let mut cache = SighashCache::new(&self.tx.tx);
|
|
||||||
// Sign committing to all inputs
|
|
||||||
let prevouts = Prevouts::All(&self.tx.prevouts);
|
|
||||||
|
|
||||||
let mut shares = Vec::with_capacity(self.sigs.len());
|
|
||||||
let sigs = self
|
|
||||||
.sigs
|
|
||||||
.drain(..)
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, sig)| {
|
|
||||||
let (sig, share) = sig.sign(
|
|
||||||
commitments[i].clone(),
|
|
||||||
cache
|
|
||||||
.taproot_key_spend_signature_hash(i, &prevouts, TapSighashType::Default)
|
|
||||||
.unwrap()
|
|
||||||
.as_ref(),
|
|
||||||
)?;
|
|
||||||
shares.push(share);
|
|
||||||
Ok(sig)
|
|
||||||
})
|
|
||||||
.collect::<Result<_, _>>()?;
|
|
||||||
|
|
||||||
Ok((TransactionSignatureMachine { tx: self.tx.tx, sigs }, shares))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct TransactionSignatureMachine {
|
|
||||||
tx: Transaction,
|
|
||||||
sigs: Vec<AlgorithmSignatureMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SignatureMachine<Transaction> for TransactionSignatureMachine {
|
|
||||||
type SignatureShare = Vec<SignatureShare<Secp256k1>>;
|
|
||||||
|
|
||||||
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare> {
|
|
||||||
self.sigs.iter().map(|sig| sig.read_share(reader)).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn complete(
|
|
||||||
mut self,
|
|
||||||
mut shares: HashMap<Participant, Self::SignatureShare>,
|
|
||||||
) -> Result<Transaction, FrostError> {
|
|
||||||
for (input, schnorr) in self.tx.input.iter_mut().zip(self.sigs.drain(..)) {
|
|
||||||
let sig = schnorr.complete(
|
|
||||||
shares.iter_mut().map(|(l, shares)| (*l, shares.remove(0))).collect::<HashMap<_, _>>(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let mut witness = Witness::new();
|
|
||||||
witness.push(sig.as_ref());
|
|
||||||
input.witness = witness;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(self.tx)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
use bitcoin_serai::{bitcoin::hashes::Hash as HashTrait, rpc::RpcError};
|
|
||||||
|
|
||||||
mod runner;
|
|
||||||
use runner::rpc;
|
|
||||||
|
|
||||||
async_sequential! {
|
|
||||||
async fn test_rpc() {
|
|
||||||
let rpc = rpc().await;
|
|
||||||
|
|
||||||
// Test get_latest_block_number and get_block_hash by round tripping them
|
|
||||||
let latest = rpc.get_latest_block_number().await.unwrap();
|
|
||||||
let hash = rpc.get_block_hash(latest).await.unwrap();
|
|
||||||
assert_eq!(rpc.get_block_number(&hash).await.unwrap(), latest);
|
|
||||||
|
|
||||||
// Test this actually is the latest block number by checking asking for the next block's errors
|
|
||||||
assert!(matches!(rpc.get_block_hash(latest + 1).await, Err(RpcError::RequestError(_))));
|
|
||||||
|
|
||||||
// Test get_block by checking the received block's hash matches the request
|
|
||||||
let block = rpc.get_block(&hash).await.unwrap();
|
|
||||||
// Hashes are stored in reverse. It's bs from Satoshi
|
|
||||||
let mut block_hash = *block.block_hash().as_raw_hash().as_byte_array();
|
|
||||||
block_hash.reverse();
|
|
||||||
assert_eq!(hash, block_hash);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
use bitcoin_serai::rpc::Rpc;
|
|
||||||
|
|
||||||
use tokio::sync::Mutex;
|
|
||||||
|
|
||||||
lazy_static::lazy_static! {
|
|
||||||
pub static ref SEQUENTIAL: Mutex<()> = Mutex::new(());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub(crate) async fn rpc() -> Rpc {
|
|
||||||
let rpc = Rpc::new("http://serai:seraidex@127.0.0.1:18443".to_string()).await.unwrap();
|
|
||||||
|
|
||||||
// If this node has already been interacted with, clear its chain
|
|
||||||
if rpc.get_latest_block_number().await.unwrap() > 0 {
|
|
||||||
rpc
|
|
||||||
.rpc_call(
|
|
||||||
"invalidateblock",
|
|
||||||
serde_json::json!([hex::encode(rpc.get_block_hash(1).await.unwrap())]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
rpc
|
|
||||||
}
|
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! async_sequential {
|
|
||||||
($(async fn $name: ident() $body: block)*) => {
|
|
||||||
$(
|
|
||||||
#[tokio::test]
|
|
||||||
async fn $name() {
|
|
||||||
let guard = runner::SEQUENTIAL.lock().await;
|
|
||||||
let local = tokio::task::LocalSet::new();
|
|
||||||
local.run_until(async move {
|
|
||||||
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {
|
|
||||||
drop(guard);
|
|
||||||
Err(err).unwrap()
|
|
||||||
}
|
|
||||||
}).await;
|
|
||||||
}
|
|
||||||
)*
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,348 +0,0 @@
|
|||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use rand_core::{RngCore, OsRng};
|
|
||||||
|
|
||||||
use transcript::{Transcript, RecommendedTranscript};
|
|
||||||
|
|
||||||
use k256::{
|
|
||||||
elliptic_curve::{
|
|
||||||
group::{ff::Field, Group},
|
|
||||||
sec1::{Tag, ToEncodedPoint},
|
|
||||||
},
|
|
||||||
Scalar, ProjectivePoint,
|
|
||||||
};
|
|
||||||
use frost::{
|
|
||||||
curve::Secp256k1,
|
|
||||||
Participant, ThresholdKeys,
|
|
||||||
tests::{THRESHOLD, key_gen, sign_without_caching},
|
|
||||||
};
|
|
||||||
|
|
||||||
use bitcoin_serai::{
|
|
||||||
bitcoin::{
|
|
||||||
hashes::Hash as HashTrait,
|
|
||||||
blockdata::opcodes::all::OP_RETURN,
|
|
||||||
script::{PushBytesBuf, Instruction, Instructions, Script},
|
|
||||||
OutPoint, TxOut, Transaction, Network, Address,
|
|
||||||
},
|
|
||||||
wallet::{tweak_keys, address, ReceivedOutput, Scanner, TransactionError, SignableTransaction},
|
|
||||||
rpc::Rpc,
|
|
||||||
};
|
|
||||||
|
|
||||||
mod runner;
|
|
||||||
use runner::rpc;
|
|
||||||
|
|
||||||
const FEE: u64 = 20;
|
|
||||||
|
|
||||||
fn is_even(key: ProjectivePoint) -> bool {
|
|
||||||
key.to_encoded_point(true).tag() == Tag::CompressedEvenY
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint) -> ReceivedOutput {
|
|
||||||
let block_number = rpc.get_latest_block_number().await.unwrap() + 1;
|
|
||||||
|
|
||||||
rpc
|
|
||||||
.rpc_call::<Vec<String>>(
|
|
||||||
"generatetoaddress",
|
|
||||||
serde_json::json!([1, address(Network::Regtest, key).unwrap()]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Mine until maturity
|
|
||||||
rpc
|
|
||||||
.rpc_call::<Vec<String>>(
|
|
||||||
"generatetoaddress",
|
|
||||||
serde_json::json!([100, Address::p2sh(Script::empty(), Network::Regtest).unwrap()]),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let block = rpc.get_block(&rpc.get_block_hash(block_number).await.unwrap()).await.unwrap();
|
|
||||||
|
|
||||||
let mut outputs = scanner.scan_block(&block);
|
|
||||||
assert_eq!(outputs, scanner.scan_transaction(&block.txdata[0]));
|
|
||||||
|
|
||||||
assert_eq!(outputs.len(), 1);
|
|
||||||
assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].txid(), 0));
|
|
||||||
assert_eq!(outputs[0].value(), block.txdata[0].output[0].value);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
ReceivedOutput::read::<&[u8]>(&mut outputs[0].serialize().as_ref()).unwrap(),
|
|
||||||
outputs[0]
|
|
||||||
);
|
|
||||||
|
|
||||||
outputs.swap_remove(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn keys() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, ProjectivePoint) {
|
|
||||||
let mut keys = key_gen(&mut OsRng);
|
|
||||||
for (_, keys) in keys.iter_mut() {
|
|
||||||
*keys = tweak_keys(keys);
|
|
||||||
}
|
|
||||||
let key = keys.values().next().unwrap().group_key();
|
|
||||||
(keys, key)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn sign(
|
|
||||||
keys: &HashMap<Participant, ThresholdKeys<Secp256k1>>,
|
|
||||||
tx: SignableTransaction,
|
|
||||||
) -> Transaction {
|
|
||||||
let mut machines = HashMap::new();
|
|
||||||
for i in (1 ..= THRESHOLD).map(|i| Participant::new(i).unwrap()) {
|
|
||||||
machines.insert(
|
|
||||||
i,
|
|
||||||
tx.clone()
|
|
||||||
.multisig(keys[&i].clone(), RecommendedTranscript::new(b"bitcoin-serai Test Transaction"))
|
|
||||||
.unwrap(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
sign_without_caching(&mut OsRng, machines, &[])
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_tweak_keys() {
|
|
||||||
let mut even = false;
|
|
||||||
let mut odd = false;
|
|
||||||
|
|
||||||
// Generate keys until we get an even set and an odd set
|
|
||||||
while !(even && odd) {
|
|
||||||
let mut keys = key_gen(&mut OsRng).drain().next().unwrap().1;
|
|
||||||
if is_even(keys.group_key()) {
|
|
||||||
// Tweaking should do nothing
|
|
||||||
assert_eq!(tweak_keys(&keys).group_key(), keys.group_key());
|
|
||||||
|
|
||||||
even = true;
|
|
||||||
} else {
|
|
||||||
let tweaked = tweak_keys(&keys).group_key();
|
|
||||||
assert_ne!(tweaked, keys.group_key());
|
|
||||||
// Tweaking should produce an even key
|
|
||||||
assert!(is_even(tweaked));
|
|
||||||
|
|
||||||
// Verify it uses the smallest possible offset
|
|
||||||
while keys.group_key().to_encoded_point(true).tag() == Tag::CompressedOddY {
|
|
||||||
keys = keys.offset(Scalar::ONE);
|
|
||||||
}
|
|
||||||
assert_eq!(tweaked, keys.group_key());
|
|
||||||
|
|
||||||
odd = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async_sequential! {
|
|
||||||
async fn test_scanner() {
|
|
||||||
// Test Scanners are creatable for even keys.
|
|
||||||
for _ in 0 .. 128 {
|
|
||||||
let key = ProjectivePoint::random(&mut OsRng);
|
|
||||||
assert_eq!(Scanner::new(key).is_some(), is_even(key));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut key = ProjectivePoint::random(&mut OsRng);
|
|
||||||
while !is_even(key) {
|
|
||||||
key += ProjectivePoint::GENERATOR;
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
let mut scanner = Scanner::new(key).unwrap();
|
|
||||||
for _ in 0 .. 128 {
|
|
||||||
let mut offset = Scalar::random(&mut OsRng);
|
|
||||||
let registered = scanner.register_offset(offset).unwrap();
|
|
||||||
// Registering this again should return None
|
|
||||||
assert!(scanner.register_offset(offset).is_none());
|
|
||||||
|
|
||||||
// We can only register offsets resulting in even keys
|
|
||||||
// Make this even
|
|
||||||
while !is_even(key + (ProjectivePoint::GENERATOR * offset)) {
|
|
||||||
offset += Scalar::ONE;
|
|
||||||
}
|
|
||||||
// Ensure it matches the registered offset
|
|
||||||
assert_eq!(registered, offset);
|
|
||||||
// Assert registering this again fails
|
|
||||||
assert!(scanner.register_offset(offset).is_none());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let rpc = rpc().await;
|
|
||||||
let mut scanner = Scanner::new(key).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(send_and_get_output(&rpc, &scanner, key).await.offset(), Scalar::ZERO);
|
|
||||||
|
|
||||||
// Register an offset and test receiving to it
|
|
||||||
let offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
send_and_get_output(&rpc, &scanner, key + (ProjectivePoint::GENERATOR * offset))
|
|
||||||
.await
|
|
||||||
.offset(),
|
|
||||||
offset
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn test_transaction_errors() {
|
|
||||||
let (_, key) = keys();
|
|
||||||
|
|
||||||
let rpc = rpc().await;
|
|
||||||
let scanner = Scanner::new(key).unwrap();
|
|
||||||
|
|
||||||
let output = send_and_get_output(&rpc, &scanner, key).await;
|
|
||||||
assert_eq!(output.offset(), Scalar::ZERO);
|
|
||||||
|
|
||||||
let inputs = vec![output];
|
|
||||||
let addr = || address(Network::Regtest, key).unwrap();
|
|
||||||
let payments = vec![(addr(), 1000)];
|
|
||||||
|
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok());
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
SignableTransaction::new(vec![], &payments, None, None, FEE),
|
|
||||||
Err(TransactionError::NoInputs)
|
|
||||||
);
|
|
||||||
|
|
||||||
// No change
|
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &[(addr(), 1000)], None, None, FEE).is_ok());
|
|
||||||
// Consolidation TX
|
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, FEE).is_ok());
|
|
||||||
// Data
|
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &[], None, Some(vec![]), FEE).is_ok());
|
|
||||||
// No outputs
|
|
||||||
assert_eq!(
|
|
||||||
SignableTransaction::new(inputs.clone(), &[], None, None, FEE),
|
|
||||||
Err(TransactionError::NoOutputs),
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
SignableTransaction::new(inputs.clone(), &[(addr(), 1)], None, None, FEE),
|
|
||||||
Err(TransactionError::DustPayment),
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(
|
|
||||||
SignableTransaction::new(inputs.clone(), &payments, None, Some(vec![0; 80]), FEE).is_ok()
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
SignableTransaction::new(inputs.clone(), &payments, None, Some(vec![0; 81]), FEE),
|
|
||||||
Err(TransactionError::TooMuchData),
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
SignableTransaction::new(inputs.clone(), &[(addr(), inputs[0].value() * 2)], None, None, FEE),
|
|
||||||
Err(TransactionError::NotEnoughFunds),
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
SignableTransaction::new(inputs, &vec![(addr(), 1000); 10000], None, None, 0),
|
|
||||||
Err(TransactionError::TooLargeTransaction),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn test_send() {
|
|
||||||
let (keys, key) = keys();
|
|
||||||
|
|
||||||
let rpc = rpc().await;
|
|
||||||
let mut scanner = Scanner::new(key).unwrap();
|
|
||||||
|
|
||||||
// Get inputs, one not offset and one offset
|
|
||||||
let output = send_and_get_output(&rpc, &scanner, key).await;
|
|
||||||
assert_eq!(output.offset(), Scalar::ZERO);
|
|
||||||
|
|
||||||
let offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
|
|
||||||
let offset_key = key + (ProjectivePoint::GENERATOR * offset);
|
|
||||||
let offset_output = send_and_get_output(&rpc, &scanner, offset_key).await;
|
|
||||||
assert_eq!(offset_output.offset(), offset);
|
|
||||||
|
|
||||||
// Declare payments, change, fee
|
|
||||||
let payments = [
|
|
||||||
(address(Network::Regtest, key).unwrap(), 1005),
|
|
||||||
(address(Network::Regtest, offset_key).unwrap(), 1007)
|
|
||||||
];
|
|
||||||
|
|
||||||
let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
|
|
||||||
let change_key = key + (ProjectivePoint::GENERATOR * change_offset);
|
|
||||||
let change_addr = address(Network::Regtest, change_key).unwrap();
|
|
||||||
|
|
||||||
// Create and sign the TX
|
|
||||||
let tx = SignableTransaction::new(
|
|
||||||
vec![output.clone(), offset_output.clone()],
|
|
||||||
&payments,
|
|
||||||
Some(change_addr.clone()),
|
|
||||||
None,
|
|
||||||
FEE
|
|
||||||
).unwrap();
|
|
||||||
let needed_fee = tx.needed_fee();
|
|
||||||
let tx = sign(&keys, tx);
|
|
||||||
|
|
||||||
assert_eq!(tx.output.len(), 3);
|
|
||||||
|
|
||||||
// Ensure we can scan it
|
|
||||||
let outputs = scanner.scan_transaction(&tx);
|
|
||||||
for (o, output) in outputs.iter().enumerate() {
|
|
||||||
assert_eq!(output.outpoint(), &OutPoint::new(tx.txid(), u32::try_from(o).unwrap()));
|
|
||||||
assert_eq!(&ReceivedOutput::read::<&[u8]>(&mut output.serialize().as_ref()).unwrap(), output);
|
|
||||||
}
|
|
||||||
|
|
||||||
assert_eq!(outputs[0].offset(), Scalar::ZERO);
|
|
||||||
assert_eq!(outputs[1].offset(), offset);
|
|
||||||
assert_eq!(outputs[2].offset(), change_offset);
|
|
||||||
|
|
||||||
// Make sure the payments were properly created
|
|
||||||
for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) {
|
|
||||||
assert_eq!(output, &TxOut { script_pubkey: payment.0.script_pubkey(), value: payment.1 });
|
|
||||||
assert_eq!(scanned.value(), payment.1 );
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make sure the change is correct
|
|
||||||
assert_eq!(needed_fee, u64::try_from(tx.weight()).unwrap() * FEE);
|
|
||||||
let input_value = output.value() + offset_output.value();
|
|
||||||
let output_value = tx.output.iter().map(|output| output.value).sum::<u64>();
|
|
||||||
assert_eq!(input_value - output_value, needed_fee);
|
|
||||||
|
|
||||||
let change_amount =
|
|
||||||
input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee;
|
|
||||||
assert_eq!(
|
|
||||||
tx.output[2],
|
|
||||||
TxOut { script_pubkey: change_addr.script_pubkey(), value: change_amount },
|
|
||||||
);
|
|
||||||
|
|
||||||
// This also tests send_raw_transaction and get_transaction, which the RPC test can't
|
|
||||||
// effectively test
|
|
||||||
rpc.send_raw_transaction(&tx).await.unwrap();
|
|
||||||
let mut hash = *tx.txid().as_raw_hash().as_byte_array();
|
|
||||||
hash.reverse();
|
|
||||||
assert_eq!(tx, rpc.get_transaction(&hash).await.unwrap());
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn test_data() {
|
|
||||||
let (keys, key) = keys();
|
|
||||||
|
|
||||||
let rpc = rpc().await;
|
|
||||||
let scanner = Scanner::new(key).unwrap();
|
|
||||||
|
|
||||||
let output = send_and_get_output(&rpc, &scanner, key).await;
|
|
||||||
assert_eq!(output.offset(), Scalar::ZERO);
|
|
||||||
|
|
||||||
let data_len = 60 + usize::try_from(OsRng.next_u64() % 21).unwrap();
|
|
||||||
let mut data = vec![0; data_len];
|
|
||||||
OsRng.fill_bytes(&mut data);
|
|
||||||
|
|
||||||
let tx = sign(
|
|
||||||
&keys,
|
|
||||||
SignableTransaction::new(
|
|
||||||
vec![output],
|
|
||||||
&[],
|
|
||||||
address(Network::Regtest, key),
|
|
||||||
Some(data.clone()),
|
|
||||||
FEE
|
|
||||||
).unwrap()
|
|
||||||
);
|
|
||||||
|
|
||||||
assert!(tx.output[0].script_pubkey.is_op_return());
|
|
||||||
let check = |mut instructions: Instructions| {
|
|
||||||
assert_eq!(instructions.next().unwrap().unwrap(), Instruction::Op(OP_RETURN));
|
|
||||||
assert_eq!(
|
|
||||||
instructions.next().unwrap().unwrap(),
|
|
||||||
Instruction::PushBytes(&PushBytesBuf::try_from(data.clone()).unwrap()),
|
|
||||||
);
|
|
||||||
assert!(instructions.next().is_none());
|
|
||||||
};
|
|
||||||
check(tx.output[0].script_pubkey.instructions());
|
|
||||||
check(tx.output[0].script_pubkey.instructions_minimal());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
3
coins/ethereum/.gitignore
vendored
3
coins/ethereum/.gitignore
vendored
@@ -1,3 +0,0 @@
|
|||||||
# solidity build outputs
|
|
||||||
cache
|
|
||||||
artifacts
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "ethereum-serai"
|
|
||||||
version = "0.1.0"
|
|
||||||
description = "An Ethereum library supporting Schnorr signing and on-chain verification"
|
|
||||||
license = "AGPL-3.0-only"
|
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
|
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
|
|
||||||
edition = "2021"
|
|
||||||
publish = false
|
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
|
||||||
all-features = true
|
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
thiserror = "1"
|
|
||||||
rand_core = "0.6"
|
|
||||||
|
|
||||||
serde_json = "1"
|
|
||||||
serde = "1"
|
|
||||||
|
|
||||||
sha2 = "0.10"
|
|
||||||
sha3 = "0.10"
|
|
||||||
|
|
||||||
group = "0.13"
|
|
||||||
k256 = { version = "^0.13.1", default-features = false, features = ["std", "arithmetic", "bits", "ecdsa"] }
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] }
|
|
||||||
|
|
||||||
eyre = "0.6"
|
|
||||||
|
|
||||||
ethers = { version = "2", default-features = false, features = ["abigen", "ethers-solc"] }
|
|
||||||
|
|
||||||
[build-dependencies]
|
|
||||||
ethers-solc = "2"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
tokio = { version = "1", features = ["macros"] }
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
AGPL-3.0-only license
|
|
||||||
|
|
||||||
Copyright (c) 2022-2023 Luke Parker
|
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
|
||||||
it under the terms of the GNU Affero General Public License Version 3 as
|
|
||||||
published by the Free Software Foundation.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Affero General Public License
|
|
||||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
# Ethereum
|
|
||||||
|
|
||||||
This package contains Ethereum-related functionality, specifically deploying and
|
|
||||||
interacting with Serai contracts.
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
- solc
|
|
||||||
- [Foundry](https://github.com/foundry-rs/foundry)
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
use ethers_solc::{Project, ProjectPathsConfig};
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
println!("cargo:rerun-if-changed=contracts");
|
|
||||||
println!("cargo:rerun-if-changed=artifacts");
|
|
||||||
|
|
||||||
// configure the project with all its paths, solc, cache etc.
|
|
||||||
let project = Project::builder()
|
|
||||||
.paths(ProjectPathsConfig::hardhat(env!("CARGO_MANIFEST_DIR")).unwrap())
|
|
||||||
.build()
|
|
||||||
.unwrap();
|
|
||||||
project.compile().unwrap();
|
|
||||||
|
|
||||||
// Tell Cargo that if a source file changes, to rerun this build script.
|
|
||||||
project.rerun_if_sources_changed();
|
|
||||||
}
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
//SPDX-License-Identifier: AGPLv3
|
|
||||||
pragma solidity ^0.8.0;
|
|
||||||
|
|
||||||
// see https://github.com/noot/schnorr-verify for implementation details
|
|
||||||
contract Schnorr {
|
|
||||||
// secp256k1 group order
|
|
||||||
uint256 constant public Q =
|
|
||||||
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
|
|
||||||
|
|
||||||
// parity := public key y-coord parity (27 or 28)
|
|
||||||
// px := public key x-coord
|
|
||||||
// message := 32-byte message
|
|
||||||
// s := schnorr signature
|
|
||||||
// e := schnorr signature challenge
|
|
||||||
function verify(
|
|
||||||
uint8 parity,
|
|
||||||
bytes32 px,
|
|
||||||
bytes32 message,
|
|
||||||
bytes32 s,
|
|
||||||
bytes32 e
|
|
||||||
) public view returns (bool) {
|
|
||||||
// ecrecover = (m, v, r, s);
|
|
||||||
bytes32 sp = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
|
|
||||||
bytes32 ep = bytes32(Q - mulmod(uint256(e), uint256(px), Q));
|
|
||||||
|
|
||||||
require(sp != 0);
|
|
||||||
// the ecrecover precompile implementation checks that the `r` and `s`
|
|
||||||
// inputs are non-zero (in this case, `px` and `ep`), thus we don't need to
|
|
||||||
// check if they're zero.will make me
|
|
||||||
address R = ecrecover(sp, parity, px, ep);
|
|
||||||
require(R != address(0), "ecrecover failed");
|
|
||||||
return e == keccak256(
|
|
||||||
abi.encodePacked(R, uint8(parity), px, block.chainid, message)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
use crate::crypto::ProcessedSignature;
|
|
||||||
use ethers::{contract::ContractFactory, prelude::*, solc::artifacts::contract::ContractBytecode};
|
|
||||||
use eyre::{eyre, Result};
|
|
||||||
use std::fs::File;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
|
||||||
pub enum EthereumError {
|
|
||||||
#[error("failed to verify Schnorr signature")]
|
|
||||||
VerificationError,
|
|
||||||
}
|
|
||||||
|
|
||||||
abigen!(
|
|
||||||
Schnorr,
|
|
||||||
"./artifacts/Schnorr.sol/Schnorr.json",
|
|
||||||
event_derives(serde::Deserialize, serde::Serialize),
|
|
||||||
);
|
|
||||||
|
|
||||||
pub async fn deploy_schnorr_verifier_contract(
|
|
||||||
client: Arc<SignerMiddleware<Provider<Http>, LocalWallet>>,
|
|
||||||
) -> Result<Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>> {
|
|
||||||
let path = "./artifacts/Schnorr.sol/Schnorr.json";
|
|
||||||
let artifact: ContractBytecode = serde_json::from_reader(File::open(path).unwrap()).unwrap();
|
|
||||||
let abi = artifact.abi.unwrap();
|
|
||||||
let bin = artifact.bytecode.unwrap().object;
|
|
||||||
let factory = ContractFactory::new(abi, bin.into_bytes().unwrap(), client.clone());
|
|
||||||
let contract = factory.deploy(())?.send().await?;
|
|
||||||
let contract = Schnorr::new(contract.address(), client);
|
|
||||||
Ok(contract)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn call_verify(
|
|
||||||
contract: &Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>,
|
|
||||||
params: &ProcessedSignature,
|
|
||||||
) -> Result<()> {
|
|
||||||
if contract
|
|
||||||
.verify(
|
|
||||||
params.parity + 27,
|
|
||||||
params.px.to_bytes().into(),
|
|
||||||
params.message,
|
|
||||||
params.s.to_bytes().into(),
|
|
||||||
params.e.to_bytes().into(),
|
|
||||||
)
|
|
||||||
.call()
|
|
||||||
.await?
|
|
||||||
{
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(eyre!(EthereumError::VerificationError))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
use sha3::{Digest, Keccak256};
|
|
||||||
|
|
||||||
use group::Group;
|
|
||||||
use k256::{
|
|
||||||
elliptic_curve::{
|
|
||||||
bigint::ArrayEncoding, ops::Reduce, point::DecompressPoint, sec1::ToEncodedPoint,
|
|
||||||
},
|
|
||||||
AffinePoint, ProjectivePoint, Scalar, U256,
|
|
||||||
};
|
|
||||||
|
|
||||||
use frost::{algorithm::Hram, curve::Secp256k1};
|
|
||||||
|
|
||||||
pub fn keccak256(data: &[u8]) -> [u8; 32] {
|
|
||||||
Keccak256::digest(data).try_into().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
|
||||||
Scalar::reduce(U256::from_be_slice(&keccak256(data)))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn address(point: &ProjectivePoint) -> [u8; 20] {
|
|
||||||
let encoded_point = point.to_encoded_point(false);
|
|
||||||
keccak256(&encoded_point.as_ref()[1 .. 65])[12 .. 32].try_into().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
|
|
||||||
if r.is_zero().into() || s.is_zero().into() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let R = AffinePoint::decompress(&r.to_bytes(), v.into());
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
if let Some(R) = Option::<AffinePoint>::from(R) {
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let R = ProjectivePoint::from(R);
|
|
||||||
|
|
||||||
let r = r.invert().unwrap();
|
|
||||||
let u1 = ProjectivePoint::GENERATOR * (-message * r);
|
|
||||||
let u2 = R * (s * r);
|
|
||||||
let key: ProjectivePoint = u1 + u2;
|
|
||||||
if !bool::from(key.is_identity()) {
|
|
||||||
return Some(address(&key));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
|
||||||
pub struct EthereumHram {}
|
|
||||||
impl Hram<Secp256k1> for EthereumHram {
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
|
||||||
let a_encoded_point = A.to_encoded_point(true);
|
|
||||||
let mut a_encoded = a_encoded_point.as_ref().to_owned();
|
|
||||||
a_encoded[0] += 25; // Ethereum uses 27/28 for point parity
|
|
||||||
let mut data = address(R).to_vec();
|
|
||||||
data.append(&mut a_encoded);
|
|
||||||
data.append(&mut m.to_vec());
|
|
||||||
Scalar::reduce(U256::from_be_slice(&keccak256(&data)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct ProcessedSignature {
|
|
||||||
pub s: Scalar,
|
|
||||||
pub px: Scalar,
|
|
||||||
pub parity: u8,
|
|
||||||
pub message: [u8; 32],
|
|
||||||
pub e: Scalar,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub fn preprocess_signature_for_ecrecover(
|
|
||||||
m: [u8; 32],
|
|
||||||
R: &ProjectivePoint,
|
|
||||||
s: Scalar,
|
|
||||||
A: &ProjectivePoint,
|
|
||||||
chain_id: U256,
|
|
||||||
) -> (Scalar, Scalar) {
|
|
||||||
let processed_sig = process_signature_for_contract(m, R, s, A, chain_id);
|
|
||||||
let sr = processed_sig.s.mul(&processed_sig.px).negate();
|
|
||||||
let er = processed_sig.e.mul(&processed_sig.px).negate();
|
|
||||||
(sr, er)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub fn process_signature_for_contract(
|
|
||||||
m: [u8; 32],
|
|
||||||
R: &ProjectivePoint,
|
|
||||||
s: Scalar,
|
|
||||||
A: &ProjectivePoint,
|
|
||||||
chain_id: U256,
|
|
||||||
) -> ProcessedSignature {
|
|
||||||
let encoded_pk = A.to_encoded_point(true);
|
|
||||||
let px = &encoded_pk.as_ref()[1 .. 33];
|
|
||||||
let px_scalar = Scalar::reduce(U256::from_be_slice(px));
|
|
||||||
let e = EthereumHram::hram(R, A, &[chain_id.to_be_byte_array().as_slice(), &m].concat());
|
|
||||||
ProcessedSignature {
|
|
||||||
s,
|
|
||||||
px: px_scalar,
|
|
||||||
parity: &encoded_pk.as_ref()[0] - 2,
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
message: m,
|
|
||||||
e,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
pub mod contract;
|
|
||||||
pub mod crypto;
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
use std::{convert::TryFrom, sync::Arc, time::Duration};
|
|
||||||
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
use ::k256::{elliptic_curve::bigint::ArrayEncoding, U256};
|
|
||||||
|
|
||||||
use ethers::{
|
|
||||||
prelude::*,
|
|
||||||
utils::{keccak256, Anvil, AnvilInstance},
|
|
||||||
};
|
|
||||||
|
|
||||||
use frost::{
|
|
||||||
curve::Secp256k1,
|
|
||||||
Participant,
|
|
||||||
algorithm::IetfSchnorr,
|
|
||||||
tests::{key_gen, algorithm_machines, sign},
|
|
||||||
};
|
|
||||||
|
|
||||||
use ethereum_serai::{
|
|
||||||
crypto,
|
|
||||||
contract::{Schnorr, call_verify, deploy_schnorr_verifier_contract},
|
|
||||||
};
|
|
||||||
|
|
||||||
async fn deploy_test_contract(
|
|
||||||
) -> (u32, AnvilInstance, Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>) {
|
|
||||||
let anvil = Anvil::new().spawn();
|
|
||||||
|
|
||||||
let wallet: LocalWallet = anvil.keys()[0].clone().into();
|
|
||||||
let provider =
|
|
||||||
Provider::<Http>::try_from(anvil.endpoint()).unwrap().interval(Duration::from_millis(10u64));
|
|
||||||
let chain_id = provider.get_chainid().await.unwrap().as_u32();
|
|
||||||
let client = Arc::new(SignerMiddleware::new_with_provider_chain(provider, wallet).await.unwrap());
|
|
||||||
|
|
||||||
(chain_id, anvil, deploy_schnorr_verifier_contract(client).await.unwrap())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_deploy_contract() {
|
|
||||||
deploy_test_contract().await;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_ecrecover_hack() {
|
|
||||||
let (chain_id, _anvil, contract) = deploy_test_contract().await;
|
|
||||||
let chain_id = U256::from(chain_id);
|
|
||||||
|
|
||||||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
|
||||||
let group_key = keys[&Participant::new(1).unwrap()].group_key();
|
|
||||||
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
|
||||||
let hashed_message = keccak256(MESSAGE);
|
|
||||||
|
|
||||||
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
|
||||||
|
|
||||||
let algo = IetfSchnorr::<Secp256k1, crypto::EthereumHram>::ietf();
|
|
||||||
let sig = sign(
|
|
||||||
&mut OsRng,
|
|
||||||
algo.clone(),
|
|
||||||
keys.clone(),
|
|
||||||
algorithm_machines(&mut OsRng, algo, &keys),
|
|
||||||
full_message,
|
|
||||||
);
|
|
||||||
let mut processed_sig =
|
|
||||||
crypto::process_signature_for_contract(hashed_message, &sig.R, sig.s, &group_key, chain_id);
|
|
||||||
|
|
||||||
call_verify(&contract, &processed_sig).await.unwrap();
|
|
||||||
|
|
||||||
// test invalid signature fails
|
|
||||||
processed_sig.message[0] = 0;
|
|
||||||
assert!(call_verify(&contract, &processed_sig).await.is_err());
|
|
||||||
}
|
|
||||||
@@ -1,92 +0,0 @@
|
|||||||
use k256::{
|
|
||||||
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, sec1::ToEncodedPoint},
|
|
||||||
ProjectivePoint, Scalar, U256,
|
|
||||||
};
|
|
||||||
use frost::{curve::Secp256k1, Participant};
|
|
||||||
|
|
||||||
use ethereum_serai::crypto::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_ecrecover() {
|
|
||||||
use rand_core::OsRng;
|
|
||||||
use sha2::Sha256;
|
|
||||||
use sha3::{Digest, Keccak256};
|
|
||||||
use k256::ecdsa::{hazmat::SignPrimitive, signature::DigestVerifier, SigningKey, VerifyingKey};
|
|
||||||
|
|
||||||
let private = SigningKey::random(&mut OsRng);
|
|
||||||
let public = VerifyingKey::from(&private);
|
|
||||||
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
|
||||||
let (sig, recovery_id) = private
|
|
||||||
.as_nonzero_scalar()
|
|
||||||
.try_sign_prehashed_rfc6979::<Sha256>(&Keccak256::digest(MESSAGE), b"")
|
|
||||||
.unwrap();
|
|
||||||
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
|
|
||||||
{
|
|
||||||
assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ());
|
|
||||||
}
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
ecrecover(hash_to_scalar(MESSAGE), recovery_id.unwrap().is_y_odd().into(), *sig.r(), *sig.s())
|
|
||||||
.unwrap(),
|
|
||||||
address(&ProjectivePoint::from(public.as_affine()))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_signing() {
|
|
||||||
use frost::{
|
|
||||||
algorithm::IetfSchnorr,
|
|
||||||
tests::{algorithm_machines, key_gen, sign},
|
|
||||||
};
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
|
||||||
let _group_key = keys[&Participant::new(1).unwrap()].group_key();
|
|
||||||
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
|
||||||
|
|
||||||
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
|
||||||
let _sig = sign(
|
|
||||||
&mut OsRng,
|
|
||||||
algo,
|
|
||||||
keys.clone(),
|
|
||||||
algorithm_machines(&mut OsRng, IetfSchnorr::<Secp256k1, EthereumHram>::ietf(), &keys),
|
|
||||||
MESSAGE,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_ecrecover_hack() {
|
|
||||||
use frost::{
|
|
||||||
algorithm::IetfSchnorr,
|
|
||||||
tests::{algorithm_machines, key_gen, sign},
|
|
||||||
};
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
|
||||||
let group_key = keys[&Participant::new(1).unwrap()].group_key();
|
|
||||||
let group_key_encoded = group_key.to_encoded_point(true);
|
|
||||||
let group_key_compressed = group_key_encoded.as_ref();
|
|
||||||
let group_key_x = Scalar::reduce(U256::from_be_slice(&group_key_compressed[1 .. 33]));
|
|
||||||
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
|
||||||
let hashed_message = keccak256(MESSAGE);
|
|
||||||
let chain_id = U256::ONE;
|
|
||||||
|
|
||||||
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
|
||||||
|
|
||||||
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
|
||||||
let sig = sign(
|
|
||||||
&mut OsRng,
|
|
||||||
algo.clone(),
|
|
||||||
keys.clone(),
|
|
||||||
algorithm_machines(&mut OsRng, algo, &keys),
|
|
||||||
full_message,
|
|
||||||
);
|
|
||||||
|
|
||||||
let (sr, er) =
|
|
||||||
preprocess_signature_for_ecrecover(hashed_message, &sig.R, sig.s, &group_key, chain_id);
|
|
||||||
let q = ecrecover(sr, group_key_compressed[0] - 2, group_key_x, er).unwrap();
|
|
||||||
assert_eq!(q, address(&sig.R));
|
|
||||||
}
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
mod contract;
|
|
||||||
mod crypto;
|
|
||||||
30
coins/firo/Cargo.toml
Normal file
30
coins/firo/Cargo.toml
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
[package]
|
||||||
|
name = "firo"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "A modern Firo wallet library"
|
||||||
|
license = "MIT"
|
||||||
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
lazy_static = "1"
|
||||||
|
thiserror = "1"
|
||||||
|
|
||||||
|
rand_core = "0.6"
|
||||||
|
rand_chacha = { version = "0.3", optional = true }
|
||||||
|
|
||||||
|
sha2 = "0.10"
|
||||||
|
|
||||||
|
ff = "0.12"
|
||||||
|
group = "0.12"
|
||||||
|
k256 = { version = "0.11", features = ["arithmetic"] }
|
||||||
|
|
||||||
|
blake2 = { version = "0.10", optional = true }
|
||||||
|
transcript = { path = "../../crypto/transcript", package = "flexible-transcript", features = ["recommended"], optional = true }
|
||||||
|
frost = { path = "../../crypto/frost", package = "modular-frost", features = ["secp256k1"], optional = true }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
rand = "0.8"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
multisig = ["blake2", "transcript", "frost", "rand_chacha"]
|
||||||
4
coins/firo/src/lib.rs
Normal file
4
coins/firo/src/lib.rs
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
pub mod spark;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
183
coins/firo/src/spark/chaum/mod.rs
Normal file
183
coins/firo/src/spark/chaum/mod.rs
Normal file
@@ -0,0 +1,183 @@
|
|||||||
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
|
use sha2::{Digest, Sha512};
|
||||||
|
|
||||||
|
use ff::Field;
|
||||||
|
use group::{Group, GroupEncoding};
|
||||||
|
use k256::{
|
||||||
|
elliptic_curve::{bigint::{ArrayEncoding, U512}, ops::Reduce},
|
||||||
|
Scalar, ProjectivePoint
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::spark::{F, G, H, U, GENERATORS_TRANSCRIPT};
|
||||||
|
|
||||||
|
#[cfg(feature = "frost")]
|
||||||
|
mod multisig;
|
||||||
|
#[cfg(feature = "frost")]
|
||||||
|
pub use multisig::ChaumMultisig;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct ChaumStatement {
|
||||||
|
context: Vec<u8>,
|
||||||
|
S_T: Vec<(ProjectivePoint, ProjectivePoint)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ChaumStatement {
|
||||||
|
pub fn new(context: Vec<u8>, S_T: Vec<(ProjectivePoint, ProjectivePoint)>) -> ChaumStatement {
|
||||||
|
ChaumStatement { context, S_T }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn transcript(&self) -> Vec<u8> {
|
||||||
|
let mut transcript = self.context.clone();
|
||||||
|
for S_T in &self.S_T {
|
||||||
|
transcript.extend(S_T.0.to_bytes());
|
||||||
|
transcript.extend(S_T.1.to_bytes());
|
||||||
|
}
|
||||||
|
transcript
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct ChaumWitness {
|
||||||
|
statement: ChaumStatement,
|
||||||
|
xz: Vec<(Scalar, Scalar)>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ChaumWitness {
|
||||||
|
pub fn new(statement: ChaumStatement, xz: Vec<(Scalar, Scalar)>) -> ChaumWitness {
|
||||||
|
assert!(statement.S_T.len() != 0);
|
||||||
|
assert_eq!(statement.S_T.len(), xz.len());
|
||||||
|
ChaumWitness { statement, xz }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
|
pub(crate) struct ChaumCommitments {
|
||||||
|
A1: ProjectivePoint,
|
||||||
|
A2: Vec<ProjectivePoint>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ChaumCommitments {
|
||||||
|
fn transcript(&self) -> Vec<u8> {
|
||||||
|
let mut transcript = Vec::with_capacity((self.A2.len() + 1) * 33);
|
||||||
|
transcript.extend(self.A1.to_bytes());
|
||||||
|
for A in &self.A2 {
|
||||||
|
transcript.extend(A.to_bytes());
|
||||||
|
}
|
||||||
|
transcript
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
|
pub struct ChaumProof {
|
||||||
|
commitments: ChaumCommitments,
|
||||||
|
t1: Vec<Scalar>,
|
||||||
|
t2: Scalar,
|
||||||
|
t3: Scalar
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ChaumProof {
|
||||||
|
fn r_t_commitments<R: RngCore + CryptoRng>(
|
||||||
|
rng: &mut R,
|
||||||
|
witness: &ChaumWitness
|
||||||
|
) -> (Vec<Scalar>, Scalar, ChaumCommitments) {
|
||||||
|
let len = witness.xz.len();
|
||||||
|
let mut rs = Vec::with_capacity(len);
|
||||||
|
let mut r_sum = Scalar::zero();
|
||||||
|
|
||||||
|
let mut commitments = ChaumCommitments {
|
||||||
|
A1: ProjectivePoint::IDENTITY,
|
||||||
|
A2: Vec::with_capacity(len)
|
||||||
|
};
|
||||||
|
|
||||||
|
for (_, T) in &witness.statement.S_T {
|
||||||
|
let r = Scalar::random(&mut *rng);
|
||||||
|
r_sum += r;
|
||||||
|
commitments.A2.push(T * &r);
|
||||||
|
rs.push(r);
|
||||||
|
}
|
||||||
|
|
||||||
|
let t = Scalar::random(&mut *rng);
|
||||||
|
commitments.A1 = (*F * r_sum) + (*H * t);
|
||||||
|
|
||||||
|
(rs, t, commitments)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn t_prove(
|
||||||
|
witness: &ChaumWitness,
|
||||||
|
rs: &[Scalar],
|
||||||
|
mut t3: Scalar,
|
||||||
|
commitments: ChaumCommitments,
|
||||||
|
nonces: &[Scalar],
|
||||||
|
y: &Scalar
|
||||||
|
) -> (Scalar, ChaumProof) {
|
||||||
|
let challenge = ChaumProof::challenge(&witness.statement, &commitments);
|
||||||
|
let mut t1 = Vec::with_capacity(rs.len());
|
||||||
|
let mut t2 = Scalar::zero();
|
||||||
|
|
||||||
|
let mut accum = challenge;
|
||||||
|
for (i, (x, z)) in witness.xz.iter().enumerate() {
|
||||||
|
t1.push(rs[i] + (accum * x));
|
||||||
|
t2 += nonces[i] + (accum * y);
|
||||||
|
t3 += accum * z;
|
||||||
|
accum *= challenge;
|
||||||
|
}
|
||||||
|
|
||||||
|
(challenge, ChaumProof { commitments, t1, t2, t3 })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn challenge(statement: &ChaumStatement, commitments: &ChaumCommitments) -> Scalar {
|
||||||
|
let mut transcript = b"Chaum".to_vec();
|
||||||
|
transcript.extend(&*GENERATORS_TRANSCRIPT);
|
||||||
|
transcript.extend(&statement.transcript());
|
||||||
|
transcript.extend(&commitments.transcript());
|
||||||
|
Scalar::from_uint_reduced(U512::from_be_byte_array(Sha512::digest(transcript)))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn prove<R: RngCore + CryptoRng>(
|
||||||
|
rng: &mut R,
|
||||||
|
witness: &ChaumWitness,
|
||||||
|
y: &Scalar
|
||||||
|
) -> ChaumProof {
|
||||||
|
let len = witness.xz.len();
|
||||||
|
let (rs, t3, mut commitments) = Self::r_t_commitments(rng, witness);
|
||||||
|
|
||||||
|
let mut s_sum = Scalar::zero();
|
||||||
|
let mut ss = Vec::with_capacity(len);
|
||||||
|
for i in 0 .. len {
|
||||||
|
let s = Scalar::random(&mut *rng);
|
||||||
|
s_sum += s;
|
||||||
|
commitments.A2[i] += *G * s;
|
||||||
|
ss.push(s);
|
||||||
|
}
|
||||||
|
commitments.A1 += *G * s_sum;
|
||||||
|
|
||||||
|
let (_, proof) = Self::t_prove(&witness, &rs, t3, commitments, &ss, y);
|
||||||
|
proof
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn verify(&self, statement: &ChaumStatement) -> bool {
|
||||||
|
let len = statement.S_T.len();
|
||||||
|
assert_eq!(len, self.commitments.A2.len());
|
||||||
|
assert_eq!(len, self.t1.len());
|
||||||
|
|
||||||
|
let challenge = Self::challenge(&statement, &self.commitments);
|
||||||
|
|
||||||
|
let mut one = self.commitments.A1 - ((*G * self.t2) + (*H * self.t3));
|
||||||
|
let mut two = -(*G * self.t2);
|
||||||
|
|
||||||
|
let mut accum = challenge;
|
||||||
|
for i in 0 .. len {
|
||||||
|
one += statement.S_T[i].0 * accum;
|
||||||
|
one -= *F * self.t1[i];
|
||||||
|
|
||||||
|
two += self.commitments.A2[i] + (*U * accum);
|
||||||
|
two -= statement.S_T[i].1 * self.t1[i];
|
||||||
|
accum *= challenge;
|
||||||
|
}
|
||||||
|
|
||||||
|
one.is_identity().into() && two.is_identity().into()
|
||||||
|
}
|
||||||
|
}
|
||||||
132
coins/firo/src/spark/chaum/multisig.rs
Normal file
132
coins/firo/src/spark/chaum/multisig.rs
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
use std::io::Read;
|
||||||
|
|
||||||
|
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
||||||
|
use rand_chacha::ChaCha12Rng;
|
||||||
|
|
||||||
|
use ff::Field;
|
||||||
|
use k256::{Scalar, ProjectivePoint};
|
||||||
|
|
||||||
|
use transcript::{Transcript, RecommendedTranscript};
|
||||||
|
use frost::{curve::Secp256k1, FrostError, FrostView, algorithm::Algorithm};
|
||||||
|
|
||||||
|
use crate::spark::{G, GENERATORS_TRANSCRIPT, chaum::{ChaumWitness, ChaumProof}};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ChaumMultisig {
|
||||||
|
transcript: RecommendedTranscript,
|
||||||
|
len: usize,
|
||||||
|
witness: ChaumWitness,
|
||||||
|
|
||||||
|
challenge: Scalar,
|
||||||
|
proof: Option<ChaumProof>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ChaumMultisig {
|
||||||
|
pub fn new(mut transcript: RecommendedTranscript, witness: ChaumWitness) -> ChaumMultisig {
|
||||||
|
transcript.domain_separate(b"Chaum");
|
||||||
|
transcript.append_message(b"generators", &*GENERATORS_TRANSCRIPT);
|
||||||
|
transcript.append_message(b"statement", &witness.statement.transcript());
|
||||||
|
for (x, z) in &witness.xz {
|
||||||
|
transcript.append_message(b"x", &x.to_bytes());
|
||||||
|
transcript.append_message(b"z", &z.to_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
let len = witness.xz.len();
|
||||||
|
ChaumMultisig {
|
||||||
|
transcript,
|
||||||
|
len,
|
||||||
|
witness,
|
||||||
|
|
||||||
|
challenge: Scalar::zero(),
|
||||||
|
proof: None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Algorithm<Secp256k1> for ChaumMultisig {
|
||||||
|
type Transcript = RecommendedTranscript;
|
||||||
|
type Signature = ChaumProof;
|
||||||
|
|
||||||
|
fn transcript(&mut self) -> &mut Self::Transcript {
|
||||||
|
&mut self.transcript
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nonces(&self) -> Vec<Vec<ProjectivePoint>> {
|
||||||
|
vec![vec![*G]; self.len]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
||||||
|
&mut self,
|
||||||
|
_: &mut R,
|
||||||
|
_: &FrostView<Secp256k1>
|
||||||
|
) -> Vec<u8> {
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn process_addendum<Re: Read>(
|
||||||
|
&mut self,
|
||||||
|
_: &FrostView<Secp256k1>,
|
||||||
|
_: u16,
|
||||||
|
_: &mut Re
|
||||||
|
) -> Result<(), FrostError> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sign_share(
|
||||||
|
&mut self,
|
||||||
|
view: &FrostView<Secp256k1>,
|
||||||
|
nonce_sums: &[Vec<ProjectivePoint>],
|
||||||
|
nonces: &[Scalar],
|
||||||
|
_: &[u8]
|
||||||
|
) -> Scalar {
|
||||||
|
let (rs, t3, mut commitments) = ChaumProof::r_t_commitments(
|
||||||
|
&mut ChaCha12Rng::from_seed(self.transcript.rng_seed(b"r_t")),
|
||||||
|
&self.witness
|
||||||
|
);
|
||||||
|
|
||||||
|
for i in 0 .. self.len {
|
||||||
|
commitments.A2[i] += nonce_sums[i][0];
|
||||||
|
}
|
||||||
|
commitments.A1 += nonce_sums.iter().map(|sum| sum[0]).sum::<ProjectivePoint>();
|
||||||
|
|
||||||
|
let (challenge, proof) = ChaumProof::t_prove(
|
||||||
|
&self.witness,
|
||||||
|
&rs,
|
||||||
|
t3,
|
||||||
|
commitments,
|
||||||
|
nonces,
|
||||||
|
&view.secret_share()
|
||||||
|
);
|
||||||
|
self.challenge = challenge;
|
||||||
|
let t2 = proof.t2;
|
||||||
|
self.proof = Some(proof);
|
||||||
|
t2
|
||||||
|
}
|
||||||
|
|
||||||
|
fn verify(
|
||||||
|
&self,
|
||||||
|
_: ProjectivePoint,
|
||||||
|
_: &[Vec<ProjectivePoint>],
|
||||||
|
sum: Scalar
|
||||||
|
) -> Option<Self::Signature> {
|
||||||
|
let mut proof = self.proof.clone().unwrap();
|
||||||
|
proof.t2 = sum;
|
||||||
|
Some(proof).filter(|proof| proof.verify(&self.witness.statement))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn verify_share(
|
||||||
|
&self,
|
||||||
|
_: u16,
|
||||||
|
verification_share: ProjectivePoint,
|
||||||
|
nonces: &[Vec<ProjectivePoint>],
|
||||||
|
share: Scalar
|
||||||
|
) -> bool {
|
||||||
|
let mut t2 = ProjectivePoint::IDENTITY;
|
||||||
|
let mut accum = self.challenge;
|
||||||
|
for i in 0 .. self.len {
|
||||||
|
t2 += nonces[i][0] + (verification_share * accum);
|
||||||
|
accum *= self.challenge;
|
||||||
|
}
|
||||||
|
(*G * share) == t2
|
||||||
|
}
|
||||||
|
}
|
||||||
42
coins/firo/src/spark/mod.rs
Normal file
42
coins/firo/src/spark/mod.rs
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
use lazy_static::lazy_static;
|
||||||
|
|
||||||
|
use sha2::{Digest, Sha256};
|
||||||
|
|
||||||
|
use group::GroupEncoding;
|
||||||
|
use k256::{ProjectivePoint, CompressedPoint};
|
||||||
|
|
||||||
|
pub mod chaum;
|
||||||
|
|
||||||
|
// Extremely basic hash to curve, which should not be used, yet which offers the needed generators
|
||||||
|
fn generator(letter: u8) -> ProjectivePoint {
|
||||||
|
if letter == b'G' {
|
||||||
|
return ProjectivePoint::GENERATOR;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut point = [2; 33];
|
||||||
|
let mut g = b"Generator ".to_vec();
|
||||||
|
|
||||||
|
let mut res;
|
||||||
|
while {
|
||||||
|
g.push(letter);
|
||||||
|
point[1..].copy_from_slice(&Sha256::digest(&g));
|
||||||
|
res = ProjectivePoint::from_bytes(&CompressedPoint::from(point));
|
||||||
|
res.is_none().into()
|
||||||
|
} {}
|
||||||
|
res.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
pub static ref F: ProjectivePoint = generator(b'F');
|
||||||
|
pub static ref G: ProjectivePoint = generator(b'G');
|
||||||
|
pub static ref H: ProjectivePoint = generator(b'H');
|
||||||
|
pub static ref U: ProjectivePoint = generator(b'U');
|
||||||
|
pub static ref GENERATORS_TRANSCRIPT: Vec<u8> = {
|
||||||
|
let mut transcript = Vec::with_capacity(4 * 33);
|
||||||
|
transcript.extend(&F.to_bytes());
|
||||||
|
transcript.extend(&G.to_bytes());
|
||||||
|
transcript.extend(&H.to_bytes());
|
||||||
|
transcript.extend(&U.to_bytes());
|
||||||
|
transcript
|
||||||
|
};
|
||||||
|
}
|
||||||
72
coins/firo/src/tests/mod.rs
Normal file
72
coins/firo/src/tests/mod.rs
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
use rand::rngs::OsRng;
|
||||||
|
|
||||||
|
use ff::Field;
|
||||||
|
use k256::Scalar;
|
||||||
|
|
||||||
|
#[cfg(feature = "multisig")]
|
||||||
|
use transcript::{Transcript, RecommendedTranscript};
|
||||||
|
#[cfg(feature = "multisig")]
|
||||||
|
use frost::{curve::Secp256k1, tests::{key_gen, algorithm_machines, sign}};
|
||||||
|
|
||||||
|
use crate::spark::{F, G, H, U, chaum::*};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn chaum() {
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let mut S_T = vec![];
|
||||||
|
let mut xz = vec![];
|
||||||
|
let y = Scalar::random(&mut OsRng);
|
||||||
|
for _ in 0 .. 2 {
|
||||||
|
let x = Scalar::random(&mut OsRng);
|
||||||
|
let z = Scalar::random(&mut OsRng);
|
||||||
|
|
||||||
|
S_T.push((
|
||||||
|
(*F * x) + (*G * y) + (*H * z),
|
||||||
|
// U = (x * T) + (y * G)
|
||||||
|
// T = (U - (y * G)) * x^-1
|
||||||
|
(*U - (*G * y)) * x.invert().unwrap()
|
||||||
|
));
|
||||||
|
|
||||||
|
xz.push((x, z));
|
||||||
|
}
|
||||||
|
|
||||||
|
let statement = ChaumStatement::new(b"Hello, World!".to_vec(), S_T);
|
||||||
|
let witness = ChaumWitness::new(statement.clone(), xz);
|
||||||
|
assert!(ChaumProof::prove(&mut OsRng, &witness, &y).verify(&statement));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "multisig")]
|
||||||
|
#[test]
|
||||||
|
fn chaum_multisig() {
|
||||||
|
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let mut S_T = vec![];
|
||||||
|
let mut xz = vec![];
|
||||||
|
for _ in 0 .. 5 {
|
||||||
|
let x = Scalar::random(&mut OsRng);
|
||||||
|
let z = Scalar::random(&mut OsRng);
|
||||||
|
|
||||||
|
S_T.push((
|
||||||
|
(*F * x) + keys[&1].group_key() + (*H * z),
|
||||||
|
(*U - keys[&1].group_key()) * x.invert().unwrap()
|
||||||
|
));
|
||||||
|
|
||||||
|
xz.push((x, z));
|
||||||
|
}
|
||||||
|
|
||||||
|
let statement = ChaumStatement::new(b"Hello, Multisig World!".to_vec(), S_T);
|
||||||
|
let witness = ChaumWitness::new(statement.clone(), xz);
|
||||||
|
|
||||||
|
assert!(
|
||||||
|
sign(
|
||||||
|
&mut OsRng,
|
||||||
|
algorithm_machines(
|
||||||
|
&mut OsRng,
|
||||||
|
ChaumMultisig::new(RecommendedTranscript::new(b"Firo Serai Chaum Test"), witness),
|
||||||
|
&keys
|
||||||
|
),
|
||||||
|
&[]
|
||||||
|
).verify(&statement)
|
||||||
|
);
|
||||||
|
}
|
||||||
1
coins/monero/.gitignore
vendored
Normal file
1
coins/monero/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
c/.build
|
||||||
@@ -1,107 +1,52 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "monero-serai"
|
name = "monero-serai"
|
||||||
version = "0.1.4-alpha"
|
version = "0.1.0"
|
||||||
description = "A modern Monero transaction library"
|
description = "A modern Monero wallet library"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
|
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[build-dependencies]
|
||||||
all-features = true
|
cc = "1.0"
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
std-shims = { path = "../../common/std-shims", version = "0.1", default-features = false }
|
hex-literal = "0.3"
|
||||||
|
lazy_static = "1"
|
||||||
|
thiserror = "1"
|
||||||
|
|
||||||
async-trait = { version = "0.1", default-features = false }
|
rand_core = "0.6"
|
||||||
thiserror = { version = "1", optional = true }
|
rand_chacha = { version = "0.3", optional = true }
|
||||||
|
rand = "0.8"
|
||||||
|
rand_distr = "0.4"
|
||||||
|
|
||||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
subtle = "2.4"
|
||||||
subtle = { version = "^2.4", default-features = false }
|
|
||||||
|
|
||||||
rand_core = { version = "0.6", default-features = false }
|
tiny-keccak = { version = "2", features = ["keccak"] }
|
||||||
# Used to send transactions
|
blake2 = { version = "0.10", optional = true }
|
||||||
rand = { version = "0.8", default-features = false }
|
|
||||||
rand_chacha = { version = "0.3", default-features = false }
|
|
||||||
# Used to select decoys
|
|
||||||
rand_distr = { version = "0.4", default-features = false }
|
|
||||||
|
|
||||||
crc = { version = "3", default-features = false }
|
curve25519-dalek = { version = "3", features = ["std"] }
|
||||||
sha3 = { version = "0.10", default-features = false }
|
|
||||||
|
|
||||||
curve25519-dalek = { version = "^3.2", default-features = false }
|
group = { version = "0.12" }
|
||||||
|
dalek-ff-group = { path = "../../crypto/dalek-ff-group" }
|
||||||
|
|
||||||
# Used for the hash to curve, along with the more complicated proofs
|
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", features = ["recommended"], optional = true }
|
||||||
group = { version = "0.13", default-features = false }
|
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["ed25519"], optional = true }
|
||||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.3", default-features = false }
|
dleq = { package = "dleq-serai", path = "../../crypto/dleq", features = ["serialize"], optional = true }
|
||||||
multiexp = { path = "../../crypto/multiexp", version = "0.3", default-features = false, features = ["batch"] }
|
|
||||||
|
|
||||||
# Needed for multisig
|
hex = "0.4"
|
||||||
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
dleq = { path = "../../crypto/dleq", version = "0.3", features = ["serialize"], optional = true }
|
serde_json = "1.0"
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["ed25519"], optional = true }
|
|
||||||
|
|
||||||
monero-generators = { path = "generators", version = "0.3", default-features = false }
|
base58-monero = "1"
|
||||||
|
monero-epee-bin-serde = "1.0"
|
||||||
|
monero = "0.16"
|
||||||
|
|
||||||
futures = { version = "0.3", default-features = false, features = ["alloc"], optional = true }
|
reqwest = { version = "0.11", features = ["json"] }
|
||||||
|
|
||||||
hex-literal = "0.4"
|
|
||||||
hex = { version = "0.4", default-features = false, features = ["alloc"] }
|
|
||||||
serde = { version = "1", default-features = false, features = ["derive"] }
|
|
||||||
serde_json = { version = "1", default-features = false, features = ["alloc"] }
|
|
||||||
|
|
||||||
base58-monero = { version = "1", git = "https://github.com/monero-rs/base58-monero", rev = "5045e8d2b817b3b6c1190661f504e879bc769c29", default-features = false, features = ["check"] }
|
|
||||||
|
|
||||||
# Used for the provided RPC
|
|
||||||
digest_auth = { version = "0.3", optional = true }
|
|
||||||
reqwest = { version = "0.11", features = ["json"], optional = true }
|
|
||||||
|
|
||||||
# Used for the binaries
|
|
||||||
tokio = { version = "1", features = ["full"], optional = true }
|
|
||||||
|
|
||||||
[build-dependencies]
|
|
||||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.3", default-features = false }
|
|
||||||
monero-generators = { path = "generators", version = "0.3", default-features = false }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
tokio = { version = "1", features = ["full"] }
|
|
||||||
monero-rpc = "0.3"
|
|
||||||
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["tests"] }
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
std = [
|
|
||||||
"std-shims/std",
|
|
||||||
|
|
||||||
"thiserror",
|
|
||||||
|
|
||||||
"zeroize/std",
|
|
||||||
"subtle/std",
|
|
||||||
|
|
||||||
"rand_core/std",
|
|
||||||
"rand_chacha/std",
|
|
||||||
"rand/std",
|
|
||||||
"rand_distr/std",
|
|
||||||
|
|
||||||
"sha3/std",
|
|
||||||
|
|
||||||
"curve25519-dalek/std",
|
|
||||||
|
|
||||||
"multiexp/std",
|
|
||||||
|
|
||||||
"monero-generators/std",
|
|
||||||
|
|
||||||
"futures/std",
|
|
||||||
|
|
||||||
"hex/std",
|
|
||||||
"serde/std",
|
|
||||||
"serde_json/std",
|
|
||||||
]
|
|
||||||
|
|
||||||
http_rpc = ["digest_auth", "reqwest"]
|
|
||||||
multisig = ["transcript", "frost", "dleq", "std"]
|
|
||||||
binaries = ["tokio"]
|
|
||||||
experimental = []
|
experimental = []
|
||||||
|
multisig = ["rand_chacha", "blake2", "transcript", "frost", "dleq"]
|
||||||
|
|
||||||
default = ["std", "http_rpc"]
|
[dev-dependencies]
|
||||||
|
sha2 = "0.10"
|
||||||
|
tokio = { version = "1", features = ["full"] }
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2022-2023 Luke Parker
|
Copyright (c) 2022 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
@@ -4,46 +4,4 @@ A modern Monero transaction library intended for usage in wallets. It prides
|
|||||||
itself on accuracy, correctness, and removing common pit falls developers may
|
itself on accuracy, correctness, and removing common pit falls developers may
|
||||||
face.
|
face.
|
||||||
|
|
||||||
monero-serai also offers the following features:
|
Threshold multisignature support is available via the `multisig` feature.
|
||||||
|
|
||||||
- Featured Addresses
|
|
||||||
- A FROST-based multisig orders of magnitude more performant than Monero's
|
|
||||||
|
|
||||||
### Purpose and support
|
|
||||||
|
|
||||||
monero-serai was written for Serai, a decentralized exchange aiming to support
|
|
||||||
Monero. Despite this, monero-serai is intended to be a widely usable library,
|
|
||||||
accurate to Monero. monero-serai guarantees the functionality needed for Serai,
|
|
||||||
yet will not deprive functionality from other users.
|
|
||||||
|
|
||||||
Various legacy transaction formats are not currently implemented, yet we are
|
|
||||||
willing to add support for them. There aren't active development efforts around
|
|
||||||
them however.
|
|
||||||
|
|
||||||
### Caveats
|
|
||||||
|
|
||||||
This library DOES attempt to do the following:
|
|
||||||
|
|
||||||
- Create on-chain transactions identical to how wallet2 would (unless told not
|
|
||||||
to)
|
|
||||||
- Not be detectable as monero-serai when scanning outputs
|
|
||||||
- Not reveal spent outputs to the connected RPC node
|
|
||||||
|
|
||||||
This library DOES NOT attempt to do the following:
|
|
||||||
|
|
||||||
- Have identical RPC behavior when creating transactions
|
|
||||||
- Be a wallet
|
|
||||||
|
|
||||||
This means that monero-serai shouldn't be fingerprintable on-chain. It also
|
|
||||||
shouldn't be fingerprintable if a targeted attack occurs to detect if the
|
|
||||||
receiving wallet is monero-serai or wallet2. It also should be generally safe
|
|
||||||
for usage with remote nodes.
|
|
||||||
|
|
||||||
It won't hide from remote nodes it's monero-serai however, potentially
|
|
||||||
allowing a remote node to profile you. The implications of this are left to the
|
|
||||||
user to consider.
|
|
||||||
|
|
||||||
It also won't act as a wallet, just as a transaction library. wallet2 has
|
|
||||||
several *non-transaction-level* policies, such as always attempting to use two
|
|
||||||
inputs to create transactions. These are considered out of scope to
|
|
||||||
monero-serai.
|
|
||||||
|
|||||||
@@ -1,67 +1,72 @@
|
|||||||
use std::{
|
use std::{env, path::Path, process::Command};
|
||||||
io::Write,
|
|
||||||
env,
|
|
||||||
path::Path,
|
|
||||||
fs::{File, remove_file},
|
|
||||||
};
|
|
||||||
|
|
||||||
use dalek_ff_group::EdwardsPoint;
|
|
||||||
|
|
||||||
use monero_generators::bulletproofs_generators;
|
|
||||||
|
|
||||||
fn serialize(generators_string: &mut String, points: &[EdwardsPoint]) {
|
|
||||||
for generator in points {
|
|
||||||
generators_string.extend(
|
|
||||||
format!(
|
|
||||||
"
|
|
||||||
dalek_ff_group::EdwardsPoint(
|
|
||||||
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap()
|
|
||||||
),
|
|
||||||
",
|
|
||||||
generator.compress().to_bytes()
|
|
||||||
)
|
|
||||||
.chars(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn generators(prefix: &'static str, path: &str) {
|
|
||||||
let generators = bulletproofs_generators(prefix.as_bytes());
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let mut G_str = String::new();
|
|
||||||
serialize(&mut G_str, &generators.G);
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let mut H_str = String::new();
|
|
||||||
serialize(&mut H_str, &generators.H);
|
|
||||||
|
|
||||||
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
|
|
||||||
let _ = remove_file(&path);
|
|
||||||
File::create(&path)
|
|
||||||
.unwrap()
|
|
||||||
.write_all(
|
|
||||||
format!(
|
|
||||||
"
|
|
||||||
pub static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
|
|
||||||
pub fn GENERATORS() -> &'static Generators {{
|
|
||||||
GENERATORS_CELL.get_or_init(|| Generators {{
|
|
||||||
G: [
|
|
||||||
{G_str}
|
|
||||||
],
|
|
||||||
H: [
|
|
||||||
{H_str}
|
|
||||||
],
|
|
||||||
}})
|
|
||||||
}}
|
|
||||||
",
|
|
||||||
)
|
|
||||||
.as_bytes(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
println!("cargo:rerun-if-changed=build.rs");
|
if !Command::new("git").args(&["submodule", "update", "--init", "--recursive"]).status().unwrap().success() {
|
||||||
|
panic!("git failed to init submodules");
|
||||||
|
}
|
||||||
|
|
||||||
generators("bulletproof", "generators.rs");
|
if !Command ::new("mkdir").args(&["-p", ".build"])
|
||||||
generators("bulletproof_plus", "generators_plus.rs");
|
.current_dir(&Path::new("c")).status().unwrap().success() {
|
||||||
|
panic!("failed to create a directory to track build progress");
|
||||||
|
}
|
||||||
|
|
||||||
|
let out_dir = &env::var("OUT_DIR").unwrap();
|
||||||
|
|
||||||
|
// Use a file to signal if Monero was already built, as that should never be rebuilt
|
||||||
|
// If the signaling file was deleted, run this script again to rebuild Monero though
|
||||||
|
println!("cargo:rerun-if-changed=c/.build/monero");
|
||||||
|
if !Path::new("c/.build/monero").exists() {
|
||||||
|
if !Command::new("make").arg(format!("-j{}", &env::var("THREADS").unwrap_or("2".to_string())))
|
||||||
|
.current_dir(&Path::new("c/monero")).status().unwrap().success() {
|
||||||
|
panic!("make failed to build Monero. Please check your dependencies");
|
||||||
|
}
|
||||||
|
|
||||||
|
if !Command::new("touch").arg("monero")
|
||||||
|
.current_dir(&Path::new("c/.build")).status().unwrap().success() {
|
||||||
|
panic!("failed to create a file to label Monero as built");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("cargo:rerun-if-changed=c/wrapper.cpp");
|
||||||
|
cc::Build::new()
|
||||||
|
.static_flag(true)
|
||||||
|
.warnings(false)
|
||||||
|
.extra_warnings(false)
|
||||||
|
.flag("-Wno-deprecated-declarations")
|
||||||
|
|
||||||
|
.include("c/monero/external/supercop/include")
|
||||||
|
.include("c/monero/contrib/epee/include")
|
||||||
|
.include("c/monero/src")
|
||||||
|
.include("c/monero/build/release/generated_include")
|
||||||
|
|
||||||
|
.define("AUTO_INITIALIZE_EASYLOGGINGPP", None)
|
||||||
|
.include("c/monero/external/easylogging++")
|
||||||
|
.file("c/monero/external/easylogging++/easylogging++.cc")
|
||||||
|
|
||||||
|
.file("c/monero/src/common/aligned.c")
|
||||||
|
.file("c/monero/src/common/perf_timer.cpp")
|
||||||
|
|
||||||
|
.include("c/monero/src/crypto")
|
||||||
|
.file("c/monero/src/crypto/crypto-ops-data.c")
|
||||||
|
.file("c/monero/src/crypto/crypto-ops.c")
|
||||||
|
.file("c/monero/src/crypto/keccak.c")
|
||||||
|
.file("c/monero/src/crypto/hash.c")
|
||||||
|
|
||||||
|
.include("c/monero/src/device")
|
||||||
|
.file("c/monero/src/device/device_default.cpp")
|
||||||
|
|
||||||
|
.include("c/monero/src/ringct")
|
||||||
|
.file("c/monero/src/ringct/rctCryptoOps.c")
|
||||||
|
.file("c/monero/src/ringct/rctTypes.cpp")
|
||||||
|
.file("c/monero/src/ringct/rctOps.cpp")
|
||||||
|
.file("c/monero/src/ringct/multiexp.cc")
|
||||||
|
.file("c/monero/src/ringct/bulletproofs.cc")
|
||||||
|
.file("c/monero/src/ringct/rctSigs.cpp")
|
||||||
|
|
||||||
|
.file("c/wrapper.cpp")
|
||||||
|
.compile("wrapper");
|
||||||
|
|
||||||
|
println!("cargo:rustc-link-search={}", out_dir);
|
||||||
|
println!("cargo:rustc-link-lib=wrapper");
|
||||||
|
println!("cargo:rustc-link-lib=stdc++");
|
||||||
}
|
}
|
||||||
|
|||||||
1
coins/monero/c/monero
Submodule
1
coins/monero/c/monero
Submodule
Submodule coins/monero/c/monero added at 424e4de16b
158
coins/monero/c/wrapper.cpp
Normal file
158
coins/monero/c/wrapper.cpp
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
#include <mutex>
|
||||||
|
|
||||||
|
#include "device/device_default.hpp"
|
||||||
|
|
||||||
|
#include "ringct/bulletproofs.h"
|
||||||
|
#include "ringct/rctSigs.h"
|
||||||
|
|
||||||
|
typedef std::lock_guard<std::mutex> lock;
|
||||||
|
|
||||||
|
std::mutex rng_mutex;
|
||||||
|
uint8_t rng_entropy[64];
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
void rng(uint8_t* seed) {
|
||||||
|
// Set the first half to the seed
|
||||||
|
memcpy(rng_entropy, seed, 32);
|
||||||
|
// Set the second half to the hash of a DST to ensure a lack of collisions
|
||||||
|
crypto::cn_fast_hash("RNG_entropy_seed", 16, (char*) &rng_entropy[32]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
extern "C" void monero_wide_reduce(uint8_t* value);
|
||||||
|
namespace crypto {
|
||||||
|
void generate_random_bytes_not_thread_safe(size_t n, void* value) {
|
||||||
|
size_t written = 0;
|
||||||
|
while (written != n) {
|
||||||
|
uint8_t hash[32];
|
||||||
|
crypto::cn_fast_hash(rng_entropy, 64, (char*) hash);
|
||||||
|
// Step the RNG by setting the latter half to the most recent result
|
||||||
|
// Does not leak the RNG, even if the values are leaked (which they are
|
||||||
|
// expected to be) due to the first half remaining constant and
|
||||||
|
// undisclosed
|
||||||
|
memcpy(&rng_entropy[32], hash, 32);
|
||||||
|
|
||||||
|
size_t next = n - written;
|
||||||
|
if (next > 32) {
|
||||||
|
next = 32;
|
||||||
|
}
|
||||||
|
memcpy(&((uint8_t*) value)[written], hash, next);
|
||||||
|
written += next;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void random32_unbiased(unsigned char *bytes) {
|
||||||
|
uint8_t value[64];
|
||||||
|
generate_random_bytes_not_thread_safe(64, value);
|
||||||
|
monero_wide_reduce(value);
|
||||||
|
memcpy(bytes, value, 32);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
void c_hash_to_point(uint8_t* point) {
|
||||||
|
rct::key key_point;
|
||||||
|
ge_p3 e_p3;
|
||||||
|
memcpy(key_point.bytes, point, 32);
|
||||||
|
rct::hash_to_p3(e_p3, key_point);
|
||||||
|
ge_p3_tobytes(point, &e_p3);
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t* c_generate_bp(uint8_t* seed, uint8_t len, uint64_t* a, uint8_t* m) {
|
||||||
|
lock guard(rng_mutex);
|
||||||
|
rng(seed);
|
||||||
|
|
||||||
|
rct::keyV masks;
|
||||||
|
std::vector<uint64_t> amounts;
|
||||||
|
masks.resize(len);
|
||||||
|
amounts.resize(len);
|
||||||
|
for (uint8_t i = 0; i < len; i++) {
|
||||||
|
memcpy(masks[i].bytes, m + (i * 32), 32);
|
||||||
|
amounts[i] = a[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
rct::Bulletproof bp = rct::bulletproof_PROVE(amounts, masks);
|
||||||
|
|
||||||
|
std::stringstream ss;
|
||||||
|
binary_archive<true> ba(ss);
|
||||||
|
::serialization::serialize(ba, bp);
|
||||||
|
uint8_t* res = (uint8_t*) calloc(ss.str().size(), 1);
|
||||||
|
memcpy(res, ss.str().data(), ss.str().size());
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool c_verify_bp(
|
||||||
|
uint8_t* seed,
|
||||||
|
uint s_len,
|
||||||
|
uint8_t* s,
|
||||||
|
uint8_t c_len,
|
||||||
|
uint8_t* c
|
||||||
|
) {
|
||||||
|
// BPs are batch verified which use RNG based weights to ensure individual
|
||||||
|
// integrity
|
||||||
|
// That's why this must also have control over RNG, to prevent interrupting
|
||||||
|
// multisig signing while not using known seeds. Considering this doesn't
|
||||||
|
// actually define a batch, and it's only verifying a single BP,
|
||||||
|
// it'd probably be fine, but...
|
||||||
|
lock guard(rng_mutex);
|
||||||
|
rng(seed);
|
||||||
|
|
||||||
|
rct::Bulletproof bp;
|
||||||
|
std::stringstream ss;
|
||||||
|
std::string str;
|
||||||
|
str.assign((char*) s, (size_t) s_len);
|
||||||
|
ss << str;
|
||||||
|
binary_archive<false> ba(ss);
|
||||||
|
::serialization::serialize(ba, bp);
|
||||||
|
if (!ss.good()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
bp.V.resize(c_len);
|
||||||
|
for (uint8_t i = 0; i < c_len; i++) {
|
||||||
|
memcpy(bp.V[i].bytes, &c[i * 32], 32);
|
||||||
|
}
|
||||||
|
|
||||||
|
try { return rct::bulletproof_VERIFY(bp); } catch(...) { return false; }
|
||||||
|
}
|
||||||
|
|
||||||
|
bool c_verify_clsag(
|
||||||
|
uint s_len,
|
||||||
|
uint8_t* s,
|
||||||
|
uint8_t k_len,
|
||||||
|
uint8_t* k,
|
||||||
|
uint8_t* I,
|
||||||
|
uint8_t* p,
|
||||||
|
uint8_t* m
|
||||||
|
) {
|
||||||
|
rct::clsag clsag;
|
||||||
|
std::stringstream ss;
|
||||||
|
std::string str;
|
||||||
|
str.assign((char*) s, (size_t) s_len);
|
||||||
|
ss << str;
|
||||||
|
binary_archive<false> ba(ss);
|
||||||
|
::serialization::serialize(ba, clsag);
|
||||||
|
if (!ss.good()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
rct::ctkeyV keys;
|
||||||
|
keys.resize(k_len);
|
||||||
|
for (uint8_t i = 0; i < k_len; i++) {
|
||||||
|
memcpy(keys[i].dest.bytes, &k[(i * 2) * 32], 32);
|
||||||
|
memcpy(keys[i].mask.bytes, &k[((i * 2) + 1) * 32], 32);
|
||||||
|
}
|
||||||
|
|
||||||
|
memcpy(clsag.I.bytes, I, 32);
|
||||||
|
|
||||||
|
rct::key pseudo_out;
|
||||||
|
memcpy(pseudo_out.bytes, p, 32);
|
||||||
|
|
||||||
|
rct::key msg;
|
||||||
|
memcpy(msg.bytes, m, 32);
|
||||||
|
|
||||||
|
try {
|
||||||
|
return verRctCLSAGSimple(msg, clsag, keys, pseudo_out);
|
||||||
|
} catch(...) { return false; }
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "monero-generators"
|
|
||||||
version = "0.3.0"
|
|
||||||
description = "Monero's hash_to_point and generators"
|
|
||||||
license = "MIT"
|
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
|
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
|
||||||
all-features = true
|
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
std-shims = { path = "../../../common/std-shims", version = "0.1", default-features = false }
|
|
||||||
|
|
||||||
subtle = { version = "^2.4", default-features = false }
|
|
||||||
|
|
||||||
sha3 = { version = "0.10", default-features = false }
|
|
||||||
|
|
||||||
curve25519-dalek = { version = "3", default-features = false }
|
|
||||||
|
|
||||||
group = { version = "0.13", default-features = false }
|
|
||||||
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.3" }
|
|
||||||
|
|
||||||
[features]
|
|
||||||
std = ["std-shims/std"]
|
|
||||||
default = ["std"]
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2022-2023 Luke Parker
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
# Monero Generators
|
|
||||||
|
|
||||||
Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
|
|
||||||
An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
|
|
||||||
`hash_to_point` here, is included, as needed to generate generators.
|
|
||||||
|
|
||||||
This library is usable under no_std when the `alloc` feature is enabled.
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
use subtle::ConditionallySelectable;
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
|
||||||
|
|
||||||
use group::ff::{Field, PrimeField};
|
|
||||||
use dalek_ff_group::FieldElement;
|
|
||||||
|
|
||||||
use crate::hash;
|
|
||||||
|
|
||||||
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
|
|
||||||
#[allow(clippy::many_single_char_names)]
|
|
||||||
pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
|
|
||||||
#[allow(non_snake_case, clippy::unreadable_literal)]
|
|
||||||
let A = FieldElement::from(486662u64);
|
|
||||||
|
|
||||||
let v = FieldElement::from_square(hash(&bytes)).double();
|
|
||||||
let w = v + FieldElement::ONE;
|
|
||||||
let x = w.square() + (-A.square() * v);
|
|
||||||
|
|
||||||
// This isn't the complete X, yet its initial value
|
|
||||||
// We don't calculate the full X, and instead solely calculate Y, letting dalek reconstruct X
|
|
||||||
// While inefficient, it solves API boundaries and reduces the amount of work done here
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let X = {
|
|
||||||
let u = w;
|
|
||||||
let v = x;
|
|
||||||
let v3 = v * v * v;
|
|
||||||
let uv3 = u * v3;
|
|
||||||
let v7 = v3 * v3 * v;
|
|
||||||
let uv7 = u * v7;
|
|
||||||
uv3 * uv7.pow((-FieldElement::from(5u8)) * FieldElement::from(8u8).invert().unwrap())
|
|
||||||
};
|
|
||||||
let x = X.square() * x;
|
|
||||||
|
|
||||||
let y = w - x;
|
|
||||||
let non_zero_0 = !y.is_zero();
|
|
||||||
let y_if_non_zero_0 = w + x;
|
|
||||||
let sign = non_zero_0 & (!y_if_non_zero_0.is_zero());
|
|
||||||
|
|
||||||
let mut z = -A;
|
|
||||||
z *= FieldElement::conditional_select(&v, &FieldElement::from(1u8), sign);
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let Z = z + w;
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let mut Y = z - w;
|
|
||||||
|
|
||||||
Y *= Z.invert().unwrap();
|
|
||||||
let mut bytes = Y.to_repr();
|
|
||||||
bytes[31] |= sign.unwrap_u8() << 7;
|
|
||||||
|
|
||||||
CompressedEdwardsY(bytes).decompress().unwrap().mul_by_cofactor()
|
|
||||||
}
|
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
//! Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
|
|
||||||
//!
|
|
||||||
//! An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
|
|
||||||
//! `hash_to_point` here, is included, as needed to generate generators.
|
|
||||||
|
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
|
||||||
|
|
||||||
use std_shims::sync::OnceLock;
|
|
||||||
|
|
||||||
use sha3::{Digest, Keccak256};
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::{EdwardsPoint as DalekPoint, CompressedEdwardsY};
|
|
||||||
|
|
||||||
use group::{Group, GroupEncoding};
|
|
||||||
use dalek_ff_group::EdwardsPoint;
|
|
||||||
|
|
||||||
mod varint;
|
|
||||||
use varint::write_varint;
|
|
||||||
|
|
||||||
mod hash_to_point;
|
|
||||||
pub use hash_to_point::hash_to_point;
|
|
||||||
|
|
||||||
fn hash(data: &[u8]) -> [u8; 32] {
|
|
||||||
Keccak256::digest(data).into()
|
|
||||||
}
|
|
||||||
|
|
||||||
static H_CELL: OnceLock<DalekPoint> = OnceLock::new();
|
|
||||||
/// Monero's alternate generator `H`, used for amounts in Pedersen commitments.
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub fn H() -> DalekPoint {
|
|
||||||
*H_CELL.get_or_init(|| {
|
|
||||||
CompressedEdwardsY(hash(&EdwardsPoint::generator().to_bytes()))
|
|
||||||
.decompress()
|
|
||||||
.unwrap()
|
|
||||||
.mul_by_cofactor()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
static H_POW_2_CELL: OnceLock<[DalekPoint; 64]> = OnceLock::new();
|
|
||||||
/// Monero's alternate generator `H`, multiplied by 2**i for i in 1 ..= 64.
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub fn H_pow_2() -> &'static [DalekPoint; 64] {
|
|
||||||
H_POW_2_CELL.get_or_init(|| {
|
|
||||||
let mut res = [H(); 64];
|
|
||||||
for i in 1 .. 64 {
|
|
||||||
res[i] = res[i - 1] + res[i - 1];
|
|
||||||
}
|
|
||||||
res
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const MAX_M: usize = 16;
|
|
||||||
const N: usize = 64;
|
|
||||||
const MAX_MN: usize = MAX_M * N;
|
|
||||||
|
|
||||||
/// Container struct for Bulletproofs(+) generators.
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub struct Generators {
|
|
||||||
pub G: [EdwardsPoint; MAX_MN],
|
|
||||||
pub H: [EdwardsPoint; MAX_MN],
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generate generators as needed for Bulletproofs(+), as Monero does.
|
|
||||||
pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
|
|
||||||
let mut res =
|
|
||||||
Generators { G: [EdwardsPoint::identity(); MAX_MN], H: [EdwardsPoint::identity(); MAX_MN] };
|
|
||||||
for i in 0 .. MAX_MN {
|
|
||||||
let i = 2 * i;
|
|
||||||
|
|
||||||
let mut even = H().compress().to_bytes().to_vec();
|
|
||||||
even.extend(dst);
|
|
||||||
let mut odd = even.clone();
|
|
||||||
|
|
||||||
write_varint(&i.try_into().unwrap(), &mut even).unwrap();
|
|
||||||
write_varint(&(i + 1).try_into().unwrap(), &mut odd).unwrap();
|
|
||||||
res.H[i / 2] = EdwardsPoint(hash_to_point(hash(&even)));
|
|
||||||
res.G[i / 2] = EdwardsPoint(hash_to_point(hash(&odd)));
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
use std_shims::io::{self, Write};
|
|
||||||
|
|
||||||
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
|
||||||
|
|
||||||
#[allow(clippy::trivially_copy_pass_by_ref)] // &u64 is needed for API consistency
|
|
||||||
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
|
||||||
let mut varint = *varint;
|
|
||||||
while {
|
|
||||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
|
||||||
varint >>= 7;
|
|
||||||
if varint != 0 {
|
|
||||||
b |= VARINT_CONTINUATION_MASK;
|
|
||||||
}
|
|
||||||
w.write_all(&[b])?;
|
|
||||||
varint != 0
|
|
||||||
} {}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@@ -1,155 +0,0 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use serde::Deserialize;
|
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use monero_serai::{
|
|
||||||
transaction::Transaction,
|
|
||||||
block::Block,
|
|
||||||
rpc::{Rpc, HttpRpc},
|
|
||||||
};
|
|
||||||
|
|
||||||
use tokio::task::JoinHandle;
|
|
||||||
|
|
||||||
async fn check_block(rpc: Arc<Rpc<HttpRpc>>, block_i: usize) {
|
|
||||||
let hash = rpc.get_block_hash(block_i).await.expect("couldn't get block {block_i}'s hash");
|
|
||||||
|
|
||||||
// TODO: Grab the JSON to also check it was deserialized correctly
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct BlockResponse {
|
|
||||||
blob: String,
|
|
||||||
}
|
|
||||||
let res: BlockResponse = rpc
|
|
||||||
.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) })))
|
|
||||||
.await
|
|
||||||
.expect("couldn't get block {block} via block.hash()");
|
|
||||||
|
|
||||||
let blob = hex::decode(res.blob).expect("node returned non-hex block");
|
|
||||||
let block = Block::read(&mut blob.as_slice()).expect("couldn't deserialize block {block_i}");
|
|
||||||
assert_eq!(block.hash(), hash, "hash differs");
|
|
||||||
assert_eq!(block.serialize(), blob, "serialization differs");
|
|
||||||
|
|
||||||
let txs_len = 1 + block.txs.len();
|
|
||||||
|
|
||||||
if !block.txs.is_empty() {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct TransactionResponse {
|
|
||||||
tx_hash: String,
|
|
||||||
as_hex: String,
|
|
||||||
}
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct TransactionsResponse {
|
|
||||||
#[serde(default)]
|
|
||||||
missed_tx: Vec<String>,
|
|
||||||
txs: Vec<TransactionResponse>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut hashes_hex = block.txs.iter().map(hex::encode).collect::<Vec<_>>();
|
|
||||||
let mut all_txs = vec![];
|
|
||||||
while !hashes_hex.is_empty() {
|
|
||||||
let txs: TransactionsResponse = rpc
|
|
||||||
.rpc_call(
|
|
||||||
"get_transactions",
|
|
||||||
Some(json!({
|
|
||||||
"txs_hashes": hashes_hex.drain(.. hashes_hex.len().min(100)).collect::<Vec<_>>(),
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.expect("couldn't call get_transactions");
|
|
||||||
assert!(txs.missed_tx.is_empty());
|
|
||||||
all_txs.extend(txs.txs);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (tx_hash, tx_res) in block.txs.into_iter().zip(all_txs.into_iter()) {
|
|
||||||
assert_eq!(
|
|
||||||
tx_res.tx_hash,
|
|
||||||
hex::encode(tx_hash),
|
|
||||||
"node returned a transaction with different hash"
|
|
||||||
);
|
|
||||||
|
|
||||||
let tx = Transaction::read(
|
|
||||||
&mut hex::decode(&tx_res.as_hex).expect("node returned non-hex transaction").as_slice(),
|
|
||||||
)
|
|
||||||
.expect("couldn't deserialize transaction");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
hex::encode(tx.serialize()),
|
|
||||||
tx_res.as_hex,
|
|
||||||
"Transaction serialization was different"
|
|
||||||
);
|
|
||||||
assert_eq!(tx.hash(), tx_hash, "Transaction hash was different");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
println!("Deserialized, hashed, and reserialized {block_i} with {} TXs", txs_len);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() {
|
|
||||||
let args = std::env::args().collect::<Vec<String>>();
|
|
||||||
|
|
||||||
// Read start block as the first arg
|
|
||||||
let mut block_i = args[1].parse::<usize>().expect("invalid start block");
|
|
||||||
|
|
||||||
// How many blocks to work on at once
|
|
||||||
let async_parallelism: usize =
|
|
||||||
args.get(2).unwrap_or(&"8".to_string()).parse::<usize>().expect("invalid parallelism argument");
|
|
||||||
|
|
||||||
// Read further args as RPC URLs
|
|
||||||
let default_nodes = vec![
|
|
||||||
"http://xmr-node.cakewallet.com:18081".to_string(),
|
|
||||||
"https://node.sethforprivacy.com".to_string(),
|
|
||||||
];
|
|
||||||
let mut specified_nodes = vec![];
|
|
||||||
{
|
|
||||||
let mut i = 0;
|
|
||||||
loop {
|
|
||||||
let Some(node) = args.get(3 + i) else { break };
|
|
||||||
specified_nodes.push(node.clone());
|
|
||||||
i += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let nodes = if specified_nodes.is_empty() { default_nodes } else { specified_nodes };
|
|
||||||
|
|
||||||
let rpc = |url: String| {
|
|
||||||
HttpRpc::new(url.clone())
|
|
||||||
.unwrap_or_else(|_| panic!("couldn't create HttpRpc connected to {url}"))
|
|
||||||
};
|
|
||||||
let main_rpc = rpc(nodes[0].clone());
|
|
||||||
let mut rpcs = vec![];
|
|
||||||
for i in 0 .. async_parallelism {
|
|
||||||
rpcs.push(Arc::new(rpc(nodes[i % nodes.len()].clone())));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut rpc_i = 0;
|
|
||||||
let mut handles: Vec<JoinHandle<()>> = vec![];
|
|
||||||
let mut height = 0;
|
|
||||||
loop {
|
|
||||||
let new_height = main_rpc.get_height().await.expect("couldn't call get_height");
|
|
||||||
if new_height == height {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
height = new_height;
|
|
||||||
|
|
||||||
while block_i < height {
|
|
||||||
if handles.len() >= async_parallelism {
|
|
||||||
// Guarantee one handle is complete
|
|
||||||
handles.swap_remove(0).await.unwrap();
|
|
||||||
|
|
||||||
// Remove all of the finished handles
|
|
||||||
let mut i = 0;
|
|
||||||
while i < handles.len() {
|
|
||||||
if handles[i].is_finished() {
|
|
||||||
handles.swap_remove(i).await.unwrap();
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
i += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
handles.push(tokio::spawn(check_block(rpcs[rpc_i].clone(), block_i)));
|
|
||||||
rpc_i = (rpc_i + 1) % rpcs.len();
|
|
||||||
block_i += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,31 +1,19 @@
|
|||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
hash,
|
|
||||||
merkle::merkle_root,
|
|
||||||
serialize::*,
|
serialize::*,
|
||||||
transaction::{Input, Transaction},
|
transaction::Transaction
|
||||||
};
|
};
|
||||||
|
|
||||||
const CORRECT_BLOCK_HASH_202612: [u8; 32] =
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
hex_literal::hex!("426d16cff04c71f8b16340b722dc4010a2dd3831c22041431f772547ba6e331a");
|
|
||||||
const EXISTING_BLOCK_HASH_202612: [u8; 32] =
|
|
||||||
hex_literal::hex!("bbd604d2ba11ba27935e006ed39c9bfdd99b76bf4a50654bc1e1e61217962698");
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct BlockHeader {
|
pub struct BlockHeader {
|
||||||
pub major_version: u64,
|
pub major_version: u64,
|
||||||
pub minor_version: u64,
|
pub minor_version: u64,
|
||||||
pub timestamp: u64,
|
pub timestamp: u64,
|
||||||
pub previous: [u8; 32],
|
pub previous: [u8; 32],
|
||||||
pub nonce: u32,
|
pub nonce: u32
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BlockHeader {
|
impl BlockHeader {
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||||
write_varint(&self.major_version, w)?;
|
write_varint(&self.major_version, w)?;
|
||||||
write_varint(&self.minor_version, w)?;
|
write_varint(&self.minor_version, w)?;
|
||||||
write_varint(&self.timestamp, w)?;
|
write_varint(&self.timestamp, w)?;
|
||||||
@@ -33,41 +21,30 @@ impl BlockHeader {
|
|||||||
w.write_all(&self.nonce.to_le_bytes())
|
w.write_all(&self.nonce.to_le_bytes())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<BlockHeader> {
|
||||||
let mut serialized = vec![];
|
Ok(
|
||||||
self.write(&mut serialized).unwrap();
|
BlockHeader {
|
||||||
serialized
|
major_version: read_varint(r)?,
|
||||||
}
|
minor_version: read_varint(r)?,
|
||||||
|
timestamp: read_varint(r)?,
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
previous: { let mut previous = [0; 32]; r.read_exact(&mut previous)?; previous },
|
||||||
Ok(Self {
|
nonce: { let mut nonce = [0; 4]; r.read_exact(&mut nonce)?; u32::from_le_bytes(nonce) }
|
||||||
major_version: read_varint(r)?,
|
}
|
||||||
minor_version: read_varint(r)?,
|
)
|
||||||
timestamp: read_varint(r)?,
|
|
||||||
previous: read_bytes(r)?,
|
|
||||||
nonce: read_bytes(r).map(u32::from_le_bytes)?,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
pub struct Block {
|
pub struct Block {
|
||||||
pub header: BlockHeader,
|
pub header: BlockHeader,
|
||||||
pub miner_tx: Transaction,
|
pub miner_tx: Transaction,
|
||||||
pub txs: Vec<[u8; 32]>,
|
pub txs: Vec<[u8; 32]>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Block {
|
impl Block {
|
||||||
pub fn number(&self) -> usize {
|
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||||
match self.miner_tx.prefix.inputs.get(0) {
|
self.header.serialize(w)?;
|
||||||
Some(Input::Gen(number)) => (*number).try_into().unwrap(),
|
self.miner_tx.serialize(w)?;
|
||||||
_ => panic!("invalid block, miner TX didn't have a Input::Gen"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
self.header.write(w)?;
|
|
||||||
self.miner_tx.write(w)?;
|
|
||||||
write_varint(&self.txs.len().try_into().unwrap(), w)?;
|
write_varint(&self.txs.len().try_into().unwrap(), w)?;
|
||||||
for tx in &self.txs {
|
for tx in &self.txs {
|
||||||
w.write_all(tx)?;
|
w.write_all(tx)?;
|
||||||
@@ -75,42 +52,15 @@ impl Block {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tx_merkle_root(&self) -> [u8; 32] {
|
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Block> {
|
||||||
merkle_root(self.miner_tx.hash(), &self.txs)
|
Ok(
|
||||||
}
|
Block {
|
||||||
|
header: BlockHeader::deserialize(r)?,
|
||||||
fn serialize_hashable(&self) -> Vec<u8> {
|
miner_tx: Transaction::deserialize(r)?,
|
||||||
let mut blob = self.header.serialize();
|
txs: (0 .. read_varint(r)?).map(
|
||||||
blob.extend_from_slice(&self.tx_merkle_root());
|
|_| { let mut tx = [0; 32]; r.read_exact(&mut tx).map(|_| tx) }
|
||||||
write_varint(&(1 + u64::try_from(self.txs.len()).unwrap()), &mut blob).unwrap();
|
).collect::<Result<_, _>>()?
|
||||||
|
}
|
||||||
let mut out = Vec::with_capacity(8 + blob.len());
|
)
|
||||||
write_varint(&u64::try_from(blob.len()).unwrap(), &mut out).unwrap();
|
|
||||||
out.append(&mut blob);
|
|
||||||
|
|
||||||
out
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hash(&self) -> [u8; 32] {
|
|
||||||
let hash = hash(&self.serialize_hashable());
|
|
||||||
if hash == CORRECT_BLOCK_HASH_202612 {
|
|
||||||
return EXISTING_BLOCK_HASH_202612;
|
|
||||||
};
|
|
||||||
|
|
||||||
hash
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut serialized = vec![];
|
|
||||||
self.write(&mut serialized).unwrap();
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self {
|
|
||||||
header: BlockHeader::read(r)?,
|
|
||||||
miner_tx: Transaction::read(r)?,
|
|
||||||
txs: (0 .. read_varint(r)?).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
76
coins/monero/src/frost.rs
Normal file
76
coins/monero/src/frost.rs
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
use std::io::Read;
|
||||||
|
|
||||||
|
use thiserror::Error;
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
|
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
||||||
|
|
||||||
|
use group::{Group, GroupEncoding};
|
||||||
|
|
||||||
|
use transcript::{Transcript, RecommendedTranscript};
|
||||||
|
use dalek_ff_group as dfg;
|
||||||
|
use dleq::{Generators, DLEqProof};
|
||||||
|
|
||||||
|
#[derive(Clone, Error, Debug)]
|
||||||
|
pub enum MultisigError {
|
||||||
|
#[error("internal error ({0})")]
|
||||||
|
InternalError(String),
|
||||||
|
#[error("invalid discrete log equality proof")]
|
||||||
|
InvalidDLEqProof(u16),
|
||||||
|
#[error("invalid key image {0}")]
|
||||||
|
InvalidKeyImage(u16)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn transcript() -> RecommendedTranscript {
|
||||||
|
RecommendedTranscript::new(b"monero_key_image_dleq")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
|
||||||
|
rng: &mut R,
|
||||||
|
H: EdwardsPoint,
|
||||||
|
x: Scalar
|
||||||
|
) -> Vec<u8> {
|
||||||
|
let mut res = Vec::with_capacity(64);
|
||||||
|
DLEqProof::prove(
|
||||||
|
rng,
|
||||||
|
// Doesn't take in a larger transcript object due to the usage of this
|
||||||
|
// Every prover would immediately write their own DLEq proof, when they can only do so in
|
||||||
|
// the proper order if they want to reach consensus
|
||||||
|
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to try to
|
||||||
|
// merge later in some form, when it should instead just merge xH (as it does)
|
||||||
|
&mut transcript(),
|
||||||
|
Generators::new(dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)),
|
||||||
|
dfg::Scalar(x)
|
||||||
|
).serialize(&mut res).unwrap();
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub(crate) fn read_dleq<Re: Read>(
|
||||||
|
serialized: &mut Re,
|
||||||
|
H: EdwardsPoint,
|
||||||
|
l: u16,
|
||||||
|
xG: dfg::EdwardsPoint
|
||||||
|
) -> Result<dfg::EdwardsPoint, MultisigError> {
|
||||||
|
let mut bytes = [0; 32];
|
||||||
|
serialized.read_exact(&mut bytes).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
|
||||||
|
// dfg ensures the point is torsion free
|
||||||
|
let xH = Option::<dfg::EdwardsPoint>::from(
|
||||||
|
dfg::EdwardsPoint::from_bytes(&bytes)).ok_or(MultisigError::InvalidDLEqProof(l)
|
||||||
|
)?;
|
||||||
|
// Ensure this is a canonical point
|
||||||
|
if xH.to_bytes() != bytes {
|
||||||
|
Err(MultisigError::InvalidDLEqProof(l))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
DLEqProof::<dfg::EdwardsPoint>::deserialize(
|
||||||
|
serialized
|
||||||
|
).map_err(|_| MultisigError::InvalidDLEqProof(l))?.verify(
|
||||||
|
&mut transcript(),
|
||||||
|
Generators::new(dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)),
|
||||||
|
(xG, xH)
|
||||||
|
).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
|
||||||
|
|
||||||
|
Ok(xH)
|
||||||
|
}
|
||||||
@@ -1,178 +1,100 @@
|
|||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
use std::slice;
|
||||||
#![doc = include_str!("../README.md")]
|
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
|
||||||
|
|
||||||
#[cfg(not(feature = "std"))]
|
|
||||||
#[macro_use]
|
|
||||||
extern crate alloc;
|
|
||||||
|
|
||||||
use std_shims::{sync::OnceLock, io};
|
|
||||||
|
|
||||||
|
use lazy_static::lazy_static;
|
||||||
use rand_core::{RngCore, CryptoRng};
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
use subtle::ConstantTimeEq;
|
||||||
|
|
||||||
use sha3::{Digest, Keccak256};
|
use tiny_keccak::{Hasher, Keccak};
|
||||||
|
|
||||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
use curve25519_dalek::{
|
||||||
|
constants::ED25519_BASEPOINT_TABLE,
|
||||||
|
scalar::Scalar,
|
||||||
|
edwards::{EdwardsPoint, EdwardsBasepointTable, CompressedEdwardsY}
|
||||||
|
};
|
||||||
|
|
||||||
pub use monero_generators::H;
|
#[cfg(feature = "multisig")]
|
||||||
|
pub mod frost;
|
||||||
mod merkle;
|
|
||||||
|
|
||||||
mod serialize;
|
mod serialize;
|
||||||
use serialize::{read_byte, read_u16};
|
|
||||||
|
|
||||||
/// RingCT structs and functionality.
|
|
||||||
pub mod ringct;
|
pub mod ringct;
|
||||||
use ringct::RctType;
|
|
||||||
|
|
||||||
/// Transaction structs.
|
|
||||||
pub mod transaction;
|
pub mod transaction;
|
||||||
/// Block structs.
|
|
||||||
pub mod block;
|
pub mod block;
|
||||||
|
|
||||||
/// Monero daemon RPC interface.
|
|
||||||
pub mod rpc;
|
pub mod rpc;
|
||||||
/// Wallet functionality, enabling scanning and sending transactions.
|
|
||||||
pub mod wallet;
|
pub mod wallet;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
static INV_EIGHT_CELL: OnceLock<Scalar> = OnceLock::new();
|
lazy_static! {
|
||||||
|
static ref H: EdwardsPoint = CompressedEdwardsY(
|
||||||
|
hex::decode("8b655970153799af2aeadc9ff1add0ea6c7251d54154cfa92c173a0dd39c1f94").unwrap().try_into().unwrap()
|
||||||
|
).decompress().unwrap();
|
||||||
|
static ref H_TABLE: EdwardsBasepointTable = EdwardsBasepointTable::create(&*H);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function from libsodium our subsection of Monero relies on. Implementing it here means we don't
|
||||||
|
// need to link against libsodium
|
||||||
|
#[no_mangle]
|
||||||
|
unsafe extern "C" fn crypto_verify_32(a: *const u8, b: *const u8) -> isize {
|
||||||
|
isize::from(
|
||||||
|
slice::from_raw_parts(a, 32).ct_eq(slice::from_raw_parts(b, 32)).unwrap_u8()
|
||||||
|
) - 1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Offer a wide reduction to C. Our seeded RNG prevented Monero from defining an unbiased scalar
|
||||||
|
// generation function, and in order to not use Monero code (which would require propagating its
|
||||||
|
// license), the function was rewritten. It was rewritten with wide reduction, instead of rejection
|
||||||
|
// sampling however, hence the need for this function
|
||||||
|
#[no_mangle]
|
||||||
|
unsafe extern "C" fn monero_wide_reduce(value: *mut u8) {
|
||||||
|
let res = Scalar::from_bytes_mod_order_wide(
|
||||||
|
std::slice::from_raw_parts(value, 64).try_into().unwrap()
|
||||||
|
);
|
||||||
|
for (i, b) in res.to_bytes().iter().enumerate() {
|
||||||
|
value.add(i).write(*b);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
pub(crate) fn INV_EIGHT() -> Scalar {
|
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||||
*INV_EIGHT_CELL.get_or_init(|| Scalar::from(8u8).invert())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Monero protocol version.
|
|
||||||
///
|
|
||||||
/// v15 is omitted as v15 was simply v14 and v16 being active at the same time, with regards to the
|
|
||||||
/// transactions supported. Accordingly, v16 should be used during v15.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
#[allow(non_camel_case_types)]
|
|
||||||
pub enum Protocol {
|
|
||||||
v14,
|
|
||||||
v16,
|
|
||||||
Custom { ring_len: usize, bp_plus: bool, optimal_rct_type: RctType },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Protocol {
|
|
||||||
/// Amount of ring members under this protocol version.
|
|
||||||
pub const fn ring_len(&self) -> usize {
|
|
||||||
match self {
|
|
||||||
Self::v14 => 11,
|
|
||||||
Self::v16 => 16,
|
|
||||||
Self::Custom { ring_len, .. } => *ring_len,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Whether or not the specified version uses Bulletproofs or Bulletproofs+.
|
|
||||||
///
|
|
||||||
/// This method will likely be reworked when versions not using Bulletproofs at all are added.
|
|
||||||
pub const fn bp_plus(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::v14 => false,
|
|
||||||
Self::v16 => true,
|
|
||||||
Self::Custom { bp_plus, .. } => *bp_plus,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Make this an Option when we support pre-RCT protocols
|
|
||||||
pub const fn optimal_rct_type(&self) -> RctType {
|
|
||||||
match self {
|
|
||||||
Self::v14 => RctType::Clsag,
|
|
||||||
Self::v16 => RctType::BulletproofsPlus,
|
|
||||||
Self::Custom { optimal_rct_type, .. } => *optimal_rct_type,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
Self::v14 => w.write_all(&[0, 14]),
|
|
||||||
Self::v16 => w.write_all(&[0, 16]),
|
|
||||||
Self::Custom { ring_len, bp_plus, optimal_rct_type } => {
|
|
||||||
// Custom, version 0
|
|
||||||
w.write_all(&[1, 0])?;
|
|
||||||
w.write_all(&u16::try_from(*ring_len).unwrap().to_le_bytes())?;
|
|
||||||
w.write_all(&[u8::from(*bp_plus)])?;
|
|
||||||
w.write_all(&[optimal_rct_type.to_byte()])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read<R: io::Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(match read_byte(r)? {
|
|
||||||
// Monero protocol
|
|
||||||
0 => match read_byte(r)? {
|
|
||||||
14 => Self::v14,
|
|
||||||
16 => Self::v16,
|
|
||||||
_ => Err(io::Error::new(io::ErrorKind::Other, "unrecognized monero protocol"))?,
|
|
||||||
},
|
|
||||||
// Custom
|
|
||||||
1 => match read_byte(r)? {
|
|
||||||
0 => Self::Custom {
|
|
||||||
ring_len: read_u16(r)?.into(),
|
|
||||||
bp_plus: match read_byte(r)? {
|
|
||||||
0 => false,
|
|
||||||
1 => true,
|
|
||||||
_ => Err(io::Error::new(io::ErrorKind::Other, "invalid bool serialization"))?,
|
|
||||||
},
|
|
||||||
optimal_rct_type: RctType::from_byte(read_byte(r)?)
|
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid RctType serialization"))?,
|
|
||||||
},
|
|
||||||
_ => {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "unrecognized custom protocol serialization"))?
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => Err(io::Error::new(io::ErrorKind::Other, "unrecognized protocol serialization"))?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Transparent structure representing a Pedersen commitment's contents.
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub struct Commitment {
|
pub struct Commitment {
|
||||||
pub mask: Scalar,
|
pub mask: Scalar,
|
||||||
pub amount: u64,
|
pub amount: u64
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Commitment {
|
impl Commitment {
|
||||||
/// A commitment to zero, defined with a mask of 1 (as to not be the identity).
|
pub fn zero() -> Commitment {
|
||||||
pub fn zero() -> Self {
|
Commitment { mask: Scalar::one(), amount: 0}
|
||||||
Self { mask: Scalar::one(), amount: 0 }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(mask: Scalar, amount: u64) -> Self {
|
pub fn new(mask: Scalar, amount: u64) -> Commitment {
|
||||||
Self { mask, amount }
|
Commitment { mask, amount }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Calculate a Pedersen commitment, as a point, from the transparent structure.
|
|
||||||
pub fn calculate(&self) -> EdwardsPoint {
|
pub fn calculate(&self) -> EdwardsPoint {
|
||||||
(&self.mask * &ED25519_BASEPOINT_TABLE) + (Scalar::from(self.amount) * H())
|
(&self.mask * &ED25519_BASEPOINT_TABLE) + (&Scalar::from(self.amount) * &*H_TABLE)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Support generating a random scalar using a modern rand, as dalek's is notoriously dated.
|
// Allows using a modern rand as dalek's is notoriously dated
|
||||||
pub fn random_scalar<R: RngCore + CryptoRng>(rng: &mut R) -> Scalar {
|
pub fn random_scalar<R: RngCore + CryptoRng>(rng: &mut R) -> Scalar {
|
||||||
let mut r = [0; 64];
|
let mut r = [0; 64];
|
||||||
rng.fill_bytes(&mut r);
|
rng.fill_bytes(&mut r);
|
||||||
Scalar::from_bytes_mod_order_wide(&r)
|
Scalar::from_bytes_mod_order_wide(&r)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn hash(data: &[u8]) -> [u8; 32] {
|
pub fn hash(data: &[u8]) -> [u8; 32] {
|
||||||
Keccak256::digest(data).into()
|
let mut keccak = Keccak::v256();
|
||||||
|
keccak.update(data);
|
||||||
|
let mut res = [0; 32];
|
||||||
|
keccak.finalize(&mut res);
|
||||||
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Hash the provided data to a scalar via keccak256(data) % l.
|
|
||||||
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||||
let scalar = Scalar::from_bytes_mod_order(hash(data));
|
Scalar::from_bytes_mod_order(hash(&data))
|
||||||
// Monero will explicitly error in this case
|
|
||||||
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
|
|
||||||
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
|
|
||||||
// not generate/verify a proof we believe to be valid when it isn't
|
|
||||||
assert!(scalar != Scalar::zero(), "ZERO HASH: {data:?}");
|
|
||||||
scalar
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,55 +0,0 @@
|
|||||||
use std_shims::vec::Vec;
|
|
||||||
|
|
||||||
use crate::hash;
|
|
||||||
|
|
||||||
pub(crate) fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] {
|
|
||||||
match leafs.len() {
|
|
||||||
0 => root,
|
|
||||||
1 => hash(&[root, leafs[0]].concat()),
|
|
||||||
_ => {
|
|
||||||
let mut hashes = Vec::with_capacity(1 + leafs.len());
|
|
||||||
hashes.push(root);
|
|
||||||
hashes.extend(leafs);
|
|
||||||
|
|
||||||
// Monero preprocess this so the length is a power of 2
|
|
||||||
let mut high_pow_2 = 4; // 4 is the lowest value this can be
|
|
||||||
while high_pow_2 < hashes.len() {
|
|
||||||
high_pow_2 *= 2;
|
|
||||||
}
|
|
||||||
let low_pow_2 = high_pow_2 / 2;
|
|
||||||
|
|
||||||
// Merge right-most hashes until we're at the low_pow_2
|
|
||||||
{
|
|
||||||
let overage = hashes.len() - low_pow_2;
|
|
||||||
let mut rightmost = hashes.drain((low_pow_2 - overage) ..);
|
|
||||||
// This is true since we took overage from beneath and above low_pow_2, taking twice as
|
|
||||||
// many elements as overage
|
|
||||||
debug_assert_eq!(rightmost.len() % 2, 0);
|
|
||||||
|
|
||||||
let mut paired_hashes = Vec::with_capacity(overage);
|
|
||||||
while let Some(left) = rightmost.next() {
|
|
||||||
let right = rightmost.next().unwrap();
|
|
||||||
paired_hashes.push(hash(&[left.as_ref(), &right].concat()));
|
|
||||||
}
|
|
||||||
drop(rightmost);
|
|
||||||
|
|
||||||
hashes.extend(paired_hashes);
|
|
||||||
assert_eq!(hashes.len(), low_pow_2);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Do a traditional pairing off
|
|
||||||
let mut new_hashes = Vec::with_capacity(hashes.len() / 2);
|
|
||||||
while hashes.len() > 1 {
|
|
||||||
let mut i = 0;
|
|
||||||
while i < hashes.len() {
|
|
||||||
new_hashes.push(hash(&[hashes[i], hashes[i + 1]].concat()));
|
|
||||||
i += 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
hashes = new_hashes;
|
|
||||||
new_hashes = Vec::with_capacity(hashes.len() / 2);
|
|
||||||
}
|
|
||||||
hashes[0]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,102 +0,0 @@
|
|||||||
use core::fmt::Debug;
|
|
||||||
use std_shims::io::{self, Read, Write};
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::EdwardsPoint;
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
use curve25519_dalek::{traits::Identity, scalar::Scalar};
|
|
||||||
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
use monero_generators::H_pow_2;
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
use crate::hash_to_scalar;
|
|
||||||
use crate::serialize::*;
|
|
||||||
|
|
||||||
/// 64 Borromean ring signatures.
|
|
||||||
///
|
|
||||||
/// This type keeps the data as raw bytes as Monero has some transactions with unreduced scalars in
|
|
||||||
/// this field. While we could use `from_bytes_mod_order`, we'd then not be able to encode this
|
|
||||||
/// back into it's original form.
|
|
||||||
///
|
|
||||||
/// Those scalars also have a custom reduction algorithm...
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct BorromeanSignatures {
|
|
||||||
pub s0: [[u8; 32]; 64],
|
|
||||||
pub s1: [[u8; 32]; 64],
|
|
||||||
pub ee: [u8; 32],
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BorromeanSignatures {
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self { s0: read_array(read_bytes, r)?, s1: read_array(read_bytes, r)?, ee: read_bytes(r)? })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
for s0 in &self.s0 {
|
|
||||||
w.write_all(s0)?;
|
|
||||||
}
|
|
||||||
for s1 in &self.s1 {
|
|
||||||
w.write_all(s1)?;
|
|
||||||
}
|
|
||||||
w.write_all(&self.ee)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
fn verify(&self, keys_a: &[EdwardsPoint], keys_b: &[EdwardsPoint]) -> bool {
|
|
||||||
let mut transcript = [0; 2048];
|
|
||||||
for i in 0 .. 64 {
|
|
||||||
// TODO: These aren't the correct reduction
|
|
||||||
// TODO: Can either of these be tightened?
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let LL = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
|
||||||
&Scalar::from_bytes_mod_order(self.ee),
|
|
||||||
&keys_a[i],
|
|
||||||
&Scalar::from_bytes_mod_order(self.s0[i]),
|
|
||||||
);
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
|
||||||
&hash_to_scalar(LL.compress().as_bytes()),
|
|
||||||
&keys_b[i],
|
|
||||||
&Scalar::from_bytes_mod_order(self.s1[i]),
|
|
||||||
);
|
|
||||||
transcript[i .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: This isn't the correct reduction
|
|
||||||
// TODO: Can this be tightened to from_canonical_bytes?
|
|
||||||
hash_to_scalar(&transcript) == Scalar::from_bytes_mod_order(self.ee)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A range proof premised on Borromean ring signatures.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct BorromeanRange {
|
|
||||||
pub sigs: BorromeanSignatures,
|
|
||||||
pub bit_commitments: [EdwardsPoint; 64],
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BorromeanRange {
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self { sigs: BorromeanSignatures::read(r)?, bit_commitments: read_array(read_point, r)? })
|
|
||||||
}
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
self.sigs.write(w)?;
|
|
||||||
write_raw_vec(write_point, &self.bit_commitments, w)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
#[must_use]
|
|
||||||
pub fn verify(&self, commitment: &EdwardsPoint) -> bool {
|
|
||||||
if &self.bit_commitments.iter().sum::<EdwardsPoint>() != commitment {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let H_pow_2 = H_pow_2();
|
|
||||||
let mut commitments_sub_one = [EdwardsPoint::identity(); 64];
|
|
||||||
for i in 0 .. 64 {
|
|
||||||
commitments_sub_one[i] = self.bit_commitments[i] - H_pow_2[i];
|
|
||||||
}
|
|
||||||
|
|
||||||
self.sigs.verify(&self.bit_commitments, &commitments_sub_one)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
161
coins/monero/src/ringct/bulletproofs.rs
Normal file
161
coins/monero/src/ringct/bulletproofs.rs
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
|
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
||||||
|
|
||||||
|
use crate::{Commitment, wallet::TransactionError, serialize::*};
|
||||||
|
|
||||||
|
pub(crate) const MAX_OUTPUTS: usize = 16;
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
|
pub struct Bulletproofs {
|
||||||
|
pub A: EdwardsPoint,
|
||||||
|
pub S: EdwardsPoint,
|
||||||
|
pub T1: EdwardsPoint,
|
||||||
|
pub T2: EdwardsPoint,
|
||||||
|
pub taux: Scalar,
|
||||||
|
pub mu: Scalar,
|
||||||
|
pub L: Vec<EdwardsPoint>,
|
||||||
|
pub R: Vec<EdwardsPoint>,
|
||||||
|
pub a: Scalar,
|
||||||
|
pub b: Scalar,
|
||||||
|
pub t: Scalar
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Bulletproofs {
|
||||||
|
pub(crate) fn fee_weight(outputs: usize) -> usize {
|
||||||
|
let proofs = 6 + usize::try_from(usize::BITS - (outputs - 1).leading_zeros()).unwrap();
|
||||||
|
let len = (9 + (2 * proofs)) * 32;
|
||||||
|
|
||||||
|
let mut clawback = 0;
|
||||||
|
let padded = 1 << (proofs - 6);
|
||||||
|
if padded > 2 {
|
||||||
|
const BP_BASE: usize = 368;
|
||||||
|
clawback = ((BP_BASE * padded) - len) * 4 / 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
len + clawback
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, outputs: &[Commitment]) -> Result<Bulletproofs, TransactionError> {
|
||||||
|
if outputs.len() > MAX_OUTPUTS {
|
||||||
|
return Err(TransactionError::TooManyOutputs)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut seed = [0; 32];
|
||||||
|
rng.fill_bytes(&mut seed);
|
||||||
|
|
||||||
|
let masks = outputs.iter().map(|commitment| commitment.mask.to_bytes()).collect::<Vec<_>>();
|
||||||
|
let amounts = outputs.iter().map(|commitment| commitment.amount).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let res;
|
||||||
|
unsafe {
|
||||||
|
#[link(name = "wrapper")]
|
||||||
|
extern "C" {
|
||||||
|
fn free(ptr: *const u8);
|
||||||
|
fn c_generate_bp(seed: *const u8, len: u8, amounts: *const u64, masks: *const [u8; 32]) -> *const u8;
|
||||||
|
}
|
||||||
|
|
||||||
|
let ptr = c_generate_bp(
|
||||||
|
seed.as_ptr(),
|
||||||
|
u8::try_from(outputs.len()).unwrap(),
|
||||||
|
amounts.as_ptr(),
|
||||||
|
masks.as_ptr()
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut len = 6 * 32;
|
||||||
|
len += (2 * (1 + (usize::from(ptr.add(len).read()) * 32))) + (3 * 32);
|
||||||
|
res = Bulletproofs::deserialize(
|
||||||
|
// Wrap in a cursor to provide a mutable Reader
|
||||||
|
&mut std::io::Cursor::new(std::slice::from_raw_parts(ptr, len))
|
||||||
|
).expect("Couldn't deserialize Bulletproofs from Monero");
|
||||||
|
free(ptr);
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
|
||||||
|
if commitments.len() > 16 {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut seed = [0; 32];
|
||||||
|
rng.fill_bytes(&mut seed);
|
||||||
|
|
||||||
|
let mut serialized = Vec::with_capacity((9 + (2 * self.L.len())) * 32);
|
||||||
|
self.serialize(&mut serialized).unwrap();
|
||||||
|
let commitments: Vec<[u8; 32]> = commitments.iter().map(
|
||||||
|
|commitment| (commitment * Scalar::from(8u8).invert()).compress().to_bytes()
|
||||||
|
).collect();
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
#[link(name = "wrapper")]
|
||||||
|
extern "C" {
|
||||||
|
fn c_verify_bp(
|
||||||
|
seed: *const u8,
|
||||||
|
serialized_len: usize,
|
||||||
|
serialized: *const u8,
|
||||||
|
commitments_len: u8,
|
||||||
|
commitments: *const [u8; 32]
|
||||||
|
) -> bool;
|
||||||
|
}
|
||||||
|
|
||||||
|
c_verify_bp(
|
||||||
|
seed.as_ptr(),
|
||||||
|
serialized.len(),
|
||||||
|
serialized.as_ptr(),
|
||||||
|
u8::try_from(commitments.len()).unwrap(),
|
||||||
|
commitments.as_ptr()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn serialize_core<
|
||||||
|
W: std::io::Write,
|
||||||
|
F: Fn(&[EdwardsPoint], &mut W) -> std::io::Result<()>
|
||||||
|
>(&self, w: &mut W, specific_write_vec: F) -> std::io::Result<()> {
|
||||||
|
write_point(&self.A, w)?;
|
||||||
|
write_point(&self.S, w)?;
|
||||||
|
write_point(&self.T1, w)?;
|
||||||
|
write_point(&self.T2, w)?;
|
||||||
|
write_scalar(&self.taux, w)?;
|
||||||
|
write_scalar(&self.mu, w)?;
|
||||||
|
specific_write_vec(&self.L, w)?;
|
||||||
|
specific_write_vec(&self.R, w)?;
|
||||||
|
write_scalar(&self.a, w)?;
|
||||||
|
write_scalar(&self.b, w)?;
|
||||||
|
write_scalar(&self.t, w)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn signature_serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||||
|
self.serialize_core(w, |points, w| write_raw_vec(write_point, points, w))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||||
|
self.serialize_core(w, |points, w| write_vec(write_point, points, w))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Bulletproofs> {
|
||||||
|
let bp = Bulletproofs {
|
||||||
|
A: read_point(r)?,
|
||||||
|
S: read_point(r)?,
|
||||||
|
T1: read_point(r)?,
|
||||||
|
T2: read_point(r)?,
|
||||||
|
taux: read_scalar(r)?,
|
||||||
|
mu: read_scalar(r)?,
|
||||||
|
L: read_vec(read_point, r)?,
|
||||||
|
R: read_vec(read_point, r)?,
|
||||||
|
a: read_scalar(r)?,
|
||||||
|
b: read_scalar(r)?,
|
||||||
|
t: read_scalar(r)?
|
||||||
|
};
|
||||||
|
|
||||||
|
if bp.L.len() != bp.R.len() {
|
||||||
|
Err(std::io::Error::new(std::io::ErrorKind::Other, "mismatched L/R len"))?;
|
||||||
|
}
|
||||||
|
Ok(bp)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,151 +0,0 @@
|
|||||||
use std_shims::{vec::Vec, sync::OnceLock};
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use subtle::{Choice, ConditionallySelectable};
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::EdwardsPoint as DalekPoint;
|
|
||||||
|
|
||||||
use group::{ff::Field, Group};
|
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use multiexp::multiexp as multiexp_const;
|
|
||||||
|
|
||||||
pub(crate) use monero_generators::Generators;
|
|
||||||
|
|
||||||
use crate::{INV_EIGHT as DALEK_INV_EIGHT, H as DALEK_H, Commitment, hash_to_scalar as dalek_hash};
|
|
||||||
pub(crate) use crate::ringct::bulletproofs::scalar_vector::*;
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub(crate) fn INV_EIGHT() -> Scalar {
|
|
||||||
Scalar(DALEK_INV_EIGHT())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub(crate) fn H() -> EdwardsPoint {
|
|
||||||
EdwardsPoint(DALEK_H())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
|
|
||||||
Scalar(dalek_hash(data))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Components common between variants
|
|
||||||
pub(crate) const MAX_M: usize = 16;
|
|
||||||
pub(crate) const LOG_N: usize = 6; // 2 << 6 == N
|
|
||||||
pub(crate) const N: usize = 64;
|
|
||||||
|
|
||||||
pub(crate) fn prove_multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
|
|
||||||
multiexp_const(pairs) * INV_EIGHT()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn vector_exponent(
|
|
||||||
generators: &Generators,
|
|
||||||
a: &ScalarVector,
|
|
||||||
b: &ScalarVector,
|
|
||||||
) -> EdwardsPoint {
|
|
||||||
debug_assert_eq!(a.len(), b.len());
|
|
||||||
(a * &generators.G[.. a.len()]) + (b * &generators.H[.. b.len()])
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn hash_cache(cache: &mut Scalar, mash: &[[u8; 32]]) -> Scalar {
|
|
||||||
let slice =
|
|
||||||
&[cache.to_bytes().as_ref(), mash.iter().copied().flatten().collect::<Vec<_>>().as_ref()]
|
|
||||||
.concat();
|
|
||||||
*cache = hash_to_scalar(slice);
|
|
||||||
*cache
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn MN(outputs: usize) -> (usize, usize, usize) {
|
|
||||||
let mut logM = 0;
|
|
||||||
let mut M;
|
|
||||||
while {
|
|
||||||
M = 1 << logM;
|
|
||||||
(M <= MAX_M) && (M < outputs)
|
|
||||||
} {
|
|
||||||
logM += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
(logM + LOG_N, M, M * N)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn bit_decompose(commitments: &[Commitment]) -> (ScalarVector, ScalarVector) {
|
|
||||||
let (_, M, MN) = MN(commitments.len());
|
|
||||||
|
|
||||||
let sv = commitments.iter().map(|c| Scalar::from(c.amount)).collect::<Vec<_>>();
|
|
||||||
let mut aL = ScalarVector::new(MN);
|
|
||||||
let mut aR = ScalarVector::new(MN);
|
|
||||||
|
|
||||||
for j in 0 .. M {
|
|
||||||
for i in (0 .. N).rev() {
|
|
||||||
let bit =
|
|
||||||
if j < sv.len() { Choice::from((sv[j][i / 8] >> (i % 8)) & 1) } else { Choice::from(0) };
|
|
||||||
aL.0[(j * N) + i] = Scalar::conditional_select(&Scalar::ZERO, &Scalar::ONE, bit);
|
|
||||||
aR.0[(j * N) + i] = Scalar::conditional_select(&-Scalar::ONE, &Scalar::ZERO, bit);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
(aL, aR)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn hash_commitments<C: IntoIterator<Item = DalekPoint>>(
|
|
||||||
commitments: C,
|
|
||||||
) -> (Scalar, Vec<EdwardsPoint>) {
|
|
||||||
let V = commitments.into_iter().map(|c| EdwardsPoint(c) * INV_EIGHT()).collect::<Vec<_>>();
|
|
||||||
(hash_to_scalar(&V.iter().flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>()), V)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn alpha_rho<R: RngCore + CryptoRng>(
|
|
||||||
rng: &mut R,
|
|
||||||
generators: &Generators,
|
|
||||||
aL: &ScalarVector,
|
|
||||||
aR: &ScalarVector,
|
|
||||||
) -> (Scalar, EdwardsPoint) {
|
|
||||||
let ar = Scalar::random(rng);
|
|
||||||
(ar, (vector_exponent(generators, aL, aR) + (EdwardsPoint::generator() * ar)) * INV_EIGHT())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn LR_statements(
|
|
||||||
a: &ScalarVector,
|
|
||||||
G_i: &[EdwardsPoint],
|
|
||||||
b: &ScalarVector,
|
|
||||||
H_i: &[EdwardsPoint],
|
|
||||||
cL: Scalar,
|
|
||||||
U: EdwardsPoint,
|
|
||||||
) -> Vec<(Scalar, EdwardsPoint)> {
|
|
||||||
let mut res = a
|
|
||||||
.0
|
|
||||||
.iter()
|
|
||||||
.copied()
|
|
||||||
.zip(G_i.iter().copied())
|
|
||||||
.chain(b.0.iter().copied().zip(H_i.iter().copied()))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
res.push((cL, U));
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
static TWO_N_CELL: OnceLock<ScalarVector> = OnceLock::new();
|
|
||||||
pub(crate) fn TWO_N() -> &'static ScalarVector {
|
|
||||||
TWO_N_CELL.get_or_init(|| ScalarVector::powers(Scalar::from(2u8), N))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn challenge_products(w: &[Scalar], winv: &[Scalar]) -> Vec<Scalar> {
|
|
||||||
let mut products = vec![Scalar::ZERO; 1 << w.len()];
|
|
||||||
products[0] = winv[0];
|
|
||||||
products[1] = w[0];
|
|
||||||
for j in 1 .. w.len() {
|
|
||||||
let mut slots = (1 << (j + 1)) - 1;
|
|
||||||
while slots > 0 {
|
|
||||||
products[slots] = products[slots / 2] * w[j];
|
|
||||||
products[slots - 1] = products[slots / 2] * winv[j];
|
|
||||||
slots = slots.saturating_sub(2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sanity check as if the above failed to populate, it'd be critical
|
|
||||||
for w in &products {
|
|
||||||
debug_assert!(!bool::from(w.is_zero()));
|
|
||||||
}
|
|
||||||
|
|
||||||
products
|
|
||||||
}
|
|
||||||
@@ -1,179 +0,0 @@
|
|||||||
#![allow(non_snake_case)]
|
|
||||||
|
|
||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::EdwardsPoint;
|
|
||||||
use multiexp::BatchVerifier;
|
|
||||||
|
|
||||||
use crate::{Commitment, wallet::TransactionError, serialize::*};
|
|
||||||
|
|
||||||
pub(crate) mod scalar_vector;
|
|
||||||
pub(crate) mod core;
|
|
||||||
use self::core::LOG_N;
|
|
||||||
|
|
||||||
pub(crate) mod original;
|
|
||||||
pub use original::GENERATORS as BULLETPROOFS_GENERATORS;
|
|
||||||
pub(crate) mod plus;
|
|
||||||
pub use plus::GENERATORS as BULLETPROOFS_PLUS_GENERATORS;
|
|
||||||
|
|
||||||
pub(crate) use self::original::OriginalStruct;
|
|
||||||
pub(crate) use self::plus::PlusStruct;
|
|
||||||
|
|
||||||
pub(crate) const MAX_OUTPUTS: usize = self::core::MAX_M;
|
|
||||||
|
|
||||||
/// Bulletproofs enum, supporting the original and plus formulations.
|
|
||||||
#[allow(clippy::large_enum_variant)]
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub enum Bulletproofs {
|
|
||||||
Original(OriginalStruct),
|
|
||||||
Plus(PlusStruct),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Bulletproofs {
|
|
||||||
pub(crate) fn fee_weight(plus: bool, outputs: usize) -> usize {
|
|
||||||
let fields = if plus { 6 } else { 9 };
|
|
||||||
|
|
||||||
// TODO: Shouldn't this use u32/u64?
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let mut LR_len = usize::try_from(usize::BITS - (outputs - 1).leading_zeros()).unwrap();
|
|
||||||
let padded_outputs = 1 << LR_len;
|
|
||||||
LR_len += LOG_N;
|
|
||||||
|
|
||||||
let len = (fields + (2 * LR_len)) * 32;
|
|
||||||
len +
|
|
||||||
if padded_outputs <= 2 {
|
|
||||||
0
|
|
||||||
} else {
|
|
||||||
let base = ((fields + (2 * (LOG_N + 1))) * 32) / 2;
|
|
||||||
let size = (fields + (2 * LR_len)) * 32;
|
|
||||||
((base * padded_outputs) - size) * 4 / 5
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Prove the list of commitments are within [0 .. 2^64).
|
|
||||||
pub fn prove<R: RngCore + CryptoRng>(
|
|
||||||
rng: &mut R,
|
|
||||||
outputs: &[Commitment],
|
|
||||||
plus: bool,
|
|
||||||
) -> Result<Self, TransactionError> {
|
|
||||||
if outputs.len() > MAX_OUTPUTS {
|
|
||||||
return Err(TransactionError::TooManyOutputs)?;
|
|
||||||
}
|
|
||||||
Ok(if !plus {
|
|
||||||
Self::Plus(PlusStruct::prove(rng, outputs))
|
|
||||||
} else {
|
|
||||||
Self::Original(OriginalStruct::prove(rng, outputs))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Verify the given Bulletproofs.
|
|
||||||
#[must_use]
|
|
||||||
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Original(bp) => bp.verify(rng, commitments),
|
|
||||||
Self::Plus(bp) => bp.verify(rng, commitments),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Accumulate the verification for the given Bulletproofs into the specified BatchVerifier.
|
|
||||||
/// Returns false if the Bulletproofs aren't sane, without mutating the BatchVerifier.
|
|
||||||
/// Returns true if the Bulletproofs are sane, regardless of their validity.
|
|
||||||
#[must_use]
|
|
||||||
pub fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
|
||||||
&self,
|
|
||||||
rng: &mut R,
|
|
||||||
verifier: &mut BatchVerifier<ID, dalek_ff_group::EdwardsPoint>,
|
|
||||||
id: ID,
|
|
||||||
commitments: &[EdwardsPoint],
|
|
||||||
) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Original(bp) => bp.batch_verify(rng, verifier, id, commitments),
|
|
||||||
Self::Plus(bp) => bp.batch_verify(rng, verifier, id, commitments),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_core<W: Write, F: Fn(&[EdwardsPoint], &mut W) -> io::Result<()>>(
|
|
||||||
&self,
|
|
||||||
w: &mut W,
|
|
||||||
specific_write_vec: F,
|
|
||||||
) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
Self::Original(bp) => {
|
|
||||||
write_point(&bp.A, w)?;
|
|
||||||
write_point(&bp.S, w)?;
|
|
||||||
write_point(&bp.T1, w)?;
|
|
||||||
write_point(&bp.T2, w)?;
|
|
||||||
write_scalar(&bp.taux, w)?;
|
|
||||||
write_scalar(&bp.mu, w)?;
|
|
||||||
specific_write_vec(&bp.L, w)?;
|
|
||||||
specific_write_vec(&bp.R, w)?;
|
|
||||||
write_scalar(&bp.a, w)?;
|
|
||||||
write_scalar(&bp.b, w)?;
|
|
||||||
write_scalar(&bp.t, w)
|
|
||||||
}
|
|
||||||
|
|
||||||
Self::Plus(bp) => {
|
|
||||||
write_point(&bp.A, w)?;
|
|
||||||
write_point(&bp.A1, w)?;
|
|
||||||
write_point(&bp.B, w)?;
|
|
||||||
write_scalar(&bp.r1, w)?;
|
|
||||||
write_scalar(&bp.s1, w)?;
|
|
||||||
write_scalar(&bp.d1, w)?;
|
|
||||||
specific_write_vec(&bp.L, w)?;
|
|
||||||
specific_write_vec(&bp.R, w)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
self.write_core(w, |points, w| write_raw_vec(write_point, points, w))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
self.write_core(w, |points, w| write_vec(write_point, points, w))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut serialized = vec![];
|
|
||||||
self.write(&mut serialized).unwrap();
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Read Bulletproofs.
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self::Original(OriginalStruct {
|
|
||||||
A: read_point(r)?,
|
|
||||||
S: read_point(r)?,
|
|
||||||
T1: read_point(r)?,
|
|
||||||
T2: read_point(r)?,
|
|
||||||
taux: read_scalar(r)?,
|
|
||||||
mu: read_scalar(r)?,
|
|
||||||
L: read_vec(read_point, r)?,
|
|
||||||
R: read_vec(read_point, r)?,
|
|
||||||
a: read_scalar(r)?,
|
|
||||||
b: read_scalar(r)?,
|
|
||||||
t: read_scalar(r)?,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Read Bulletproofs+.
|
|
||||||
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self::Plus(PlusStruct {
|
|
||||||
A: read_point(r)?,
|
|
||||||
A1: read_point(r)?,
|
|
||||||
B: read_point(r)?,
|
|
||||||
r1: read_scalar(r)?,
|
|
||||||
s1: read_scalar(r)?,
|
|
||||||
d1: read_scalar(r)?,
|
|
||||||
L: read_vec(read_point, r)?,
|
|
||||||
R: read_vec(read_point, r)?,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,308 +0,0 @@
|
|||||||
use std_shims::{vec::Vec, sync::OnceLock};
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
|
||||||
|
|
||||||
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
|
|
||||||
|
|
||||||
use group::{ff::Field, Group};
|
|
||||||
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use multiexp::BatchVerifier;
|
|
||||||
|
|
||||||
use crate::{Commitment, ringct::bulletproofs::core::*};
|
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/generators.rs"));
|
|
||||||
|
|
||||||
static IP12_CELL: OnceLock<Scalar> = OnceLock::new();
|
|
||||||
pub(crate) fn IP12() -> Scalar {
|
|
||||||
*IP12_CELL.get_or_init(|| inner_product(&ScalarVector(vec![Scalar::ONE; N]), TWO_N()))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct OriginalStruct {
|
|
||||||
pub(crate) A: DalekPoint,
|
|
||||||
pub(crate) S: DalekPoint,
|
|
||||||
pub(crate) T1: DalekPoint,
|
|
||||||
pub(crate) T2: DalekPoint,
|
|
||||||
pub(crate) taux: DalekScalar,
|
|
||||||
pub(crate) mu: DalekScalar,
|
|
||||||
pub(crate) L: Vec<DalekPoint>,
|
|
||||||
pub(crate) R: Vec<DalekPoint>,
|
|
||||||
pub(crate) a: DalekScalar,
|
|
||||||
pub(crate) b: DalekScalar,
|
|
||||||
pub(crate) t: DalekScalar,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl OriginalStruct {
|
|
||||||
#[allow(clippy::many_single_char_names)]
|
|
||||||
pub(crate) fn prove<R: RngCore + CryptoRng>(rng: &mut R, commitments: &[Commitment]) -> Self {
|
|
||||||
let (logMN, M, MN) = MN(commitments.len());
|
|
||||||
|
|
||||||
let (aL, aR) = bit_decompose(commitments);
|
|
||||||
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
|
||||||
let (mut cache, _) = hash_commitments(commitments_points.clone());
|
|
||||||
|
|
||||||
let (sL, sR) =
|
|
||||||
ScalarVector((0 .. (MN * 2)).map(|_| Scalar::random(&mut *rng)).collect::<Vec<_>>()).split();
|
|
||||||
|
|
||||||
let generators = GENERATORS();
|
|
||||||
let (mut alpha, A) = alpha_rho(&mut *rng, generators, &aL, &aR);
|
|
||||||
let (mut rho, S) = alpha_rho(&mut *rng, generators, &sL, &sR);
|
|
||||||
|
|
||||||
let y = hash_cache(&mut cache, &[A.compress().to_bytes(), S.compress().to_bytes()]);
|
|
||||||
let mut cache = hash_to_scalar(&y.to_bytes());
|
|
||||||
let z = cache;
|
|
||||||
|
|
||||||
let l0 = &aL - z;
|
|
||||||
let l1 = sL;
|
|
||||||
|
|
||||||
let mut zero_twos = Vec::with_capacity(MN);
|
|
||||||
let zpow = ScalarVector::powers(z, M + 2);
|
|
||||||
for j in 0 .. M {
|
|
||||||
for i in 0 .. N {
|
|
||||||
zero_twos.push(zpow[j + 2] * TWO_N()[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let yMN = ScalarVector::powers(y, MN);
|
|
||||||
let r0 = (&(aR + z) * &yMN) + ScalarVector(zero_twos);
|
|
||||||
let r1 = yMN * sR;
|
|
||||||
|
|
||||||
let (T1, T2, x, mut taux) = {
|
|
||||||
let t1 = inner_product(&l0, &r1) + inner_product(&l1, &r0);
|
|
||||||
let t2 = inner_product(&l1, &r1);
|
|
||||||
|
|
||||||
let mut tau1 = Scalar::random(&mut *rng);
|
|
||||||
let mut tau2 = Scalar::random(&mut *rng);
|
|
||||||
|
|
||||||
let T1 = prove_multiexp(&[(t1, H()), (tau1, EdwardsPoint::generator())]);
|
|
||||||
let T2 = prove_multiexp(&[(t2, H()), (tau2, EdwardsPoint::generator())]);
|
|
||||||
|
|
||||||
let x =
|
|
||||||
hash_cache(&mut cache, &[z.to_bytes(), T1.compress().to_bytes(), T2.compress().to_bytes()]);
|
|
||||||
|
|
||||||
let taux = (tau2 * (x * x)) + (tau1 * x);
|
|
||||||
|
|
||||||
tau1.zeroize();
|
|
||||||
tau2.zeroize();
|
|
||||||
(T1, T2, x, taux)
|
|
||||||
};
|
|
||||||
|
|
||||||
let mu = (x * rho) + alpha;
|
|
||||||
alpha.zeroize();
|
|
||||||
rho.zeroize();
|
|
||||||
|
|
||||||
for (i, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
|
|
||||||
taux += zpow[i + 2] * gamma;
|
|
||||||
}
|
|
||||||
|
|
||||||
let l = &l0 + &(l1 * x);
|
|
||||||
let r = &r0 + &(r1 * x);
|
|
||||||
|
|
||||||
let t = inner_product(&l, &r);
|
|
||||||
|
|
||||||
let x_ip =
|
|
||||||
hash_cache(&mut cache, &[x.to_bytes(), taux.to_bytes(), mu.to_bytes(), t.to_bytes()]);
|
|
||||||
|
|
||||||
let mut a = l;
|
|
||||||
let mut b = r;
|
|
||||||
|
|
||||||
let yinv = y.invert().unwrap();
|
|
||||||
let yinvpow = ScalarVector::powers(yinv, MN);
|
|
||||||
|
|
||||||
let mut G_proof = generators.G[.. a.len()].to_vec();
|
|
||||||
let mut H_proof = generators.H[.. a.len()].to_vec();
|
|
||||||
H_proof.iter_mut().zip(yinvpow.0.iter()).for_each(|(this_H, yinvpow)| *this_H *= yinvpow);
|
|
||||||
let U = H() * x_ip;
|
|
||||||
|
|
||||||
let mut L = Vec::with_capacity(logMN);
|
|
||||||
let mut R = Vec::with_capacity(logMN);
|
|
||||||
|
|
||||||
while a.len() != 1 {
|
|
||||||
let (aL, aR) = a.split();
|
|
||||||
let (bL, bR) = b.split();
|
|
||||||
|
|
||||||
let cL = inner_product(&aL, &bR);
|
|
||||||
let cR = inner_product(&aR, &bL);
|
|
||||||
|
|
||||||
let (G_L, G_R) = G_proof.split_at(aL.len());
|
|
||||||
let (H_L, H_R) = H_proof.split_at(aL.len());
|
|
||||||
|
|
||||||
let L_i = prove_multiexp(&LR_statements(&aL, G_R, &bR, H_L, cL, U));
|
|
||||||
let R_i = prove_multiexp(&LR_statements(&aR, G_L, &bL, H_R, cR, U));
|
|
||||||
L.push(*L_i);
|
|
||||||
R.push(*R_i);
|
|
||||||
|
|
||||||
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
|
|
||||||
let winv = w.invert().unwrap();
|
|
||||||
|
|
||||||
a = (aL * w) + (aR * winv);
|
|
||||||
b = (bL * winv) + (bR * w);
|
|
||||||
|
|
||||||
if a.len() != 1 {
|
|
||||||
G_proof = hadamard_fold(G_L, G_R, winv, w);
|
|
||||||
H_proof = hadamard_fold(H_L, H_R, w, winv);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let res = Self {
|
|
||||||
A: *A,
|
|
||||||
S: *S,
|
|
||||||
T1: *T1,
|
|
||||||
T2: *T2,
|
|
||||||
taux: *taux,
|
|
||||||
mu: *mu,
|
|
||||||
L,
|
|
||||||
R,
|
|
||||||
a: *a[0],
|
|
||||||
b: *b[0],
|
|
||||||
t: *t,
|
|
||||||
};
|
|
||||||
debug_assert!(res.verify(rng, &commitments_points));
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::many_single_char_names)]
|
|
||||||
#[must_use]
|
|
||||||
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
|
||||||
&self,
|
|
||||||
rng: &mut R,
|
|
||||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
|
||||||
id: ID,
|
|
||||||
commitments: &[DalekPoint],
|
|
||||||
) -> bool {
|
|
||||||
// Verify commitments are valid
|
|
||||||
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify L and R are properly sized
|
|
||||||
if self.L.len() != self.R.len() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (logMN, M, MN) = MN(commitments.len());
|
|
||||||
if self.L.len() != logMN {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rebuild all challenges
|
|
||||||
let (mut cache, commitments) = hash_commitments(commitments.iter().copied());
|
|
||||||
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes(), self.S.compress().to_bytes()]);
|
|
||||||
|
|
||||||
let z = hash_to_scalar(&y.to_bytes());
|
|
||||||
cache = z;
|
|
||||||
|
|
||||||
let x = hash_cache(
|
|
||||||
&mut cache,
|
|
||||||
&[z.to_bytes(), self.T1.compress().to_bytes(), self.T2.compress().to_bytes()],
|
|
||||||
);
|
|
||||||
|
|
||||||
let x_ip = hash_cache(
|
|
||||||
&mut cache,
|
|
||||||
&[x.to_bytes(), self.taux.to_bytes(), self.mu.to_bytes(), self.t.to_bytes()],
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut w = Vec::with_capacity(logMN);
|
|
||||||
let mut winv = Vec::with_capacity(logMN);
|
|
||||||
for (L, R) in self.L.iter().zip(&self.R) {
|
|
||||||
w.push(hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]));
|
|
||||||
winv.push(cache.invert().unwrap());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert the proof from * INV_EIGHT to its actual form
|
|
||||||
let normalize = |point: &DalekPoint| EdwardsPoint(point.mul_by_cofactor());
|
|
||||||
|
|
||||||
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
|
|
||||||
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
|
|
||||||
let T1 = normalize(&self.T1);
|
|
||||||
let T2 = normalize(&self.T2);
|
|
||||||
let A = normalize(&self.A);
|
|
||||||
let S = normalize(&self.S);
|
|
||||||
|
|
||||||
let commitments = commitments.iter().map(EdwardsPoint::mul_by_cofactor).collect::<Vec<_>>();
|
|
||||||
|
|
||||||
// Verify it
|
|
||||||
let mut proof = Vec::with_capacity(4 + commitments.len());
|
|
||||||
|
|
||||||
let zpow = ScalarVector::powers(z, M + 3);
|
|
||||||
let ip1y = ScalarVector::powers(y, M * N).sum();
|
|
||||||
let mut k = -(zpow[2] * ip1y);
|
|
||||||
for j in 1 ..= M {
|
|
||||||
k -= zpow[j + 2] * IP12();
|
|
||||||
}
|
|
||||||
let y1 = Scalar(self.t) - ((z * ip1y) + k);
|
|
||||||
proof.push((-y1, H()));
|
|
||||||
|
|
||||||
proof.push((-Scalar(self.taux), G));
|
|
||||||
|
|
||||||
for (j, commitment) in commitments.iter().enumerate() {
|
|
||||||
proof.push((zpow[j + 2], *commitment));
|
|
||||||
}
|
|
||||||
|
|
||||||
proof.push((x, T1));
|
|
||||||
proof.push((x * x, T2));
|
|
||||||
verifier.queue(&mut *rng, id, proof);
|
|
||||||
|
|
||||||
proof = Vec::with_capacity(4 + (2 * (MN + logMN)));
|
|
||||||
let z3 = (Scalar(self.t) - (Scalar(self.a) * Scalar(self.b))) * x_ip;
|
|
||||||
proof.push((z3, H()));
|
|
||||||
proof.push((-Scalar(self.mu), G));
|
|
||||||
|
|
||||||
proof.push((Scalar::ONE, A));
|
|
||||||
proof.push((x, S));
|
|
||||||
|
|
||||||
{
|
|
||||||
let ypow = ScalarVector::powers(y, MN);
|
|
||||||
let yinv = y.invert().unwrap();
|
|
||||||
let yinvpow = ScalarVector::powers(yinv, MN);
|
|
||||||
|
|
||||||
let w_cache = challenge_products(&w, &winv);
|
|
||||||
|
|
||||||
let generators = GENERATORS();
|
|
||||||
for i in 0 .. MN {
|
|
||||||
let g = (Scalar(self.a) * w_cache[i]) + z;
|
|
||||||
proof.push((-g, generators.G[i]));
|
|
||||||
|
|
||||||
let mut h = Scalar(self.b) * yinvpow[i] * w_cache[(!i) & (MN - 1)];
|
|
||||||
h -= ((zpow[(i / N) + 2] * TWO_N()[i % N]) + (z * ypow[i])) * yinvpow[i];
|
|
||||||
proof.push((-h, generators.H[i]));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for i in 0 .. logMN {
|
|
||||||
proof.push((w[i] * w[i], L[i]));
|
|
||||||
proof.push((winv[i] * winv[i], R[i]));
|
|
||||||
}
|
|
||||||
verifier.queue(rng, id, proof);
|
|
||||||
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub(crate) fn verify<R: RngCore + CryptoRng>(
|
|
||||||
&self,
|
|
||||||
rng: &mut R,
|
|
||||||
commitments: &[DalekPoint],
|
|
||||||
) -> bool {
|
|
||||||
let mut verifier = BatchVerifier::new(1);
|
|
||||||
if self.verify_core(rng, &mut verifier, (), commitments) {
|
|
||||||
verifier.verify_vartime()
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
|
||||||
&self,
|
|
||||||
rng: &mut R,
|
|
||||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
|
||||||
id: ID,
|
|
||||||
commitments: &[DalekPoint],
|
|
||||||
) -> bool {
|
|
||||||
self.verify_core(rng, verifier, id, commitments)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,300 +0,0 @@
|
|||||||
use std_shims::{vec::Vec, sync::OnceLock};
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
|
||||||
|
|
||||||
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
|
|
||||||
|
|
||||||
use group::ff::Field;
|
|
||||||
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use multiexp::BatchVerifier;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Commitment, hash,
|
|
||||||
ringct::{hash_to_point::raw_hash_to_point, bulletproofs::core::*},
|
|
||||||
};
|
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
|
|
||||||
|
|
||||||
static TRANSCRIPT_CELL: OnceLock<[u8; 32]> = OnceLock::new();
|
|
||||||
pub(crate) fn TRANSCRIPT() -> [u8; 32] {
|
|
||||||
*TRANSCRIPT_CELL.get_or_init(|| {
|
|
||||||
EdwardsPoint(raw_hash_to_point(hash(b"bulletproof_plus_transcript"))).compress().to_bytes()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// TRANSCRIPT isn't a Scalar, so we need this alternative for the first hash
|
|
||||||
fn hash_plus<C: IntoIterator<Item = DalekPoint>>(commitments: C) -> (Scalar, Vec<EdwardsPoint>) {
|
|
||||||
let (cache, commitments) = hash_commitments(commitments);
|
|
||||||
(hash_to_scalar(&[TRANSCRIPT().as_ref(), &cache.to_bytes()].concat()), commitments)
|
|
||||||
}
|
|
||||||
|
|
||||||
// d[j*N+i] = z**(2*(j+1)) * 2**i
|
|
||||||
fn d(z: Scalar, M: usize, MN: usize) -> (ScalarVector, ScalarVector) {
|
|
||||||
let zpow = ScalarVector::even_powers(z, 2 * M);
|
|
||||||
let mut d = vec![Scalar::ZERO; MN];
|
|
||||||
for j in 0 .. M {
|
|
||||||
for i in 0 .. N {
|
|
||||||
d[(j * N) + i] = zpow[j] * TWO_N()[i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(zpow, ScalarVector(d))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct PlusStruct {
|
|
||||||
pub(crate) A: DalekPoint,
|
|
||||||
pub(crate) A1: DalekPoint,
|
|
||||||
pub(crate) B: DalekPoint,
|
|
||||||
pub(crate) r1: DalekScalar,
|
|
||||||
pub(crate) s1: DalekScalar,
|
|
||||||
pub(crate) d1: DalekScalar,
|
|
||||||
pub(crate) L: Vec<DalekPoint>,
|
|
||||||
pub(crate) R: Vec<DalekPoint>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PlusStruct {
|
|
||||||
#[allow(clippy::many_single_char_names)]
|
|
||||||
pub(crate) fn prove<R: RngCore + CryptoRng>(rng: &mut R, commitments: &[Commitment]) -> Self {
|
|
||||||
let generators = GENERATORS();
|
|
||||||
|
|
||||||
let (logMN, M, MN) = MN(commitments.len());
|
|
||||||
|
|
||||||
let (aL, aR) = bit_decompose(commitments);
|
|
||||||
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
|
||||||
let (mut cache, _) = hash_plus(commitments_points.clone());
|
|
||||||
let (mut alpha1, A) = alpha_rho(&mut *rng, generators, &aL, &aR);
|
|
||||||
|
|
||||||
let y = hash_cache(&mut cache, &[A.compress().to_bytes()]);
|
|
||||||
let mut cache = hash_to_scalar(&y.to_bytes());
|
|
||||||
let z = cache;
|
|
||||||
|
|
||||||
let (zpow, d) = d(z, M, MN);
|
|
||||||
|
|
||||||
let aL1 = aL - z;
|
|
||||||
|
|
||||||
let ypow = ScalarVector::powers(y, MN + 2);
|
|
||||||
let mut y_for_d = ScalarVector(ypow.0[1 ..= MN].to_vec());
|
|
||||||
y_for_d.0.reverse();
|
|
||||||
let aR1 = (aR + z) + (y_for_d * d);
|
|
||||||
|
|
||||||
for (j, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
|
|
||||||
alpha1 += zpow[j] * ypow[MN + 1] * gamma;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut a = aL1;
|
|
||||||
let mut b = aR1;
|
|
||||||
|
|
||||||
let yinv = y.invert().unwrap();
|
|
||||||
let yinvpow = ScalarVector::powers(yinv, MN);
|
|
||||||
|
|
||||||
let mut G_proof = generators.G[.. a.len()].to_vec();
|
|
||||||
let mut H_proof = generators.H[.. a.len()].to_vec();
|
|
||||||
|
|
||||||
let mut L = Vec::with_capacity(logMN);
|
|
||||||
let mut R = Vec::with_capacity(logMN);
|
|
||||||
|
|
||||||
while a.len() != 1 {
|
|
||||||
let (aL, aR) = a.split();
|
|
||||||
let (bL, bR) = b.split();
|
|
||||||
|
|
||||||
let cL = weighted_inner_product(&aL, &bR, y);
|
|
||||||
let cR = weighted_inner_product(&(&aR * ypow[aR.len()]), &bL, y);
|
|
||||||
|
|
||||||
let (mut dL, mut dR) = (Scalar::random(&mut *rng), Scalar::random(&mut *rng));
|
|
||||||
|
|
||||||
let (G_L, G_R) = G_proof.split_at(aL.len());
|
|
||||||
let (H_L, H_R) = H_proof.split_at(aL.len());
|
|
||||||
|
|
||||||
let mut L_i = LR_statements(&(&aL * yinvpow[aL.len()]), G_R, &bR, H_L, cL, H());
|
|
||||||
L_i.push((dL, G));
|
|
||||||
let L_i = prove_multiexp(&L_i);
|
|
||||||
L.push(*L_i);
|
|
||||||
|
|
||||||
let mut R_i = LR_statements(&(&aR * ypow[aR.len()]), G_L, &bL, H_R, cR, H());
|
|
||||||
R_i.push((dR, G));
|
|
||||||
let R_i = prove_multiexp(&R_i);
|
|
||||||
R.push(*R_i);
|
|
||||||
|
|
||||||
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
|
|
||||||
let winv = w.invert().unwrap();
|
|
||||||
|
|
||||||
G_proof = hadamard_fold(G_L, G_R, winv, w * yinvpow[aL.len()]);
|
|
||||||
H_proof = hadamard_fold(H_L, H_R, w, winv);
|
|
||||||
|
|
||||||
a = (&aL * w) + (aR * (winv * ypow[aL.len()]));
|
|
||||||
b = (bL * winv) + (bR * w);
|
|
||||||
|
|
||||||
alpha1 += (dL * (w * w)) + (dR * (winv * winv));
|
|
||||||
|
|
||||||
dL.zeroize();
|
|
||||||
dR.zeroize();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut r = Scalar::random(&mut *rng);
|
|
||||||
let mut s = Scalar::random(&mut *rng);
|
|
||||||
let mut d = Scalar::random(&mut *rng);
|
|
||||||
let mut eta = Scalar::random(&mut *rng);
|
|
||||||
|
|
||||||
let A1 = prove_multiexp(&[
|
|
||||||
(r, G_proof[0]),
|
|
||||||
(s, H_proof[0]),
|
|
||||||
(d, G),
|
|
||||||
((r * y * b[0]) + (s * y * a[0]), H()),
|
|
||||||
]);
|
|
||||||
let B = prove_multiexp(&[(r * y * s, H()), (eta, G)]);
|
|
||||||
let e = hash_cache(&mut cache, &[A1.compress().to_bytes(), B.compress().to_bytes()]);
|
|
||||||
|
|
||||||
let r1 = (a[0] * e) + r;
|
|
||||||
r.zeroize();
|
|
||||||
let s1 = (b[0] * e) + s;
|
|
||||||
s.zeroize();
|
|
||||||
let d1 = ((d * e) + eta) + (alpha1 * (e * e));
|
|
||||||
d.zeroize();
|
|
||||||
eta.zeroize();
|
|
||||||
alpha1.zeroize();
|
|
||||||
|
|
||||||
let res = Self { A: *A, A1: *A1, B: *B, r1: *r1, s1: *s1, d1: *d1, L, R };
|
|
||||||
debug_assert!(res.verify(rng, &commitments_points));
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::many_single_char_names)]
|
|
||||||
#[must_use]
|
|
||||||
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
|
||||||
&self,
|
|
||||||
rng: &mut R,
|
|
||||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
|
||||||
id: ID,
|
|
||||||
commitments: &[DalekPoint],
|
|
||||||
) -> bool {
|
|
||||||
// Verify commitments are valid
|
|
||||||
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify L and R are properly sized
|
|
||||||
if self.L.len() != self.R.len() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (logMN, M, MN) = MN(commitments.len());
|
|
||||||
if self.L.len() != logMN {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rebuild all challenges
|
|
||||||
let (mut cache, commitments) = hash_plus(commitments.iter().copied());
|
|
||||||
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes()]);
|
|
||||||
let yinv = y.invert().unwrap();
|
|
||||||
let z = hash_to_scalar(&y.to_bytes());
|
|
||||||
cache = z;
|
|
||||||
|
|
||||||
let mut w = Vec::with_capacity(logMN);
|
|
||||||
let mut winv = Vec::with_capacity(logMN);
|
|
||||||
for (L, R) in self.L.iter().zip(&self.R) {
|
|
||||||
w.push(hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]));
|
|
||||||
winv.push(cache.invert().unwrap());
|
|
||||||
}
|
|
||||||
|
|
||||||
let e = hash_cache(&mut cache, &[self.A1.compress().to_bytes(), self.B.compress().to_bytes()]);
|
|
||||||
|
|
||||||
// Convert the proof from * INV_EIGHT to its actual form
|
|
||||||
let normalize = |point: &DalekPoint| EdwardsPoint(point.mul_by_cofactor());
|
|
||||||
|
|
||||||
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
|
|
||||||
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
|
|
||||||
let A = normalize(&self.A);
|
|
||||||
let A1 = normalize(&self.A1);
|
|
||||||
let B = normalize(&self.B);
|
|
||||||
|
|
||||||
// Verify it
|
|
||||||
let mut proof = Vec::with_capacity(logMN + 5 + (2 * (MN + logMN)));
|
|
||||||
|
|
||||||
let mut yMN = y;
|
|
||||||
for _ in 0 .. logMN {
|
|
||||||
yMN *= yMN;
|
|
||||||
}
|
|
||||||
let yMNy = yMN * y;
|
|
||||||
|
|
||||||
let (zpow, d) = d(z, M, MN);
|
|
||||||
let zsq = zpow[0];
|
|
||||||
|
|
||||||
let esq = e * e;
|
|
||||||
let minus_esq = -esq;
|
|
||||||
let commitment_weight = minus_esq * yMNy;
|
|
||||||
for (i, commitment) in commitments.iter().map(EdwardsPoint::mul_by_cofactor).enumerate() {
|
|
||||||
proof.push((commitment_weight * zpow[i], commitment));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Invert B, instead of the Scalar, as the latter is only 2x as expensive yet enables reduction
|
|
||||||
// to a single addition under vartime for the first BP verified in the batch, which is expected
|
|
||||||
// to be much more significant
|
|
||||||
proof.push((Scalar::ONE, -B));
|
|
||||||
proof.push((-e, A1));
|
|
||||||
proof.push((minus_esq, A));
|
|
||||||
proof.push((Scalar(self.d1), G));
|
|
||||||
|
|
||||||
let d_sum = zpow.sum() * Scalar::from(u64::MAX);
|
|
||||||
let y_sum = weighted_powers(y, MN).sum();
|
|
||||||
proof.push((
|
|
||||||
Scalar(self.r1 * y.0 * self.s1) + (esq * ((yMNy * z * d_sum) + ((zsq - z) * y_sum))),
|
|
||||||
H(),
|
|
||||||
));
|
|
||||||
|
|
||||||
let w_cache = challenge_products(&w, &winv);
|
|
||||||
|
|
||||||
let mut e_r1_y = e * Scalar(self.r1);
|
|
||||||
let e_s1 = e * Scalar(self.s1);
|
|
||||||
let esq_z = esq * z;
|
|
||||||
let minus_esq_z = -esq_z;
|
|
||||||
let mut minus_esq_y = minus_esq * yMN;
|
|
||||||
|
|
||||||
let generators = GENERATORS();
|
|
||||||
for i in 0 .. MN {
|
|
||||||
proof.push((e_r1_y * w_cache[i] + esq_z, generators.G[i]));
|
|
||||||
proof.push((
|
|
||||||
(e_s1 * w_cache[(!i) & (MN - 1)]) + minus_esq_z + (minus_esq_y * d[i]),
|
|
||||||
generators.H[i],
|
|
||||||
));
|
|
||||||
|
|
||||||
e_r1_y *= yinv;
|
|
||||||
minus_esq_y *= yinv;
|
|
||||||
}
|
|
||||||
|
|
||||||
for i in 0 .. logMN {
|
|
||||||
proof.push((minus_esq * w[i] * w[i], L[i]));
|
|
||||||
proof.push((minus_esq * winv[i] * winv[i], R[i]));
|
|
||||||
}
|
|
||||||
|
|
||||||
verifier.queue(rng, id, proof);
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub(crate) fn verify<R: RngCore + CryptoRng>(
|
|
||||||
&self,
|
|
||||||
rng: &mut R,
|
|
||||||
commitments: &[DalekPoint],
|
|
||||||
) -> bool {
|
|
||||||
let mut verifier = BatchVerifier::new(1);
|
|
||||||
if self.verify_core(rng, &mut verifier, (), commitments) {
|
|
||||||
verifier.verify_vartime()
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
|
||||||
&self,
|
|
||||||
rng: &mut R,
|
|
||||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
|
||||||
id: ID,
|
|
||||||
commitments: &[DalekPoint],
|
|
||||||
) -> bool {
|
|
||||||
self.verify_core(rng, verifier, id, commitments)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,137 +0,0 @@
|
|||||||
use core::ops::{Add, Sub, Mul, Index};
|
|
||||||
use std_shims::vec::Vec;
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
|
||||||
|
|
||||||
use group::ff::Field;
|
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use multiexp::multiexp;
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
|
||||||
macro_rules! math_op {
|
|
||||||
($Op: ident, $op: ident, $f: expr) => {
|
|
||||||
impl $Op<Scalar> for ScalarVector {
|
|
||||||
type Output = Self;
|
|
||||||
fn $op(self, b: Scalar) -> Self {
|
|
||||||
Self(self.0.iter().map(|a| $f((a, &b))).collect())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl $Op<Scalar> for &ScalarVector {
|
|
||||||
type Output = ScalarVector;
|
|
||||||
fn $op(self, b: Scalar) -> ScalarVector {
|
|
||||||
ScalarVector(self.0.iter().map(|a| $f((a, &b))).collect())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl $Op<ScalarVector> for ScalarVector {
|
|
||||||
type Output = Self;
|
|
||||||
fn $op(self, b: Self) -> Self {
|
|
||||||
debug_assert_eq!(self.len(), b.len());
|
|
||||||
Self(self.0.iter().zip(b.0.iter()).map($f).collect())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl $Op<Self> for &ScalarVector {
|
|
||||||
type Output = ScalarVector;
|
|
||||||
fn $op(self, b: Self) -> ScalarVector {
|
|
||||||
debug_assert_eq!(self.len(), b.len());
|
|
||||||
ScalarVector(self.0.iter().zip(b.0.iter()).map($f).collect())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
math_op!(Add, add, |(a, b): (&Scalar, &Scalar)| *a + *b);
|
|
||||||
math_op!(Sub, sub, |(a, b): (&Scalar, &Scalar)| *a - *b);
|
|
||||||
math_op!(Mul, mul, |(a, b): (&Scalar, &Scalar)| *a * *b);
|
|
||||||
|
|
||||||
impl ScalarVector {
|
|
||||||
pub(crate) fn new(len: usize) -> Self {
|
|
||||||
Self(vec![Scalar::ZERO; len])
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn powers(x: Scalar, len: usize) -> Self {
|
|
||||||
debug_assert!(len != 0);
|
|
||||||
|
|
||||||
let mut res = Vec::with_capacity(len);
|
|
||||||
res.push(Scalar::ONE);
|
|
||||||
for i in 1 .. len {
|
|
||||||
res.push(res[i - 1] * x);
|
|
||||||
}
|
|
||||||
Self(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn even_powers(x: Scalar, pow: usize) -> Self {
|
|
||||||
debug_assert!(pow != 0);
|
|
||||||
// Verify pow is a power of two
|
|
||||||
debug_assert_eq!(((pow - 1) & pow), 0);
|
|
||||||
|
|
||||||
let xsq = x * x;
|
|
||||||
let mut res = Self(Vec::with_capacity(pow / 2));
|
|
||||||
res.0.push(xsq);
|
|
||||||
|
|
||||||
let mut prev = 2;
|
|
||||||
while prev < pow {
|
|
||||||
res.0.push(res[res.len() - 1] * xsq);
|
|
||||||
prev += 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn sum(mut self) -> Scalar {
|
|
||||||
self.0.drain(..).sum()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn len(&self) -> usize {
|
|
||||||
self.0.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn split(self) -> (Self, Self) {
|
|
||||||
let (l, r) = self.0.split_at(self.0.len() / 2);
|
|
||||||
(Self(l.to_vec()), Self(r.to_vec()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Index<usize> for ScalarVector {
|
|
||||||
type Output = Scalar;
|
|
||||||
fn index(&self, index: usize) -> &Scalar {
|
|
||||||
&self.0[index]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn inner_product(a: &ScalarVector, b: &ScalarVector) -> Scalar {
|
|
||||||
(a * b).sum()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn weighted_powers(x: Scalar, len: usize) -> ScalarVector {
|
|
||||||
ScalarVector(ScalarVector::powers(x, len + 1).0[1 ..].to_vec())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn weighted_inner_product(a: &ScalarVector, b: &ScalarVector, y: Scalar) -> Scalar {
|
|
||||||
// y ** 0 is not used as a power
|
|
||||||
(a * b * weighted_powers(y, a.len())).sum()
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Mul<&[EdwardsPoint]> for &ScalarVector {
|
|
||||||
type Output = EdwardsPoint;
|
|
||||||
fn mul(self, b: &[EdwardsPoint]) -> EdwardsPoint {
|
|
||||||
debug_assert_eq!(self.len(), b.len());
|
|
||||||
multiexp(&self.0.iter().copied().zip(b.iter().copied()).collect::<Vec<_>>())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn hadamard_fold(
|
|
||||||
l: &[EdwardsPoint],
|
|
||||||
r: &[EdwardsPoint],
|
|
||||||
a: Scalar,
|
|
||||||
b: Scalar,
|
|
||||||
) -> Vec<EdwardsPoint> {
|
|
||||||
let mut res = Vec::with_capacity(l.len() / 2);
|
|
||||||
for i in 0 .. l.len() {
|
|
||||||
res.push(multiexp(&[(a, l[i]), (b, r[i])]));
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
@@ -1,70 +1,65 @@
|
|||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
use core::ops::Deref;
|
use lazy_static::lazy_static;
|
||||||
use std_shims::{
|
use thiserror::Error;
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
|
||||||
use subtle::{ConstantTimeEq, Choice, CtOption};
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
use curve25519_dalek::{
|
use curve25519_dalek::{
|
||||||
constants::ED25519_BASEPOINT_TABLE,
|
constants::ED25519_BASEPOINT_TABLE,
|
||||||
scalar::Scalar,
|
scalar::Scalar,
|
||||||
traits::{IsIdentity, VartimePrecomputedMultiscalarMul},
|
traits::VartimePrecomputedMultiscalarMul,
|
||||||
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation},
|
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation}
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
INV_EIGHT, Commitment, random_scalar, hash_to_scalar, wallet::decoys::Decoys,
|
Commitment, random_scalar, hash_to_scalar,
|
||||||
ringct::hash_to_point, serialize::*,
|
transaction::RING_LEN,
|
||||||
|
wallet::decoys::Decoys,
|
||||||
|
ringct::hash_to_point,
|
||||||
|
serialize::*
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(feature = "multisig")]
|
#[cfg(feature = "multisig")]
|
||||||
mod multisig;
|
mod multisig;
|
||||||
#[cfg(feature = "multisig")]
|
#[cfg(feature = "multisig")]
|
||||||
pub use multisig::{ClsagDetails, ClsagAddendum, ClsagMultisig};
|
pub use multisig::{ClsagDetails, ClsagMultisig};
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
pub(crate) use multisig::add_key_image_share;
|
|
||||||
|
|
||||||
/// Errors returned when CLSAG signing fails.
|
lazy_static! {
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
static ref INV_EIGHT: Scalar = Scalar::from(8u8).invert();
|
||||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
|
||||||
pub enum ClsagError {
|
|
||||||
#[cfg_attr(feature = "std", error("internal error ({0})"))]
|
|
||||||
InternalError(&'static str),
|
|
||||||
#[cfg_attr(feature = "std", error("invalid ring"))]
|
|
||||||
InvalidRing,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid ring member (member {0}, ring size {1})"))]
|
|
||||||
InvalidRingMember(u8, u8),
|
|
||||||
#[cfg_attr(feature = "std", error("invalid commitment"))]
|
|
||||||
InvalidCommitment,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid key image"))]
|
|
||||||
InvalidImage,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid D"))]
|
|
||||||
InvalidD,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid s"))]
|
|
||||||
InvalidS,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid c1"))]
|
|
||||||
InvalidC1,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Input being signed for.
|
#[derive(Clone, Error, Debug)]
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
pub enum ClsagError {
|
||||||
|
#[error("internal error ({0})")]
|
||||||
|
InternalError(String),
|
||||||
|
#[error("invalid ring member (member {0}, ring size {1})")]
|
||||||
|
InvalidRingMember(u8, u8),
|
||||||
|
#[error("invalid commitment")]
|
||||||
|
InvalidCommitment,
|
||||||
|
#[error("invalid D")]
|
||||||
|
InvalidD,
|
||||||
|
#[error("invalid s")]
|
||||||
|
InvalidS,
|
||||||
|
#[error("invalid c1")]
|
||||||
|
InvalidC1
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
pub struct ClsagInput {
|
pub struct ClsagInput {
|
||||||
// The actual commitment for the true spend
|
// The actual commitment for the true spend
|
||||||
pub(crate) commitment: Commitment,
|
pub commitment: Commitment,
|
||||||
// True spend index, offsets, and ring
|
// True spend index, offsets, and ring
|
||||||
pub(crate) decoys: Decoys,
|
pub decoys: Decoys
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ClsagInput {
|
impl ClsagInput {
|
||||||
pub fn new(commitment: Commitment, decoys: Decoys) -> Result<Self, ClsagError> {
|
pub fn new(
|
||||||
|
commitment: Commitment,
|
||||||
|
decoys: Decoys
|
||||||
|
) -> Result<ClsagInput, ClsagError> {
|
||||||
let n = decoys.len();
|
let n = decoys.len();
|
||||||
if n > u8::MAX.into() {
|
if n > u8::MAX.into() {
|
||||||
Err(ClsagError::InternalError("max ring size in this library is u8 max"))?;
|
Err(ClsagError::InternalError("max ring size in this library is u8 max".to_string()))?;
|
||||||
}
|
}
|
||||||
let n = u8::try_from(n).unwrap();
|
let n = u8::try_from(n).unwrap();
|
||||||
if decoys.i >= n {
|
if decoys.i >= n {
|
||||||
@@ -76,14 +71,14 @@ impl ClsagInput {
|
|||||||
Err(ClsagError::InvalidCommitment)?;
|
Err(ClsagError::InvalidCommitment)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self { commitment, decoys })
|
Ok(ClsagInput { commitment, decoys })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::large_enum_variant)]
|
|
||||||
enum Mode {
|
enum Mode {
|
||||||
Sign(usize, EdwardsPoint, EdwardsPoint),
|
Sign(usize, EdwardsPoint, EdwardsPoint),
|
||||||
Verify(Scalar),
|
#[cfg(feature = "experimental")]
|
||||||
|
Verify(Scalar)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences
|
// Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences
|
||||||
@@ -95,26 +90,22 @@ fn core(
|
|||||||
msg: &[u8; 32],
|
msg: &[u8; 32],
|
||||||
D: &EdwardsPoint,
|
D: &EdwardsPoint,
|
||||||
s: &[Scalar],
|
s: &[Scalar],
|
||||||
A_c1: Mode,
|
A_c1: Mode
|
||||||
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
|
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
|
||||||
let n = ring.len();
|
let n = ring.len();
|
||||||
|
|
||||||
let images_precomp = VartimeEdwardsPrecomputation::new([I, D]);
|
let images_precomp = VartimeEdwardsPrecomputation::new([I, D]);
|
||||||
let D = D * INV_EIGHT();
|
let D = D * *INV_EIGHT;
|
||||||
|
|
||||||
// Generate the transcript
|
// Generate the transcript
|
||||||
// Instead of generating multiple, a single transcript is created and then edited as needed
|
// Instead of generating multiple, a single transcript is created and then edited as needed
|
||||||
const PREFIX: &[u8] = b"CLSAG_";
|
let mut to_hash = vec![];
|
||||||
#[rustfmt::skip]
|
to_hash.reserve_exact(((2 * n) + 5) * 32);
|
||||||
const AGG_0: &[u8] = b"agg_0";
|
const PREFIX: &[u8] = "CLSAG_".as_bytes();
|
||||||
#[rustfmt::skip]
|
const AGG_0: &[u8] = "CLSAG_agg_0".as_bytes();
|
||||||
const ROUND: &[u8] = b"round";
|
const ROUND: &[u8] = "round".as_bytes();
|
||||||
const PREFIX_AGG_0_LEN: usize = PREFIX.len() + AGG_0.len();
|
|
||||||
|
|
||||||
let mut to_hash = Vec::with_capacity(((2 * n) + 5) * 32);
|
|
||||||
to_hash.extend(PREFIX);
|
|
||||||
to_hash.extend(AGG_0);
|
to_hash.extend(AGG_0);
|
||||||
to_hash.extend([0; 32 - PREFIX_AGG_0_LEN]);
|
to_hash.extend([0; 32 - AGG_0.len()]);
|
||||||
|
|
||||||
let mut P = Vec::with_capacity(n);
|
let mut P = Vec::with_capacity(n);
|
||||||
for member in ring {
|
for member in ring {
|
||||||
@@ -134,7 +125,7 @@ fn core(
|
|||||||
// mu_P with agg_0
|
// mu_P with agg_0
|
||||||
let mu_P = hash_to_scalar(&to_hash);
|
let mu_P = hash_to_scalar(&to_hash);
|
||||||
// mu_C with agg_1
|
// mu_C with agg_1
|
||||||
to_hash[PREFIX_AGG_0_LEN - 1] = b'1';
|
to_hash[AGG_0.len() - 1] = b'1';
|
||||||
let mu_C = hash_to_scalar(&to_hash);
|
let mu_C = hash_to_scalar(&to_hash);
|
||||||
|
|
||||||
// Truncate it for the round transcript, altering the DST as needed
|
// Truncate it for the round transcript, altering the DST as needed
|
||||||
@@ -158,8 +149,9 @@ fn core(
|
|||||||
to_hash.extend(A.compress().to_bytes());
|
to_hash.extend(A.compress().to_bytes());
|
||||||
to_hash.extend(AH.compress().to_bytes());
|
to_hash.extend(AH.compress().to_bytes());
|
||||||
c = hash_to_scalar(&to_hash);
|
c = hash_to_scalar(&to_hash);
|
||||||
}
|
},
|
||||||
|
|
||||||
|
#[cfg(feature = "experimental")]
|
||||||
Mode::Verify(c1) => {
|
Mode::Verify(c1) => {
|
||||||
start = 0;
|
start = 0;
|
||||||
end = n;
|
end = n;
|
||||||
@@ -168,12 +160,11 @@ fn core(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Perform the core loop
|
// Perform the core loop
|
||||||
let mut c1 = CtOption::new(Scalar::zero(), Choice::from(0));
|
let mut c1 = None;
|
||||||
for i in (start .. end).map(|i| i % n) {
|
for i in (start .. end).map(|i| i % n) {
|
||||||
// This will only execute once and shouldn't need to be constant time. Making it constant time
|
if i == 0 {
|
||||||
// removes the risk of branch prediction creating timing differences depending on ring index
|
c1 = Some(c);
|
||||||
// however
|
}
|
||||||
c1 = c1.or_else(|| CtOption::new(c, i.ct_eq(&0)));
|
|
||||||
|
|
||||||
let c_p = mu_P * c;
|
let c_p = mu_P * c;
|
||||||
let c_c = mu_C * c;
|
let c_c = mu_C * c;
|
||||||
@@ -181,7 +172,7 @@ fn core(
|
|||||||
let L = (&s[i] * &ED25519_BASEPOINT_TABLE) + (c_p * P[i]) + (c_c * C[i]);
|
let L = (&s[i] * &ED25519_BASEPOINT_TABLE) + (c_p * P[i]) + (c_c * C[i]);
|
||||||
let PH = hash_to_point(P[i]);
|
let PH = hash_to_point(P[i]);
|
||||||
// Shouldn't be an issue as all of the variables in this vartime statement are public
|
// Shouldn't be an issue as all of the variables in this vartime statement are public
|
||||||
let R = (s[i] * PH) + images_precomp.vartime_multiscalar_mul([c_p, c_c]);
|
let R = (s[i] * PH) + images_precomp.vartime_multiscalar_mul(&[c_p, c_c]);
|
||||||
|
|
||||||
to_hash.truncate(((2 * n) + 3) * 32);
|
to_hash.truncate(((2 * n) + 3) * 32);
|
||||||
to_hash.extend(L.compress().to_bytes());
|
to_hash.extend(L.compress().to_bytes());
|
||||||
@@ -193,18 +184,16 @@ fn core(
|
|||||||
((D, c * mu_P, c * mu_C), c1.unwrap_or(c))
|
((D, c * mu_P, c * mu_C), c1.unwrap_or(c))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// CLSAG signature, as used in Monero.
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct Clsag {
|
pub struct Clsag {
|
||||||
pub D: EdwardsPoint,
|
pub D: EdwardsPoint,
|
||||||
pub s: Vec<Scalar>,
|
pub s: Vec<Scalar>,
|
||||||
pub c1: Scalar,
|
pub c1: Scalar
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clsag {
|
impl Clsag {
|
||||||
// Sign core is the extension of core as needed for signing, yet is shared between single signer
|
// Sign core is the extension of core as needed for signing, yet is shared between single signer
|
||||||
// and multisig, hence why it's still core
|
// and multisig, hence why it's still core
|
||||||
#[allow(clippy::many_single_char_names)]
|
|
||||||
pub(crate) fn sign_core<R: RngCore + CryptoRng>(
|
pub(crate) fn sign_core<R: RngCore + CryptoRng>(
|
||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
I: &EdwardsPoint,
|
I: &EdwardsPoint,
|
||||||
@@ -212,8 +201,8 @@ impl Clsag {
|
|||||||
mask: Scalar,
|
mask: Scalar,
|
||||||
msg: &[u8; 32],
|
msg: &[u8; 32],
|
||||||
A: EdwardsPoint,
|
A: EdwardsPoint,
|
||||||
AH: EdwardsPoint,
|
AH: EdwardsPoint
|
||||||
) -> (Self, EdwardsPoint, Scalar, Scalar) {
|
) -> (Clsag, EdwardsPoint, Scalar, Scalar) {
|
||||||
let r: usize = input.decoys.i.into();
|
let r: usize = input.decoys.i.into();
|
||||||
|
|
||||||
let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate();
|
let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate();
|
||||||
@@ -225,51 +214,49 @@ impl Clsag {
|
|||||||
for _ in 0 .. input.decoys.ring.len() {
|
for _ in 0 .. input.decoys.ring.len() {
|
||||||
s.push(random_scalar(rng));
|
s.push(random_scalar(rng));
|
||||||
}
|
}
|
||||||
let ((D, p, c), c1) =
|
let ((D, p, c), c1) = core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
|
||||||
core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
|
|
||||||
|
|
||||||
(Self { D, s, c1 }, pseudo_out, p, c * z)
|
(
|
||||||
|
Clsag { D, s, c1 },
|
||||||
|
pseudo_out,
|
||||||
|
p,
|
||||||
|
c * z
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate CLSAG signatures for the given inputs.
|
// Single signer CLSAG
|
||||||
/// inputs is of the form (private key, key image, input).
|
|
||||||
/// sum_outputs is for the sum of the outputs' commitment masks.
|
|
||||||
pub fn sign<R: RngCore + CryptoRng>(
|
pub fn sign<R: RngCore + CryptoRng>(
|
||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
mut inputs: Vec<(Zeroizing<Scalar>, EdwardsPoint, ClsagInput)>,
|
inputs: &[(Scalar, EdwardsPoint, ClsagInput)],
|
||||||
sum_outputs: Scalar,
|
sum_outputs: Scalar,
|
||||||
msg: [u8; 32],
|
msg: [u8; 32]
|
||||||
) -> Vec<(Self, EdwardsPoint)> {
|
) -> Vec<(Clsag, EdwardsPoint)> {
|
||||||
|
let nonce = random_scalar(rng);
|
||||||
|
let mut rand_source = [0; 64];
|
||||||
|
rng.fill_bytes(&mut rand_source);
|
||||||
|
|
||||||
let mut res = Vec::with_capacity(inputs.len());
|
let mut res = Vec::with_capacity(inputs.len());
|
||||||
let mut sum_pseudo_outs = Scalar::zero();
|
let mut sum_pseudo_outs = Scalar::zero();
|
||||||
for i in 0 .. inputs.len() {
|
for i in 0 .. inputs.len() {
|
||||||
let mask = if i == (inputs.len() - 1) {
|
let mut mask = random_scalar(rng);
|
||||||
sum_outputs - sum_pseudo_outs
|
if i == (inputs.len() - 1) {
|
||||||
|
mask = sum_outputs - sum_pseudo_outs;
|
||||||
} else {
|
} else {
|
||||||
let mask = random_scalar(rng);
|
|
||||||
sum_pseudo_outs += mask;
|
sum_pseudo_outs += mask;
|
||||||
mask
|
}
|
||||||
};
|
|
||||||
|
|
||||||
let mut nonce = Zeroizing::new(random_scalar(rng));
|
let mut rand_source = [0; 64];
|
||||||
let (mut clsag, pseudo_out, p, c) = Self::sign_core(
|
rng.fill_bytes(&mut rand_source);
|
||||||
|
let (mut clsag, pseudo_out, p, c) = Clsag::sign_core(
|
||||||
rng,
|
rng,
|
||||||
&inputs[i].1,
|
&inputs[i].1,
|
||||||
&inputs[i].2,
|
&inputs[i].2,
|
||||||
mask,
|
mask,
|
||||||
&msg,
|
&msg,
|
||||||
nonce.deref() * &ED25519_BASEPOINT_TABLE,
|
&nonce * &ED25519_BASEPOINT_TABLE,
|
||||||
nonce.deref() *
|
nonce * hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0])
|
||||||
hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
|
|
||||||
);
|
);
|
||||||
clsag.s[usize::from(inputs[i].2.decoys.i)] =
|
clsag.s[usize::from(inputs[i].2.decoys.i)] = nonce - ((p * inputs[i].0) + c);
|
||||||
(-((p * inputs[i].0.deref()) + c)) + nonce.deref();
|
|
||||||
inputs[i].0.zeroize();
|
|
||||||
nonce.zeroize();
|
|
||||||
|
|
||||||
debug_assert!(clsag
|
|
||||||
.verify(&inputs[i].2.decoys.ring, &inputs[i].1, &pseudo_out, &msg)
|
|
||||||
.is_ok());
|
|
||||||
|
|
||||||
res.push((clsag, pseudo_out));
|
res.push((clsag, pseudo_out));
|
||||||
}
|
}
|
||||||
@@ -277,49 +264,97 @@ impl Clsag {
|
|||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Verify the CLSAG signature against the given Transaction data.
|
// Not extensively tested nor guaranteed to have expected parity with Monero
|
||||||
pub fn verify(
|
#[cfg(feature = "experimental")]
|
||||||
|
pub fn rust_verify(
|
||||||
&self,
|
&self,
|
||||||
ring: &[[EdwardsPoint; 2]],
|
ring: &[[EdwardsPoint; 2]],
|
||||||
I: &EdwardsPoint,
|
I: &EdwardsPoint,
|
||||||
pseudo_out: &EdwardsPoint,
|
pseudo_out: &EdwardsPoint,
|
||||||
msg: &[u8; 32],
|
msg: &[u8; 32]
|
||||||
) -> Result<(), ClsagError> {
|
) -> Result<(), ClsagError> {
|
||||||
// Preliminary checks. s, c1, and points must also be encoded canonically, which isn't checked
|
let (_, c1) = core(
|
||||||
// here
|
ring,
|
||||||
if ring.is_empty() {
|
I,
|
||||||
Err(ClsagError::InvalidRing)?;
|
pseudo_out,
|
||||||
}
|
msg,
|
||||||
if ring.len() != self.s.len() {
|
&self.D.mul_by_cofactor(),
|
||||||
Err(ClsagError::InvalidS)?;
|
&self.s,
|
||||||
}
|
Mode::Verify(self.c1)
|
||||||
if I.is_identity() {
|
);
|
||||||
Err(ClsagError::InvalidImage)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let D = self.D.mul_by_cofactor();
|
|
||||||
if D.is_identity() {
|
|
||||||
Err(ClsagError::InvalidD)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (_, c1) = core(ring, I, pseudo_out, msg, &D, &self.s, Mode::Verify(self.c1));
|
|
||||||
if c1 != self.c1 {
|
if c1 != self.c1 {
|
||||||
Err(ClsagError::InvalidC1)?;
|
Err(ClsagError::InvalidC1)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn fee_weight(ring_len: usize) -> usize {
|
pub(crate) fn fee_weight() -> usize {
|
||||||
(ring_len * 32) + 32 + 32
|
(RING_LEN * 32) + 32 + 32
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||||
write_raw_vec(write_scalar, &self.s, w)?;
|
write_raw_vec(write_scalar, &self.s, w)?;
|
||||||
w.write_all(&self.c1.to_bytes())?;
|
w.write_all(&self.c1.to_bytes())?;
|
||||||
write_point(&self.D, w)
|
write_point(&self.D, w)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read<R: Read>(decoys: usize, r: &mut R) -> io::Result<Self> {
|
pub fn deserialize<R: std::io::Read>(decoys: usize, r: &mut R) -> std::io::Result<Clsag> {
|
||||||
Ok(Self { s: read_raw_vec(read_scalar, decoys, r)?, c1: read_scalar(r)?, D: read_point(r)? })
|
Ok(
|
||||||
|
Clsag {
|
||||||
|
s: read_raw_vec(read_scalar, decoys, r)?,
|
||||||
|
c1: read_scalar(r)?,
|
||||||
|
D: read_point(r)?
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn verify(
|
||||||
|
&self,
|
||||||
|
ring: &[[EdwardsPoint; 2]],
|
||||||
|
I: &EdwardsPoint,
|
||||||
|
pseudo_out: &EdwardsPoint,
|
||||||
|
msg: &[u8; 32]
|
||||||
|
) -> Result<(), ClsagError> {
|
||||||
|
// Serialize it to pass the struct to Monero without extensive FFI
|
||||||
|
let mut serialized = Vec::with_capacity(1 + ((self.s.len() + 2) * 32));
|
||||||
|
write_varint(&self.s.len().try_into().unwrap(), &mut serialized).unwrap();
|
||||||
|
self.serialize(&mut serialized).unwrap();
|
||||||
|
|
||||||
|
let I_bytes = I.compress().to_bytes();
|
||||||
|
|
||||||
|
let mut ring_bytes = vec![];
|
||||||
|
for member in ring {
|
||||||
|
ring_bytes.extend(&member[0].compress().to_bytes());
|
||||||
|
ring_bytes.extend(&member[1].compress().to_bytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
let pseudo_out_bytes = pseudo_out.compress().to_bytes();
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
// Uses Monero's C verification function to ensure compatibility with Monero
|
||||||
|
#[link(name = "wrapper")]
|
||||||
|
extern "C" {
|
||||||
|
pub(crate) fn c_verify_clsag(
|
||||||
|
serialized_len: usize,
|
||||||
|
serialized: *const u8,
|
||||||
|
ring_size: u8,
|
||||||
|
ring: *const u8,
|
||||||
|
I: *const u8,
|
||||||
|
pseudo_out: *const u8,
|
||||||
|
msg: *const u8
|
||||||
|
) -> bool;
|
||||||
|
}
|
||||||
|
|
||||||
|
if c_verify_clsag(
|
||||||
|
serialized.len(), serialized.as_ptr(),
|
||||||
|
u8::try_from(ring.len()).map_err(|_| ClsagError::InternalError("too large ring".to_string()))?,
|
||||||
|
ring_bytes.as_ptr(),
|
||||||
|
I_bytes.as_ptr(), pseudo_out_bytes.as_ptr(), msg.as_ptr()
|
||||||
|
) {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(ClsagError::InvalidC1)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,58 +1,44 @@
|
|||||||
use core::{ops::Deref, fmt::Debug};
|
use core::fmt::Debug;
|
||||||
use std_shims::{
|
use std::{io::Read, sync::{Arc, RwLock}};
|
||||||
sync::Arc,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
use std::sync::RwLock;
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
||||||
use rand_chacha::ChaCha20Rng;
|
use rand_chacha::ChaCha12Rng;
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
|
||||||
|
|
||||||
use curve25519_dalek::{
|
use curve25519_dalek::{
|
||||||
|
constants::ED25519_BASEPOINT_TABLE,
|
||||||
traits::{Identity, IsIdentity},
|
traits::{Identity, IsIdentity},
|
||||||
scalar::Scalar,
|
scalar::Scalar,
|
||||||
edwards::EdwardsPoint,
|
edwards::EdwardsPoint
|
||||||
};
|
};
|
||||||
|
|
||||||
use group::{ff::Field, Group, GroupEncoding};
|
use group::Group;
|
||||||
|
|
||||||
use transcript::{Transcript, RecommendedTranscript};
|
use transcript::{Transcript, RecommendedTranscript};
|
||||||
|
use frost::{curve::Ed25519, FrostError, FrostView, algorithm::Algorithm};
|
||||||
use dalek_ff_group as dfg;
|
use dalek_ff_group as dfg;
|
||||||
use dleq::DLEqProof;
|
|
||||||
use frost::{
|
|
||||||
dkg::lagrange,
|
|
||||||
curve::Ed25519,
|
|
||||||
Participant, FrostError, ThresholdKeys, ThresholdView,
|
|
||||||
algorithm::{WriteAddendum, Algorithm},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::ringct::{
|
use crate::{
|
||||||
hash_to_point,
|
frost::{MultisigError, write_dleq, read_dleq},
|
||||||
clsag::{ClsagInput, Clsag},
|
ringct::{hash_to_point, clsag::{ClsagInput, Clsag}}
|
||||||
};
|
};
|
||||||
|
|
||||||
fn dleq_transcript() -> RecommendedTranscript {
|
|
||||||
RecommendedTranscript::new(b"monero_key_image_dleq")
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ClsagInput {
|
impl ClsagInput {
|
||||||
fn transcript<T: Transcript>(&self, transcript: &mut T) {
|
fn transcript<T: Transcript>(&self, transcript: &mut T) {
|
||||||
// Doesn't domain separate as this is considered part of the larger CLSAG proof
|
// Doesn't domain separate as this is considered part of the larger CLSAG proof
|
||||||
|
|
||||||
// Ring index
|
// Ring index
|
||||||
transcript.append_message(b"real_spend", [self.decoys.i]);
|
transcript.append_message(b"ring_index", &[self.decoys.i]);
|
||||||
|
|
||||||
// Ring
|
// Ring
|
||||||
for (i, pair) in self.decoys.ring.iter().enumerate() {
|
let mut ring = vec![];
|
||||||
|
for pair in &self.decoys.ring {
|
||||||
// Doesn't include global output indexes as CLSAG doesn't care and won't be affected by it
|
// Doesn't include global output indexes as CLSAG doesn't care and won't be affected by it
|
||||||
// They're just a unreliable reference to this data which will be included in the message
|
// They're just a unreliable reference to this data which will be included in the message
|
||||||
// if in use
|
// if in use
|
||||||
transcript.append_message(b"member", [u8::try_from(i).expect("ring size exceeded 255")]);
|
ring.extend(&pair[0].compress().to_bytes());
|
||||||
transcript.append_message(b"key", pair[0].compress().to_bytes());
|
ring.extend(&pair[1].compress().to_bytes());
|
||||||
transcript.append_message(b"commitment", pair[1].compress().to_bytes());
|
|
||||||
}
|
}
|
||||||
|
transcript.append_message(b"ring", &ring);
|
||||||
|
|
||||||
// Doesn't include the commitment's parts as the above ring + index includes the commitment
|
// Doesn't include the commitment's parts as the above ring + index includes the commitment
|
||||||
// The only potential malleability would be if the G/H relationship is known breaking the
|
// The only potential malleability would be if the G/H relationship is known breaking the
|
||||||
@@ -60,77 +46,66 @@ impl ClsagInput {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// CLSAG input and the mask to use for it.
|
#[derive(Clone, Debug)]
|
||||||
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub struct ClsagDetails {
|
pub struct ClsagDetails {
|
||||||
input: ClsagInput,
|
input: ClsagInput,
|
||||||
mask: Scalar,
|
mask: Scalar
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ClsagDetails {
|
impl ClsagDetails {
|
||||||
pub fn new(input: ClsagInput, mask: Scalar) -> Self {
|
pub fn new(input: ClsagInput, mask: Scalar) -> ClsagDetails {
|
||||||
Self { input, mask }
|
ClsagDetails { input, mask }
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Addendum produced during the FROST signing process with relevant data.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Zeroize, Debug)]
|
|
||||||
pub struct ClsagAddendum {
|
|
||||||
pub(crate) key_image: dfg::EdwardsPoint,
|
|
||||||
dleq: DLEqProof<dfg::EdwardsPoint>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WriteAddendum for ClsagAddendum {
|
|
||||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
|
||||||
writer.write_all(self.key_image.compress().to_bytes().as_ref())?;
|
|
||||||
self.dleq.write(writer)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
struct Interim {
|
struct Interim {
|
||||||
p: Scalar,
|
p: Scalar,
|
||||||
c: Scalar,
|
c: Scalar,
|
||||||
|
|
||||||
clsag: Clsag,
|
clsag: Clsag,
|
||||||
pseudo_out: EdwardsPoint,
|
pseudo_out: EdwardsPoint
|
||||||
}
|
}
|
||||||
|
|
||||||
/// FROST algorithm for producing a CLSAG signature.
|
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct ClsagMultisig {
|
pub struct ClsagMultisig {
|
||||||
transcript: RecommendedTranscript,
|
transcript: RecommendedTranscript,
|
||||||
|
|
||||||
pub(crate) H: EdwardsPoint,
|
H: EdwardsPoint,
|
||||||
// Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires
|
// Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires a round
|
||||||
// an extra round
|
|
||||||
image: EdwardsPoint,
|
image: EdwardsPoint,
|
||||||
|
|
||||||
details: Arc<RwLock<Option<ClsagDetails>>>,
|
details: Arc<RwLock<Option<ClsagDetails>>>,
|
||||||
|
|
||||||
msg: Option<[u8; 32]>,
|
msg: Option<[u8; 32]>,
|
||||||
interim: Option<Interim>,
|
interim: Option<Interim>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ClsagMultisig {
|
impl ClsagMultisig {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
transcript: RecommendedTranscript,
|
transcript: RecommendedTranscript,
|
||||||
output_key: EdwardsPoint,
|
output_key: EdwardsPoint,
|
||||||
details: Arc<RwLock<Option<ClsagDetails>>>,
|
details: Arc<RwLock<Option<ClsagDetails>>>
|
||||||
) -> Self {
|
) -> Result<ClsagMultisig, MultisigError> {
|
||||||
Self {
|
Ok(
|
||||||
transcript,
|
ClsagMultisig {
|
||||||
|
transcript,
|
||||||
|
|
||||||
H: hash_to_point(output_key),
|
H: hash_to_point(output_key),
|
||||||
image: EdwardsPoint::identity(),
|
image: EdwardsPoint::identity(),
|
||||||
|
|
||||||
details,
|
details,
|
||||||
|
|
||||||
msg: None,
|
msg: None,
|
||||||
interim: None,
|
interim: None
|
||||||
}
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const fn serialized_len() -> usize {
|
||||||
|
32 + (2 * 32)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn input(&self) -> ClsagInput {
|
fn input(&self) -> ClsagInput {
|
||||||
@@ -142,23 +117,8 @@ impl ClsagMultisig {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn add_key_image_share(
|
|
||||||
image: &mut EdwardsPoint,
|
|
||||||
generator: EdwardsPoint,
|
|
||||||
offset: Scalar,
|
|
||||||
included: &[Participant],
|
|
||||||
participant: Participant,
|
|
||||||
share: EdwardsPoint,
|
|
||||||
) {
|
|
||||||
if image.is_identity() {
|
|
||||||
*image = generator * offset;
|
|
||||||
}
|
|
||||||
*image += share * lagrange::<dfg::Scalar>(participant, included).0;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Algorithm<Ed25519> for ClsagMultisig {
|
impl Algorithm<Ed25519> for ClsagMultisig {
|
||||||
type Transcript = RecommendedTranscript;
|
type Transcript = RecommendedTranscript;
|
||||||
type Addendum = ClsagAddendum;
|
|
||||||
type Signature = (Clsag, EdwardsPoint);
|
type Signature = (Clsag, EdwardsPoint);
|
||||||
|
|
||||||
fn nonces(&self) -> Vec<Vec<dfg::EdwardsPoint>> {
|
fn nonces(&self) -> Vec<Vec<dfg::EdwardsPoint>> {
|
||||||
@@ -168,70 +128,35 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||||||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
||||||
&mut self,
|
&mut self,
|
||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
keys: &ThresholdKeys<Ed25519>,
|
view: &FrostView<Ed25519>
|
||||||
) -> ClsagAddendum {
|
) -> Vec<u8> {
|
||||||
ClsagAddendum {
|
let mut serialized = Vec::with_capacity(Self::serialized_len());
|
||||||
key_image: dfg::EdwardsPoint(self.H) * keys.secret_share().deref(),
|
serialized.extend((view.secret_share().0 * self.H).compress().to_bytes());
|
||||||
dleq: DLEqProof::prove(
|
serialized.extend(write_dleq(rng, self.H, view.secret_share().0));
|
||||||
rng,
|
serialized
|
||||||
// Doesn't take in a larger transcript object due to the usage of this
|
|
||||||
// Every prover would immediately write their own DLEq proof, when they can only do so in
|
|
||||||
// the proper order if they want to reach consensus
|
|
||||||
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to
|
|
||||||
// try to merge later in some form, when it should instead just merge xH (as it does)
|
|
||||||
&mut dleq_transcript(),
|
|
||||||
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
|
|
||||||
keys.secret_share(),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_addendum<R: Read>(&self, reader: &mut R) -> io::Result<ClsagAddendum> {
|
fn process_addendum<Re: Read>(
|
||||||
let mut bytes = [0; 32];
|
|
||||||
reader.read_exact(&mut bytes)?;
|
|
||||||
// dfg ensures the point is torsion free
|
|
||||||
let xH = Option::<dfg::EdwardsPoint>::from(dfg::EdwardsPoint::from_bytes(&bytes))
|
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid key image"))?;
|
|
||||||
// Ensure this is a canonical point
|
|
||||||
if xH.to_bytes() != bytes {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "non-canonical key image"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(ClsagAddendum { key_image: xH, dleq: DLEqProof::<dfg::EdwardsPoint>::read(reader)? })
|
|
||||||
}
|
|
||||||
|
|
||||||
fn process_addendum(
|
|
||||||
&mut self,
|
&mut self,
|
||||||
view: &ThresholdView<Ed25519>,
|
view: &FrostView<Ed25519>,
|
||||||
l: Participant,
|
l: u16,
|
||||||
addendum: ClsagAddendum,
|
serialized: &mut Re
|
||||||
) -> Result<(), FrostError> {
|
) -> Result<(), FrostError> {
|
||||||
if self.image.is_identity() {
|
if self.image.is_identity().into() {
|
||||||
self.transcript.domain_separate(b"CLSAG");
|
self.transcript.domain_separate(b"CLSAG");
|
||||||
self.input().transcript(&mut self.transcript);
|
self.input().transcript(&mut self.transcript);
|
||||||
self.transcript.append_message(b"mask", self.mask().to_bytes());
|
self.transcript.append_message(b"mask", &self.mask().to_bytes());
|
||||||
}
|
}
|
||||||
|
|
||||||
self.transcript.append_message(b"participant", l.to_bytes());
|
self.transcript.append_message(b"participant", &l.to_be_bytes());
|
||||||
|
let image = read_dleq(
|
||||||
addendum
|
serialized,
|
||||||
.dleq
|
|
||||||
.verify(
|
|
||||||
&mut dleq_transcript(),
|
|
||||||
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
|
|
||||||
&[view.original_verification_share(l), addendum.key_image],
|
|
||||||
)
|
|
||||||
.map_err(|_| FrostError::InvalidPreprocess(l))?;
|
|
||||||
|
|
||||||
self.transcript.append_message(b"key_image_share", addendum.key_image.compress().to_bytes());
|
|
||||||
add_key_image_share(
|
|
||||||
&mut self.image,
|
|
||||||
self.H,
|
self.H,
|
||||||
view.offset().0,
|
|
||||||
view.included(),
|
|
||||||
l,
|
l,
|
||||||
addendum.key_image.0,
|
view.verification_share(l)
|
||||||
);
|
).map_err(|_| FrostError::InvalidCommitment(l))?.0;
|
||||||
|
self.transcript.append_message(b"key_image_share", image.compress().to_bytes().as_ref());
|
||||||
|
self.image += image;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -242,17 +167,17 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||||||
|
|
||||||
fn sign_share(
|
fn sign_share(
|
||||||
&mut self,
|
&mut self,
|
||||||
view: &ThresholdView<Ed25519>,
|
view: &FrostView<Ed25519>,
|
||||||
nonce_sums: &[Vec<dfg::EdwardsPoint>],
|
nonce_sums: &[Vec<dfg::EdwardsPoint>],
|
||||||
nonces: Vec<Zeroizing<dfg::Scalar>>,
|
nonces: &[dfg::Scalar],
|
||||||
msg: &[u8],
|
msg: &[u8]
|
||||||
) -> dfg::Scalar {
|
) -> dfg::Scalar {
|
||||||
// Use the transcript to get a seeded random number generator
|
// Use the transcript to get a seeded random number generator
|
||||||
// The transcript contains private data, preventing passive adversaries from recreating this
|
// The transcript contains private data, preventing passive adversaries from recreating this
|
||||||
// process even if they have access to commitments (specifically, the ring index being signed
|
// process even if they have access to commitments (specifically, the ring index being signed
|
||||||
// for, along with the mask which should not only require knowing the shared keys yet also the
|
// for, along with the mask which should not only require knowing the shared keys yet also the
|
||||||
// input commitment masks)
|
// input commitment masks)
|
||||||
let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"decoy_responses"));
|
let mut rng = ChaCha12Rng::from_seed(self.transcript.rng_seed(b"decoy_responses"));
|
||||||
|
|
||||||
self.msg = Some(msg.try_into().expect("CLSAG message should be 32-bytes"));
|
self.msg = Some(msg.try_into().expect("CLSAG message should be 32-bytes"));
|
||||||
|
|
||||||
@@ -262,50 +187,49 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||||||
&self.image,
|
&self.image,
|
||||||
&self.input(),
|
&self.input(),
|
||||||
self.mask(),
|
self.mask(),
|
||||||
self.msg.as_ref().unwrap(),
|
&self.msg.as_ref().unwrap(),
|
||||||
nonce_sums[0][0].0,
|
nonce_sums[0][0].0,
|
||||||
nonce_sums[0][1].0,
|
nonce_sums[0][1].0
|
||||||
);
|
);
|
||||||
self.interim = Some(Interim { p, c, clsag, pseudo_out });
|
self.interim = Some(Interim { p, c, clsag, pseudo_out });
|
||||||
|
|
||||||
(-(dfg::Scalar(p) * view.secret_share().deref())) + nonces[0].deref()
|
let share = dfg::Scalar(nonces[0].0 - (p * view.secret_share().0));
|
||||||
|
|
||||||
|
share
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn verify(
|
fn verify(
|
||||||
&self,
|
&self,
|
||||||
|
_: u16,
|
||||||
_: dfg::EdwardsPoint,
|
_: dfg::EdwardsPoint,
|
||||||
_: &[Vec<dfg::EdwardsPoint>],
|
_: &[Vec<dfg::EdwardsPoint>],
|
||||||
sum: dfg::Scalar,
|
sum: dfg::Scalar
|
||||||
) -> Option<Self::Signature> {
|
) -> Option<Self::Signature> {
|
||||||
let interim = self.interim.as_ref().unwrap();
|
let interim = self.interim.as_ref().unwrap();
|
||||||
let mut clsag = interim.clsag.clone();
|
let mut clsag = interim.clsag.clone();
|
||||||
clsag.s[usize::from(self.input().decoys.i)] = sum.0 - interim.c;
|
clsag.s[usize::from(self.input().decoys.i)] = sum.0 - interim.c;
|
||||||
if clsag
|
if clsag.verify(
|
||||||
.verify(
|
&self.input().decoys.ring,
|
||||||
&self.input().decoys.ring,
|
&self.image,
|
||||||
&self.image,
|
&interim.pseudo_out,
|
||||||
&interim.pseudo_out,
|
&self.msg.as_ref().unwrap()
|
||||||
self.msg.as_ref().unwrap(),
|
).is_ok() {
|
||||||
)
|
|
||||||
.is_ok()
|
|
||||||
{
|
|
||||||
return Some((clsag, interim.pseudo_out));
|
return Some((clsag, interim.pseudo_out));
|
||||||
}
|
}
|
||||||
None
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
fn verify_share(
|
fn verify_share(
|
||||||
&self,
|
&self,
|
||||||
verification_share: dfg::EdwardsPoint,
|
verification_share: dfg::EdwardsPoint,
|
||||||
nonces: &[Vec<dfg::EdwardsPoint>],
|
nonces: &[Vec<dfg::EdwardsPoint>],
|
||||||
share: dfg::Scalar,
|
share: dfg::Scalar,
|
||||||
) -> Result<Vec<(dfg::Scalar, dfg::EdwardsPoint)>, ()> {
|
) -> bool {
|
||||||
let interim = self.interim.as_ref().unwrap();
|
let interim = self.interim.as_ref().unwrap();
|
||||||
Ok(vec![
|
return (&share.0 * &ED25519_BASEPOINT_TABLE) == (
|
||||||
(share, dfg::EdwardsPoint::generator()),
|
nonces[0][0].0 - (interim.p * verification_share.0)
|
||||||
(dfg::Scalar(interim.p), verification_share),
|
);
|
||||||
(-dfg::Scalar::ONE, nonces[0][0]),
|
|
||||||
])
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,67 @@
|
|||||||
use curve25519_dalek::edwards::EdwardsPoint;
|
use subtle::ConditionallySelectable;
|
||||||
|
|
||||||
pub use monero_generators::{hash_to_point as raw_hash_to_point};
|
use curve25519_dalek::edwards::{CompressedEdwardsY, EdwardsPoint};
|
||||||
|
|
||||||
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
|
use group::ff::{Field, PrimeField};
|
||||||
pub fn hash_to_point(key: EdwardsPoint) -> EdwardsPoint {
|
use dalek_ff_group::field::FieldElement;
|
||||||
raw_hash_to_point(key.compress().to_bytes())
|
|
||||||
|
use crate::hash;
|
||||||
|
|
||||||
|
pub fn hash_to_point(point: EdwardsPoint) -> EdwardsPoint {
|
||||||
|
let mut bytes = point.compress().to_bytes();
|
||||||
|
unsafe {
|
||||||
|
#[link(name = "wrapper")]
|
||||||
|
extern "C" {
|
||||||
|
fn c_hash_to_point(point: *const u8);
|
||||||
|
}
|
||||||
|
|
||||||
|
c_hash_to_point(bytes.as_mut_ptr());
|
||||||
|
}
|
||||||
|
CompressedEdwardsY::from_slice(&bytes).decompress().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
// This works without issue. It's also 140 times slower (@ 3.5ms), and despite checking it passes
|
||||||
|
// for all branches, there still could be *some* discrepancy somewhere. There's no reason to use it
|
||||||
|
// unless we're trying to purge that section of the C static library, which we aren't right now
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub(crate) fn rust_hash_to_point(key: EdwardsPoint) -> EdwardsPoint {
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let A = FieldElement::from(486662u64);
|
||||||
|
|
||||||
|
let v = FieldElement::from_square(hash(&key.compress().to_bytes())).double();
|
||||||
|
let w = v + FieldElement::one();
|
||||||
|
let x = w.square() + (-A.square() * v);
|
||||||
|
|
||||||
|
// This isn't the complete X, yet its initial value
|
||||||
|
// We don't calculate the full X, and instead solely calculate Y, letting dalek reconstruct X
|
||||||
|
// While inefficient, it solves API boundaries and reduces the amount of work done here
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let X = {
|
||||||
|
let u = w;
|
||||||
|
let v = x;
|
||||||
|
let v3 = v * v * v;
|
||||||
|
let uv3 = u * v3;
|
||||||
|
let v7 = v3 * v3 * v;
|
||||||
|
let uv7 = u * v7;
|
||||||
|
uv3 * uv7.pow((-FieldElement::from(5u8)) * FieldElement::from(8u8).invert().unwrap())
|
||||||
|
};
|
||||||
|
let x = X.square() * x;
|
||||||
|
|
||||||
|
let y = w - x;
|
||||||
|
let non_zero_0 = !y.is_zero();
|
||||||
|
let y_if_non_zero_0 = w + x;
|
||||||
|
let sign = non_zero_0 & (!y_if_non_zero_0.is_zero());
|
||||||
|
|
||||||
|
let mut z = -A;
|
||||||
|
z *= FieldElement::conditional_select(&v, &FieldElement::from(1u8), sign);
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let Z = z + w;
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let mut Y = z - w;
|
||||||
|
|
||||||
|
Y = Y * Z.invert().unwrap();
|
||||||
|
let mut bytes = Y.to_repr();
|
||||||
|
bytes[31] |= sign.unwrap_u8() << 7;
|
||||||
|
|
||||||
|
CompressedEdwardsY(bytes).decompress().unwrap().mul_by_cofactor()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,72 +0,0 @@
|
|||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use curve25519_dalek::scalar::Scalar;
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
use curve25519_dalek::edwards::EdwardsPoint;
|
|
||||||
|
|
||||||
use crate::serialize::*;
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
use crate::{hash_to_scalar, ringct::hash_to_point};
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct Mlsag {
|
|
||||||
pub ss: Vec<[Scalar; 2]>,
|
|
||||||
pub cc: Scalar,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Mlsag {
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
for ss in &self.ss {
|
|
||||||
write_raw_vec(write_scalar, ss, w)?;
|
|
||||||
}
|
|
||||||
write_scalar(&self.cc, w)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(mixins: usize, r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self {
|
|
||||||
ss: (0 .. mixins).map(|_| read_array(read_scalar, r)).collect::<Result<_, _>>()?,
|
|
||||||
cc: read_scalar(r)?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
#[must_use]
|
|
||||||
pub fn verify(
|
|
||||||
&self,
|
|
||||||
msg: &[u8; 32],
|
|
||||||
ring: &[[EdwardsPoint; 2]],
|
|
||||||
key_image: &EdwardsPoint,
|
|
||||||
) -> bool {
|
|
||||||
if ring.is_empty() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut buf = Vec::with_capacity(6 * 32);
|
|
||||||
let mut ci = self.cc;
|
|
||||||
for (i, ring_member) in ring.iter().enumerate() {
|
|
||||||
buf.extend_from_slice(msg);
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let L =
|
|
||||||
|r| EdwardsPoint::vartime_double_scalar_mul_basepoint(&ci, &ring_member[r], &self.ss[i][r]);
|
|
||||||
|
|
||||||
buf.extend_from_slice(ring_member[0].compress().as_bytes());
|
|
||||||
buf.extend_from_slice(L(0).compress().as_bytes());
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let R = (self.ss[i][0] * hash_to_point(ring_member[0])) + (ci * key_image);
|
|
||||||
buf.extend_from_slice(R.compress().as_bytes());
|
|
||||||
|
|
||||||
buf.extend_from_slice(ring_member[1].compress().as_bytes());
|
|
||||||
buf.extend_from_slice(L(1).compress().as_bytes());
|
|
||||||
|
|
||||||
ci = hash_to_scalar(&buf);
|
|
||||||
buf.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
ci == self.cc
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,121 +1,25 @@
|
|||||||
use core::ops::Deref;
|
|
||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, Zeroizing};
|
|
||||||
|
|
||||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
||||||
|
|
||||||
pub(crate) mod hash_to_point;
|
pub(crate) mod hash_to_point;
|
||||||
pub use hash_to_point::{raw_hash_to_point, hash_to_point};
|
pub use hash_to_point::hash_to_point;
|
||||||
|
|
||||||
/// MLSAG struct, along with verifying functionality.
|
|
||||||
pub mod mlsag;
|
|
||||||
/// CLSAG struct, along with signing and verifying functionality.
|
|
||||||
pub mod clsag;
|
pub mod clsag;
|
||||||
/// BorromeanRange struct, along with verifying functionality.
|
|
||||||
pub mod borromean;
|
|
||||||
/// Bulletproofs(+) structs, along with proving and verifying functionality.
|
|
||||||
pub mod bulletproofs;
|
pub mod bulletproofs;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
Protocol,
|
|
||||||
serialize::*,
|
serialize::*,
|
||||||
ringct::{mlsag::Mlsag, clsag::Clsag, borromean::BorromeanRange, bulletproofs::Bulletproofs},
|
ringct::{clsag::Clsag, bulletproofs::Bulletproofs}
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Generate a key image for a given key. Defined as `x * hash_to_point(xG)`.
|
pub fn generate_key_image(secret: Scalar) -> EdwardsPoint {
|
||||||
pub fn generate_key_image(secret: &Zeroizing<Scalar>) -> EdwardsPoint {
|
secret * hash_to_point(&secret * &ED25519_BASEPOINT_TABLE)
|
||||||
hash_to_point(&ED25519_BASEPOINT_TABLE * secret.deref()) * secret.deref()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
pub enum EncryptedAmount {
|
|
||||||
Original { mask: [u8; 32], amount: [u8; 32] },
|
|
||||||
Compact { amount: [u8; 8] },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EncryptedAmount {
|
|
||||||
pub fn read<R: Read>(compact: bool, r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(if compact {
|
|
||||||
Self::Compact { amount: read_bytes(r)? }
|
|
||||||
} else {
|
|
||||||
Self::Original { mask: read_bytes(r)?, amount: read_bytes(r)? }
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
Self::Original { mask, amount } => {
|
|
||||||
w.write_all(mask)?;
|
|
||||||
w.write_all(amount)
|
|
||||||
}
|
|
||||||
Self::Compact { amount } => w.write_all(amount),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub enum RctType {
|
|
||||||
/// No RCT proofs.
|
|
||||||
Null,
|
|
||||||
/// One MLSAG for a single input and a Borromean range proof (RCTTypeFull).
|
|
||||||
MlsagAggregate,
|
|
||||||
// One MLSAG for each input and a Borromean range proof (RCTTypeSimple).
|
|
||||||
MlsagIndividual,
|
|
||||||
// One MLSAG for each input and a Bulletproof (RCTTypeBulletproof).
|
|
||||||
Bulletproofs,
|
|
||||||
/// One MLSAG for each input and a Bulletproof, yet starting to use EncryptedAmount::Compact
|
|
||||||
/// (RCTTypeBulletproof2).
|
|
||||||
BulletproofsCompactAmount,
|
|
||||||
/// One CLSAG for each input and a Bulletproof (RCTTypeCLSAG).
|
|
||||||
Clsag,
|
|
||||||
/// One CLSAG for each input and a Bulletproof+ (RCTTypeBulletproofPlus).
|
|
||||||
BulletproofsPlus,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RctType {
|
|
||||||
pub fn to_byte(self) -> u8 {
|
|
||||||
match self {
|
|
||||||
Self::Null => 0,
|
|
||||||
Self::MlsagAggregate => 1,
|
|
||||||
Self::MlsagIndividual => 2,
|
|
||||||
Self::Bulletproofs => 3,
|
|
||||||
Self::BulletproofsCompactAmount => 4,
|
|
||||||
Self::Clsag => 5,
|
|
||||||
Self::BulletproofsPlus => 6,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_byte(byte: u8) -> Option<Self> {
|
|
||||||
Some(match byte {
|
|
||||||
0 => Self::Null,
|
|
||||||
1 => Self::MlsagAggregate,
|
|
||||||
2 => Self::MlsagIndividual,
|
|
||||||
3 => Self::Bulletproofs,
|
|
||||||
4 => Self::BulletproofsCompactAmount,
|
|
||||||
5 => Self::Clsag,
|
|
||||||
6 => Self::BulletproofsPlus,
|
|
||||||
_ => None?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn compact_encrypted_amounts(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Null | Self::MlsagAggregate | Self::MlsagIndividual | Self::Bulletproofs => false,
|
|
||||||
Self::BulletproofsCompactAmount | Self::Clsag | Self::BulletproofsPlus => true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct RctBase {
|
pub struct RctBase {
|
||||||
pub fee: u64,
|
pub fee: u64,
|
||||||
pub pseudo_outs: Vec<EdwardsPoint>,
|
pub ecdh_info: Vec<[u8; 8]>,
|
||||||
pub encrypted_amounts: Vec<EncryptedAmount>,
|
pub commitments: Vec<EdwardsPoint>
|
||||||
pub commitments: Vec<EdwardsPoint>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RctBase {
|
impl RctBase {
|
||||||
@@ -123,261 +27,119 @@ impl RctBase {
|
|||||||
1 + 8 + (outputs * (8 + 32))
|
1 + 8 + (outputs * (8 + 32))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W, rct_type: RctType) -> io::Result<()> {
|
pub fn serialize<W: std::io::Write>(&self, w: &mut W, rct_type: u8) -> std::io::Result<()> {
|
||||||
w.write_all(&[rct_type.to_byte()])?;
|
w.write_all(&[rct_type])?;
|
||||||
match rct_type {
|
match rct_type {
|
||||||
RctType::Null => Ok(()),
|
0 => Ok(()),
|
||||||
RctType::MlsagAggregate |
|
5 => {
|
||||||
RctType::MlsagIndividual |
|
|
||||||
RctType::Bulletproofs |
|
|
||||||
RctType::BulletproofsCompactAmount |
|
|
||||||
RctType::Clsag |
|
|
||||||
RctType::BulletproofsPlus => {
|
|
||||||
write_varint(&self.fee, w)?;
|
write_varint(&self.fee, w)?;
|
||||||
if rct_type == RctType::MlsagIndividual {
|
for ecdh in &self.ecdh_info {
|
||||||
write_raw_vec(write_point, &self.pseudo_outs, w)?;
|
w.write_all(ecdh)?;
|
||||||
}
|
|
||||||
for encrypted_amount in &self.encrypted_amounts {
|
|
||||||
encrypted_amount.write(w)?;
|
|
||||||
}
|
}
|
||||||
write_raw_vec(write_point, &self.commitments, w)
|
write_raw_vec(write_point, &self.commitments, w)
|
||||||
}
|
},
|
||||||
|
_ => panic!("Serializing unknown RctType's Base")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read<R: Read>(inputs: usize, outputs: usize, r: &mut R) -> io::Result<(Self, RctType)> {
|
pub fn deserialize<R: std::io::Read>(outputs: usize, r: &mut R) -> std::io::Result<(RctBase, u8)> {
|
||||||
let rct_type = RctType::from_byte(read_byte(r)?)
|
let mut rct_type = [0];
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid RCT type"))?;
|
r.read_exact(&mut rct_type)?;
|
||||||
|
|
||||||
match rct_type {
|
|
||||||
RctType::Null | RctType::MlsagAggregate | RctType::MlsagIndividual => {}
|
|
||||||
RctType::Bulletproofs |
|
|
||||||
RctType::BulletproofsCompactAmount |
|
|
||||||
RctType::Clsag |
|
|
||||||
RctType::BulletproofsPlus => {
|
|
||||||
if outputs == 0 {
|
|
||||||
// Because the Bulletproofs(+) layout must be canonical, there must be 1 Bulletproof if
|
|
||||||
// Bulletproofs are in use
|
|
||||||
// If there are Bulletproofs, there must be a matching amount of outputs, implicitly
|
|
||||||
// banning 0 outputs
|
|
||||||
// Since HF 12 (CLSAG being 13), a 2-output minimum has also been enforced
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "RCT with Bulletproofs(+) had 0 outputs"))?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
if rct_type == RctType::Null {
|
if rct_type[0] == 0 {
|
||||||
Self { fee: 0, pseudo_outs: vec![], encrypted_amounts: vec![], commitments: vec![] }
|
RctBase { fee: 0, ecdh_info: vec![], commitments: vec![] }
|
||||||
} else {
|
} else {
|
||||||
Self {
|
RctBase {
|
||||||
fee: read_varint(r)?,
|
fee: read_varint(r)?,
|
||||||
pseudo_outs: if rct_type == RctType::MlsagIndividual {
|
ecdh_info: (0 .. outputs).map(
|
||||||
read_raw_vec(read_point, inputs, r)?
|
|_| { let mut ecdh = [0; 8]; r.read_exact(&mut ecdh).map(|_| ecdh) }
|
||||||
} else {
|
).collect::<Result<_, _>>()?,
|
||||||
vec![]
|
commitments: read_raw_vec(read_point, outputs, r)?
|
||||||
},
|
|
||||||
encrypted_amounts: (0 .. outputs)
|
|
||||||
.map(|_| EncryptedAmount::read(rct_type.compact_encrypted_amounts(), r))
|
|
||||||
.collect::<Result<_, _>>()?,
|
|
||||||
commitments: read_raw_vec(read_point, outputs, r)?,
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
rct_type,
|
rct_type[0]
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
pub enum RctPrunable {
|
pub enum RctPrunable {
|
||||||
Null,
|
Null,
|
||||||
MlsagBorromean {
|
|
||||||
borromean: Vec<BorromeanRange>,
|
|
||||||
mlsags: Vec<Mlsag>,
|
|
||||||
},
|
|
||||||
MlsagBulletproofs {
|
|
||||||
bulletproofs: Bulletproofs,
|
|
||||||
mlsags: Vec<Mlsag>,
|
|
||||||
pseudo_outs: Vec<EdwardsPoint>,
|
|
||||||
},
|
|
||||||
Clsag {
|
Clsag {
|
||||||
bulletproofs: Bulletproofs,
|
bulletproofs: Vec<Bulletproofs>,
|
||||||
clsags: Vec<Clsag>,
|
clsags: Vec<Clsag>,
|
||||||
pseudo_outs: Vec<EdwardsPoint>,
|
pseudo_outs: Vec<EdwardsPoint>
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RctPrunable {
|
impl RctPrunable {
|
||||||
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
|
pub fn rct_type(&self) -> u8 {
|
||||||
1 + Bulletproofs::fee_weight(protocol.bp_plus(), outputs) +
|
match self {
|
||||||
(inputs * (Clsag::fee_weight(protocol.ring_len()) + 32))
|
RctPrunable::Null => 0,
|
||||||
|
RctPrunable::Clsag { .. } => 5
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W, rct_type: RctType) -> io::Result<()> {
|
pub(crate) fn fee_weight(inputs: usize, outputs: usize) -> usize {
|
||||||
|
1 + Bulletproofs::fee_weight(outputs) + (inputs * (Clsag::fee_weight() + 32))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
Self::Null => Ok(()),
|
RctPrunable::Null => Ok(()),
|
||||||
Self::MlsagBorromean { borromean, mlsags } => {
|
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs } => {
|
||||||
write_raw_vec(BorromeanRange::write, borromean, w)?;
|
write_vec(Bulletproofs::serialize, &bulletproofs, w)?;
|
||||||
write_raw_vec(Mlsag::write, mlsags, w)
|
write_raw_vec(Clsag::serialize, &clsags, w)?;
|
||||||
}
|
write_raw_vec(write_point, &pseudo_outs, w)
|
||||||
Self::MlsagBulletproofs { bulletproofs, mlsags, pseudo_outs } => {
|
|
||||||
if rct_type == RctType::Bulletproofs {
|
|
||||||
w.write_all(&1u32.to_le_bytes())?;
|
|
||||||
} else {
|
|
||||||
w.write_all(&[1])?;
|
|
||||||
}
|
|
||||||
bulletproofs.write(w)?;
|
|
||||||
|
|
||||||
write_raw_vec(Mlsag::write, mlsags, w)?;
|
|
||||||
write_raw_vec(write_point, pseudo_outs, w)
|
|
||||||
}
|
|
||||||
Self::Clsag { bulletproofs, clsags, pseudo_outs } => {
|
|
||||||
w.write_all(&[1])?;
|
|
||||||
bulletproofs.write(w)?;
|
|
||||||
|
|
||||||
write_raw_vec(Clsag::write, clsags, w)?;
|
|
||||||
write_raw_vec(write_point, pseudo_outs, w)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize(&self, rct_type: RctType) -> Vec<u8> {
|
pub fn deserialize<R: std::io::Read>(
|
||||||
let mut serialized = vec![];
|
rct_type: u8,
|
||||||
self.write(&mut serialized, rct_type).unwrap();
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(
|
|
||||||
rct_type: RctType,
|
|
||||||
decoys: &[usize],
|
decoys: &[usize],
|
||||||
outputs: usize,
|
r: &mut R
|
||||||
r: &mut R,
|
) -> std::io::Result<RctPrunable> {
|
||||||
) -> io::Result<Self> {
|
Ok(
|
||||||
Ok(match rct_type {
|
match rct_type {
|
||||||
RctType::Null => Self::Null,
|
0 => RctPrunable::Null,
|
||||||
RctType::MlsagAggregate | RctType::MlsagIndividual => Self::MlsagBorromean {
|
5 => RctPrunable::Clsag {
|
||||||
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
|
// TODO: Can the amount of outputs be calculated from the BPs for any validly formed TX?
|
||||||
mlsags: decoys.iter().map(|d| Mlsag::read(*d, r)).collect::<Result<_, _>>()?,
|
bulletproofs: read_vec(Bulletproofs::deserialize, r)?,
|
||||||
},
|
clsags: (0 .. decoys.len()).map(|o| Clsag::deserialize(decoys[o], r)).collect::<Result<_, _>>()?,
|
||||||
RctType::Bulletproofs | RctType::BulletproofsCompactAmount => Self::MlsagBulletproofs {
|
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?
|
||||||
bulletproofs: {
|
|
||||||
if (if rct_type == RctType::Bulletproofs {
|
|
||||||
u64::from(read_u32(r)?)
|
|
||||||
} else {
|
|
||||||
read_varint(r)?
|
|
||||||
}) != 1
|
|
||||||
{
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "n bulletproofs instead of one"))?;
|
|
||||||
}
|
|
||||||
Bulletproofs::read(r)?
|
|
||||||
},
|
},
|
||||||
mlsags: decoys.iter().map(|d| Mlsag::read(*d, r)).collect::<Result<_, _>>()?,
|
_ => Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown RCT type"))?
|
||||||
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
|
}
|
||||||
},
|
)
|
||||||
RctType::Clsag | RctType::BulletproofsPlus => Self::Clsag {
|
|
||||||
bulletproofs: {
|
|
||||||
if read_varint(r)? != 1 {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "n bulletproofs instead of one"))?;
|
|
||||||
}
|
|
||||||
(if rct_type == RctType::Clsag { Bulletproofs::read } else { Bulletproofs::read_plus })(
|
|
||||||
r,
|
|
||||||
)?
|
|
||||||
},
|
|
||||||
clsags: (0 .. decoys.len()).map(|o| Clsag::read(decoys[o], r)).collect::<Result<_, _>>()?,
|
|
||||||
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn signature_serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
Self::Null => panic!("Serializing RctPrunable::Null for a signature"),
|
RctPrunable::Null => panic!("Serializing RctPrunable::Null for a signature"),
|
||||||
Self::MlsagBorromean { borromean, .. } => borromean.iter().try_for_each(|rs| rs.write(w)),
|
RctPrunable::Clsag { bulletproofs, .. } => bulletproofs.iter().map(|bp| bp.signature_serialize(w)).collect(),
|
||||||
Self::MlsagBulletproofs { bulletproofs, .. } => bulletproofs.signature_write(w),
|
|
||||||
Self::Clsag { bulletproofs, .. } => bulletproofs.signature_write(w),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
pub struct RctSignatures {
|
pub struct RctSignatures {
|
||||||
pub base: RctBase,
|
pub base: RctBase,
|
||||||
pub prunable: RctPrunable,
|
pub prunable: RctPrunable
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RctSignatures {
|
impl RctSignatures {
|
||||||
/// RctType for a given RctSignatures struct.
|
pub(crate) fn fee_weight(inputs: usize, outputs: usize) -> usize {
|
||||||
pub fn rct_type(&self) -> RctType {
|
RctBase::fee_weight(outputs) + RctPrunable::fee_weight(inputs, outputs)
|
||||||
match &self.prunable {
|
|
||||||
RctPrunable::Null => RctType::Null,
|
|
||||||
RctPrunable::MlsagBorromean { .. } => {
|
|
||||||
/*
|
|
||||||
This type of RctPrunable may have no outputs, yet pseudo_outs are per input
|
|
||||||
This will only be a valid RctSignatures if it's for a TX with inputs
|
|
||||||
That makes this valid for any valid RctSignatures
|
|
||||||
|
|
||||||
While it will be invalid for any invalid RctSignatures, potentially letting an invalid
|
|
||||||
MlsagAggregate be interpreted as a valid MlsagIndividual (or vice versa), they have
|
|
||||||
incompatible deserializations
|
|
||||||
|
|
||||||
This means it's impossible to receive a MlsagAggregate over the wire and interpret it
|
|
||||||
as a MlsagIndividual (or vice versa)
|
|
||||||
|
|
||||||
That only makes manual manipulation unsafe, which will always be true since these fields
|
|
||||||
are all pub
|
|
||||||
|
|
||||||
TODO: Consider making them private with read-only accessors?
|
|
||||||
*/
|
|
||||||
if self.base.pseudo_outs.is_empty() {
|
|
||||||
RctType::MlsagAggregate
|
|
||||||
} else {
|
|
||||||
RctType::MlsagIndividual
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// RctBase ensures there's at least one output, making the following
|
|
||||||
// inferences guaranteed/expects impossible on any valid RctSignatures
|
|
||||||
RctPrunable::MlsagBulletproofs { .. } => {
|
|
||||||
if matches!(
|
|
||||||
self
|
|
||||||
.base
|
|
||||||
.encrypted_amounts
|
|
||||||
.get(0)
|
|
||||||
.expect("MLSAG with Bulletproofs didn't have any outputs"),
|
|
||||||
EncryptedAmount::Original { .. }
|
|
||||||
) {
|
|
||||||
RctType::Bulletproofs
|
|
||||||
} else {
|
|
||||||
RctType::BulletproofsCompactAmount
|
|
||||||
}
|
|
||||||
}
|
|
||||||
RctPrunable::Clsag { bulletproofs, .. } => {
|
|
||||||
if matches!(bulletproofs, Bulletproofs::Original { .. }) {
|
|
||||||
RctType::Clsag
|
|
||||||
} else {
|
|
||||||
RctType::BulletproofsPlus
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
|
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||||
RctBase::fee_weight(outputs) + RctPrunable::fee_weight(protocol, inputs, outputs)
|
self.base.serialize(w, self.prunable.rct_type())?;
|
||||||
|
self.prunable.serialize(w)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn deserialize<R: std::io::Read>(decoys: Vec<usize>, outputs: usize, r: &mut R) -> std::io::Result<RctSignatures> {
|
||||||
let rct_type = self.rct_type();
|
let base = RctBase::deserialize(outputs, r)?;
|
||||||
self.base.write(w, rct_type)?;
|
Ok(RctSignatures { base: base.0, prunable: RctPrunable::deserialize(base.1, &decoys, r)? })
|
||||||
self.prunable.write(w, rct_type)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut serialized = vec![];
|
|
||||||
self.write(&mut serialized).unwrap();
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(decoys: Vec<usize>, outputs: usize, r: &mut R) -> io::Result<Self> {
|
|
||||||
let base = RctBase::read(decoys.len(), outputs, r)?;
|
|
||||||
Ok(Self { base: base.0, prunable: RctPrunable::read(base.1, &decoys, outputs, r)? })
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
353
coins/monero/src/rpc.rs
Normal file
353
coins/monero/src/rpc.rs
Normal file
@@ -0,0 +1,353 @@
|
|||||||
|
use std::fmt::Debug;
|
||||||
|
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
||||||
|
|
||||||
|
use serde::{Serialize, Deserialize, de::DeserializeOwned};
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
use reqwest;
|
||||||
|
|
||||||
|
use crate::{transaction::{Input, Timelock, Transaction}, block::Block, wallet::Fee};
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
pub struct EmptyResponse {}
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
pub struct JsonRpcResponse<T> {
|
||||||
|
result: T
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Error, Debug)]
|
||||||
|
pub enum RpcError {
|
||||||
|
#[error("internal error ({0})")]
|
||||||
|
InternalError(String),
|
||||||
|
#[error("connection error")]
|
||||||
|
ConnectionError,
|
||||||
|
#[error("transactions not found")]
|
||||||
|
TransactionsNotFound(Vec<[u8; 32]>),
|
||||||
|
#[error("invalid point ({0})")]
|
||||||
|
InvalidPoint(String),
|
||||||
|
#[error("pruned transaction")]
|
||||||
|
PrunedTransaction,
|
||||||
|
#[error("invalid transaction ({0:?})")]
|
||||||
|
InvalidTransaction([u8; 32])
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
|
||||||
|
hex::decode(value).map_err(|_| RpcError::InternalError("Monero returned invalid hex".to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
|
||||||
|
CompressedEdwardsY(
|
||||||
|
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?
|
||||||
|
).decompress().ok_or(RpcError::InvalidPoint(point.to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Rpc(String);
|
||||||
|
|
||||||
|
impl Rpc {
|
||||||
|
pub fn new(daemon: String) -> Rpc {
|
||||||
|
Rpc(daemon)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn rpc_call<
|
||||||
|
Params: Serialize + Debug,
|
||||||
|
Response: DeserializeOwned + Debug
|
||||||
|
>(&self, method: &str, params: Option<Params>) -> Result<Response, RpcError> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let mut builder = client.post(&(self.0.clone() + "/" + method));
|
||||||
|
if let Some(params) = params.as_ref() {
|
||||||
|
builder = builder.json(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.call_tail(method, builder).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn bin_call<
|
||||||
|
Response: DeserializeOwned + Debug
|
||||||
|
>(&self, method: &str, params: Vec<u8>) -> Result<Response, RpcError> {
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let builder = client.post(&(self.0.clone() + "/" + method)).body(params);
|
||||||
|
self.call_tail(method, builder.header("Content-Type", "application/octet-stream")).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn call_tail<
|
||||||
|
Response: DeserializeOwned + Debug
|
||||||
|
>(&self, method: &str, builder: reqwest::RequestBuilder) -> Result<Response, RpcError> {
|
||||||
|
let res = builder
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|_| RpcError::ConnectionError)?;
|
||||||
|
|
||||||
|
Ok(
|
||||||
|
if !method.ends_with(".bin") {
|
||||||
|
serde_json::from_str(&res.text().await.map_err(|_| RpcError::ConnectionError)?)
|
||||||
|
.map_err(|_| RpcError::InternalError("Failed to parse JSON response".to_string()))?
|
||||||
|
} else {
|
||||||
|
monero_epee_bin_serde::from_bytes(&res.bytes().await.map_err(|_| RpcError::ConnectionError)?)
|
||||||
|
.map_err(|_| RpcError::InternalError("Failed to parse binary response".to_string()))?
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_height(&self) -> Result<usize, RpcError> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct HeightResponse {
|
||||||
|
height: usize
|
||||||
|
}
|
||||||
|
Ok(self.rpc_call::<Option<()>, HeightResponse>("get_height", None).await?.height)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_transactions_core(
|
||||||
|
&self,
|
||||||
|
hashes: &[[u8; 32]]
|
||||||
|
) -> Result<(Vec<Result<Transaction, RpcError>>, Vec<[u8; 32]>), RpcError> {
|
||||||
|
if hashes.len() == 0 {
|
||||||
|
return Ok((vec![], vec![]));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct TransactionResponse {
|
||||||
|
tx_hash: String,
|
||||||
|
as_hex: String,
|
||||||
|
pruned_as_hex: String
|
||||||
|
}
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct TransactionsResponse {
|
||||||
|
#[serde(default)]
|
||||||
|
missed_tx: Vec<String>,
|
||||||
|
txs: Vec<TransactionResponse>
|
||||||
|
}
|
||||||
|
|
||||||
|
let txs: TransactionsResponse = self.rpc_call("get_transactions", Some(json!({
|
||||||
|
"txs_hashes": hashes.iter().map(|hash| hex::encode(&hash)).collect::<Vec<_>>()
|
||||||
|
}))).await?;
|
||||||
|
|
||||||
|
Ok((
|
||||||
|
txs.txs.iter().map(|res| {
|
||||||
|
let tx = Transaction::deserialize(
|
||||||
|
&mut std::io::Cursor::new(
|
||||||
|
rpc_hex(if res.as_hex.len() != 0 { &res.as_hex } else { &res.pruned_as_hex }).unwrap()
|
||||||
|
)
|
||||||
|
).map_err(|_| RpcError::InvalidTransaction(hex::decode(&res.tx_hash).unwrap().try_into().unwrap()))?;
|
||||||
|
|
||||||
|
// https://github.com/monero-project/monero/issues/8311
|
||||||
|
if res.as_hex.len() == 0 {
|
||||||
|
match tx.prefix.inputs.get(0) {
|
||||||
|
Some(Input::Gen { .. }) => (),
|
||||||
|
_ => Err(RpcError::PrunedTransaction)?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(tx)
|
||||||
|
}).collect(),
|
||||||
|
|
||||||
|
txs.missed_tx.iter().map(|hash| hex::decode(&hash).unwrap().try_into().unwrap()).collect()
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_transactions(&self, hashes: &[[u8; 32]]) -> Result<Vec<Transaction>, RpcError> {
|
||||||
|
let (txs, missed) = self.get_transactions_core(hashes).await?;
|
||||||
|
if missed.len() != 0 {
|
||||||
|
Err(RpcError::TransactionsNotFound(missed))?;
|
||||||
|
}
|
||||||
|
// This will clone several KB and is accordingly inefficient
|
||||||
|
// TODO: Optimize
|
||||||
|
txs.iter().cloned().collect::<Result<_, _>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_transactions_possible(&self, hashes: &[[u8; 32]]) -> Result<Vec<Transaction>, RpcError> {
|
||||||
|
let (txs, _) = self.get_transactions_core(hashes).await?;
|
||||||
|
Ok(txs.iter().cloned().filter_map(|tx| tx.ok()).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_block(&self, height: usize) -> Result<Block, RpcError> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct BlockResponse {
|
||||||
|
blob: String
|
||||||
|
}
|
||||||
|
|
||||||
|
let block: JsonRpcResponse<BlockResponse> = self.rpc_call("json_rpc", Some(json!({
|
||||||
|
"method": "get_block",
|
||||||
|
"params": {
|
||||||
|
"height": height
|
||||||
|
}
|
||||||
|
}))).await?;
|
||||||
|
|
||||||
|
Ok(
|
||||||
|
Block::deserialize(
|
||||||
|
&mut std::io::Cursor::new(rpc_hex(&block.result.blob)?)
|
||||||
|
).expect("Monero returned a block we couldn't deserialize")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_block_transactions_core(
|
||||||
|
&self,
|
||||||
|
height: usize,
|
||||||
|
possible: bool
|
||||||
|
) -> Result<Vec<Transaction>, RpcError> {
|
||||||
|
let block = self.get_block(height).await?;
|
||||||
|
let mut res = vec![block.miner_tx];
|
||||||
|
res.extend(
|
||||||
|
if possible {
|
||||||
|
self.get_transactions_possible(&block.txs).await?
|
||||||
|
} else {
|
||||||
|
self.get_transactions(&block.txs).await?
|
||||||
|
}
|
||||||
|
);
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_block_transactions(&self, height: usize) -> Result<Vec<Transaction>, RpcError> {
|
||||||
|
self.get_block_transactions_core(height, false).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_block_transactions_possible(&self, height: usize) -> Result<Vec<Transaction>, RpcError> {
|
||||||
|
self.get_block_transactions_core(height, true).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_o_indexes(&self, hash: [u8; 32]) -> Result<Vec<u64>, RpcError> {
|
||||||
|
#[derive(Serialize, Debug)]
|
||||||
|
struct Request {
|
||||||
|
txid: [u8; 32]
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct OIndexes {
|
||||||
|
o_indexes: Vec<u64>,
|
||||||
|
status: String,
|
||||||
|
untrusted: bool,
|
||||||
|
credits: usize,
|
||||||
|
top_hash: String
|
||||||
|
}
|
||||||
|
|
||||||
|
let indexes: OIndexes = self.bin_call("get_o_indexes.bin", monero_epee_bin_serde::to_bytes(
|
||||||
|
&Request {
|
||||||
|
txid: hash
|
||||||
|
}).unwrap()
|
||||||
|
).await?;
|
||||||
|
|
||||||
|
Ok(indexes.o_indexes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// from and to are inclusive
|
||||||
|
pub async fn get_output_distribution(&self, from: usize, to: usize) -> Result<Vec<u64>, RpcError> {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
pub struct Distribution {
|
||||||
|
distribution: Vec<u64>
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct Distributions {
|
||||||
|
distributions: Vec<Distribution>
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut distributions: JsonRpcResponse<Distributions> = self.rpc_call("json_rpc", Some(json!({
|
||||||
|
"method": "get_output_distribution",
|
||||||
|
"params": {
|
||||||
|
"binary": false,
|
||||||
|
"amounts": [0],
|
||||||
|
"cumulative": true,
|
||||||
|
"from_height": from,
|
||||||
|
"to_height": to
|
||||||
|
}
|
||||||
|
}))).await?;
|
||||||
|
|
||||||
|
Ok(distributions.result.distributions.swap_remove(0).distribution)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_outputs(
|
||||||
|
&self,
|
||||||
|
indexes: &[u64],
|
||||||
|
height: usize
|
||||||
|
) -> Result<Vec<Option<[EdwardsPoint; 2]>>, RpcError> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
pub struct Out {
|
||||||
|
key: String,
|
||||||
|
mask: String,
|
||||||
|
txid: String
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct Outs {
|
||||||
|
outs: Vec<Out>
|
||||||
|
}
|
||||||
|
|
||||||
|
let outs: Outs = self.rpc_call("get_outs", Some(json!({
|
||||||
|
"get_txid": true,
|
||||||
|
"outputs": indexes.iter().map(|o| json!({
|
||||||
|
"amount": 0,
|
||||||
|
"index": o
|
||||||
|
})).collect::<Vec<_>>()
|
||||||
|
}))).await?;
|
||||||
|
|
||||||
|
let txs = self.get_transactions(
|
||||||
|
&outs.outs.iter().map(|out|
|
||||||
|
rpc_hex(&out.txid).expect("Monero returned an invalidly encoded hash")
|
||||||
|
.try_into().expect("Monero returned an invalid sized hash")
|
||||||
|
).collect::<Vec<_>>()
|
||||||
|
).await?;
|
||||||
|
// TODO: Support time based lock times. These shouldn't be needed, and it may be painful to
|
||||||
|
// get the median time for the given height, yet we do need to in order to be complete
|
||||||
|
outs.outs.iter().enumerate().map(
|
||||||
|
|(i, out)| Ok(
|
||||||
|
Some([rpc_point(&out.key)?, rpc_point(&out.mask)?]).filter(|_| {
|
||||||
|
match txs[i].prefix.timelock {
|
||||||
|
Timelock::Block(t_height) => (t_height <= height),
|
||||||
|
_ => false
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_fee(&self) -> Result<Fee, RpcError> {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct FeeResponse {
|
||||||
|
fee: u64,
|
||||||
|
quantization_mask: u64
|
||||||
|
}
|
||||||
|
|
||||||
|
let res: JsonRpcResponse<FeeResponse> = self.rpc_call("json_rpc", Some(json!({
|
||||||
|
"method": "get_fee_estimate"
|
||||||
|
}))).await?;
|
||||||
|
|
||||||
|
Ok(Fee { per_weight: res.result.fee, mask: res.result.quantization_mask })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn publish_transaction(&self, tx: &Transaction) -> Result<(), RpcError> {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct SendRawResponse {
|
||||||
|
status: String,
|
||||||
|
double_spend: bool,
|
||||||
|
fee_too_low: bool,
|
||||||
|
invalid_input: bool,
|
||||||
|
invalid_output: bool,
|
||||||
|
low_mixin: bool,
|
||||||
|
not_relayed: bool,
|
||||||
|
overspend: bool,
|
||||||
|
too_big: bool,
|
||||||
|
too_few_outputs: bool,
|
||||||
|
reason: String
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut buf = Vec::with_capacity(2048);
|
||||||
|
tx.serialize(&mut buf).unwrap();
|
||||||
|
let res: SendRawResponse = self.rpc_call("send_raw_transaction", Some(json!({
|
||||||
|
"tx_as_hex": hex::encode(&buf)
|
||||||
|
}))).await?;
|
||||||
|
|
||||||
|
if res.status != "OK" {
|
||||||
|
Err(RpcError::InvalidTransaction(tx.hash()))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,91 +0,0 @@
|
|||||||
use async_trait::async_trait;
|
|
||||||
|
|
||||||
use digest_auth::AuthContext;
|
|
||||||
use reqwest::Client;
|
|
||||||
|
|
||||||
use crate::rpc::{RpcError, RpcConnection, Rpc};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct HttpRpc {
|
|
||||||
client: Client,
|
|
||||||
userpass: Option<(String, String)>,
|
|
||||||
url: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HttpRpc {
|
|
||||||
/// Create a new HTTP(S) RPC connection.
|
|
||||||
///
|
|
||||||
/// A daemon requiring authentication can be used via including the username and password in the
|
|
||||||
/// URL.
|
|
||||||
pub fn new(mut url: String) -> Result<Rpc<Self>, RpcError> {
|
|
||||||
// Parse out the username and password
|
|
||||||
let userpass = if url.contains('@') {
|
|
||||||
let url_clone = url;
|
|
||||||
let split_url = url_clone.split('@').collect::<Vec<_>>();
|
|
||||||
if split_url.len() != 2 {
|
|
||||||
Err(RpcError::InvalidNode)?;
|
|
||||||
}
|
|
||||||
let mut userpass = split_url[0];
|
|
||||||
url = split_url[1].to_string();
|
|
||||||
|
|
||||||
// If there was additionally a protocol string, restore that to the daemon URL
|
|
||||||
if userpass.contains("://") {
|
|
||||||
let split_userpass = userpass.split("://").collect::<Vec<_>>();
|
|
||||||
if split_userpass.len() != 2 {
|
|
||||||
Err(RpcError::InvalidNode)?;
|
|
||||||
}
|
|
||||||
url = split_userpass[0].to_string() + "://" + &url;
|
|
||||||
userpass = split_userpass[1];
|
|
||||||
}
|
|
||||||
|
|
||||||
let split_userpass = userpass.split(':').collect::<Vec<_>>();
|
|
||||||
if split_userpass.len() != 2 {
|
|
||||||
Err(RpcError::InvalidNode)?;
|
|
||||||
}
|
|
||||||
Some((split_userpass[0].to_string(), split_userpass[1].to_string()))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Rpc(Self { client: Client::new(), userpass, url }))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl RpcConnection for HttpRpc {
|
|
||||||
async fn post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError> {
|
|
||||||
let mut builder = self.client.post(self.url.clone() + "/" + route).body(body);
|
|
||||||
|
|
||||||
if let Some((user, pass)) = &self.userpass {
|
|
||||||
let req = self.client.post(&self.url).send().await.map_err(|_| RpcError::InvalidNode)?;
|
|
||||||
// Only provide authentication if this daemon actually expects it
|
|
||||||
if let Some(header) = req.headers().get("www-authenticate") {
|
|
||||||
builder = builder.header(
|
|
||||||
"Authorization",
|
|
||||||
digest_auth::parse(header.to_str().map_err(|_| RpcError::InvalidNode)?)
|
|
||||||
.map_err(|_| RpcError::InvalidNode)?
|
|
||||||
.respond(&AuthContext::new_post::<_, _, _, &[u8]>(
|
|
||||||
user,
|
|
||||||
pass,
|
|
||||||
"/".to_string() + route,
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
.map_err(|_| RpcError::InvalidNode)?
|
|
||||||
.to_header_string(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(
|
|
||||||
builder
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.map_err(|_| RpcError::ConnectionError)?
|
|
||||||
.bytes()
|
|
||||||
.await
|
|
||||||
.map_err(|_| RpcError::ConnectionError)?
|
|
||||||
.slice(..)
|
|
||||||
.to_vec(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,617 +0,0 @@
|
|||||||
use core::fmt::Debug;
|
|
||||||
#[cfg(not(feature = "std"))]
|
|
||||||
use alloc::boxed::Box;
|
|
||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io,
|
|
||||||
string::{String, ToString},
|
|
||||||
};
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
|
||||||
|
|
||||||
use serde::{Serialize, Deserialize, de::DeserializeOwned};
|
|
||||||
use serde_json::{Value, json};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Protocol,
|
|
||||||
serialize::*,
|
|
||||||
transaction::{Input, Timelock, Transaction},
|
|
||||||
block::Block,
|
|
||||||
wallet::Fee,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(feature = "http_rpc")]
|
|
||||||
mod http;
|
|
||||||
#[cfg(feature = "http_rpc")]
|
|
||||||
pub use http::*;
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
pub struct EmptyResponse;
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
pub struct JsonRpcResponse<T> {
|
|
||||||
result: T,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct TransactionResponse {
|
|
||||||
tx_hash: String,
|
|
||||||
as_hex: String,
|
|
||||||
pruned_as_hex: String,
|
|
||||||
}
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct TransactionsResponse {
|
|
||||||
#[serde(default)]
|
|
||||||
missed_tx: Vec<String>,
|
|
||||||
txs: Vec<TransactionResponse>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
|
||||||
pub enum RpcError {
|
|
||||||
#[cfg_attr(feature = "std", error("internal error ({0})"))]
|
|
||||||
InternalError(&'static str),
|
|
||||||
#[cfg_attr(feature = "std", error("connection error"))]
|
|
||||||
ConnectionError,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid node"))]
|
|
||||||
InvalidNode,
|
|
||||||
#[cfg_attr(feature = "std", error("unsupported protocol version ({0})"))]
|
|
||||||
UnsupportedProtocol(usize),
|
|
||||||
#[cfg_attr(feature = "std", error("transactions not found"))]
|
|
||||||
TransactionsNotFound(Vec<[u8; 32]>),
|
|
||||||
#[cfg_attr(feature = "std", error("invalid point ({0})"))]
|
|
||||||
InvalidPoint(String),
|
|
||||||
#[cfg_attr(feature = "std", error("pruned transaction"))]
|
|
||||||
PrunedTransaction,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid transaction ({0:?})"))]
|
|
||||||
InvalidTransaction([u8; 32]),
|
|
||||||
}
|
|
||||||
|
|
||||||
fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
|
|
||||||
hex::decode(value).map_err(|_| RpcError::InvalidNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hash_hex(hash: &str) -> Result<[u8; 32], RpcError> {
|
|
||||||
rpc_hex(hash)?.try_into().map_err(|_| RpcError::InvalidNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
|
|
||||||
CompressedEdwardsY(
|
|
||||||
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?,
|
|
||||||
)
|
|
||||||
.decompress()
|
|
||||||
.ok_or_else(|| RpcError::InvalidPoint(point.to_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read an EPEE VarInt, distinct from the VarInts used throughout the rest of the protocol
|
|
||||||
fn read_epee_vi<R: io::Read>(reader: &mut R) -> io::Result<u64> {
|
|
||||||
let vi_start = read_byte(reader)?;
|
|
||||||
let len = match vi_start & 0b11 {
|
|
||||||
0 => 1,
|
|
||||||
1 => 2,
|
|
||||||
2 => 4,
|
|
||||||
3 => 8,
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
let mut vi = u64::from(vi_start >> 2);
|
|
||||||
for i in 1 .. len {
|
|
||||||
vi |= u64::from(read_byte(reader)?) << (((i - 1) * 8) + 6);
|
|
||||||
}
|
|
||||||
Ok(vi)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
pub trait RpcConnection: Send + Sync + Clone + Debug {
|
|
||||||
/// Perform a POST request to the specified route with the specified body.
|
|
||||||
///
|
|
||||||
/// The implementor is left to handle anything such as authentication.
|
|
||||||
async fn post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError>;
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Make this provided methods for RpcConnection?
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Rpc<R: RpcConnection>(R);
|
|
||||||
impl<R: RpcConnection> Rpc<R> {
|
|
||||||
/// Perform a RPC call to the specified route with the provided parameters.
|
|
||||||
///
|
|
||||||
/// This is NOT a JSON-RPC call. They use a route of "json_rpc" and are available via
|
|
||||||
/// `json_rpc_call`.
|
|
||||||
pub async fn rpc_call<Params: Send + Serialize + Debug, Response: DeserializeOwned + Debug>(
|
|
||||||
&self,
|
|
||||||
route: &str,
|
|
||||||
params: Option<Params>,
|
|
||||||
) -> Result<Response, RpcError> {
|
|
||||||
serde_json::from_str(
|
|
||||||
std_shims::str::from_utf8(
|
|
||||||
&self
|
|
||||||
.0
|
|
||||||
.post(
|
|
||||||
route,
|
|
||||||
if let Some(params) = params {
|
|
||||||
serde_json::to_string(¶ms).unwrap().into_bytes()
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await?,
|
|
||||||
)
|
|
||||||
.map_err(|_| RpcError::InvalidNode)?,
|
|
||||||
)
|
|
||||||
.map_err(|_| RpcError::InvalidNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Perform a JSON-RPC call with the specified method with the provided parameters
|
|
||||||
pub async fn json_rpc_call<Response: DeserializeOwned + Debug>(
|
|
||||||
&self,
|
|
||||||
method: &str,
|
|
||||||
params: Option<Value>,
|
|
||||||
) -> Result<Response, RpcError> {
|
|
||||||
let mut req = json!({ "method": method });
|
|
||||||
if let Some(params) = params {
|
|
||||||
req.as_object_mut().unwrap().insert("params".into(), params);
|
|
||||||
}
|
|
||||||
Ok(self.rpc_call::<_, JsonRpcResponse<Response>>("json_rpc", Some(req)).await?.result)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Perform a binary call to the specified route with the provided parameters.
|
|
||||||
pub async fn bin_call(&self, route: &str, params: Vec<u8>) -> Result<Vec<u8>, RpcError> {
|
|
||||||
self.0.post(route, params).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the active blockchain protocol version.
|
|
||||||
pub async fn get_protocol(&self) -> Result<Protocol, RpcError> {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct ProtocolResponse {
|
|
||||||
major_version: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct LastHeaderResponse {
|
|
||||||
block_header: ProtocolResponse,
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(
|
|
||||||
match self
|
|
||||||
.json_rpc_call::<LastHeaderResponse>("get_last_block_header", None)
|
|
||||||
.await?
|
|
||||||
.block_header
|
|
||||||
.major_version
|
|
||||||
{
|
|
||||||
13 | 14 => Protocol::v14,
|
|
||||||
15 | 16 => Protocol::v16,
|
|
||||||
protocol => Err(RpcError::UnsupportedProtocol(protocol))?,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_height(&self) -> Result<usize, RpcError> {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct HeightResponse {
|
|
||||||
height: usize,
|
|
||||||
}
|
|
||||||
Ok(self.rpc_call::<Option<()>, HeightResponse>("get_height", None).await?.height)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_transactions(&self, hashes: &[[u8; 32]]) -> Result<Vec<Transaction>, RpcError> {
|
|
||||||
if hashes.is_empty() {
|
|
||||||
return Ok(vec![]);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut hashes_hex = hashes.iter().map(hex::encode).collect::<Vec<_>>();
|
|
||||||
let mut all_txs = Vec::with_capacity(hashes.len());
|
|
||||||
while !hashes_hex.is_empty() {
|
|
||||||
// Monero errors if more than 100 is requested unless using a non-restricted RPC
|
|
||||||
const TXS_PER_REQUEST: usize = 100;
|
|
||||||
let this_count = TXS_PER_REQUEST.min(hashes_hex.len());
|
|
||||||
|
|
||||||
let txs: TransactionsResponse = self
|
|
||||||
.rpc_call(
|
|
||||||
"get_transactions",
|
|
||||||
Some(json!({
|
|
||||||
"txs_hashes": hashes_hex.drain(.. this_count).collect::<Vec<_>>(),
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if !txs.missed_tx.is_empty() {
|
|
||||||
Err(RpcError::TransactionsNotFound(
|
|
||||||
txs.missed_tx.iter().map(|hash| hash_hex(hash)).collect::<Result<_, _>>()?,
|
|
||||||
))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
all_txs.extend(txs.txs);
|
|
||||||
}
|
|
||||||
|
|
||||||
all_txs
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, res)| {
|
|
||||||
let tx = Transaction::read::<&[u8]>(
|
|
||||||
&mut rpc_hex(if !res.as_hex.is_empty() { &res.as_hex } else { &res.pruned_as_hex })?
|
|
||||||
.as_ref(),
|
|
||||||
)
|
|
||||||
.map_err(|_| match hash_hex(&res.tx_hash) {
|
|
||||||
Ok(hash) => RpcError::InvalidTransaction(hash),
|
|
||||||
Err(err) => err,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// https://github.com/monero-project/monero/issues/8311
|
|
||||||
if res.as_hex.is_empty() {
|
|
||||||
match tx.prefix.inputs.get(0) {
|
|
||||||
Some(Input::Gen { .. }) => (),
|
|
||||||
_ => Err(RpcError::PrunedTransaction)?,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This does run a few keccak256 hashes, which is pointless if the node is trusted
|
|
||||||
// In exchange, this provides resilience against invalid/malicious nodes
|
|
||||||
if tx.hash() != hashes[i] {
|
|
||||||
Err(RpcError::InvalidNode)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(tx)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_transaction(&self, tx: [u8; 32]) -> Result<Transaction, RpcError> {
|
|
||||||
self.get_transactions(&[tx]).await.map(|mut txs| txs.swap_remove(0))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the hash of a block from the node by the block's numbers.
|
|
||||||
/// This function does not verify the returned block hash is actually for the number in question.
|
|
||||||
pub async fn get_block_hash(&self, number: usize) -> Result<[u8; 32], RpcError> {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct BlockHeaderResponse {
|
|
||||||
hash: String,
|
|
||||||
}
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct BlockHeaderByHeightResponse {
|
|
||||||
block_header: BlockHeaderResponse,
|
|
||||||
}
|
|
||||||
|
|
||||||
let header: BlockHeaderByHeightResponse =
|
|
||||||
self.json_rpc_call("get_block_header_by_height", Some(json!({ "height": number }))).await?;
|
|
||||||
rpc_hex(&header.block_header.hash)?.try_into().map_err(|_| RpcError::InvalidNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a block from the node by its hash.
|
|
||||||
/// This function does not verify the returned block actually has the hash in question.
|
|
||||||
pub async fn get_block(&self, hash: [u8; 32]) -> Result<Block, RpcError> {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct BlockResponse {
|
|
||||||
blob: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
let res: BlockResponse =
|
|
||||||
self.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await?;
|
|
||||||
|
|
||||||
let block =
|
|
||||||
Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref()).map_err(|_| RpcError::InvalidNode)?;
|
|
||||||
if block.hash() != hash {
|
|
||||||
Err(RpcError::InvalidNode)?;
|
|
||||||
}
|
|
||||||
Ok(block)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_block_by_number(&self, number: usize) -> Result<Block, RpcError> {
|
|
||||||
match self.get_block(self.get_block_hash(number).await?).await {
|
|
||||||
Ok(block) => {
|
|
||||||
// Make sure this is actually the block for this number
|
|
||||||
match block.miner_tx.prefix.inputs.get(0) {
|
|
||||||
Some(Input::Gen(actual)) => {
|
|
||||||
if usize::try_from(*actual).unwrap() == number {
|
|
||||||
Ok(block)
|
|
||||||
} else {
|
|
||||||
Err(RpcError::InvalidNode)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(Input::ToKey { .. }) | None => Err(RpcError::InvalidNode),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
e => e,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_block_transactions(&self, hash: [u8; 32]) -> Result<Vec<Transaction>, RpcError> {
|
|
||||||
let block = self.get_block(hash).await?;
|
|
||||||
let mut res = vec![block.miner_tx];
|
|
||||||
res.extend(self.get_transactions(&block.txs).await?);
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_block_transactions_by_number(
|
|
||||||
&self,
|
|
||||||
number: usize,
|
|
||||||
) -> Result<Vec<Transaction>, RpcError> {
|
|
||||||
self.get_block_transactions(self.get_block_hash(number).await?).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the output indexes of the specified transaction.
|
|
||||||
pub async fn get_o_indexes(&self, hash: [u8; 32]) -> Result<Vec<u64>, RpcError> {
|
|
||||||
/*
|
|
||||||
TODO: Use these when a suitable epee serde lib exists
|
|
||||||
|
|
||||||
#[derive(Serialize, Debug)]
|
|
||||||
struct Request {
|
|
||||||
txid: [u8; 32],
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct OIndexes {
|
|
||||||
o_indexes: Vec<u64>,
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Given the immaturity of Rust epee libraries, this is a homegrown one which is only validated
|
|
||||||
// to work against this specific function
|
|
||||||
|
|
||||||
// Header for EPEE, an 8-byte magic and a version
|
|
||||||
const EPEE_HEADER: &[u8] = b"\x01\x11\x01\x01\x01\x01\x02\x01\x01";
|
|
||||||
|
|
||||||
let mut request = EPEE_HEADER.to_vec();
|
|
||||||
// Number of fields (shifted over 2 bits as the 2 LSBs are reserved for metadata)
|
|
||||||
request.push(1 << 2);
|
|
||||||
// Length of field name
|
|
||||||
request.push(4);
|
|
||||||
// Field name
|
|
||||||
request.extend(b"txid");
|
|
||||||
// Type of field
|
|
||||||
request.push(10);
|
|
||||||
// Length of string, since this byte array is technically a string
|
|
||||||
request.push(32 << 2);
|
|
||||||
// The "string"
|
|
||||||
request.extend(hash);
|
|
||||||
|
|
||||||
let indexes_buf = self.bin_call("get_o_indexes.bin", request).await?;
|
|
||||||
let mut indexes: &[u8] = indexes_buf.as_ref();
|
|
||||||
|
|
||||||
(|| {
|
|
||||||
if read_bytes::<_, { EPEE_HEADER.len() }>(&mut indexes)? != EPEE_HEADER {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "invalid header"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let read_object = |reader: &mut &[u8]| {
|
|
||||||
let fields = read_byte(reader)? >> 2;
|
|
||||||
|
|
||||||
for _ in 0 .. fields {
|
|
||||||
let name_len = read_byte(reader)?;
|
|
||||||
let name = read_raw_vec(read_byte, name_len.into(), reader)?;
|
|
||||||
|
|
||||||
let type_with_array_flag = read_byte(reader)?;
|
|
||||||
let kind = type_with_array_flag & (!0x80);
|
|
||||||
|
|
||||||
let iters = if type_with_array_flag != kind { read_epee_vi(reader)? } else { 1 };
|
|
||||||
|
|
||||||
if (&name == b"o_indexes") && (kind != 5) {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "o_indexes weren't u64s"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let f = match kind {
|
|
||||||
// i64
|
|
||||||
1 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
|
|
||||||
// i32
|
|
||||||
2 => |reader: &mut &[u8]| read_raw_vec(read_byte, 4, reader),
|
|
||||||
// i16
|
|
||||||
3 => |reader: &mut &[u8]| read_raw_vec(read_byte, 2, reader),
|
|
||||||
// i8
|
|
||||||
4 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
|
|
||||||
// u64
|
|
||||||
5 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
|
|
||||||
// u32
|
|
||||||
6 => |reader: &mut &[u8]| read_raw_vec(read_byte, 4, reader),
|
|
||||||
// u16
|
|
||||||
7 => |reader: &mut &[u8]| read_raw_vec(read_byte, 2, reader),
|
|
||||||
// u8
|
|
||||||
8 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
|
|
||||||
// double
|
|
||||||
9 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
|
|
||||||
// string, or any collection of bytes
|
|
||||||
10 => |reader: &mut &[u8]| {
|
|
||||||
let len = read_epee_vi(reader)?;
|
|
||||||
read_raw_vec(
|
|
||||||
read_byte,
|
|
||||||
len
|
|
||||||
.try_into()
|
|
||||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "u64 length exceeded usize"))?,
|
|
||||||
reader,
|
|
||||||
)
|
|
||||||
},
|
|
||||||
// bool
|
|
||||||
11 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
|
|
||||||
// object, errors here as it shouldn't be used on this call
|
|
||||||
12 => |_: &mut &[u8]| {
|
|
||||||
Err(io::Error::new(
|
|
||||||
io::ErrorKind::Other,
|
|
||||||
"node used object in reply to get_o_indexes",
|
|
||||||
))
|
|
||||||
},
|
|
||||||
// array, so far unused
|
|
||||||
13 => |_: &mut &[u8]| {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "node used the unused array type"))
|
|
||||||
},
|
|
||||||
_ => {
|
|
||||||
|_: &mut &[u8]| Err(io::Error::new(io::ErrorKind::Other, "node used an invalid type"))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut res = vec![];
|
|
||||||
for _ in 0 .. iters {
|
|
||||||
res.push(f(reader)?);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut actual_res = Vec::with_capacity(res.len());
|
|
||||||
if &name == b"o_indexes" {
|
|
||||||
for o_index in res {
|
|
||||||
actual_res.push(u64::from_le_bytes(o_index.try_into().map_err(|_| {
|
|
||||||
io::Error::new(io::ErrorKind::Other, "node didn't provide 8 bytes for a u64")
|
|
||||||
})?));
|
|
||||||
}
|
|
||||||
return Ok(actual_res);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Didn't return a response with o_indexes
|
|
||||||
// TODO: Check if this didn't have o_indexes because it's an error response
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "response didn't contain o_indexes"))
|
|
||||||
};
|
|
||||||
|
|
||||||
read_object(&mut indexes)
|
|
||||||
})()
|
|
||||||
.map_err(|_| RpcError::InvalidNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the output distribution, from the specified height to the specified height (both
|
|
||||||
/// inclusive).
|
|
||||||
pub async fn get_output_distribution(
|
|
||||||
&self,
|
|
||||||
from: usize,
|
|
||||||
to: usize,
|
|
||||||
) -> Result<Vec<u64>, RpcError> {
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct Distribution {
|
|
||||||
distribution: Vec<u64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct Distributions {
|
|
||||||
distributions: Vec<Distribution>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut distributions: Distributions = self
|
|
||||||
.json_rpc_call(
|
|
||||||
"get_output_distribution",
|
|
||||||
Some(json!({
|
|
||||||
"binary": false,
|
|
||||||
"amounts": [0],
|
|
||||||
"cumulative": true,
|
|
||||||
"from_height": from,
|
|
||||||
"to_height": to,
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(distributions.distributions.swap_remove(0).distribution)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the specified outputs from the RingCT (zero-amount) pool, but only return them if their
|
|
||||||
/// timelock has been satisfied. This is distinct from being free of the 10-block lock applied to
|
|
||||||
/// all Monero transactions.
|
|
||||||
pub async fn get_unlocked_outputs(
|
|
||||||
&self,
|
|
||||||
indexes: &[u64],
|
|
||||||
height: usize,
|
|
||||||
) -> Result<Vec<Option<[EdwardsPoint; 2]>>, RpcError> {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct Out {
|
|
||||||
key: String,
|
|
||||||
mask: String,
|
|
||||||
txid: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct Outs {
|
|
||||||
outs: Vec<Out>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let outs: Outs = self
|
|
||||||
.rpc_call(
|
|
||||||
"get_outs",
|
|
||||||
Some(json!({
|
|
||||||
"get_txid": true,
|
|
||||||
"outputs": indexes.iter().map(|o| json!({
|
|
||||||
"amount": 0,
|
|
||||||
"index": o
|
|
||||||
})).collect::<Vec<_>>()
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let txs = self
|
|
||||||
.get_transactions(
|
|
||||||
&outs
|
|
||||||
.outs
|
|
||||||
.iter()
|
|
||||||
.map(|out| rpc_hex(&out.txid)?.try_into().map_err(|_| RpcError::InvalidNode))
|
|
||||||
.collect::<Result<Vec<_>, _>>()?,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// TODO: https://github.com/serai-dex/serai/issues/104
|
|
||||||
outs
|
|
||||||
.outs
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, out)| {
|
|
||||||
Ok(
|
|
||||||
Some([rpc_point(&out.key)?, rpc_point(&out.mask)?])
|
|
||||||
.filter(|_| Timelock::Block(height) >= txs[i].prefix.timelock),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the currently estimated fee from the node. This may be manipulated to unsafe levels and
|
|
||||||
/// MUST be sanity checked.
|
|
||||||
// TODO: Take a sanity check argument
|
|
||||||
pub async fn get_fee(&self) -> Result<Fee, RpcError> {
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct FeeResponse {
|
|
||||||
fee: u64,
|
|
||||||
quantization_mask: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
let res: FeeResponse = self.json_rpc_call("get_fee_estimate", None).await?;
|
|
||||||
Ok(Fee { per_weight: res.fee, mask: res.quantization_mask })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn publish_transaction(&self, tx: &Transaction) -> Result<(), RpcError> {
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct SendRawResponse {
|
|
||||||
status: String,
|
|
||||||
double_spend: bool,
|
|
||||||
fee_too_low: bool,
|
|
||||||
invalid_input: bool,
|
|
||||||
invalid_output: bool,
|
|
||||||
low_mixin: bool,
|
|
||||||
not_relayed: bool,
|
|
||||||
overspend: bool,
|
|
||||||
too_big: bool,
|
|
||||||
too_few_outputs: bool,
|
|
||||||
reason: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
let res: SendRawResponse = self
|
|
||||||
.rpc_call("send_raw_transaction", Some(json!({ "tx_as_hex": hex::encode(tx.serialize()) })))
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if res.status != "OK" {
|
|
||||||
Err(RpcError::InvalidTransaction(tx.hash()))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn generate_blocks(&self, address: &str, block_count: usize) -> Result<(), RpcError> {
|
|
||||||
self
|
|
||||||
.rpc_call::<_, EmptyResponse>(
|
|
||||||
"json_rpc",
|
|
||||||
Some(json!({
|
|
||||||
"method": "generateblocks",
|
|
||||||
"params": {
|
|
||||||
"wallet_address": address,
|
|
||||||
"amount_of_blocks": block_count
|
|
||||||
},
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,25 +1,14 @@
|
|||||||
use core::fmt::Debug;
|
use std::io;
|
||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use curve25519_dalek::{
|
use curve25519_dalek::{scalar::Scalar, edwards::{EdwardsPoint, CompressedEdwardsY}};
|
||||||
scalar::Scalar,
|
|
||||||
edwards::{EdwardsPoint, CompressedEdwardsY},
|
|
||||||
};
|
|
||||||
|
|
||||||
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
pub const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
||||||
|
|
||||||
pub(crate) fn varint_len(varint: usize) -> usize {
|
pub fn varint_len(varint: usize) -> usize {
|
||||||
((usize::try_from(usize::BITS - varint.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
|
((usize::try_from(usize::BITS - varint.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
|
pub fn write_varint<W: io::Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
||||||
w.write_all(&[*byte])
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
|
||||||
let mut varint = *varint;
|
let mut varint = *varint;
|
||||||
while {
|
while {
|
||||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
||||||
@@ -27,130 +16,89 @@ pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()>
|
|||||||
if varint != 0 {
|
if varint != 0 {
|
||||||
b |= VARINT_CONTINUATION_MASK;
|
b |= VARINT_CONTINUATION_MASK;
|
||||||
}
|
}
|
||||||
write_byte(&b, w)?;
|
w.write_all(&[b])?;
|
||||||
varint != 0
|
varint != 0
|
||||||
} {}
|
} {}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write_scalar<W: Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
|
pub fn write_scalar<W: io::Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
|
||||||
w.write_all(&scalar.to_bytes())
|
w.write_all(&scalar.to_bytes())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write_point<W: Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
|
pub fn write_point<W: io::Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
|
||||||
w.write_all(&point.compress().to_bytes())
|
w.write_all(&point.compress().to_bytes())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write_raw_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
pub fn write_raw_vec<
|
||||||
f: F,
|
T,
|
||||||
values: &[T],
|
W: io::Write,
|
||||||
w: &mut W,
|
F: Fn(&T, &mut W) -> io::Result<()>
|
||||||
) -> io::Result<()> {
|
>(f: F, values: &[T], w: &mut W) -> io::Result<()> {
|
||||||
for value in values {
|
for value in values {
|
||||||
f(value, w)?;
|
f(value, w)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
pub fn write_vec<
|
||||||
f: F,
|
T,
|
||||||
values: &[T],
|
W: io::Write,
|
||||||
w: &mut W,
|
F: Fn(&T, &mut W) -> io::Result<()>
|
||||||
) -> io::Result<()> {
|
>(f: F, values: &[T], w: &mut W) -> io::Result<()> {
|
||||||
write_varint(&values.len().try_into().unwrap(), w)?;
|
write_varint(&values.len().try_into().unwrap(), w)?;
|
||||||
write_raw_vec(f, values, w)
|
write_raw_vec(f, &values, w)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_bytes<R: Read, const N: usize>(r: &mut R) -> io::Result<[u8; N]> {
|
pub fn read_byte<R: io::Read>(r: &mut R) -> io::Result<u8> {
|
||||||
let mut res = [0; N];
|
let mut res = [0; 1];
|
||||||
r.read_exact(&mut res)?;
|
r.read_exact(&mut res)?;
|
||||||
Ok(res)
|
Ok(res[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_byte<R: Read>(r: &mut R) -> io::Result<u8> {
|
pub fn read_varint<R: io::Read>(r: &mut R) -> io::Result<u64> {
|
||||||
Ok(read_bytes::<_, 1>(r)?[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_u16<R: Read>(r: &mut R) -> io::Result<u16> {
|
|
||||||
read_bytes(r).map(u16::from_le_bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_u32<R: Read>(r: &mut R) -> io::Result<u32> {
|
|
||||||
read_bytes(r).map(u32::from_le_bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_u64<R: Read>(r: &mut R) -> io::Result<u64> {
|
|
||||||
read_bytes(r).map(u64::from_le_bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_varint<R: Read>(r: &mut R) -> io::Result<u64> {
|
|
||||||
let mut bits = 0;
|
let mut bits = 0;
|
||||||
let mut res = 0;
|
let mut res = 0;
|
||||||
while {
|
while {
|
||||||
let b = read_byte(r)?;
|
let b = read_byte(r)?;
|
||||||
if (bits != 0) && (b == 0) {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "non-canonical varint"))?;
|
|
||||||
}
|
|
||||||
if ((bits + 7) > 64) && (b >= (1 << (64 - bits))) {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "varint overflow"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits;
|
res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits;
|
||||||
|
// TODO: Error if bits exceed u64
|
||||||
bits += 7;
|
bits += 7;
|
||||||
b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK
|
b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK
|
||||||
} {}
|
} {}
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
// All scalar fields supported by monero-serai are checked to be canonical for valid transactions
|
pub fn read_32<R: io::Read>(r: &mut R) -> io::Result<[u8; 32]> {
|
||||||
// While from_bytes_mod_order would be more flexible, it's not currently needed and would be
|
let mut res = [0; 32];
|
||||||
// inaccurate to include now. While casting a wide net may be preferable, it'd also be inaccurate
|
r.read_exact(&mut res)?;
|
||||||
// for now. There's also further edge cases as noted by
|
Ok(res)
|
||||||
// https://github.com/monero-project/monero/issues/8438, where some scalars had an archaic
|
|
||||||
// reduction applied
|
|
||||||
pub(crate) fn read_scalar<R: Read>(r: &mut R) -> io::Result<Scalar> {
|
|
||||||
Scalar::from_canonical_bytes(read_bytes(r)?)
|
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
// TODO: Potentially update to Monero's parsing rules on scalars/points, which should be any arbitrary 32-bytes
|
||||||
let bytes = read_bytes(r)?;
|
// We may be able to consider such transactions as malformed and accordingly be opinionated in ignoring them
|
||||||
CompressedEdwardsY(bytes)
|
pub fn read_scalar<R: io::Read>(r: &mut R) -> io::Result<Scalar> {
|
||||||
.decompress()
|
Scalar::from_canonical_bytes(
|
||||||
// Ban points which are either unreduced or -0
|
read_32(r)?
|
||||||
.filter(|point| point.compress().to_bytes() == bytes)
|
).ok_or(io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
pub fn read_point<R: io::Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||||
read_point(r)
|
CompressedEdwardsY(
|
||||||
.ok()
|
read_32(r)?
|
||||||
.filter(EdwardsPoint::is_torsion_free)
|
).decompress().filter(|point| point.is_torsion_free()).ok_or(io::Error::new(io::ErrorKind::Other, "invalid point"))
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_raw_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
pub fn read_raw_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, len: usize, r: &mut R) -> io::Result<Vec<T>> {
|
||||||
f: F,
|
let mut res = Vec::with_capacity(
|
||||||
len: usize,
|
len.try_into().map_err(|_| io::Error::new(io::ErrorKind::Other, "length exceeds usize"))?
|
||||||
r: &mut R,
|
);
|
||||||
) -> io::Result<Vec<T>> {
|
|
||||||
let mut res = vec![];
|
|
||||||
for _ in 0 .. len {
|
for _ in 0 .. len {
|
||||||
res.push(f(r)?);
|
res.push(f(r)?);
|
||||||
}
|
}
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn read_array<R: Read, T: Debug, F: Fn(&mut R) -> io::Result<T>, const N: usize>(
|
pub fn read_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, r: &mut R) -> io::Result<Vec<T>> {
|
||||||
f: F,
|
|
||||||
r: &mut R,
|
|
||||||
) -> io::Result<[T; N]> {
|
|
||||||
read_raw_vec(f, N, r).map(|vec| vec.try_into().unwrap())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
|
||||||
f: F,
|
|
||||||
r: &mut R,
|
|
||||||
) -> io::Result<Vec<T>> {
|
|
||||||
read_raw_vec(f, read_varint(r)?.try_into().unwrap(), r)
|
read_raw_vec(f, read_varint(r)?.try_into().unwrap(), r)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,176 +1,45 @@
|
|||||||
use hex_literal::hex;
|
use hex_literal::hex;
|
||||||
|
|
||||||
use rand_core::{RngCore, OsRng};
|
use crate::wallet::address::{Network, AddressType, Address};
|
||||||
|
|
||||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, edwards::CompressedEdwardsY};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
random_scalar,
|
|
||||||
wallet::address::{Network, AddressType, AddressMeta, MoneroAddress},
|
|
||||||
};
|
|
||||||
|
|
||||||
const SPEND: [u8; 32] = hex!("f8631661f6ab4e6fda310c797330d86e23a682f20d5bc8cc27b18051191f16d7");
|
const SPEND: [u8; 32] = hex!("f8631661f6ab4e6fda310c797330d86e23a682f20d5bc8cc27b18051191f16d7");
|
||||||
const VIEW: [u8; 32] = hex!("4a1535063ad1fee2dabbf909d4fd9a873e29541b401f0944754e17c9a41820ce");
|
const VIEW: [u8; 32] = hex!("4a1535063ad1fee2dabbf909d4fd9a873e29541b401f0944754e17c9a41820ce");
|
||||||
|
|
||||||
const STANDARD: &str =
|
const STANDARD: &'static str = "4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey";
|
||||||
"4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey";
|
|
||||||
|
|
||||||
const PAYMENT_ID: [u8; 8] = hex!("b8963a57855cf73f");
|
const PAYMENT_ID: [u8; 8] = hex!("b8963a57855cf73f");
|
||||||
const INTEGRATED: &str =
|
const INTEGRATED: &'static str = "4Ljin4CrSNHKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KbaTH6MnpXSn88oBX35";
|
||||||
"4Ljin4CrSNHKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KbaTH6Mn\
|
|
||||||
pXSn88oBX35";
|
|
||||||
|
|
||||||
const SUB_SPEND: [u8; 32] =
|
const SUB_SPEND: [u8; 32] = hex!("fe358188b528335ad1cfdc24a22a23988d742c882b6f19a602892eaab3c1b62b");
|
||||||
hex!("fe358188b528335ad1cfdc24a22a23988d742c882b6f19a602892eaab3c1b62b");
|
|
||||||
const SUB_VIEW: [u8; 32] = hex!("9bc2b464de90d058468522098d5610c5019c45fd1711a9517db1eea7794f5470");
|
const SUB_VIEW: [u8; 32] = hex!("9bc2b464de90d058468522098d5610c5019c45fd1711a9517db1eea7794f5470");
|
||||||
const SUBADDRESS: &str =
|
const SUBADDRESS: &'static str = "8C5zHM5ud8nGC4hC2ULiBLSWx9infi8JUUmWEat4fcTf8J4H38iWYVdFmPCA9UmfLTZxD43RsyKnGEdZkoGij6csDeUnbEB";
|
||||||
"8C5zHM5ud8nGC4hC2ULiBLSWx9infi8JUUmWEat4fcTf8J4H38iWYVdFmPCA9UmfLTZxD43RsyKnGEdZkoGij6csDeUnbEB";
|
|
||||||
|
|
||||||
const FEATURED_JSON: &str = include_str!("vectors/featured_addresses.json");
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn standard_address() {
|
fn standard_address() {
|
||||||
let addr = MoneroAddress::from_str(Network::Mainnet, STANDARD).unwrap();
|
let addr = Address::from_str(STANDARD, Network::Mainnet).unwrap();
|
||||||
assert_eq!(addr.meta.network, Network::Mainnet);
|
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||||
assert_eq!(addr.meta.kind, AddressType::Standard);
|
assert_eq!(addr.meta.kind, AddressType::Standard);
|
||||||
assert!(!addr.meta.kind.is_subaddress());
|
assert_eq!(addr.meta.guaranteed, false);
|
||||||
assert_eq!(addr.meta.kind.payment_id(), None);
|
|
||||||
assert!(!addr.meta.kind.is_guaranteed());
|
|
||||||
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
||||||
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
||||||
assert_eq!(addr.to_string(), STANDARD);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn integrated_address() {
|
fn integrated_address() {
|
||||||
let addr = MoneroAddress::from_str(Network::Mainnet, INTEGRATED).unwrap();
|
let addr = Address::from_str(INTEGRATED, Network::Mainnet).unwrap();
|
||||||
assert_eq!(addr.meta.network, Network::Mainnet);
|
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||||
assert_eq!(addr.meta.kind, AddressType::Integrated(PAYMENT_ID));
|
assert_eq!(addr.meta.kind, AddressType::Integrated(PAYMENT_ID));
|
||||||
assert!(!addr.meta.kind.is_subaddress());
|
assert_eq!(addr.meta.guaranteed, false);
|
||||||
assert_eq!(addr.meta.kind.payment_id(), Some(PAYMENT_ID));
|
|
||||||
assert!(!addr.meta.kind.is_guaranteed());
|
|
||||||
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
|
||||||
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
assert_eq!(addr.view.compress().to_bytes(), VIEW);
|
||||||
assert_eq!(addr.to_string(), INTEGRATED);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn subaddress() {
|
fn subaddress() {
|
||||||
let addr = MoneroAddress::from_str(Network::Mainnet, SUBADDRESS).unwrap();
|
let addr = Address::from_str(SUBADDRESS, Network::Mainnet).unwrap();
|
||||||
assert_eq!(addr.meta.network, Network::Mainnet);
|
assert_eq!(addr.meta.network, Network::Mainnet);
|
||||||
assert_eq!(addr.meta.kind, AddressType::Subaddress);
|
assert_eq!(addr.meta.kind, AddressType::Subaddress);
|
||||||
assert!(addr.meta.kind.is_subaddress());
|
assert_eq!(addr.meta.guaranteed, false);
|
||||||
assert_eq!(addr.meta.kind.payment_id(), None);
|
|
||||||
assert!(!addr.meta.kind.is_guaranteed());
|
|
||||||
assert_eq!(addr.spend.compress().to_bytes(), SUB_SPEND);
|
assert_eq!(addr.spend.compress().to_bytes(), SUB_SPEND);
|
||||||
assert_eq!(addr.view.compress().to_bytes(), SUB_VIEW);
|
assert_eq!(addr.view.compress().to_bytes(), SUB_VIEW);
|
||||||
assert_eq!(addr.to_string(), SUBADDRESS);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn featured() {
|
|
||||||
for (network, first) in
|
|
||||||
[(Network::Mainnet, 'C'), (Network::Testnet, 'K'), (Network::Stagenet, 'F')]
|
|
||||||
{
|
|
||||||
for _ in 0 .. 100 {
|
|
||||||
let spend = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
|
||||||
let view = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
|
||||||
|
|
||||||
for features in 0 .. (1 << 3) {
|
|
||||||
const SUBADDRESS_FEATURE_BIT: u8 = 1;
|
|
||||||
const INTEGRATED_FEATURE_BIT: u8 = 1 << 1;
|
|
||||||
const GUARANTEED_FEATURE_BIT: u8 = 1 << 2;
|
|
||||||
|
|
||||||
let subaddress = (features & SUBADDRESS_FEATURE_BIT) == SUBADDRESS_FEATURE_BIT;
|
|
||||||
|
|
||||||
let mut payment_id = [0; 8];
|
|
||||||
OsRng.fill_bytes(&mut payment_id);
|
|
||||||
let payment_id = Some(payment_id)
|
|
||||||
.filter(|_| (features & INTEGRATED_FEATURE_BIT) == INTEGRATED_FEATURE_BIT);
|
|
||||||
|
|
||||||
let guaranteed = (features & GUARANTEED_FEATURE_BIT) == GUARANTEED_FEATURE_BIT;
|
|
||||||
|
|
||||||
let kind = AddressType::Featured { subaddress, payment_id, guaranteed };
|
|
||||||
let meta = AddressMeta::new(network, kind);
|
|
||||||
let addr = MoneroAddress::new(meta, spend, view);
|
|
||||||
|
|
||||||
assert_eq!(addr.to_string().chars().next().unwrap(), first);
|
|
||||||
assert_eq!(MoneroAddress::from_str(network, &addr.to_string()).unwrap(), addr);
|
|
||||||
|
|
||||||
assert_eq!(addr.spend, spend);
|
|
||||||
assert_eq!(addr.view, view);
|
|
||||||
|
|
||||||
assert_eq!(addr.is_subaddress(), subaddress);
|
|
||||||
assert_eq!(addr.payment_id(), payment_id);
|
|
||||||
assert_eq!(addr.is_guaranteed(), guaranteed);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn featured_vectors() {
|
|
||||||
#[derive(serde::Deserialize)]
|
|
||||||
struct Vector {
|
|
||||||
address: String,
|
|
||||||
|
|
||||||
network: String,
|
|
||||||
spend: String,
|
|
||||||
view: String,
|
|
||||||
|
|
||||||
subaddress: bool,
|
|
||||||
integrated: bool,
|
|
||||||
payment_id: Option<[u8; 8]>,
|
|
||||||
guaranteed: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
let vectors = serde_json::from_str::<Vec<Vector>>(FEATURED_JSON).unwrap();
|
|
||||||
for vector in vectors {
|
|
||||||
let first = vector.address.chars().next().unwrap();
|
|
||||||
let network = match vector.network.as_str() {
|
|
||||||
"Mainnet" => {
|
|
||||||
assert_eq!(first, 'C');
|
|
||||||
Network::Mainnet
|
|
||||||
}
|
|
||||||
"Testnet" => {
|
|
||||||
assert_eq!(first, 'K');
|
|
||||||
Network::Testnet
|
|
||||||
}
|
|
||||||
"Stagenet" => {
|
|
||||||
assert_eq!(first, 'F');
|
|
||||||
Network::Stagenet
|
|
||||||
}
|
|
||||||
_ => panic!("Unknown network"),
|
|
||||||
};
|
|
||||||
let spend =
|
|
||||||
CompressedEdwardsY::from_slice(&hex::decode(vector.spend).unwrap()).decompress().unwrap();
|
|
||||||
let view =
|
|
||||||
CompressedEdwardsY::from_slice(&hex::decode(vector.view).unwrap()).decompress().unwrap();
|
|
||||||
|
|
||||||
let addr = MoneroAddress::from_str(network, &vector.address).unwrap();
|
|
||||||
assert_eq!(addr.spend, spend);
|
|
||||||
assert_eq!(addr.view, view);
|
|
||||||
|
|
||||||
assert_eq!(addr.is_subaddress(), vector.subaddress);
|
|
||||||
assert_eq!(vector.integrated, vector.payment_id.is_some());
|
|
||||||
assert_eq!(addr.payment_id(), vector.payment_id);
|
|
||||||
assert_eq!(addr.is_guaranteed(), vector.guaranteed);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
MoneroAddress::new(
|
|
||||||
AddressMeta::new(
|
|
||||||
network,
|
|
||||||
AddressType::Featured {
|
|
||||||
subaddress: vector.subaddress,
|
|
||||||
payment_id: vector.payment_id,
|
|
||||||
guaranteed: vector.guaranteed
|
|
||||||
}
|
|
||||||
),
|
|
||||||
spend,
|
|
||||||
view
|
|
||||||
)
|
|
||||||
.to_string(),
|
|
||||||
vector.address
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,92 +0,0 @@
|
|||||||
use hex_literal::hex;
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
use curve25519_dalek::{scalar::Scalar, edwards::CompressedEdwardsY};
|
|
||||||
use multiexp::BatchVerifier;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Commitment, random_scalar,
|
|
||||||
ringct::bulletproofs::{Bulletproofs, original::OriginalStruct},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn bulletproofs_vector() {
|
|
||||||
let scalar = |scalar| Scalar::from_canonical_bytes(scalar).unwrap();
|
|
||||||
let point = |point| CompressedEdwardsY(point).decompress().unwrap();
|
|
||||||
|
|
||||||
// Generated from Monero
|
|
||||||
assert!(Bulletproofs::Original(OriginalStruct {
|
|
||||||
A: point(hex!("ef32c0b9551b804decdcb107eb22aa715b7ce259bf3c5cac20e24dfa6b28ac71")),
|
|
||||||
S: point(hex!("e1285960861783574ee2b689ae53622834eb0b035d6943103f960cd23e063fa0")),
|
|
||||||
T1: point(hex!("4ea07735f184ba159d0e0eb662bac8cde3eb7d39f31e567b0fbda3aa23fe5620")),
|
|
||||||
T2: point(hex!("b8390aa4b60b255630d40e592f55ec6b7ab5e3a96bfcdcd6f1cd1d2fc95f441e")),
|
|
||||||
taux: scalar(hex!("5957dba8ea9afb23d6e81cc048a92f2d502c10c749dc1b2bd148ae8d41ec7107")),
|
|
||||||
mu: scalar(hex!("923023b234c2e64774b820b4961f7181f6c1dc152c438643e5a25b0bf271bc02")),
|
|
||||||
L: vec![
|
|
||||||
point(hex!("c45f656316b9ebf9d357fb6a9f85b5f09e0b991dd50a6e0ae9b02de3946c9d99")),
|
|
||||||
point(hex!("9304d2bf0f27183a2acc58cc755a0348da11bd345485fda41b872fee89e72aac")),
|
|
||||||
point(hex!("1bb8b71925d155dd9569f64129ea049d6149fdc4e7a42a86d9478801d922129b")),
|
|
||||||
point(hex!("5756a7bf887aa72b9a952f92f47182122e7b19d89e5dd434c747492b00e1c6b7")),
|
|
||||||
point(hex!("6e497c910d102592830555356af5ff8340e8d141e3fb60ea24cfa587e964f07d")),
|
|
||||||
point(hex!("f4fa3898e7b08e039183d444f3d55040f3c790ed806cb314de49f3068bdbb218")),
|
|
||||||
point(hex!("0bbc37597c3ead517a3841e159c8b7b79a5ceaee24b2a9a20350127aab428713")),
|
|
||||||
],
|
|
||||||
R: vec![
|
|
||||||
point(hex!("609420ba1702781692e84accfd225adb3d077aedc3cf8125563400466b52dbd9")),
|
|
||||||
point(hex!("fb4e1d079e7a2b0ec14f7e2a3943bf50b6d60bc346a54fcf562fb234b342abf8")),
|
|
||||||
point(hex!("6ae3ac97289c48ce95b9c557289e82a34932055f7f5e32720139824fe81b12e5")),
|
|
||||||
point(hex!("d071cc2ffbdab2d840326ad15f68c01da6482271cae3cf644670d1632f29a15c")),
|
|
||||||
point(hex!("e52a1754b95e1060589ba7ce0c43d0060820ebfc0d49dc52884bc3c65ad18af5")),
|
|
||||||
point(hex!("41573b06140108539957df71aceb4b1816d2409ce896659aa5c86f037ca5e851")),
|
|
||||||
point(hex!("a65970b2cc3c7b08b2b5b739dbc8e71e646783c41c625e2a5b1535e3d2e0f742")),
|
|
||||||
],
|
|
||||||
a: scalar(hex!("0077c5383dea44d3cd1bc74849376bd60679612dc4b945255822457fa0c0a209")),
|
|
||||||
b: scalar(hex!("fe80cf5756473482581e1d38644007793ddc66fdeb9404ec1689a907e4863302")),
|
|
||||||
t: scalar(hex!("40dfb08e09249040df997851db311bd6827c26e87d6f0f332c55be8eef10e603"))
|
|
||||||
})
|
|
||||||
.verify(
|
|
||||||
&mut OsRng,
|
|
||||||
&[
|
|
||||||
// For some reason, these vectors are * INV_EIGHT
|
|
||||||
point(hex!("8e8f23f315edae4f6c2f948d9a861e0ae32d356b933cd11d2f0e031ac744c41f"))
|
|
||||||
.mul_by_cofactor(),
|
|
||||||
point(hex!("2829cbd025aa54cd6e1b59a032564f22f0b2e5627f7f2c4297f90da438b5510f"))
|
|
||||||
.mul_by_cofactor(),
|
|
||||||
]
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! bulletproofs_tests {
|
|
||||||
($name: ident, $max: ident, $plus: literal) => {
|
|
||||||
#[test]
|
|
||||||
fn $name() {
|
|
||||||
// Create Bulletproofs for all possible output quantities
|
|
||||||
let mut verifier = BatchVerifier::new(16);
|
|
||||||
for i in 1 .. 17 {
|
|
||||||
let commitments = (1 ..= i)
|
|
||||||
.map(|i| Commitment::new(random_scalar(&mut OsRng), u64::try_from(i).unwrap()))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let bp = Bulletproofs::prove(&mut OsRng, &commitments, $plus).unwrap();
|
|
||||||
|
|
||||||
let commitments = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
|
||||||
assert!(bp.verify(&mut OsRng, &commitments));
|
|
||||||
assert!(bp.batch_verify(&mut OsRng, &mut verifier, i, &commitments));
|
|
||||||
}
|
|
||||||
assert!(verifier.verify_vartime());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn $max() {
|
|
||||||
// Check Bulletproofs errors if we try to prove for too many outputs
|
|
||||||
let mut commitments = vec![];
|
|
||||||
for _ in 0 .. 17 {
|
|
||||||
commitments.push(Commitment::new(Scalar::zero(), 0));
|
|
||||||
}
|
|
||||||
assert!(Bulletproofs::prove(&mut OsRng, &commitments, $plus).is_err());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
bulletproofs_tests!(bulletproofs, bulletproofs_max, false);
|
|
||||||
bulletproofs_tests!(bulletproofs_plus, bulletproofs_plus_max, true);
|
|
||||||
@@ -1,11 +1,7 @@
|
|||||||
use core::ops::Deref;
|
|
||||||
#[cfg(feature = "multisig")]
|
#[cfg(feature = "multisig")]
|
||||||
use std_shims::sync::Arc;
|
use std::sync::{Arc, RwLock};
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
use std::sync::RwLock;
|
|
||||||
|
|
||||||
use zeroize::Zeroizing;
|
use rand::{RngCore, rngs::OsRng};
|
||||||
use rand_core::{RngCore, OsRng};
|
|
||||||
|
|
||||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
|
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
|
||||||
|
|
||||||
@@ -15,21 +11,16 @@ use transcript::{Transcript, RecommendedTranscript};
|
|||||||
use frost::curve::Ed25519;
|
use frost::curve::Ed25519;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
Commitment, random_scalar,
|
Commitment,
|
||||||
|
random_scalar,
|
||||||
wallet::Decoys,
|
wallet::Decoys,
|
||||||
ringct::{
|
ringct::{generate_key_image, clsag::{ClsagInput, Clsag}}
|
||||||
generate_key_image,
|
|
||||||
clsag::{ClsagInput, Clsag},
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
#[cfg(feature = "multisig")]
|
#[cfg(feature = "multisig")]
|
||||||
use crate::ringct::clsag::{ClsagDetails, ClsagMultisig};
|
use crate::{frost::MultisigError, ringct::clsag::{ClsagDetails, ClsagMultisig}};
|
||||||
|
|
||||||
#[cfg(feature = "multisig")]
|
#[cfg(feature = "multisig")]
|
||||||
use frost::{
|
use frost::tests::{key_gen, algorithm_machines, sign};
|
||||||
Participant,
|
|
||||||
tests::{key_gen, algorithm_machines, sign},
|
|
||||||
};
|
|
||||||
|
|
||||||
const RING_LEN: u64 = 11;
|
const RING_LEN: u64 = 11;
|
||||||
const AMOUNT: u64 = 1337;
|
const AMOUNT: u64 = 1337;
|
||||||
@@ -42,48 +33,48 @@ fn clsag() {
|
|||||||
for real in 0 .. RING_LEN {
|
for real in 0 .. RING_LEN {
|
||||||
let msg = [1; 32];
|
let msg = [1; 32];
|
||||||
|
|
||||||
let mut secrets = (Zeroizing::new(Scalar::zero()), Scalar::zero());
|
let mut secrets = [Scalar::zero(), Scalar::zero()];
|
||||||
let mut ring = vec![];
|
let mut ring = vec![];
|
||||||
for i in 0 .. RING_LEN {
|
for i in 0 .. RING_LEN {
|
||||||
let dest = Zeroizing::new(random_scalar(&mut OsRng));
|
let dest = random_scalar(&mut OsRng);
|
||||||
let mask = random_scalar(&mut OsRng);
|
let mask = random_scalar(&mut OsRng);
|
||||||
let amount = if i == real {
|
let amount;
|
||||||
secrets = (dest.clone(), mask);
|
if i == u64::from(real) {
|
||||||
AMOUNT
|
secrets = [dest, mask];
|
||||||
|
amount = AMOUNT;
|
||||||
} else {
|
} else {
|
||||||
OsRng.next_u64()
|
amount = OsRng.next_u64();
|
||||||
};
|
}
|
||||||
ring
|
ring.push([&dest * &ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]);
|
||||||
.push([dest.deref() * &ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let image = generate_key_image(&secrets.0);
|
let image = generate_key_image(secrets[0]);
|
||||||
let (clsag, pseudo_out) = Clsag::sign(
|
let (clsag, pseudo_out) = Clsag::sign(
|
||||||
&mut OsRng,
|
&mut OsRng,
|
||||||
vec![(
|
&vec![(
|
||||||
secrets.0,
|
secrets[0],
|
||||||
image,
|
image,
|
||||||
ClsagInput::new(
|
ClsagInput::new(
|
||||||
Commitment::new(secrets.1, AMOUNT),
|
Commitment::new(secrets[1], AMOUNT),
|
||||||
Decoys {
|
Decoys {
|
||||||
i: u8::try_from(real).unwrap(),
|
i: u8::try_from(real).unwrap(),
|
||||||
offsets: (1 ..= RING_LEN).collect(),
|
offsets: (1 ..= RING_LEN).into_iter().collect(),
|
||||||
ring: ring.clone(),
|
ring: ring.clone()
|
||||||
},
|
}
|
||||||
)
|
).unwrap()
|
||||||
.unwrap(),
|
|
||||||
)],
|
)],
|
||||||
random_scalar(&mut OsRng),
|
random_scalar(&mut OsRng),
|
||||||
msg,
|
msg
|
||||||
)
|
).swap_remove(0);
|
||||||
.swap_remove(0);
|
|
||||||
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
|
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
|
||||||
|
#[cfg(feature = "experimental")]
|
||||||
|
clsag.rust_verify(&ring, &image, &pseudo_out, &msg).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "multisig")]
|
#[cfg(feature = "multisig")]
|
||||||
#[test]
|
#[test]
|
||||||
fn clsag_multisig() {
|
fn clsag_multisig() -> Result<(), MultisigError> {
|
||||||
let keys = key_gen::<_, Ed25519>(&mut OsRng);
|
let keys = key_gen::<_, Ed25519>(&mut OsRng);
|
||||||
|
|
||||||
let randomness = random_scalar(&mut OsRng);
|
let randomness = random_scalar(&mut OsRng);
|
||||||
@@ -91,37 +82,45 @@ fn clsag_multisig() {
|
|||||||
for i in 0 .. RING_LEN {
|
for i in 0 .. RING_LEN {
|
||||||
let dest;
|
let dest;
|
||||||
let mask;
|
let mask;
|
||||||
let amount = if i == u64::from(RING_INDEX) {
|
let amount;
|
||||||
dest = keys[&Participant::new(1).unwrap()].group_key().0;
|
if i != u64::from(RING_INDEX) {
|
||||||
mask = randomness;
|
|
||||||
AMOUNT
|
|
||||||
} else {
|
|
||||||
dest = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
dest = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
||||||
mask = random_scalar(&mut OsRng);
|
mask = random_scalar(&mut OsRng);
|
||||||
OsRng.next_u64()
|
amount = OsRng.next_u64();
|
||||||
};
|
} else {
|
||||||
|
dest = keys[&1].group_key().0;
|
||||||
|
mask = randomness;
|
||||||
|
amount = AMOUNT;
|
||||||
|
}
|
||||||
ring.push([dest, Commitment::new(mask, amount).calculate()]);
|
ring.push([dest, Commitment::new(mask, amount).calculate()]);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mask_sum = random_scalar(&mut OsRng);
|
let mask_sum = random_scalar(&mut OsRng);
|
||||||
let algorithm = ClsagMultisig::new(
|
|
||||||
RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
|
|
||||||
keys[&Participant::new(1).unwrap()].group_key().0,
|
|
||||||
Arc::new(RwLock::new(Some(ClsagDetails::new(
|
|
||||||
ClsagInput::new(
|
|
||||||
Commitment::new(randomness, AMOUNT),
|
|
||||||
Decoys { i: RING_INDEX, offsets: (1 ..= RING_LEN).collect(), ring: ring.clone() },
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
mask_sum,
|
|
||||||
)))),
|
|
||||||
);
|
|
||||||
|
|
||||||
sign(
|
sign(
|
||||||
&mut OsRng,
|
&mut OsRng,
|
||||||
algorithm.clone(),
|
algorithm_machines(
|
||||||
keys.clone(),
|
&mut OsRng,
|
||||||
algorithm_machines(&mut OsRng, algorithm, &keys),
|
ClsagMultisig::new(
|
||||||
&[1; 32],
|
RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
|
||||||
|
keys[&1].group_key().0,
|
||||||
|
Arc::new(RwLock::new(Some(
|
||||||
|
ClsagDetails::new(
|
||||||
|
ClsagInput::new(
|
||||||
|
Commitment::new(randomness, AMOUNT),
|
||||||
|
Decoys {
|
||||||
|
i: RING_INDEX,
|
||||||
|
offsets: (1 ..= RING_LEN).into_iter().collect(),
|
||||||
|
ring: ring.clone()
|
||||||
|
}
|
||||||
|
).unwrap(),
|
||||||
|
mask_sum
|
||||||
|
)
|
||||||
|
)))
|
||||||
|
).unwrap(),
|
||||||
|
&keys
|
||||||
|
),
|
||||||
|
&[1; 32]
|
||||||
);
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
13
coins/monero/src/tests/hash_to_point.rs
Normal file
13
coins/monero/src/tests/hash_to_point.rs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
use rand::rngs::OsRng;
|
||||||
|
|
||||||
|
use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
|
||||||
|
|
||||||
|
use crate::{random_scalar, ringct::hash_to_point::{hash_to_point as c_hash_to_point, rust_hash_to_point}};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hash_to_point() {
|
||||||
|
for _ in 0 .. 50 {
|
||||||
|
let point = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
||||||
|
assert_eq!(rust_hash_to_point(point), c_hash_to_point(point));
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
|
mod hash_to_point;
|
||||||
mod clsag;
|
mod clsag;
|
||||||
mod bulletproofs;
|
|
||||||
mod address;
|
mod address;
|
||||||
mod seed;
|
|
||||||
|
|||||||
@@ -1,177 +0,0 @@
|
|||||||
use zeroize::Zeroizing;
|
|
||||||
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
use curve25519_dalek::scalar::Scalar;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
hash,
|
|
||||||
wallet::seed::{Seed, Language, classic::trim_by_lang},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_classic_seed() {
|
|
||||||
struct Vector {
|
|
||||||
language: Language,
|
|
||||||
seed: String,
|
|
||||||
spend: String,
|
|
||||||
view: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
let vectors = [
|
|
||||||
Vector {
|
|
||||||
language: Language::Chinese,
|
|
||||||
seed: "摇 曲 艺 武 滴 然 效 似 赏 式 祥 歌 买 疑 小 碧 堆 博 键 房 鲜 悲 付 喷 武".into(),
|
|
||||||
spend: "a5e4fff1706ef9212993a69f246f5c95ad6d84371692d63e9bb0ea112a58340d".into(),
|
|
||||||
view: "1176c43ce541477ea2f3ef0b49b25112b084e26b8a843e1304ac4677b74cdf02".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: Language::English,
|
|
||||||
seed: "washing thirsty occur lectures tuesday fainted toxic adapt \
|
|
||||||
abnormal memoir nylon mostly building shrugged online ember northern \
|
|
||||||
ruby woes dauntless boil family illness inroads northern"
|
|
||||||
.into(),
|
|
||||||
spend: "c0af65c0dd837e666b9d0dfed62745f4df35aed7ea619b2798a709f0fe545403".into(),
|
|
||||||
view: "513ba91c538a5a9069e0094de90e927c0cd147fa10428ce3ac1afd49f63e3b01".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: Language::Dutch,
|
|
||||||
seed: "setwinst riphagen vimmetje extase blief tuitelig fuiven meifeest \
|
|
||||||
ponywagen zesmaal ripdeal matverf codetaal leut ivoor rotten \
|
|
||||||
wisgerhof winzucht typograaf atrium rein zilt traktaat verzaagd setwinst"
|
|
||||||
.into(),
|
|
||||||
spend: "e2d2873085c447c2bc7664222ac8f7d240df3aeac137f5ff2022eaa629e5b10a".into(),
|
|
||||||
view: "eac30b69477e3f68093d131c7fd961564458401b07f8c87ff8f6030c1a0c7301".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: Language::French,
|
|
||||||
seed: "poids vaseux tarte bazar poivre effet entier nuance \
|
|
||||||
sensuel ennui pacte osselet poudre battre alibi mouton \
|
|
||||||
stade paquet pliage gibier type question position projet pliage"
|
|
||||||
.into(),
|
|
||||||
spend: "2dd39ff1a4628a94b5c2ec3e42fb3dfe15c2b2f010154dc3b3de6791e805b904".into(),
|
|
||||||
view: "6725b32230400a1032f31d622b44c3a227f88258939b14a7c72e00939e7bdf0e".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: Language::Spanish,
|
|
||||||
seed: "minero ocupar mirar evadir octubre cal logro miope \
|
|
||||||
opaco disco ancla litio clase cuello nasal clase \
|
|
||||||
fiar avance deseo mente grumo negro cordón croqueta clase"
|
|
||||||
.into(),
|
|
||||||
spend: "ae2c9bebdddac067d73ec0180147fc92bdf9ac7337f1bcafbbe57dd13558eb02".into(),
|
|
||||||
view: "18deafb34d55b7a43cae2c1c1c206a3c80c12cc9d1f84640b484b95b7fec3e05".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: Language::German,
|
|
||||||
seed: "Kaliber Gabelung Tapir Liveband Favorit Specht Enklave Nabel \
|
|
||||||
Jupiter Foliant Chronik nisten löten Vase Aussage Rekord \
|
|
||||||
Yeti Gesetz Eleganz Alraune Künstler Almweide Jahr Kastanie Almweide"
|
|
||||||
.into(),
|
|
||||||
spend: "79801b7a1b9796856e2397d862a113862e1fdc289a205e79d8d70995b276db06".into(),
|
|
||||||
view: "99f0ec556643bd9c038a4ed86edcb9c6c16032c4622ed2e000299d527a792701".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: Language::Italian,
|
|
||||||
seed: "cavo pancetta auto fulmine alleanza filmato diavolo prato \
|
|
||||||
forzare meritare litigare lezione segreto evasione votare buio \
|
|
||||||
licenza cliente dorso natale crescere vento tutelare vetta evasione"
|
|
||||||
.into(),
|
|
||||||
spend: "5e7fd774eb00fa5877e2a8b4dc9c7ffe111008a3891220b56a6e49ac816d650a".into(),
|
|
||||||
view: "698a1dce6018aef5516e82ca0cb3e3ec7778d17dfb41a137567bfa2e55e63a03".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: Language::Portuguese,
|
|
||||||
seed: "agito eventualidade onus itrio holograma sodomizar objetos dobro \
|
|
||||||
iugoslavo bcrepuscular odalisca abjeto iuane darwinista eczema acetona \
|
|
||||||
cibernetico hoquei gleba driver buffer azoto megera nogueira agito"
|
|
||||||
.into(),
|
|
||||||
spend: "13b3115f37e35c6aa1db97428b897e584698670c1b27854568d678e729200c0f".into(),
|
|
||||||
view: "ad1b4fd35270f5f36c4da7166672b347e75c3f4d41346ec2a06d1d0193632801".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: Language::Japanese,
|
|
||||||
seed: "ぜんぶ どうぐ おたがい せんきょ おうじ そんちょう じゅしん いろえんぴつ \
|
|
||||||
かほう つかれる えらぶ にちじょう くのう にちようび ぬまえび さんきゃく \
|
|
||||||
おおや ちぬき うすめる いがく せつでん さうな すいえい せつだん おおや"
|
|
||||||
.into(),
|
|
||||||
spend: "c56e895cdb13007eda8399222974cdbab493640663804b93cbef3d8c3df80b0b".into(),
|
|
||||||
view: "6c3634a313ec2ee979d565c33888fd7c3502d696ce0134a8bc1a2698c7f2c508".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: Language::Russian,
|
|
||||||
seed: "шатер икра нация ехать получать инерция доза реальный \
|
|
||||||
рыжий таможня лопата душа веселый клетка атлас лекция \
|
|
||||||
обгонять паек наивный лыжный дурак стать ежик задача паек"
|
|
||||||
.into(),
|
|
||||||
spend: "7cb5492df5eb2db4c84af20766391cd3e3662ab1a241c70fc881f3d02c381f05".into(),
|
|
||||||
view: "fcd53e41ec0df995ab43927f7c44bc3359c93523d5009fb3f5ba87431d545a03".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: Language::Esperanto,
|
|
||||||
seed: "ukazo klini peco etikedo fabriko imitado onklino urino \
|
|
||||||
pudro incidento kumuluso ikono smirgi hirundo uretro krii \
|
|
||||||
sparkado super speciala pupo alpinisto cvana vokegi zombio fabriko"
|
|
||||||
.into(),
|
|
||||||
spend: "82ebf0336d3b152701964ed41df6b6e9a035e57fc98b84039ed0bd4611c58904".into(),
|
|
||||||
view: "cd4d120e1ea34360af528f6a3e6156063312d9cefc9aa6b5218d366c0ed6a201".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: Language::Lojban,
|
|
||||||
seed: "jetnu vensa julne xrotu xamsi julne cutci dakli \
|
|
||||||
mlatu xedja muvgau palpi xindo sfubu ciste cinri \
|
|
||||||
blabi darno dembi janli blabi fenki bukpu burcu blabi"
|
|
||||||
.into(),
|
|
||||||
spend: "e4f8c6819ab6cf792cebb858caabac9307fd646901d72123e0367ebc0a79c200".into(),
|
|
||||||
view: "c806ce62bafaa7b2d597f1a1e2dbe4a2f96bfd804bf6f8420fc7f4a6bd700c00".into(),
|
|
||||||
},
|
|
||||||
Vector {
|
|
||||||
language: Language::EnglishOld,
|
|
||||||
seed: "glorious especially puff son moment add youth nowhere \
|
|
||||||
throw glide grip wrong rhythm consume very swear \
|
|
||||||
bitter heavy eventually begin reason flirt type unable"
|
|
||||||
.into(),
|
|
||||||
spend: "647f4765b66b636ff07170ab6280a9a6804dfbaf19db2ad37d23be024a18730b".into(),
|
|
||||||
view: "045da65316a906a8c30046053119c18020b07a7a3a6ef5c01ab2a8755416bd02".into(),
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
for vector in vectors {
|
|
||||||
let trim_seed = |seed: &str| {
|
|
||||||
seed
|
|
||||||
.split_whitespace()
|
|
||||||
.map(|word| trim_by_lang(word, vector.language))
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(" ")
|
|
||||||
};
|
|
||||||
|
|
||||||
// Test against Monero
|
|
||||||
{
|
|
||||||
let seed = Seed::from_string(Zeroizing::new(vector.seed.clone())).unwrap();
|
|
||||||
assert_eq!(seed, Seed::from_string(Zeroizing::new(trim_seed(&vector.seed))).unwrap());
|
|
||||||
|
|
||||||
let spend: [u8; 32] = hex::decode(vector.spend).unwrap().try_into().unwrap();
|
|
||||||
// For classical seeds, Monero directly uses the entropy as a spend key
|
|
||||||
assert_eq!(
|
|
||||||
Scalar::from_canonical_bytes(*seed.entropy()),
|
|
||||||
Scalar::from_canonical_bytes(spend)
|
|
||||||
);
|
|
||||||
|
|
||||||
let view: [u8; 32] = hex::decode(vector.view).unwrap().try_into().unwrap();
|
|
||||||
// Monero then derives the view key as H(spend)
|
|
||||||
assert_eq!(
|
|
||||||
Scalar::from_bytes_mod_order(hash(&spend)),
|
|
||||||
Scalar::from_canonical_bytes(view).unwrap()
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(Seed::from_entropy(vector.language, Zeroizing::new(spend)).unwrap(), seed);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test against ourself
|
|
||||||
{
|
|
||||||
let seed = Seed::new(&mut OsRng, vector.language);
|
|
||||||
assert_eq!(seed, Seed::from_string(Zeroizing::new(trim_seed(&seed.to_string()))).unwrap());
|
|
||||||
assert_eq!(seed, Seed::from_entropy(vector.language, seed.entropy()).unwrap());
|
|
||||||
assert_eq!(seed, Seed::from_string(seed.to_string()).unwrap());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,230 +0,0 @@
|
|||||||
[
|
|
||||||
{
|
|
||||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v3pYyUDn",
|
|
||||||
"network": "Mainnet",
|
|
||||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
|
||||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
|
||||||
"subaddress": false,
|
|
||||||
"integrated": false,
|
|
||||||
"guaranteed": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v3wfMHCy",
|
|
||||||
"network": "Mainnet",
|
|
||||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
|
||||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
|
||||||
"subaddress": true,
|
|
||||||
"integrated": false,
|
|
||||||
"guaranteed": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJTo4p5ayvj36PStM5AX",
|
|
||||||
"network": "Mainnet",
|
|
||||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
|
||||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
|
||||||
"subaddress": false,
|
|
||||||
"integrated": true,
|
|
||||||
"payment_id": [46, 48, 134, 34, 245, 148, 243, 195],
|
|
||||||
"guaranteed": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJWv5WqMCNE2hRs9rJfy",
|
|
||||||
"network": "Mainnet",
|
|
||||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
|
||||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
|
||||||
"subaddress": true,
|
|
||||||
"integrated": true,
|
|
||||||
"payment_id": [153, 176, 98, 204, 151, 27, 197, 168],
|
|
||||||
"guaranteed": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v4DwqwH1",
|
|
||||||
"network": "Mainnet",
|
|
||||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
|
||||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
|
||||||
"subaddress": false,
|
|
||||||
"integrated": false,
|
|
||||||
"guaranteed": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v4Pyz8bD",
|
|
||||||
"network": "Mainnet",
|
|
||||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
|
||||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
|
||||||
"subaddress": true,
|
|
||||||
"integrated": false,
|
|
||||||
"guaranteed": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJcwt7hykou237MqZZDA",
|
|
||||||
"network": "Mainnet",
|
|
||||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
|
||||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
|
||||||
"subaddress": false,
|
|
||||||
"integrated": true,
|
|
||||||
"payment_id": [88, 37, 149, 111, 171, 108, 120, 181],
|
|
||||||
"guaranteed": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJfTrFAp69u2MYbf5YeN",
|
|
||||||
"network": "Mainnet",
|
|
||||||
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
|
|
||||||
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
|
|
||||||
"subaddress": true,
|
|
||||||
"integrated": true,
|
|
||||||
"payment_id": [125, 69, 155, 152, 140, 160, 157, 186],
|
|
||||||
"guaranteed": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712U9w7ScYA",
|
|
||||||
"network": "Testnet",
|
|
||||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
|
||||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
|
||||||
"subaddress": false,
|
|
||||||
"integrated": false,
|
|
||||||
"guaranteed": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UA2gCrT1",
|
|
||||||
"network": "Testnet",
|
|
||||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
|
||||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
|
||||||
"subaddress": true,
|
|
||||||
"integrated": false,
|
|
||||||
"guaranteed": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71Vc1DbPKwJu81cxJjqBkS",
|
|
||||||
"network": "Testnet",
|
|
||||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
|
||||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
|
||||||
"subaddress": false,
|
|
||||||
"integrated": true,
|
|
||||||
"payment_id": [92, 225, 118, 220, 39, 3, 72, 51],
|
|
||||||
"guaranteed": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71Vc2o1rPMaXN31Fe5J6dn",
|
|
||||||
"network": "Testnet",
|
|
||||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
|
||||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
|
||||||
"subaddress": true,
|
|
||||||
"integrated": true,
|
|
||||||
"payment_id": [20, 120, 47, 89, 72, 165, 233, 115],
|
|
||||||
"guaranteed": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UAQHCRZ4",
|
|
||||||
"network": "Testnet",
|
|
||||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
|
||||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
|
||||||
"subaddress": false,
|
|
||||||
"integrated": false,
|
|
||||||
"guaranteed": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UAUzqaii",
|
|
||||||
"network": "Testnet",
|
|
||||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
|
||||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
|
||||||
"subaddress": true,
|
|
||||||
"integrated": false,
|
|
||||||
"guaranteed": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71VcAsfQc3gJQ2gHLd5DiQ",
|
|
||||||
"network": "Testnet",
|
|
||||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
|
||||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
|
||||||
"subaddress": false,
|
|
||||||
"integrated": true,
|
|
||||||
"payment_id": [193, 149, 123, 214, 180, 205, 195, 91],
|
|
||||||
"guaranteed": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71VcDBAD5jbZQ3AMHFyvQB",
|
|
||||||
"network": "Testnet",
|
|
||||||
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
|
|
||||||
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
|
|
||||||
"subaddress": true,
|
|
||||||
"integrated": true,
|
|
||||||
"payment_id": [205, 170, 65, 0, 51, 175, 251, 184],
|
|
||||||
"guaranteed": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPJnBtTP",
|
|
||||||
"network": "Stagenet",
|
|
||||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
|
||||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
|
||||||
"subaddress": false,
|
|
||||||
"integrated": false,
|
|
||||||
"guaranteed": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPUrwMvP",
|
|
||||||
"network": "Stagenet",
|
|
||||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
|
||||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
|
||||||
"subaddress": true,
|
|
||||||
"integrated": false,
|
|
||||||
"guaranteed": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AY5ECEhP5Nr1aCRPXdxk",
|
|
||||||
"network": "Stagenet",
|
|
||||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
|
||||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
|
||||||
"subaddress": false,
|
|
||||||
"integrated": true,
|
|
||||||
"payment_id": [173, 149, 78, 64, 215, 211, 66, 170],
|
|
||||||
"guaranteed": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AY882kTUS1D2LttnPvTR",
|
|
||||||
"network": "Stagenet",
|
|
||||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
|
||||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
|
||||||
"subaddress": true,
|
|
||||||
"integrated": true,
|
|
||||||
"payment_id": [254, 159, 186, 162, 1, 8, 156, 108],
|
|
||||||
"guaranteed": false
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPpBBo8F",
|
|
||||||
"network": "Stagenet",
|
|
||||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
|
||||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
|
||||||
"subaddress": false,
|
|
||||||
"integrated": false,
|
|
||||||
"guaranteed": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPuUJX3b",
|
|
||||||
"network": "Stagenet",
|
|
||||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
|
||||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
|
||||||
"subaddress": true,
|
|
||||||
"integrated": false,
|
|
||||||
"guaranteed": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AYCZPxVAoDu21DryMoto",
|
|
||||||
"network": "Stagenet",
|
|
||||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
|
||||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
|
||||||
"subaddress": false,
|
|
||||||
"integrated": true,
|
|
||||||
"payment_id": [3, 115, 230, 129, 172, 108, 116, 235],
|
|
||||||
"guaranteed": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AYFYCqKQAWL18KkpBQ8R",
|
|
||||||
"network": "Stagenet",
|
|
||||||
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
|
|
||||||
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
|
|
||||||
"subaddress": true,
|
|
||||||
"integrated": true,
|
|
||||||
"payment_id": [94, 122, 63, 167, 209, 225, 14, 180],
|
|
||||||
"guaranteed": true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
@@ -1,83 +1,69 @@
|
|||||||
use core::cmp::Ordering;
|
use core::cmp::Ordering;
|
||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
use curve25519_dalek::edwards::EdwardsPoint;
|
||||||
|
|
||||||
use curve25519_dalek::{
|
use crate::{hash, serialize::*, ringct::{RctPrunable, RctSignatures}};
|
||||||
scalar::Scalar,
|
|
||||||
edwards::{EdwardsPoint, CompressedEdwardsY},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
pub const RING_LEN: usize = 11;
|
||||||
Protocol, hash,
|
|
||||||
serialize::*,
|
|
||||||
ringct::{RctBase, RctPrunable, RctSignatures},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
pub enum Input {
|
pub enum Input {
|
||||||
Gen(u64),
|
Gen(u64),
|
||||||
ToKey { amount: Option<u64>, key_offsets: Vec<u64>, key_image: EdwardsPoint },
|
|
||||||
|
ToKey {
|
||||||
|
amount: u64,
|
||||||
|
key_offsets: Vec<u64>,
|
||||||
|
key_image: EdwardsPoint
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Input {
|
impl Input {
|
||||||
// Worst-case predictive len
|
// Worst-case predictive len
|
||||||
pub(crate) fn fee_weight(ring_len: usize) -> usize {
|
pub(crate) fn fee_weight() -> usize {
|
||||||
// Uses 1 byte for the VarInt amount due to amount being 0
|
// Uses 1 byte for the VarInt amount due to amount being 0
|
||||||
// Uses 1 byte for the VarInt encoding of the length of the ring as well
|
// Uses 1 byte for the VarInt encoding of the length of the ring as well
|
||||||
1 + 1 + 1 + (8 * ring_len) + 32
|
1 + 1 + 1 + (8 * RING_LEN) + 32
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
Self::Gen(height) => {
|
Input::Gen(height) => {
|
||||||
w.write_all(&[255])?;
|
w.write_all(&[255])?;
|
||||||
write_varint(height, w)
|
write_varint(height, w)
|
||||||
}
|
},
|
||||||
|
|
||||||
Self::ToKey { amount, key_offsets, key_image } => {
|
Input::ToKey { amount, key_offsets, key_image } => {
|
||||||
w.write_all(&[2])?;
|
w.write_all(&[2])?;
|
||||||
write_varint(&amount.unwrap_or(0), w)?;
|
write_varint(amount, w)?;
|
||||||
write_vec(write_varint, key_offsets, w)?;
|
write_vec(write_varint, key_offsets, w)?;
|
||||||
write_point(key_image, w)
|
write_point(key_image, w)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Input> {
|
||||||
let mut res = vec![];
|
let mut variant = [0];
|
||||||
self.write(&mut res).unwrap();
|
r.read_exact(&mut variant)?;
|
||||||
res
|
Ok(
|
||||||
}
|
match variant[0] {
|
||||||
|
255 => Input::Gen(read_varint(r)?),
|
||||||
pub fn read<R: Read>(interpret_as_rct: bool, r: &mut R) -> io::Result<Self> {
|
2 => Input::ToKey {
|
||||||
Ok(match read_byte(r)? {
|
amount: read_varint(r)?,
|
||||||
255 => Self::Gen(read_varint(r)?),
|
|
||||||
2 => {
|
|
||||||
let amount = read_varint(r)?;
|
|
||||||
let amount = if (amount == 0) && interpret_as_rct { None } else { Some(amount) };
|
|
||||||
Self::ToKey {
|
|
||||||
amount,
|
|
||||||
key_offsets: read_vec(read_varint, r)?,
|
key_offsets: read_vec(read_varint, r)?,
|
||||||
key_image: read_torsion_free_point(r)?,
|
key_image: read_point(r)?
|
||||||
}
|
},
|
||||||
|
_ => Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown/unused input type"))?
|
||||||
}
|
}
|
||||||
_ => {
|
)
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown/unused input type"))?
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Doesn't bother moving to an enum for the unused Script classes
|
// Doesn't bother moving to an enum for the unused Script classes
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
pub struct Output {
|
pub struct Output {
|
||||||
pub amount: Option<u64>,
|
pub amount: u64,
|
||||||
pub key: CompressedEdwardsY,
|
pub key: EdwardsPoint,
|
||||||
pub view_tag: Option<u8>,
|
pub tag: Option<u8>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Output {
|
impl Output {
|
||||||
@@ -85,76 +71,64 @@ impl Output {
|
|||||||
1 + 1 + 32 + 1
|
1 + 1 + 32 + 1
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||||
write_varint(&self.amount.unwrap_or(0), w)?;
|
write_varint(&self.amount, w)?;
|
||||||
w.write_all(&[2 + u8::from(self.view_tag.is_some())])?;
|
w.write_all(&[2 + (if self.tag.is_some() { 1 } else { 0 })])?;
|
||||||
w.write_all(&self.key.to_bytes())?;
|
write_point(&self.key, w)?;
|
||||||
if let Some(view_tag) = self.view_tag {
|
if let Some(tag) = self.tag {
|
||||||
w.write_all(&[view_tag])?;
|
w.write_all(&[tag])?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Output> {
|
||||||
let mut res = Vec::with_capacity(8 + 1 + 32);
|
|
||||||
self.write(&mut res).unwrap();
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(interpret_as_rct: bool, r: &mut R) -> io::Result<Self> {
|
|
||||||
let amount = read_varint(r)?;
|
let amount = read_varint(r)?;
|
||||||
let amount = if interpret_as_rct {
|
let mut tag = [0];
|
||||||
if amount != 0 {
|
r.read_exact(&mut tag)?;
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "RCT TX output wasn't 0"))?;
|
if (tag[0] != 2) && (tag[0] != 3) {
|
||||||
|
Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown/unused output type"))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(
|
||||||
|
Output {
|
||||||
|
amount,
|
||||||
|
key: read_point(r)?,
|
||||||
|
tag: if tag[0] == 3 { r.read_exact(&mut tag)?; Some(tag[0]) } else { None }
|
||||||
}
|
}
|
||||||
None
|
)
|
||||||
} else {
|
|
||||||
Some(amount)
|
|
||||||
};
|
|
||||||
|
|
||||||
let view_tag = match read_byte(r)? {
|
|
||||||
2 => false,
|
|
||||||
3 => true,
|
|
||||||
_ => Err(io::Error::new(
|
|
||||||
io::ErrorKind::Other,
|
|
||||||
"Tried to deserialize unknown/unused output type",
|
|
||||||
))?,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
amount,
|
|
||||||
key: CompressedEdwardsY(read_bytes(r)?),
|
|
||||||
view_tag: if view_tag { Some(read_byte(r)?) } else { None },
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
#[derive(Clone, Copy, PartialEq, Debug)]
|
||||||
pub enum Timelock {
|
pub enum Timelock {
|
||||||
None,
|
None,
|
||||||
Block(usize),
|
Block(usize),
|
||||||
Time(u64),
|
Time(u64)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Timelock {
|
impl Timelock {
|
||||||
fn from_raw(raw: u64) -> Self {
|
fn from_raw(raw: u64) -> Timelock {
|
||||||
if raw == 0 {
|
if raw == 0 {
|
||||||
Self::None
|
Timelock::None
|
||||||
} else if raw < 500_000_000 {
|
} else if raw < 500_000_000 {
|
||||||
Self::Block(usize::try_from(raw).unwrap())
|
Timelock::Block(usize::try_from(raw).unwrap())
|
||||||
} else {
|
} else {
|
||||||
Self::Time(raw)
|
Timelock::Time(raw)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub(crate) fn fee_weight() -> usize {
|
||||||
|
8
|
||||||
|
}
|
||||||
|
|
||||||
|
fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||||
write_varint(
|
write_varint(
|
||||||
&match self {
|
&match self {
|
||||||
Self::None => 0,
|
Timelock::None => 0,
|
||||||
Self::Block(block) => (*block).try_into().unwrap(),
|
Timelock::Block(block) => (*block).try_into().unwrap(),
|
||||||
Self::Time(time) => *time,
|
Timelock::Time(time) => *time
|
||||||
},
|
},
|
||||||
w,
|
w
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -162,216 +136,138 @@ impl Timelock {
|
|||||||
impl PartialOrd for Timelock {
|
impl PartialOrd for Timelock {
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
(Self::None, _) => Some(Ordering::Less),
|
(Timelock::None, _) => Some(Ordering::Less),
|
||||||
(Self::Block(a), Self::Block(b)) => a.partial_cmp(b),
|
(Timelock::Block(a), Timelock::Block(b)) => a.partial_cmp(b),
|
||||||
(Self::Time(a), Self::Time(b)) => a.partial_cmp(b),
|
(Timelock::Time(a), Timelock::Time(b)) => a.partial_cmp(b),
|
||||||
_ => None,
|
_ => None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
pub struct TransactionPrefix {
|
pub struct TransactionPrefix {
|
||||||
pub version: u64,
|
pub version: u64,
|
||||||
pub timelock: Timelock,
|
pub timelock: Timelock,
|
||||||
pub inputs: Vec<Input>,
|
pub inputs: Vec<Input>,
|
||||||
pub outputs: Vec<Output>,
|
pub outputs: Vec<Output>,
|
||||||
pub extra: Vec<u8>,
|
pub extra: Vec<u8>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TransactionPrefix {
|
impl TransactionPrefix {
|
||||||
pub(crate) fn fee_weight(ring_len: usize, inputs: usize, outputs: usize, extra: usize) -> usize {
|
pub(crate) fn fee_weight(inputs: usize, outputs: usize, extra: usize) -> usize {
|
||||||
// Assumes Timelock::None since this library won't let you create a TX with a timelock
|
// Assumes Timelock::None since this library won't let you create a TX with a timelock
|
||||||
1 + 1 +
|
1 + 1 +
|
||||||
varint_len(inputs) +
|
varint_len(inputs) + (inputs * Input::fee_weight()) +
|
||||||
(inputs * Input::fee_weight(ring_len)) +
|
// Only 16 outputs are possible under transactions by this lib
|
||||||
1 +
|
1 + (outputs * Output::fee_weight()) +
|
||||||
(outputs * Output::fee_weight()) +
|
varint_len(extra) + extra
|
||||||
varint_len(extra) +
|
|
||||||
extra
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||||
write_varint(&self.version, w)?;
|
write_varint(&self.version, w)?;
|
||||||
self.timelock.write(w)?;
|
self.timelock.serialize(w)?;
|
||||||
write_vec(Input::write, &self.inputs, w)?;
|
write_vec(Input::serialize, &self.inputs, w)?;
|
||||||
write_vec(Output::write, &self.outputs, w)?;
|
write_vec(Output::serialize, &self.outputs, w)?;
|
||||||
write_varint(&self.extra.len().try_into().unwrap(), w)?;
|
write_varint(&self.extra.len().try_into().unwrap(), w)?;
|
||||||
w.write_all(&self.extra)
|
w.write_all(&self.extra)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<TransactionPrefix> {
|
||||||
let mut res = vec![];
|
let mut prefix = TransactionPrefix {
|
||||||
self.write(&mut res).unwrap();
|
version: read_varint(r)?,
|
||||||
res
|
timelock: Timelock::from_raw(read_varint(r)?),
|
||||||
}
|
inputs: read_vec(Input::deserialize, r)?,
|
||||||
|
outputs: read_vec(Output::deserialize, r)?,
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
extra: vec![]
|
||||||
let version = read_varint(r)?;
|
|
||||||
// TODO: Create an enum out of version
|
|
||||||
if (version == 0) || (version > 2) {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "unrecognized transaction version"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let timelock = Timelock::from_raw(read_varint(r)?);
|
|
||||||
|
|
||||||
let inputs = read_vec(|r| Input::read(version == 2, r), r)?;
|
|
||||||
if inputs.is_empty() {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "transaction had no inputs"))?;
|
|
||||||
}
|
|
||||||
let is_miner_tx = matches!(inputs[0], Input::Gen { .. });
|
|
||||||
|
|
||||||
let mut prefix = Self {
|
|
||||||
version,
|
|
||||||
timelock,
|
|
||||||
inputs,
|
|
||||||
outputs: read_vec(|r| Output::read((!is_miner_tx) && (version == 2), r), r)?,
|
|
||||||
extra: vec![],
|
|
||||||
};
|
};
|
||||||
prefix.extra = read_vec(read_byte, r)?;
|
|
||||||
Ok(prefix)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hash(&self) -> [u8; 32] {
|
let len = read_varint(r)?;
|
||||||
hash(&self.serialize())
|
prefix.extra.resize(len.try_into().unwrap(), 0);
|
||||||
|
r.read_exact(&mut prefix.extra)?;
|
||||||
|
|
||||||
|
Ok(prefix)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Monero transaction. For version 1, rct_signatures still contains an accurate fee value.
|
#[derive(Clone, PartialEq, Debug)]
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct Transaction {
|
pub struct Transaction {
|
||||||
pub prefix: TransactionPrefix,
|
pub prefix: TransactionPrefix,
|
||||||
pub signatures: Vec<Vec<(Scalar, Scalar)>>,
|
pub rct_signatures: RctSignatures
|
||||||
pub rct_signatures: RctSignatures,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Transaction {
|
impl Transaction {
|
||||||
pub(crate) fn fee_weight(
|
pub(crate) fn fee_weight(inputs: usize, outputs: usize, extra: usize) -> usize {
|
||||||
protocol: Protocol,
|
TransactionPrefix::fee_weight(inputs, outputs, extra) + RctSignatures::fee_weight(inputs, outputs)
|
||||||
inputs: usize,
|
|
||||||
outputs: usize,
|
|
||||||
extra: usize,
|
|
||||||
) -> usize {
|
|
||||||
TransactionPrefix::fee_weight(protocol.ring_len(), inputs, outputs, extra) +
|
|
||||||
RctSignatures::fee_weight(protocol, inputs, outputs)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||||
self.prefix.write(w)?;
|
self.prefix.serialize(w)?;
|
||||||
if self.prefix.version == 1 {
|
self.rct_signatures.serialize(w)
|
||||||
for sigs in &self.signatures {
|
|
||||||
for sig in sigs {
|
|
||||||
write_scalar(&sig.0, w)?;
|
|
||||||
write_scalar(&sig.1, w)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
} else if self.prefix.version == 2 {
|
|
||||||
self.rct_signatures.write(w)
|
|
||||||
} else {
|
|
||||||
panic!("Serializing a transaction with an unknown version");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Transaction> {
|
||||||
let mut res = Vec::with_capacity(2048);
|
let prefix = TransactionPrefix::deserialize(r)?;
|
||||||
self.write(&mut res).unwrap();
|
Ok(
|
||||||
res
|
Transaction {
|
||||||
}
|
rct_signatures: RctSignatures::deserialize(
|
||||||
|
prefix.inputs.iter().map(|input| match input {
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
let prefix = TransactionPrefix::read(r)?;
|
|
||||||
let mut signatures = vec![];
|
|
||||||
let mut rct_signatures = RctSignatures {
|
|
||||||
base: RctBase { fee: 0, encrypted_amounts: vec![], pseudo_outs: vec![], commitments: vec![] },
|
|
||||||
prunable: RctPrunable::Null,
|
|
||||||
};
|
|
||||||
|
|
||||||
if prefix.version == 1 {
|
|
||||||
signatures = prefix
|
|
||||||
.inputs
|
|
||||||
.iter()
|
|
||||||
.filter_map(|input| match input {
|
|
||||||
Input::ToKey { key_offsets, .. } => Some(
|
|
||||||
key_offsets
|
|
||||||
.iter()
|
|
||||||
.map(|_| Ok((read_scalar(r)?, read_scalar(r)?)))
|
|
||||||
.collect::<Result<_, io::Error>>(),
|
|
||||||
),
|
|
||||||
_ => None,
|
|
||||||
})
|
|
||||||
.collect::<Result<_, _>>()?;
|
|
||||||
|
|
||||||
rct_signatures.base.fee = prefix
|
|
||||||
.inputs
|
|
||||||
.iter()
|
|
||||||
.map(|input| match input {
|
|
||||||
Input::Gen(..) => 0,
|
|
||||||
Input::ToKey { amount, .. } => amount.unwrap(),
|
|
||||||
})
|
|
||||||
.sum::<u64>()
|
|
||||||
.saturating_sub(prefix.outputs.iter().map(|output| output.amount.unwrap()).sum());
|
|
||||||
} else if prefix.version == 2 {
|
|
||||||
rct_signatures = RctSignatures::read(
|
|
||||||
prefix
|
|
||||||
.inputs
|
|
||||||
.iter()
|
|
||||||
.map(|input| match input {
|
|
||||||
Input::Gen(_) => 0,
|
Input::Gen(_) => 0,
|
||||||
Input::ToKey { key_offsets, .. } => key_offsets.len(),
|
Input::ToKey { key_offsets, .. } => key_offsets.len()
|
||||||
})
|
}).collect(),
|
||||||
.collect(),
|
prefix.outputs.len(),
|
||||||
prefix.outputs.len(),
|
r
|
||||||
r,
|
)?,
|
||||||
)?;
|
prefix
|
||||||
} else {
|
}
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown version"))?;
|
)
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self { prefix, signatures, rct_signatures })
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hash(&self) -> [u8; 32] {
|
pub fn hash(&self) -> [u8; 32] {
|
||||||
let mut buf = Vec::with_capacity(2048);
|
let mut serialized = Vec::with_capacity(2048);
|
||||||
if self.prefix.version == 1 {
|
if self.prefix.version == 1 {
|
||||||
self.write(&mut buf).unwrap();
|
self.serialize(&mut serialized).unwrap();
|
||||||
hash(&buf)
|
hash(&serialized)
|
||||||
} else {
|
} else {
|
||||||
let mut hashes = Vec::with_capacity(96);
|
let mut sig_hash = Vec::with_capacity(96);
|
||||||
|
|
||||||
hashes.extend(self.prefix.hash());
|
self.prefix.serialize(&mut serialized).unwrap();
|
||||||
|
sig_hash.extend(hash(&serialized));
|
||||||
|
serialized.clear();
|
||||||
|
|
||||||
self.rct_signatures.base.write(&mut buf, self.rct_signatures.rct_type()).unwrap();
|
self.rct_signatures.base.serialize(
|
||||||
hashes.extend(hash(&buf));
|
&mut serialized,
|
||||||
buf.clear();
|
self.rct_signatures.prunable.rct_type()
|
||||||
|
).unwrap();
|
||||||
|
sig_hash.extend(hash(&serialized));
|
||||||
|
serialized.clear();
|
||||||
|
|
||||||
hashes.extend(&match self.rct_signatures.prunable {
|
match self.rct_signatures.prunable {
|
||||||
RctPrunable::Null => [0; 32],
|
RctPrunable::Null => serialized.resize(32, 0),
|
||||||
RctPrunable::MlsagBorromean { .. } |
|
_ => {
|
||||||
RctPrunable::MlsagBulletproofs { .. } |
|
self.rct_signatures.prunable.serialize(&mut serialized).unwrap();
|
||||||
RctPrunable::Clsag { .. } => {
|
serialized = hash(&serialized).to_vec();
|
||||||
self.rct_signatures.prunable.write(&mut buf, self.rct_signatures.rct_type()).unwrap();
|
|
||||||
hash(&buf)
|
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
sig_hash.extend(&serialized);
|
||||||
|
|
||||||
hash(&hashes)
|
hash(&sig_hash)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Calculate the hash of this transaction as needed for signing it.
|
|
||||||
pub fn signature_hash(&self) -> [u8; 32] {
|
pub fn signature_hash(&self) -> [u8; 32] {
|
||||||
let mut buf = Vec::with_capacity(2048);
|
let mut serialized = Vec::with_capacity(2048);
|
||||||
let mut sig_hash = Vec::with_capacity(96);
|
let mut sig_hash = Vec::with_capacity(96);
|
||||||
|
|
||||||
sig_hash.extend(self.prefix.hash());
|
self.prefix.serialize(&mut serialized).unwrap();
|
||||||
|
sig_hash.extend(hash(&serialized));
|
||||||
|
serialized.clear();
|
||||||
|
|
||||||
self.rct_signatures.base.write(&mut buf, self.rct_signatures.rct_type()).unwrap();
|
self.rct_signatures.base.serialize(&mut serialized, self.rct_signatures.prunable.rct_type()).unwrap();
|
||||||
sig_hash.extend(hash(&buf));
|
sig_hash.extend(hash(&serialized));
|
||||||
buf.clear();
|
serialized.clear();
|
||||||
|
|
||||||
self.rct_signatures.prunable.signature_write(&mut buf).unwrap();
|
self.rct_signatures.prunable.signature_serialize(&mut serialized).unwrap();
|
||||||
sig_hash.extend(hash(&buf));
|
sig_hash.extend(&hash(&serialized));
|
||||||
|
|
||||||
hash(&sig_hash)
|
hash(&sig_hash)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,311 +1,152 @@
|
|||||||
use core::{marker::PhantomData, fmt::Debug};
|
use std::string::ToString;
|
||||||
use std_shims::string::{String, ToString};
|
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
use thiserror::Error;
|
||||||
|
|
||||||
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, edwards::{EdwardsPoint, CompressedEdwardsY}};
|
||||||
|
|
||||||
use base58_monero::base58::{encode_check, decode_check};
|
use base58_monero::base58::{encode_check, decode_check};
|
||||||
|
|
||||||
/// The network this address is for.
|
use crate::wallet::ViewPair;
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||||
pub enum Network {
|
pub enum Network {
|
||||||
Mainnet,
|
Mainnet,
|
||||||
Testnet,
|
Testnet,
|
||||||
Stagenet,
|
Stagenet
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The address type, supporting the officially documented addresses, along with
|
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||||
/// [Featured Addresses](https://gist.github.com/kayabaNerve/01c50bbc35441e0bbdcee63a9d823789).
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub enum AddressType {
|
pub enum AddressType {
|
||||||
Standard,
|
Standard,
|
||||||
Integrated([u8; 8]),
|
Integrated([u8; 8]),
|
||||||
Subaddress,
|
Subaddress
|
||||||
Featured { subaddress: bool, payment_id: Option<[u8; 8]>, guaranteed: bool },
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub struct SubaddressIndex {
|
|
||||||
pub(crate) account: u32,
|
|
||||||
pub(crate) address: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SubaddressIndex {
|
|
||||||
pub const fn new(account: u32, address: u32) -> Option<Self> {
|
|
||||||
if (account == 0) && (address == 0) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
Some(Self { account, address })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn account(&self) -> u32 {
|
|
||||||
self.account
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn address(&self) -> u32 {
|
|
||||||
self.address
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Address specification. Used internally to create addresses.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub enum AddressSpec {
|
|
||||||
Standard,
|
|
||||||
Integrated([u8; 8]),
|
|
||||||
Subaddress(SubaddressIndex),
|
|
||||||
Featured { subaddress: Option<SubaddressIndex>, payment_id: Option<[u8; 8]>, guaranteed: bool },
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AddressType {
|
impl AddressType {
|
||||||
pub const fn is_subaddress(&self) -> bool {
|
fn network_bytes(network: Network) -> (u8, u8, u8) {
|
||||||
matches!(self, Self::Subaddress) || matches!(self, Self::Featured { subaddress: true, .. })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn payment_id(&self) -> Option<[u8; 8]> {
|
|
||||||
if let Self::Integrated(id) = self {
|
|
||||||
Some(*id)
|
|
||||||
} else if let Self::Featured { payment_id, .. } = self {
|
|
||||||
*payment_id
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn is_guaranteed(&self) -> bool {
|
|
||||||
matches!(self, Self::Featured { guaranteed: true, .. })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A type which returns the byte for a given address.
|
|
||||||
pub trait AddressBytes: Clone + Copy + PartialEq + Eq + Debug {
|
|
||||||
fn network_bytes(network: Network) -> (u8, u8, u8, u8);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Address bytes for Monero.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
|
||||||
pub struct MoneroAddressBytes;
|
|
||||||
impl AddressBytes for MoneroAddressBytes {
|
|
||||||
fn network_bytes(network: Network) -> (u8, u8, u8, u8) {
|
|
||||||
match network {
|
match network {
|
||||||
Network::Mainnet => (18, 19, 42, 70),
|
Network::Mainnet => (18, 19, 42),
|
||||||
Network::Testnet => (53, 54, 63, 111),
|
Network::Testnet => (53, 54, 63),
|
||||||
Network::Stagenet => (24, 25, 36, 86),
|
Network::Stagenet => (24, 25, 36)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Address metadata.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||||
pub struct AddressMeta<B: AddressBytes> {
|
pub struct AddressMeta {
|
||||||
_bytes: PhantomData<B>,
|
|
||||||
pub network: Network,
|
pub network: Network,
|
||||||
pub kind: AddressType,
|
pub kind: AddressType,
|
||||||
|
pub guaranteed: bool
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<B: AddressBytes> Zeroize for AddressMeta<B> {
|
#[derive(Clone, Error, Debug)]
|
||||||
fn zeroize(&mut self) {
|
|
||||||
self.network.zeroize();
|
|
||||||
self.kind.zeroize();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Error when decoding an address.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
|
||||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
|
||||||
pub enum AddressError {
|
pub enum AddressError {
|
||||||
#[cfg_attr(feature = "std", error("invalid address byte"))]
|
#[error("invalid address byte")]
|
||||||
InvalidByte,
|
InvalidByte,
|
||||||
#[cfg_attr(feature = "std", error("invalid address encoding"))]
|
#[error("invalid address encoding")]
|
||||||
InvalidEncoding,
|
InvalidEncoding,
|
||||||
#[cfg_attr(feature = "std", error("invalid length"))]
|
#[error("invalid length")]
|
||||||
InvalidLength,
|
InvalidLength,
|
||||||
#[cfg_attr(feature = "std", error("invalid key"))]
|
#[error("different network than expected")]
|
||||||
InvalidKey,
|
|
||||||
#[cfg_attr(feature = "std", error("unknown features"))]
|
|
||||||
UnknownFeatures,
|
|
||||||
#[cfg_attr(feature = "std", error("different network than expected"))]
|
|
||||||
DifferentNetwork,
|
DifferentNetwork,
|
||||||
|
#[error("invalid key")]
|
||||||
|
InvalidKey
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<B: AddressBytes> AddressMeta<B> {
|
impl AddressMeta {
|
||||||
#[allow(clippy::wrong_self_convention)]
|
|
||||||
fn to_byte(&self) -> u8 {
|
fn to_byte(&self) -> u8 {
|
||||||
let bytes = B::network_bytes(self.network);
|
let bytes = AddressType::network_bytes(self.network);
|
||||||
match self.kind {
|
let byte = match self.kind {
|
||||||
AddressType::Standard => bytes.0,
|
AddressType::Standard => bytes.0,
|
||||||
AddressType::Integrated(_) => bytes.1,
|
AddressType::Integrated(_) => bytes.1,
|
||||||
AddressType::Subaddress => bytes.2,
|
AddressType::Subaddress => bytes.2
|
||||||
AddressType::Featured { .. } => bytes.3,
|
};
|
||||||
}
|
byte | (if self.guaranteed { 1 << 7 } else { 0 })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create an address's metadata.
|
// Returns an incomplete type in the case of Integrated addresses
|
||||||
pub const fn new(network: Network, kind: AddressType) -> Self {
|
fn from_byte(byte: u8) -> Result<AddressMeta, AddressError> {
|
||||||
Self { _bytes: PhantomData, network, kind }
|
let actual = byte & 0b01111111;
|
||||||
}
|
let guaranteed = (byte >> 7) == 1;
|
||||||
|
|
||||||
// Returns an incomplete instantiation in the case of Integrated/Featured addresses
|
|
||||||
fn from_byte(byte: u8) -> Result<Self, AddressError> {
|
|
||||||
let mut meta = None;
|
let mut meta = None;
|
||||||
for network in [Network::Mainnet, Network::Testnet, Network::Stagenet] {
|
for network in [Network::Mainnet, Network::Testnet, Network::Stagenet] {
|
||||||
let (standard, integrated, subaddress, featured) = B::network_bytes(network);
|
let (standard, integrated, subaddress) = AddressType::network_bytes(network);
|
||||||
if let Some(kind) = match byte {
|
if let Some(kind) = match actual {
|
||||||
_ if byte == standard => Some(AddressType::Standard),
|
_ if actual == standard => Some(AddressType::Standard),
|
||||||
_ if byte == integrated => Some(AddressType::Integrated([0; 8])),
|
_ if actual == integrated => Some(AddressType::Integrated([0; 8])),
|
||||||
_ if byte == subaddress => Some(AddressType::Subaddress),
|
_ if actual == subaddress => Some(AddressType::Subaddress),
|
||||||
_ if byte == featured => {
|
_ => None
|
||||||
Some(AddressType::Featured { subaddress: false, payment_id: None, guaranteed: false })
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
} {
|
} {
|
||||||
meta = Some(Self::new(network, kind));
|
meta = Some(AddressMeta { network, kind, guaranteed });
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
meta.ok_or(AddressError::InvalidByte)
|
meta.ok_or(AddressError::InvalidByte)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const fn is_subaddress(&self) -> bool {
|
|
||||||
self.kind.is_subaddress()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn payment_id(&self) -> Option<[u8; 8]> {
|
|
||||||
self.kind.payment_id()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn is_guaranteed(&self) -> bool {
|
|
||||||
self.kind.is_guaranteed()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A Monero address, composed of metadata and a spend/view key.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||||
pub struct Address<B: AddressBytes> {
|
pub struct Address {
|
||||||
pub meta: AddressMeta<B>,
|
pub meta: AddressMeta,
|
||||||
pub spend: EdwardsPoint,
|
pub spend: EdwardsPoint,
|
||||||
pub view: EdwardsPoint,
|
pub view: EdwardsPoint
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<B: AddressBytes> Zeroize for Address<B> {
|
impl ViewPair {
|
||||||
fn zeroize(&mut self) {
|
pub fn address(&self, network: Network, kind: AddressType, guaranteed: bool) -> Address {
|
||||||
self.meta.zeroize();
|
Address {
|
||||||
self.spend.zeroize();
|
meta: AddressMeta {
|
||||||
self.view.zeroize();
|
network,
|
||||||
|
kind,
|
||||||
|
guaranteed
|
||||||
|
},
|
||||||
|
spend: self.spend,
|
||||||
|
view: &self.view * &ED25519_BASEPOINT_TABLE
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<B: AddressBytes> ToString for Address<B> {
|
impl ToString for Address {
|
||||||
fn to_string(&self) -> String {
|
fn to_string(&self) -> String {
|
||||||
let mut data = vec![self.meta.to_byte()];
|
let mut data = vec![self.meta.to_byte()];
|
||||||
data.extend(self.spend.compress().to_bytes());
|
data.extend(self.spend.compress().to_bytes());
|
||||||
data.extend(self.view.compress().to_bytes());
|
data.extend(self.view.compress().to_bytes());
|
||||||
if let AddressType::Featured { subaddress, payment_id, guaranteed } = self.meta.kind {
|
if let AddressType::Integrated(id) = self.meta.kind {
|
||||||
// Technically should be a VarInt, yet we don't have enough features it's needed
|
|
||||||
data.push(
|
|
||||||
u8::from(subaddress) + (u8::from(payment_id.is_some()) << 1) + (u8::from(guaranteed) << 2),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if let Some(id) = self.meta.kind.payment_id() {
|
|
||||||
data.extend(id);
|
data.extend(id);
|
||||||
}
|
}
|
||||||
encode_check(&data).unwrap()
|
encode_check(&data).unwrap()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<B: AddressBytes> Address<B> {
|
impl Address {
|
||||||
pub const fn new(meta: AddressMeta<B>, spend: EdwardsPoint, view: EdwardsPoint) -> Self {
|
pub fn from_str(s: &str, network: Network) -> Result<Self, AddressError> {
|
||||||
Self { meta, spend, view }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_str_raw(s: &str) -> Result<Self, AddressError> {
|
|
||||||
let raw = decode_check(s).map_err(|_| AddressError::InvalidEncoding)?;
|
let raw = decode_check(s).map_err(|_| AddressError::InvalidEncoding)?;
|
||||||
if raw.len() < (1 + 32 + 32) {
|
if raw.len() == 1 {
|
||||||
Err(AddressError::InvalidLength)?;
|
Err(AddressError::InvalidLength)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut meta = AddressMeta::from_byte(raw[0])?;
|
let mut meta = AddressMeta::from_byte(raw[0])?;
|
||||||
let spend = CompressedEdwardsY(raw[1 .. 33].try_into().unwrap())
|
if meta.network != network {
|
||||||
.decompress()
|
Err(AddressError::DifferentNetwork)?;
|
||||||
.ok_or(AddressError::InvalidKey)?;
|
|
||||||
let view = CompressedEdwardsY(raw[33 .. 65].try_into().unwrap())
|
|
||||||
.decompress()
|
|
||||||
.ok_or(AddressError::InvalidKey)?;
|
|
||||||
let mut read = 65;
|
|
||||||
|
|
||||||
if matches!(meta.kind, AddressType::Featured { .. }) {
|
|
||||||
if raw[read] >= (2 << 3) {
|
|
||||||
Err(AddressError::UnknownFeatures)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let subaddress = (raw[read] & 1) == 1;
|
|
||||||
let integrated = ((raw[read] >> 1) & 1) == 1;
|
|
||||||
let guaranteed = ((raw[read] >> 2) & 1) == 1;
|
|
||||||
|
|
||||||
meta.kind = AddressType::Featured {
|
|
||||||
subaddress,
|
|
||||||
payment_id: Some([0; 8]).filter(|_| integrated),
|
|
||||||
guaranteed,
|
|
||||||
};
|
|
||||||
read += 1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update read early so we can verify the length
|
let len = match meta.kind {
|
||||||
if meta.kind.payment_id().is_some() {
|
AddressType::Standard | AddressType::Subaddress => 65,
|
||||||
read += 8;
|
AddressType::Integrated(_) => 73
|
||||||
}
|
};
|
||||||
if raw.len() != read {
|
if raw.len() != len {
|
||||||
Err(AddressError::InvalidLength)?;
|
Err(AddressError::InvalidLength)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let AddressType::Integrated(ref mut id) = meta.kind {
|
let spend = CompressedEdwardsY(raw[1 .. 33].try_into().unwrap()).decompress().ok_or(AddressError::InvalidKey)?;
|
||||||
id.copy_from_slice(&raw[(read - 8) .. read]);
|
let view = CompressedEdwardsY(raw[33 .. 65].try_into().unwrap()).decompress().ok_or(AddressError::InvalidKey)?;
|
||||||
}
|
|
||||||
if let AddressType::Featured { payment_id: Some(ref mut id), .. } = meta.kind {
|
if let AddressType::Integrated(ref mut payment_id) = meta.kind {
|
||||||
id.copy_from_slice(&raw[(read - 8) .. read]);
|
payment_id.copy_from_slice(&raw[65 .. 73]);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self { meta, spend, view })
|
Ok(Address { meta, spend, view })
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_str(network: Network, s: &str) -> Result<Self, AddressError> {
|
|
||||||
Self::from_str_raw(s).and_then(|addr| {
|
|
||||||
if addr.meta.network == network {
|
|
||||||
Ok(addr)
|
|
||||||
} else {
|
|
||||||
Err(AddressError::DifferentNetwork)?
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn network(&self) -> Network {
|
|
||||||
self.meta.network
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn is_subaddress(&self) -> bool {
|
|
||||||
self.meta.is_subaddress()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn payment_id(&self) -> Option<[u8; 8]> {
|
|
||||||
self.meta.payment_id()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn is_guaranteed(&self) -> bool {
|
|
||||||
self.meta.is_guaranteed()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Instantiation of the Address type with Monero's network bytes.
|
|
||||||
pub type MoneroAddress = Address<MoneroAddressBytes>;
|
|
||||||
// Allow re-interpreting of an arbitrary address as a Monero address so it can be used with the
|
|
||||||
// rest of this library. Doesn't use From as it was conflicting with From<T> for T.
|
|
||||||
impl MoneroAddress {
|
|
||||||
pub const fn from<B: AddressBytes>(address: Address<B>) -> Self {
|
|
||||||
Self::new(
|
|
||||||
AddressMeta::new(address.meta.network, address.meta.kind),
|
|
||||||
address.spend,
|
|
||||||
address.view,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user