mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 12:19:24 +00:00
Compare commits
6 Commits
coordinato
...
firo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d2e5d9184d | ||
|
|
9b3985e120 | ||
|
|
c3cc8d51b7 | ||
|
|
e3ff4f7af6 | ||
|
|
a770e29b0c | ||
|
|
6d9221d56c |
5
.gitattributes
vendored
5
.gitattributes
vendored
@@ -1,5 +0,0 @@
|
||||
# Auto detect text files and perform LF normalization
|
||||
* text=auto
|
||||
* text eol=lf
|
||||
|
||||
*.pdf binary
|
||||
21
.github/actions/LICENSE
vendored
21
.github/actions/LICENSE
vendored
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022-2023 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
47
.github/actions/bitcoin/action.yml
vendored
47
.github/actions/bitcoin/action.yml
vendored
@@ -1,47 +0,0 @@
|
||||
name: bitcoin-regtest
|
||||
description: Spawns a regtest Bitcoin daemon
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: "Version to download and run"
|
||||
required: false
|
||||
default: 24.0.1
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Bitcoin Daemon Cache
|
||||
id: cache-bitcoind
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
|
||||
with:
|
||||
path: bitcoin.tar.gz
|
||||
key: bitcoind-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||
|
||||
- name: Download the Bitcoin Daemon
|
||||
if: steps.cache-bitcoind.outputs.cache-hit != 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
RUNNER_OS=linux
|
||||
RUNNER_ARCH=x86_64
|
||||
FILE=bitcoin-${{ inputs.version }}-$RUNNER_ARCH-$RUNNER_OS-gnu.tar.gz
|
||||
|
||||
wget https://bitcoincore.org/bin/bitcoin-core-${{ inputs.version }}/$FILE
|
||||
mv $FILE bitcoin.tar.gz
|
||||
|
||||
- name: Extract the Bitcoin Daemon
|
||||
shell: bash
|
||||
run: |
|
||||
tar xzvf bitcoin.tar.gz
|
||||
cd bitcoin-${{ inputs.version }}
|
||||
sudo mv bin/* /bin && sudo mv lib/* /lib
|
||||
|
||||
- name: Bitcoin Regtest Daemon
|
||||
shell: bash
|
||||
run: |
|
||||
RPC_USER=serai
|
||||
RPC_PASS=seraidex
|
||||
|
||||
bitcoind -txindex -regtest \
|
||||
-rpcuser=$RPC_USER -rpcpassword=$RPC_PASS \
|
||||
-rpcbind=127.0.0.1 -rpcbind=$(hostname) -rpcallowip=0.0.0.0/0 \
|
||||
-daemon
|
||||
41
.github/actions/build-dependencies/action.yml
vendored
41
.github/actions/build-dependencies/action.yml
vendored
@@ -1,41 +0,0 @@
|
||||
name: build-dependencies
|
||||
description: Installs build dependencies for Serai
|
||||
|
||||
inputs:
|
||||
github-token:
|
||||
description: "GitHub token to install Protobuf with"
|
||||
require: true
|
||||
default:
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Remove unused packages
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt remove -y "*msbuild*" "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli
|
||||
sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*"
|
||||
sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*"
|
||||
sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*"
|
||||
sudo apt autoremove -y
|
||||
sudo apt clean
|
||||
docker system prune -a --volumes
|
||||
|
||||
- name: Install apt dependencies
|
||||
shell: bash
|
||||
run: sudo apt install -y ca-certificates
|
||||
|
||||
- name: Install Protobuf
|
||||
uses: arduino/setup-protoc@a8b67ba40b37d35169e222f3bb352603327985b6
|
||||
with:
|
||||
repo-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Install solc
|
||||
shell: bash
|
||||
run: |
|
||||
cargo install svm-rs
|
||||
svm install 0.8.16
|
||||
svm use 0.8.16
|
||||
|
||||
# - name: Cache Rust
|
||||
# uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43
|
||||
44
.github/actions/monero-wallet-rpc/action.yml
vendored
44
.github/actions/monero-wallet-rpc/action.yml
vendored
@@ -1,44 +0,0 @@
|
||||
name: monero-wallet-rpc
|
||||
description: Spawns a Monero Wallet-RPC.
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: "Version to download and run"
|
||||
required: false
|
||||
default: v0.18.2.0
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Monero Wallet RPC Cache
|
||||
id: cache-monero-wallet-rpc
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
|
||||
with:
|
||||
path: monero-wallet-rpc
|
||||
key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||
|
||||
- name: Download the Monero Wallet RPC
|
||||
if: steps.cache-monero-wallet-rpc.outputs.cache-hit != 'true'
|
||||
# Calculates OS/ARCH to demonstrate it, yet then locks to linux-x64 due
|
||||
# to the contained folder not following the same naming scheme and
|
||||
# requiring further expansion not worth doing right now
|
||||
shell: bash
|
||||
run: |
|
||||
RUNNER_OS=${{ runner.os }}
|
||||
RUNNER_ARCH=${{ runner.arch }}
|
||||
|
||||
RUNNER_OS=${RUNNER_OS,,}
|
||||
RUNNER_ARCH=${RUNNER_ARCH,,}
|
||||
|
||||
RUNNER_OS=linux
|
||||
RUNNER_ARCH=x64
|
||||
|
||||
FILE=monero-$RUNNER_OS-$RUNNER_ARCH-${{ inputs.version }}.tar.bz2
|
||||
wget https://downloads.getmonero.org/cli/$FILE
|
||||
tar -xvf $FILE
|
||||
|
||||
mv monero-x86_64-linux-gnu-${{ inputs.version }}/monero-wallet-rpc monero-wallet-rpc
|
||||
|
||||
- name: Monero Wallet RPC
|
||||
shell: bash
|
||||
run: ./monero-wallet-rpc --disable-rpc-login --rpc-bind-port 6061 --allow-mismatched-daemon-version --wallet-dir ./ --detach
|
||||
44
.github/actions/monero/action.yml
vendored
44
.github/actions/monero/action.yml
vendored
@@ -1,44 +0,0 @@
|
||||
name: monero-regtest
|
||||
description: Spawns a regtest Monero daemon
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: "Version to download and run"
|
||||
required: false
|
||||
default: v0.18.2.0
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Monero Daemon Cache
|
||||
id: cache-monerod
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
|
||||
with:
|
||||
path: monerod
|
||||
key: monerod-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||
|
||||
- name: Download the Monero Daemon
|
||||
if: steps.cache-monerod.outputs.cache-hit != 'true'
|
||||
# Calculates OS/ARCH to demonstrate it, yet then locks to linux-x64 due
|
||||
# to the contained folder not following the same naming scheme and
|
||||
# requiring further expansion not worth doing right now
|
||||
shell: bash
|
||||
run: |
|
||||
RUNNER_OS=${{ runner.os }}
|
||||
RUNNER_ARCH=${{ runner.arch }}
|
||||
|
||||
RUNNER_OS=${RUNNER_OS,,}
|
||||
RUNNER_ARCH=${RUNNER_ARCH,,}
|
||||
|
||||
RUNNER_OS=linux
|
||||
RUNNER_ARCH=x64
|
||||
|
||||
FILE=monero-$RUNNER_OS-$RUNNER_ARCH-${{ inputs.version }}.tar.bz2
|
||||
wget https://downloads.getmonero.org/cli/$FILE
|
||||
tar -xvf $FILE
|
||||
|
||||
mv monero-x86_64-linux-gnu-${{ inputs.version }}/monerod monerod
|
||||
|
||||
- name: Monero Regtest Daemon
|
||||
shell: bash
|
||||
run: ./monerod --regtest --offline --fixed-difficulty=1 --detach
|
||||
45
.github/actions/test-dependencies/action.yml
vendored
45
.github/actions/test-dependencies/action.yml
vendored
@@ -1,45 +0,0 @@
|
||||
name: test-dependencies
|
||||
description: Installs test dependencies for Serai
|
||||
|
||||
inputs:
|
||||
github-token:
|
||||
description: "GitHub token to install Protobuf with"
|
||||
require: true
|
||||
default:
|
||||
|
||||
monero-version:
|
||||
description: "Monero version to download and run as a regtest node"
|
||||
required: false
|
||||
default: v0.18.2.0
|
||||
|
||||
bitcoin-version:
|
||||
description: "Bitcoin version to download and run as a regtest node"
|
||||
required: false
|
||||
default: 24.0.1
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Install Foundry
|
||||
uses: foundry-rs/foundry-toolchain@cb603ca0abb544f301eaed59ac0baf579aa6aecf
|
||||
with:
|
||||
version: nightly-09fe3e041369a816365a020f715ad6f94dbce9f2
|
||||
cache: false
|
||||
|
||||
- name: Run a Monero Regtest Node
|
||||
uses: ./.github/actions/monero
|
||||
with:
|
||||
version: ${{ inputs.monero-version }}
|
||||
|
||||
- name: Run a Bitcoin Regtest Node
|
||||
uses: ./.github/actions/bitcoin
|
||||
with:
|
||||
version: ${{ inputs.bitcoin-version }}
|
||||
|
||||
- name: Run a Monero Wallet-RPC
|
||||
uses: ./.github/actions/monero-wallet-rpc
|
||||
1
.github/nightly-version
vendored
1
.github/nightly-version
vendored
@@ -1 +0,0 @@
|
||||
nightly-2023-11-01
|
||||
37
.github/workflows/coins-tests.yml
vendored
37
.github/workflows/coins-tests.yml
vendored
@@ -1,37 +0,0 @@
|
||||
name: coins/ Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-coins:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Test Dependencies
|
||||
uses: ./.github/actions/test-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p bitcoin-serai \
|
||||
-p ethereum-serai \
|
||||
-p monero-generators \
|
||||
-p monero-serai
|
||||
33
.github/workflows/common-tests.yml
vendored
33
.github/workflows/common-tests.yml
vendored
@@ -1,33 +0,0 @@
|
||||
name: common/ Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-common:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p std-shims \
|
||||
-p zalloc \
|
||||
-p serai-db \
|
||||
-p serai-env
|
||||
44
.github/workflows/coordinator-tests.yml
vendored
44
.github/workflows/coordinator-tests.yml
vendored
@@ -1,44 +0,0 @@
|
||||
name: Coordinator Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "message-queue/**"
|
||||
- "orchestration/message-queue/**"
|
||||
- "coordinator/**"
|
||||
- "orchestration/coordinator/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/coordinator/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "message-queue/**"
|
||||
- "orchestration/message-queue/**"
|
||||
- "coordinator/**"
|
||||
- "orchestration/coordinator/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/coordinator/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Run coordinator Docker tests
|
||||
run: cd tests/coordinator && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||
42
.github/workflows/crypto-tests.yml
vendored
42
.github/workflows/crypto-tests.yml
vendored
@@ -1,42 +0,0 @@
|
||||
name: crypto/ Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-crypto:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p flexible-transcript \
|
||||
-p ff-group-tests \
|
||||
-p dalek-ff-group \
|
||||
-p minimal-ed448 \
|
||||
-p ciphersuite \
|
||||
-p multiexp \
|
||||
-p schnorr-signatures \
|
||||
-p dleq \
|
||||
-p dkg \
|
||||
-p modular-frost \
|
||||
-p frost-schnorrkel
|
||||
24
.github/workflows/daily-deny.yml
vendored
24
.github/workflows/daily-deny.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Daily Deny Check
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
jobs:
|
||||
deny:
|
||||
name: Run cargo deny
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Advisory Cache
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
|
||||
with:
|
||||
path: ~/.cargo/advisory-db
|
||||
key: rust-advisory-db
|
||||
|
||||
- name: Install cargo deny
|
||||
run: cargo install --locked cargo-deny
|
||||
|
||||
- name: Run cargo deny
|
||||
run: cargo deny -L error --all-features check
|
||||
24
.github/workflows/full-stack-tests.yml
vendored
24
.github/workflows/full-stack-tests.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Full Stack Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
pull_request:
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Run Full Stack Docker tests
|
||||
run: cd tests/full-stack && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||
69
.github/workflows/lint.yml
vendored
69
.github/workflows/lint.yml
vendored
@@ -1,69 +0,0 @@
|
||||
name: Lint
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
clippy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Install nightly rust
|
||||
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32-unknown-unknown -c clippy
|
||||
|
||||
- name: Run Clippy
|
||||
run: cargo +${{ steps.nightly.outputs.version }} clippy --all-features --all-targets -- -D warnings -A clippy::items_after_test_module
|
||||
|
||||
deny:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Advisory Cache
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
|
||||
with:
|
||||
path: ~/.cargo/advisory-db
|
||||
key: rust-advisory-db
|
||||
|
||||
- name: Install cargo deny
|
||||
run: cargo install --locked cargo-deny
|
||||
|
||||
- name: Run cargo deny
|
||||
run: cargo deny -L error --all-features check
|
||||
|
||||
fmt:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install nightly rust
|
||||
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -c rustfmt
|
||||
|
||||
- name: Run rustfmt
|
||||
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check
|
||||
|
||||
dockerfiles:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Verify Dockerfiles are up to date
|
||||
# Runs the file which generates them and checks the diff has no lines
|
||||
run: cd orchestration && ./dockerfiles.sh && git diff | wc -l | grep -x "0"
|
||||
38
.github/workflows/message-queue-tests.yml
vendored
38
.github/workflows/message-queue-tests.yml
vendored
@@ -1,38 +0,0 @@
|
||||
name: Message Queue Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "message-queue/**"
|
||||
- "orchestration/message-queue/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/message-queue/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "message-queue/**"
|
||||
- "orchestration/message-queue/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/message-queue/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Run message-queue Docker tests
|
||||
run: cd tests/message-queue && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||
28
.github/workflows/mini-tests.yml
vendored
28
.github/workflows/mini-tests.yml
vendored
@@ -1,28 +0,0 @@
|
||||
name: mini/ Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "mini/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "mini/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-common:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Tests
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p mini-serai
|
||||
59
.github/workflows/monero-tests.yaml
vendored
59
.github/workflows/monero-tests.yaml
vendored
@@ -1,59 +0,0 @@
|
||||
name: Monero Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "coins/monero/**"
|
||||
- "processor/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "coins/monero/**"
|
||||
- "processor/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
# Only run these once since they will be consistent regardless of any node
|
||||
unit-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Test Dependencies
|
||||
uses: ./.github/actions/test-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Unit Tests Without Features
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib
|
||||
|
||||
# Doesn't run unit tests with features as the tests workflow will
|
||||
|
||||
integration-tests:
|
||||
runs-on: ubuntu-latest
|
||||
# Test against all supported protocol versions
|
||||
strategy:
|
||||
matrix:
|
||||
version: [v0.17.3.2, v0.18.2.0]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Test Dependencies
|
||||
uses: ./.github/actions/test-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
monero-version: ${{ matrix.version }}
|
||||
|
||||
- name: Run Integration Tests Without Features
|
||||
# Runs with the binaries feature so the binaries build
|
||||
# https://github.com/rust-lang/cargo/issues/8396
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --features binaries --test '*'
|
||||
|
||||
- name: Run Integration Tests
|
||||
# Don't run if the the tests workflow also will
|
||||
if: ${{ matrix.version != 'v0.18.2.0' }}
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
|
||||
53
.github/workflows/monthly-nightly-update.yml
vendored
53
.github/workflows/monthly-nightly-update.yml
vendored
@@ -1,53 +0,0 @@
|
||||
name: Monthly Nightly Update
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 1 * *"
|
||||
|
||||
jobs:
|
||||
update:
|
||||
name: Update nightly
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
- name: Write nightly version
|
||||
run: echo $(date +"nightly-%Y-%m"-01) > .github/nightly-version
|
||||
|
||||
- name: Create the commit
|
||||
run: |
|
||||
git config user.name "GitHub Actions"
|
||||
git config user.email "<>"
|
||||
|
||||
git checkout -b $(date +"nightly-%Y-%m")
|
||||
|
||||
git add .github/nightly-version
|
||||
git commit -m "Update nightly"
|
||||
git push -u origin $(date +"nightly-%Y-%m")
|
||||
|
||||
- name: Pull Request
|
||||
uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410
|
||||
with:
|
||||
script: |
|
||||
const { repo, owner } = context.repo;
|
||||
|
||||
const result = await github.rest.pulls.create({
|
||||
title: (new Date()).toLocaleString(
|
||||
false,
|
||||
{ month: "long", year: "numeric" }
|
||||
) + " - Rust Nightly Update",
|
||||
owner,
|
||||
repo,
|
||||
head: "nightly-" + (new Date()).toISOString().split("-").splice(0, 2).join("-"),
|
||||
base: "develop",
|
||||
body: "PR auto-generated by a GitHub workflow."
|
||||
});
|
||||
|
||||
github.rest.issues.addLabels({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: result.data.number,
|
||||
labels: ["improvement"]
|
||||
});
|
||||
37
.github/workflows/no-std.yml
vendored
37
.github/workflows/no-std.yml
vendored
@@ -1,37 +0,0 @@
|
||||
name: no-std build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "tests/no-std/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "tests/no-std/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Install RISC-V Toolchain
|
||||
run: sudo apt update && sudo apt install -y gcc-riscv64-unknown-elf gcc-multilib && rustup target add riscv32imac-unknown-none-elf
|
||||
|
||||
- name: Verify no-std builds
|
||||
run: cd tests/no-std && CFLAGS=-I/usr/include cargo build --target riscv32imac-unknown-none-elf
|
||||
44
.github/workflows/processor-tests.yml
vendored
44
.github/workflows/processor-tests.yml
vendored
@@ -1,44 +0,0 @@
|
||||
name: Processor Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "message-queue/**"
|
||||
- "orchestration/message-queue/**"
|
||||
- "processor/**"
|
||||
- "orchestration/processor/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/processor/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "message-queue/**"
|
||||
- "orchestration/message-queue/**"
|
||||
- "processor/**"
|
||||
- "orchestration/processor/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/processor/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Run processor Docker tests
|
||||
run: cd tests/processor && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||
38
.github/workflows/reproducible-runtime.yml
vendored
38
.github/workflows/reproducible-runtime.yml
vendored
@@ -1,38 +0,0 @@
|
||||
name: Reproducible Runtime
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "Cargo.lock"
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "substrate/**"
|
||||
- "orchestration/runtime/**"
|
||||
- "tests/reproducible-runtime/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "Cargo.lock"
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "substrate/**"
|
||||
- "orchestration/runtime/**"
|
||||
- "tests/reproducible-runtime/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Run Reproducible Runtime tests
|
||||
run: cd tests/reproducible-runtime && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||
86
.github/workflows/tests.yml
vendored
86
.github/workflows/tests.yml
vendored
@@ -1,86 +0,0 @@
|
||||
name: Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "message-queue/**"
|
||||
- "processor/**"
|
||||
- "coordinator/**"
|
||||
- "substrate/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "message-queue/**"
|
||||
- "processor/**"
|
||||
- "coordinator/**"
|
||||
- "substrate/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-infra:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p serai-message-queue \
|
||||
-p serai-processor-messages \
|
||||
-p serai-processor \
|
||||
-p tendermint-machine \
|
||||
-p tributary-chain \
|
||||
-p serai-coordinator \
|
||||
-p serai-docker-tests
|
||||
|
||||
test-substrate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p serai-primitives \
|
||||
-p serai-coins-primitives \
|
||||
-p serai-coins-pallet \
|
||||
-p serai-dex-pallet \
|
||||
-p serai-validator-sets-primitives \
|
||||
-p serai-validator-sets-pallet \
|
||||
-p serai-in-instructions-primitives \
|
||||
-p serai-in-instructions-pallet \
|
||||
-p serai-signals-pallet \
|
||||
-p serai-runtime \
|
||||
-p serai-node
|
||||
|
||||
test-serai-client:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Tests
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,3 +1,2 @@
|
||||
target
|
||||
.vscode
|
||||
.test-logs
|
||||
Cargo.lock
|
||||
|
||||
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
[submodule "coins/monero/c/monero"]
|
||||
path = coins/monero/c/monero
|
||||
url = https://github.com/monero-project/monero
|
||||
@@ -1,17 +0,0 @@
|
||||
edition = "2021"
|
||||
tab_spaces = 2
|
||||
|
||||
max_width = 100
|
||||
# Let the developer decide based on the 100 char line limit
|
||||
use_small_heuristics = "Max"
|
||||
|
||||
error_on_line_overflow = true
|
||||
error_on_unformatted = true
|
||||
|
||||
imports_granularity = "Crate"
|
||||
reorder_imports = false
|
||||
reorder_modules = false
|
||||
|
||||
unstable_features = true
|
||||
spaces_around_ranges = true
|
||||
binop_separator = "Back"
|
||||
661
AGPL-3.0
661
AGPL-3.0
@@ -1,661 +0,0 @@
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
@@ -1,37 +0,0 @@
|
||||
# Contributing
|
||||
|
||||
Contributions come in a variety of forms. Developing Serai, helping document it,
|
||||
using its libraries in another project, using and testing it, and simply sharing
|
||||
it are all valuable ways of contributing.
|
||||
|
||||
This document will specifically focus on contributions to this repository in the
|
||||
form of code and documentation.
|
||||
|
||||
### Rules
|
||||
|
||||
- Stable native Rust, nightly wasm and tools.
|
||||
- `cargo fmt` must be used.
|
||||
- `cargo clippy` must pass, except for the ignored rules (`type_complexity` and
|
||||
`dead_code`).
|
||||
- The CI must pass.
|
||||
|
||||
- Only use uppercase variable names when relevant to cryptography.
|
||||
|
||||
- Use a two-space ident when possible.
|
||||
- Put a space after comment markers.
|
||||
- Don't use multiple newlines between sections of code.
|
||||
- Have a newline before EOF.
|
||||
|
||||
### Guidelines
|
||||
|
||||
- Sort inputs as core, std, third party, and then Serai.
|
||||
- Comment code reasonably.
|
||||
- Include tests for new features.
|
||||
- Sign commits.
|
||||
|
||||
### Submission
|
||||
|
||||
All submissions should be through GitHub. Contributions to a crate will be
|
||||
licensed according to the crate's existing license, with the crate's copyright
|
||||
holders (distinct from authors) having the right to re-license the crate via a
|
||||
unanimous decision.
|
||||
10753
Cargo.lock
generated
10753
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
83
Cargo.toml
83
Cargo.toml
@@ -1,95 +1,16 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"common/std-shims",
|
||||
"common/zalloc",
|
||||
"common/db",
|
||||
"common/env",
|
||||
"common/request",
|
||||
|
||||
members = [
|
||||
"crypto/transcript",
|
||||
|
||||
"crypto/ff-group-tests",
|
||||
"crypto/dalek-ff-group",
|
||||
"crypto/ed448",
|
||||
"crypto/ciphersuite",
|
||||
|
||||
"crypto/multiexp",
|
||||
|
||||
"crypto/schnorr",
|
||||
"crypto/dleq",
|
||||
"crypto/dkg",
|
||||
"crypto/frost",
|
||||
"crypto/schnorrkel",
|
||||
|
||||
"coins/bitcoin",
|
||||
"coins/ethereum",
|
||||
"coins/monero/generators",
|
||||
"coins/monero",
|
||||
"coins/firo",
|
||||
|
||||
"message-queue",
|
||||
|
||||
"processor/messages",
|
||||
"processor",
|
||||
|
||||
"coordinator/tributary/tendermint",
|
||||
"coordinator/tributary",
|
||||
"coordinator",
|
||||
|
||||
"substrate/primitives",
|
||||
|
||||
"substrate/coins/primitives",
|
||||
"substrate/coins/pallet",
|
||||
|
||||
"substrate/in-instructions/primitives",
|
||||
"substrate/in-instructions/pallet",
|
||||
|
||||
"substrate/validator-sets/primitives",
|
||||
"substrate/validator-sets/pallet",
|
||||
|
||||
"substrate/signals/pallet",
|
||||
|
||||
"substrate/runtime",
|
||||
"substrate/node",
|
||||
|
||||
"substrate/client",
|
||||
|
||||
"mini",
|
||||
|
||||
"tests/no-std",
|
||||
|
||||
"tests/docker",
|
||||
"tests/message-queue",
|
||||
"tests/processor",
|
||||
"tests/coordinator",
|
||||
"tests/full-stack",
|
||||
"tests/reproducible-runtime",
|
||||
]
|
||||
|
||||
# Always compile Monero (and a variety of dependencies) with optimizations due
|
||||
# to the extensive operations required for Bulletproofs
|
||||
[profile.dev.package]
|
||||
subtle = { opt-level = 3 }
|
||||
curve25519-dalek = { opt-level = 3 }
|
||||
|
||||
ff = { opt-level = 3 }
|
||||
group = { opt-level = 3 }
|
||||
|
||||
crypto-bigint = { opt-level = 3 }
|
||||
dalek-ff-group = { opt-level = 3 }
|
||||
minimal-ed448 = { opt-level = 3 }
|
||||
|
||||
multiexp = { opt-level = 3 }
|
||||
|
||||
monero-serai = { opt-level = 3 }
|
||||
|
||||
[profile.release]
|
||||
panic = "unwind"
|
||||
|
||||
[patch.crates-io]
|
||||
# https://github.com/rust-lang-nursery/lazy-static.rs/issues/201
|
||||
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
|
||||
|
||||
# subxt *can* pull these off crates.io yet there's no benefit to this
|
||||
sp-core-hashing = { git = "https://github.com/serai-dex/substrate" }
|
||||
sp-std = { git = "https://github.com/serai-dex/substrate" }
|
||||
|
||||
8
LICENSE
8
LICENSE
@@ -1,8 +0,0 @@
|
||||
Serai crates are licensed under one of two licenses, either MIT or AGPL-3.0,
|
||||
depending on the crate in question. Each crate declares their license in their
|
||||
`Cargo.toml` and includes a `LICENSE` file detailing its status. Additionally,
|
||||
a full copy of the AGPL-3.0 License is included in the root of this repository
|
||||
as a reference text. This copy should be provided with any distribution of a
|
||||
crate licensed under the AGPL-3.0, as per its terms.
|
||||
|
||||
The GitHub actions (`.github/actions`) are licensed under the MIT license.
|
||||
59
README.md
59
README.md
@@ -1,63 +1,22 @@
|
||||
# Serai
|
||||
|
||||
Serai is a new DEX, built from the ground up, initially planning on listing
|
||||
Bitcoin, Ethereum, DAI, and Monero, offering a liquidity-pool-based trading
|
||||
experience. Funds are stored in an economically secured threshold-multisig
|
||||
Bitcoin, Ethereum, Monero, DAI, and USDC, offering a liquidity pool trading
|
||||
experience. Funds are stored in an economically secured threshold multisig
|
||||
wallet.
|
||||
|
||||
[Getting Started](docs/Getting%20Started.md)
|
||||
|
||||
### Layout
|
||||
|
||||
- `audits`: Audits for various parts of Serai.
|
||||
- `docs` - Documentation on the Serai protocol.
|
||||
|
||||
- `docs`: Documentation on the Serai protocol.
|
||||
|
||||
- `common`: Crates containing utilities common to a variety of areas under
|
||||
Serai, none neatly fitting under another category.
|
||||
|
||||
- `crypto`: A series of composable cryptographic libraries built around the
|
||||
`ff`/`group` APIs, achieving a variety of tasks. These range from generic
|
||||
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
|
||||
needed for Bitcoin-Monero atomic swaps.
|
||||
|
||||
- `coins`: Various coin libraries intended for usage in Serai yet also by the
|
||||
- `coins` - Various coin libraries intended for usage in Serai yet also by the
|
||||
wider community. This means they will always support the functionality Serai
|
||||
needs, yet won't disadvantage other use cases when possible.
|
||||
|
||||
- `message-queue`: An ordered message server so services can talk to each other,
|
||||
even when the other is offline.
|
||||
- `crypto` - A series of composable cryptographic libraries built around the
|
||||
`ff`/`group` APIs achieving a variety of tasks. These range from generic
|
||||
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
|
||||
needed for Bitcoin-Monero atomic swaps.
|
||||
|
||||
- `processor`: A generic chain processor to process data for Serai and process
|
||||
- `processor` - A generic chain processor to process data for Serai and process
|
||||
events from Serai, executing transactions as expected and needed.
|
||||
|
||||
- `coordinator`: A service to manage processors and communicate over a P2P
|
||||
network with other validators.
|
||||
|
||||
- `substrate`: Substrate crates used to instantiate the Serai network.
|
||||
|
||||
- `orchestration`: Dockerfiles and scripts to deploy a Serai node/test
|
||||
environment.
|
||||
|
||||
- `tests`: Tests for various crates. Generally, `crate/src/tests` is used, or
|
||||
`crate/tests`, yet any tests requiring crates' binaries are placed here.
|
||||
|
||||
### Security
|
||||
|
||||
Serai hosts a bug bounty program via
|
||||
[Immunefi](https://immunefi.com/bounty/serai/). For in-scope critical
|
||||
vulnerabilities, we will reward whitehats with up to $30,000.
|
||||
|
||||
Anything not in-scope should still be submitted through Immunefi, with rewards
|
||||
issued at the discretion of the Immunefi program managers.
|
||||
|
||||
### Links
|
||||
|
||||
- [Website](https://serai.exchange/): https://serai.exchange/
|
||||
- [Immunefi](https://immunefi.com/bounty/serai/): https://immunefi.com/bounty/serai/
|
||||
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
|
||||
- [Mastodon](https://cryptodon.lol/@serai): https://cryptodon.lol/@serai
|
||||
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
|
||||
- [Matrix](https://matrix.to/#/#serai:matrix.org): https://matrix.to/#/#serai:matrix.org
|
||||
- [Reddit](https://www.reddit.com/r/SeraiDEX/): https://www.reddit.com/r/SeraiDEX/
|
||||
- [Telegram](https://t.me/SeraiDEX): https://t.me/SeraiDEX
|
||||
|
||||
Binary file not shown.
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 Cypher Stack
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -1,6 +0,0 @@
|
||||
# Cypher Stack /coins/bitcoin Audit, August 2023
|
||||
|
||||
This audit was over the /coins/bitcoin folder. It is encompassing up to commit
|
||||
5121ca75199dff7bd34230880a1fdd793012068c.
|
||||
|
||||
Please see https://github.com/cypherstack/serai-btc-audit for provenance.
|
||||
Binary file not shown.
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 Cypher Stack
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -1,7 +0,0 @@
|
||||
# Cypher Stack /crypto Audit, March 2023
|
||||
|
||||
This audit was over the /crypto folder, excluding the ed448 crate, the `Ed448`
|
||||
ciphersuite in the ciphersuite crate, and the `dleq/experimental` feature. It is
|
||||
encompassing up to commit 669d2dbffc1dafb82a09d9419ea182667115df06.
|
||||
|
||||
Please see https://github.com/cypherstack/serai-audit for provenance.
|
||||
@@ -1,61 +0,0 @@
|
||||
[package]
|
||||
name = "bitcoin-serai"
|
||||
version = "0.3.0"
|
||||
description = "A Bitcoin library for FROST-signing transactions"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.74"
|
||||
|
||||
[dependencies]
|
||||
std-shims = { version = "0.1.1", path = "../../common/std-shims", default-features = false }
|
||||
|
||||
thiserror = { version = "1", default-features = false, optional = true }
|
||||
|
||||
zeroize = { version = "^1.5", default-features = false }
|
||||
rand_core = { version = "0.6", default-features = false }
|
||||
|
||||
bitcoin = { version = "0.31", default-features = false, features = ["no-std"] }
|
||||
|
||||
k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] }
|
||||
|
||||
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["secp256k1"], optional = true }
|
||||
|
||||
hex = { version = "0.4", default-features = false, optional = true }
|
||||
serde = { version = "1", default-features = false, features = ["derive"], optional = true }
|
||||
serde_json = { version = "1", default-features = false, optional = true }
|
||||
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls", "basic-auth"], optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
secp256k1 = { version = "0.28", default-features = false, features = ["std"] }
|
||||
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
|
||||
|
||||
tokio = { version = "1", features = ["macros"] }
|
||||
|
||||
[features]
|
||||
std = [
|
||||
"std-shims/std",
|
||||
|
||||
"thiserror",
|
||||
|
||||
"zeroize/std",
|
||||
"rand_core/std",
|
||||
|
||||
"bitcoin/std",
|
||||
"bitcoin/serde",
|
||||
|
||||
"k256/std",
|
||||
|
||||
"transcript/std",
|
||||
"frost",
|
||||
|
||||
"hex/std",
|
||||
"serde/std",
|
||||
"serde_json/std",
|
||||
"simple-request",
|
||||
]
|
||||
hazmat = []
|
||||
default = ["std"]
|
||||
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022-2023 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -1,4 +0,0 @@
|
||||
# bitcoin-serai
|
||||
|
||||
An application of [modular-frost](https://docs.rs/modular-frost) to Bitcoin
|
||||
transactions, enabling extremely-efficient multisigs.
|
||||
@@ -1,166 +0,0 @@
|
||||
use k256::{
|
||||
elliptic_curve::sec1::{Tag, ToEncodedPoint},
|
||||
ProjectivePoint,
|
||||
};
|
||||
|
||||
use bitcoin::key::XOnlyPublicKey;
|
||||
|
||||
/// Get the x coordinate of a non-infinity, even point. Panics on invalid input.
|
||||
pub fn x(key: &ProjectivePoint) -> [u8; 32] {
|
||||
let encoded = key.to_encoded_point(true);
|
||||
assert_eq!(encoded.tag(), Tag::CompressedEvenY, "x coordinate of odd key");
|
||||
(*encoded.x().expect("point at infinity")).into()
|
||||
}
|
||||
|
||||
/// Convert a non-infinity even point to a XOnlyPublicKey. Panics on invalid input.
|
||||
pub fn x_only(key: &ProjectivePoint) -> XOnlyPublicKey {
|
||||
XOnlyPublicKey::from_slice(&x(key)).expect("x_only was passed a point which was infinity or odd")
|
||||
}
|
||||
|
||||
/// Make a point even by adding the generator until it is even.
|
||||
///
|
||||
/// Returns the even point and the amount of additions required.
|
||||
#[cfg(any(feature = "std", feature = "hazmat"))]
|
||||
pub fn make_even(mut key: ProjectivePoint) -> (ProjectivePoint, u64) {
|
||||
let mut c = 0;
|
||||
while key.to_encoded_point(true).tag() == Tag::CompressedOddY {
|
||||
key += ProjectivePoint::GENERATOR;
|
||||
c += 1;
|
||||
}
|
||||
(key, c)
|
||||
}
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
mod frost_crypto {
|
||||
use core::fmt::Debug;
|
||||
use std_shims::{vec::Vec, io};
|
||||
|
||||
use zeroize::Zeroizing;
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use bitcoin::hashes::{HashEngine, Hash, sha256::Hash as Sha256};
|
||||
|
||||
use transcript::Transcript;
|
||||
|
||||
use k256::{elliptic_curve::ops::Reduce, U256, Scalar};
|
||||
|
||||
use frost::{
|
||||
curve::{Ciphersuite, Secp256k1},
|
||||
Participant, ThresholdKeys, ThresholdView, FrostError,
|
||||
algorithm::{Hram as HramTrait, Algorithm, Schnorr as FrostSchnorr},
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
/// A BIP-340 compatible HRAm for use with the modular-frost Schnorr Algorithm.
|
||||
///
|
||||
/// If passed an odd nonce, it will have the generator added until it is even.
|
||||
///
|
||||
/// If the key is odd, this will panic.
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct Hram;
|
||||
#[allow(non_snake_case)]
|
||||
impl HramTrait<Secp256k1> for Hram {
|
||||
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
||||
// Convert the nonce to be even
|
||||
let (R, _) = make_even(*R);
|
||||
|
||||
const TAG_HASH: Sha256 = Sha256::const_hash(b"BIP0340/challenge");
|
||||
|
||||
let mut data = Sha256::engine();
|
||||
data.input(TAG_HASH.as_ref());
|
||||
data.input(TAG_HASH.as_ref());
|
||||
data.input(&x(&R));
|
||||
data.input(&x(A));
|
||||
data.input(m);
|
||||
|
||||
Scalar::reduce(U256::from_be_slice(Sha256::from_engine(data).as_ref()))
|
||||
}
|
||||
}
|
||||
|
||||
/// BIP-340 Schnorr signature algorithm.
|
||||
///
|
||||
/// This must be used with a ThresholdKeys whose group key is even. If it is odd, this will panic.
|
||||
#[derive(Clone)]
|
||||
pub struct Schnorr<T: Sync + Clone + Debug + Transcript>(FrostSchnorr<Secp256k1, T, Hram>);
|
||||
impl<T: Sync + Clone + Debug + Transcript> Schnorr<T> {
|
||||
/// Construct a Schnorr algorithm continuing the specified transcript.
|
||||
pub fn new(transcript: T) -> Schnorr<T> {
|
||||
Schnorr(FrostSchnorr::new(transcript))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Sync + Clone + Debug + Transcript> Algorithm<Secp256k1> for Schnorr<T> {
|
||||
type Transcript = T;
|
||||
type Addendum = ();
|
||||
type Signature = [u8; 64];
|
||||
|
||||
fn transcript(&mut self) -> &mut Self::Transcript {
|
||||
self.0.transcript()
|
||||
}
|
||||
|
||||
fn nonces(&self) -> Vec<Vec<ProjectivePoint>> {
|
||||
self.0.nonces()
|
||||
}
|
||||
|
||||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R,
|
||||
keys: &ThresholdKeys<Secp256k1>,
|
||||
) {
|
||||
self.0.preprocess_addendum(rng, keys)
|
||||
}
|
||||
|
||||
fn read_addendum<R: io::Read>(&self, reader: &mut R) -> io::Result<Self::Addendum> {
|
||||
self.0.read_addendum(reader)
|
||||
}
|
||||
|
||||
fn process_addendum(
|
||||
&mut self,
|
||||
view: &ThresholdView<Secp256k1>,
|
||||
i: Participant,
|
||||
addendum: (),
|
||||
) -> Result<(), FrostError> {
|
||||
self.0.process_addendum(view, i, addendum)
|
||||
}
|
||||
|
||||
fn sign_share(
|
||||
&mut self,
|
||||
params: &ThresholdView<Secp256k1>,
|
||||
nonce_sums: &[Vec<<Secp256k1 as Ciphersuite>::G>],
|
||||
nonces: Vec<Zeroizing<<Secp256k1 as Ciphersuite>::F>>,
|
||||
msg: &[u8],
|
||||
) -> <Secp256k1 as Ciphersuite>::F {
|
||||
self.0.sign_share(params, nonce_sums, nonces, msg)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn verify(
|
||||
&self,
|
||||
group_key: ProjectivePoint,
|
||||
nonces: &[Vec<ProjectivePoint>],
|
||||
sum: Scalar,
|
||||
) -> Option<Self::Signature> {
|
||||
self.0.verify(group_key, nonces, sum).map(|mut sig| {
|
||||
// Make the R of the final signature even
|
||||
let offset;
|
||||
(sig.R, offset) = make_even(sig.R);
|
||||
// s = r + cx. Since we added to the r, add to s
|
||||
sig.s += Scalar::from(offset);
|
||||
// Convert to a Bitcoin signature by dropping the byte for the point's sign bit
|
||||
sig.serialize()[1 ..].try_into().unwrap()
|
||||
})
|
||||
}
|
||||
|
||||
fn verify_share(
|
||||
&self,
|
||||
verification_share: ProjectivePoint,
|
||||
nonces: &[Vec<ProjectivePoint>],
|
||||
share: Scalar,
|
||||
) -> Result<Vec<(Scalar, ProjectivePoint)>, ()> {
|
||||
self.0.verify_share(verification_share, nonces, share)
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "std")]
|
||||
pub use frost_crypto::*;
|
||||
@@ -1,24 +0,0 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
extern crate alloc;
|
||||
|
||||
/// The bitcoin Rust library.
|
||||
pub use bitcoin;
|
||||
|
||||
/// Cryptographic helpers.
|
||||
#[cfg(feature = "hazmat")]
|
||||
pub mod crypto;
|
||||
#[cfg(not(feature = "hazmat"))]
|
||||
pub(crate) mod crypto;
|
||||
|
||||
/// Wallet functionality to create transactions.
|
||||
pub mod wallet;
|
||||
/// A minimal asynchronous Bitcoin RPC client.
|
||||
#[cfg(feature = "std")]
|
||||
pub mod rpc;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
@@ -1,213 +0,0 @@
|
||||
use core::fmt::Debug;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
use serde::{Deserialize, de::DeserializeOwned};
|
||||
use serde_json::json;
|
||||
|
||||
use simple_request::{hyper, Request, Client};
|
||||
|
||||
use bitcoin::{
|
||||
hashes::{Hash, hex::FromHex},
|
||||
consensus::encode,
|
||||
Txid, Transaction, BlockHash, Block,
|
||||
};
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Deserialize)]
|
||||
pub struct Error {
|
||||
code: isize,
|
||||
message: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum RpcResponse<T> {
|
||||
Ok { result: T },
|
||||
Err { error: Error },
|
||||
}
|
||||
|
||||
/// A minimal asynchronous Bitcoin RPC client.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Rpc {
|
||||
client: Client,
|
||||
url: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
||||
pub enum RpcError {
|
||||
#[error("couldn't connect to node")]
|
||||
ConnectionError,
|
||||
#[error("request had an error: {0:?}")]
|
||||
RequestError(Error),
|
||||
#[error("node replied with invalid JSON")]
|
||||
InvalidJson(serde_json::error::Category),
|
||||
#[error("node sent an invalid response ({0})")]
|
||||
InvalidResponse(&'static str),
|
||||
#[error("node was missing expected methods")]
|
||||
MissingMethods(HashSet<&'static str>),
|
||||
}
|
||||
|
||||
impl Rpc {
|
||||
/// Create a new connection to a Bitcoin RPC.
|
||||
///
|
||||
/// An RPC call is performed to ensure the node is reachable (and that an invalid URL wasn't
|
||||
/// provided).
|
||||
///
|
||||
/// Additionally, a set of expected methods is checked to be offered by the Bitcoin RPC. If these
|
||||
/// methods aren't provided, an error with the missing methods is returned. This ensures all RPC
|
||||
/// routes explicitly provided by this library are at least possible.
|
||||
///
|
||||
/// Each individual RPC route may still fail at time-of-call, regardless of the arguments
|
||||
/// provided to this library, if the RPC has an incompatible argument layout. That is not checked
|
||||
/// at time of RPC creation.
|
||||
pub async fn new(url: String) -> Result<Rpc, RpcError> {
|
||||
let rpc = Rpc { client: Client::with_connection_pool(), url };
|
||||
|
||||
// Make an RPC request to verify the node is reachable and sane
|
||||
let res: String = rpc.rpc_call("help", json!([])).await?;
|
||||
|
||||
// Verify all methods we expect are present
|
||||
// If we had a more expanded RPC, due to differences in RPC versions, it wouldn't make sense to
|
||||
// error if all methods weren't present
|
||||
// We only provide a very minimal set of methods which have been largely consistent, hence why
|
||||
// this is sane
|
||||
let mut expected_methods = HashSet::from([
|
||||
"help",
|
||||
"getblockcount",
|
||||
"getblockhash",
|
||||
"getblockheader",
|
||||
"getblock",
|
||||
"sendrawtransaction",
|
||||
"getrawtransaction",
|
||||
]);
|
||||
for line in res.split('\n') {
|
||||
// This doesn't check if the arguments are as expected
|
||||
// This is due to Bitcoin supporting a large amount of optional arguments, which
|
||||
// occassionally change, with their own mechanism of text documentation, making matching off
|
||||
// it a quite involved task
|
||||
// Instead, once we've confirmed the methods are present, we assume our arguments are aligned
|
||||
// Else we'll error at time of call
|
||||
if expected_methods.remove(line.split(' ').next().unwrap_or("")) &&
|
||||
expected_methods.is_empty()
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
if !expected_methods.is_empty() {
|
||||
Err(RpcError::MissingMethods(expected_methods))?;
|
||||
};
|
||||
|
||||
Ok(rpc)
|
||||
}
|
||||
|
||||
/// Perform an arbitrary RPC call.
|
||||
pub async fn rpc_call<Response: DeserializeOwned + Debug>(
|
||||
&self,
|
||||
method: &str,
|
||||
params: serde_json::Value,
|
||||
) -> Result<Response, RpcError> {
|
||||
let mut request = Request::from(
|
||||
hyper::Request::post(&self.url)
|
||||
.header("Content-Type", "application/json")
|
||||
.body(
|
||||
serde_json::to_vec(&json!({ "jsonrpc": "2.0", "method": method, "params": params }))
|
||||
.unwrap()
|
||||
.into(),
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
request.with_basic_auth();
|
||||
let mut res = self
|
||||
.client
|
||||
.request(request)
|
||||
.await
|
||||
.map_err(|_| RpcError::ConnectionError)?
|
||||
.body()
|
||||
.await
|
||||
.map_err(|_| RpcError::ConnectionError)?;
|
||||
|
||||
let res: RpcResponse<Response> =
|
||||
serde_json::from_reader(&mut res).map_err(|e| RpcError::InvalidJson(e.classify()))?;
|
||||
match res {
|
||||
RpcResponse::Ok { result } => Ok(result),
|
||||
RpcResponse::Err { error } => Err(RpcError::RequestError(error)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the latest block's number.
|
||||
///
|
||||
/// The genesis block's 'number' is zero. They increment from there.
|
||||
pub async fn get_latest_block_number(&self) -> Result<usize, RpcError> {
|
||||
// getblockcount doesn't return the amount of blocks on the current chain, yet the "height"
|
||||
// of the current chain. The "height" of the current chain is defined as the "height" of the
|
||||
// tip block of the current chain. The "height" of a block is defined as the amount of blocks
|
||||
// present when the block was created. Accordingly, the genesis block has height 0, and
|
||||
// getblockcount will return 0 when it's only the only block, despite their being one block.
|
||||
self.rpc_call("getblockcount", json!([])).await
|
||||
}
|
||||
|
||||
/// Get the hash of a block by the block's number.
|
||||
pub async fn get_block_hash(&self, number: usize) -> Result<[u8; 32], RpcError> {
|
||||
let mut hash = *self
|
||||
.rpc_call::<BlockHash>("getblockhash", json!([number]))
|
||||
.await?
|
||||
.as_raw_hash()
|
||||
.as_byte_array();
|
||||
// bitcoin stores the inner bytes in reverse order.
|
||||
hash.reverse();
|
||||
Ok(hash)
|
||||
}
|
||||
|
||||
/// Get a block's number by its hash.
|
||||
pub async fn get_block_number(&self, hash: &[u8; 32]) -> Result<usize, RpcError> {
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct Number {
|
||||
height: usize,
|
||||
}
|
||||
Ok(self.rpc_call::<Number>("getblockheader", json!([hex::encode(hash)])).await?.height)
|
||||
}
|
||||
|
||||
/// Get a block by its hash.
|
||||
pub async fn get_block(&self, hash: &[u8; 32]) -> Result<Block, RpcError> {
|
||||
let hex = self.rpc_call::<String>("getblock", json!([hex::encode(hash), 0])).await?;
|
||||
let bytes: Vec<u8> = FromHex::from_hex(&hex)
|
||||
.map_err(|_| RpcError::InvalidResponse("node didn't use hex to encode the block"))?;
|
||||
let block: Block = encode::deserialize(&bytes)
|
||||
.map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized block"))?;
|
||||
|
||||
let mut block_hash = *block.block_hash().as_raw_hash().as_byte_array();
|
||||
block_hash.reverse();
|
||||
if hash != &block_hash {
|
||||
Err(RpcError::InvalidResponse("node replied with a different block"))?;
|
||||
}
|
||||
|
||||
Ok(block)
|
||||
}
|
||||
|
||||
/// Publish a transaction.
|
||||
pub async fn send_raw_transaction(&self, tx: &Transaction) -> Result<Txid, RpcError> {
|
||||
let txid = self.rpc_call("sendrawtransaction", json!([encode::serialize_hex(tx)])).await?;
|
||||
if txid != tx.txid() {
|
||||
Err(RpcError::InvalidResponse("returned TX ID inequals calculated TX ID"))?;
|
||||
}
|
||||
Ok(txid)
|
||||
}
|
||||
|
||||
/// Get a transaction by its hash.
|
||||
pub async fn get_transaction(&self, hash: &[u8; 32]) -> Result<Transaction, RpcError> {
|
||||
let hex = self.rpc_call::<String>("getrawtransaction", json!([hex::encode(hash)])).await?;
|
||||
let bytes: Vec<u8> = FromHex::from_hex(&hex)
|
||||
.map_err(|_| RpcError::InvalidResponse("node didn't use hex to encode the transaction"))?;
|
||||
let tx: Transaction = encode::deserialize(&bytes)
|
||||
.map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized transaction"))?;
|
||||
|
||||
let mut tx_hash = *tx.txid().as_raw_hash().as_byte_array();
|
||||
tx_hash.reverse();
|
||||
if hash != &tx_hash {
|
||||
Err(RpcError::InvalidResponse("node replied with a different transaction"))?;
|
||||
}
|
||||
|
||||
Ok(tx)
|
||||
}
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
use rand_core::OsRng;
|
||||
|
||||
use secp256k1::{Secp256k1 as BContext, Message, schnorr::Signature};
|
||||
|
||||
use k256::Scalar;
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
use frost::{
|
||||
curve::Secp256k1,
|
||||
Participant,
|
||||
tests::{algorithm_machines, key_gen, sign},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
bitcoin::hashes::{Hash as HashTrait, sha256::Hash},
|
||||
crypto::{x_only, make_even, Schnorr},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_algorithm() {
|
||||
let mut keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
|
||||
for (_, keys) in keys.iter_mut() {
|
||||
let (_, offset) = make_even(keys.group_key());
|
||||
*keys = keys.offset(Scalar::from(offset));
|
||||
}
|
||||
|
||||
let algo =
|
||||
Schnorr::<RecommendedTranscript>::new(RecommendedTranscript::new(b"bitcoin-serai sign test"));
|
||||
let sig = sign(
|
||||
&mut OsRng,
|
||||
algo.clone(),
|
||||
keys.clone(),
|
||||
algorithm_machines(&mut OsRng, algo, &keys),
|
||||
Hash::hash(MESSAGE).as_ref(),
|
||||
);
|
||||
|
||||
BContext::new()
|
||||
.verify_schnorr(
|
||||
&Signature::from_slice(&sig)
|
||||
.expect("couldn't convert produced signature to secp256k1::Signature"),
|
||||
&Message::from(Hash::hash(MESSAGE)),
|
||||
&x_only(&keys[&Participant::new(1).unwrap()].group_key()),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
mod crypto;
|
||||
@@ -1,188 +0,0 @@
|
||||
use std_shims::{
|
||||
vec::Vec,
|
||||
collections::HashMap,
|
||||
io::{self, Write},
|
||||
};
|
||||
#[cfg(feature = "std")]
|
||||
use std_shims::io::Read;
|
||||
|
||||
use k256::{
|
||||
elliptic_curve::sec1::{Tag, ToEncodedPoint},
|
||||
Scalar, ProjectivePoint,
|
||||
};
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
use frost::{
|
||||
curve::{Ciphersuite, Secp256k1},
|
||||
ThresholdKeys,
|
||||
};
|
||||
|
||||
use bitcoin::{
|
||||
consensus::encode::serialize, key::TweakedPublicKey, address::Payload, OutPoint, ScriptBuf,
|
||||
TxOut, Transaction, Block,
|
||||
};
|
||||
#[cfg(feature = "std")]
|
||||
use bitcoin::consensus::encode::Decodable;
|
||||
|
||||
use crate::crypto::x_only;
|
||||
#[cfg(feature = "std")]
|
||||
use crate::crypto::make_even;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
mod send;
|
||||
#[cfg(feature = "std")]
|
||||
pub use send::*;
|
||||
|
||||
/// Tweak keys to ensure they're usable with Bitcoin.
|
||||
///
|
||||
/// Taproot keys, which these keys are used as, must be even. This offsets the keys until they're
|
||||
/// even.
|
||||
#[cfg(feature = "std")]
|
||||
pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
|
||||
let (_, offset) = make_even(keys.group_key());
|
||||
keys.offset(Scalar::from(offset))
|
||||
}
|
||||
|
||||
/// Return the Taproot address payload for a public key.
|
||||
///
|
||||
/// If the key is odd, this will return None.
|
||||
pub fn address_payload(key: ProjectivePoint) -> Option<Payload> {
|
||||
if key.to_encoded_point(true).tag() != Tag::CompressedEvenY {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(Payload::p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key))))
|
||||
}
|
||||
|
||||
/// A spendable output.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct ReceivedOutput {
|
||||
// The scalar offset to obtain the key usable to spend this output.
|
||||
offset: Scalar,
|
||||
// The output to spend.
|
||||
output: TxOut,
|
||||
// The TX ID and vout of the output to spend.
|
||||
outpoint: OutPoint,
|
||||
}
|
||||
|
||||
impl ReceivedOutput {
|
||||
/// The offset for this output.
|
||||
pub fn offset(&self) -> Scalar {
|
||||
self.offset
|
||||
}
|
||||
|
||||
/// The Bitcoin output for this output.
|
||||
pub fn output(&self) -> &TxOut {
|
||||
&self.output
|
||||
}
|
||||
|
||||
/// The outpoint for this output.
|
||||
pub fn outpoint(&self) -> &OutPoint {
|
||||
&self.outpoint
|
||||
}
|
||||
|
||||
/// The value of this output.
|
||||
pub fn value(&self) -> u64 {
|
||||
self.output.value.to_sat()
|
||||
}
|
||||
|
||||
/// Read a ReceivedOutput from a generic satisfying Read.
|
||||
#[cfg(feature = "std")]
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
|
||||
Ok(ReceivedOutput {
|
||||
offset: Secp256k1::read_F(r)?,
|
||||
output: TxOut::consensus_decode(r).map_err(|_| io::Error::other("invalid TxOut"))?,
|
||||
outpoint: OutPoint::consensus_decode(r).map_err(|_| io::Error::other("invalid OutPoint"))?,
|
||||
})
|
||||
}
|
||||
|
||||
/// Write a ReceivedOutput to a generic satisfying Write.
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(&self.offset.to_bytes())?;
|
||||
w.write_all(&serialize(&self.output))?;
|
||||
w.write_all(&serialize(&self.outpoint))
|
||||
}
|
||||
|
||||
/// Serialize a ReceivedOutput to a `Vec<u8>`.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut res = Vec::new();
|
||||
self.write(&mut res).unwrap();
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
/// A transaction scanner capable of being used with HDKD schemes.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Scanner {
|
||||
key: ProjectivePoint,
|
||||
scripts: HashMap<ScriptBuf, Scalar>,
|
||||
}
|
||||
|
||||
impl Scanner {
|
||||
/// Construct a Scanner for a key.
|
||||
///
|
||||
/// Returns None if this key can't be scanned for.
|
||||
pub fn new(key: ProjectivePoint) -> Option<Scanner> {
|
||||
let mut scripts = HashMap::new();
|
||||
scripts.insert(address_payload(key)?.script_pubkey(), Scalar::ZERO);
|
||||
Some(Scanner { key, scripts })
|
||||
}
|
||||
|
||||
/// Register an offset to scan for.
|
||||
///
|
||||
/// Due to Bitcoin's requirement that points are even, not every offset may be used.
|
||||
/// If an offset isn't usable, it will be incremented until it is. If this offset is already
|
||||
/// present, None is returned. Else, Some(offset) will be, with the used offset.
|
||||
///
|
||||
/// This means offsets are surjective, not bijective, and the order offsets are registered in
|
||||
/// may determine the validity of future offsets.
|
||||
pub fn register_offset(&mut self, mut offset: Scalar) -> Option<Scalar> {
|
||||
// This loop will terminate as soon as an even point is found, with any point having a ~50%
|
||||
// chance of being even
|
||||
// That means this should terminate within a very small amount of iterations
|
||||
loop {
|
||||
match address_payload(self.key + (ProjectivePoint::GENERATOR * offset)) {
|
||||
Some(address) => {
|
||||
let script = address.script_pubkey();
|
||||
if self.scripts.contains_key(&script) {
|
||||
None?;
|
||||
}
|
||||
self.scripts.insert(script, offset);
|
||||
return Some(offset);
|
||||
}
|
||||
None => offset += Scalar::ONE,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Scan a transaction.
|
||||
pub fn scan_transaction(&self, tx: &Transaction) -> Vec<ReceivedOutput> {
|
||||
let mut res = Vec::new();
|
||||
for (vout, output) in tx.output.iter().enumerate() {
|
||||
// If the vout index exceeds 2**32, stop scanning outputs
|
||||
let Ok(vout) = u32::try_from(vout) else { break };
|
||||
|
||||
if let Some(offset) = self.scripts.get(&output.script_pubkey) {
|
||||
res.push(ReceivedOutput {
|
||||
offset: *offset,
|
||||
output: output.clone(),
|
||||
outpoint: OutPoint::new(tx.txid(), vout),
|
||||
});
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// Scan a block.
|
||||
///
|
||||
/// This will also scan the coinbase transaction which is bound by maturity. If received outputs
|
||||
/// must be immediately spendable, a post-processing pass is needed to remove those outputs.
|
||||
/// Alternatively, scan_transaction can be called on `block.txdata[1 ..]`.
|
||||
pub fn scan_block(&self, block: &Block) -> Vec<ReceivedOutput> {
|
||||
let mut res = Vec::new();
|
||||
for tx in &block.txdata {
|
||||
res.extend(self.scan_transaction(tx));
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
@@ -1,438 +0,0 @@
|
||||
use std_shims::{
|
||||
io::{self, Read},
|
||||
collections::HashMap,
|
||||
};
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
|
||||
use k256::{elliptic_curve::sec1::ToEncodedPoint, Scalar};
|
||||
use frost::{curve::Secp256k1, Participant, ThresholdKeys, FrostError, sign::*};
|
||||
|
||||
use bitcoin::{
|
||||
sighash::{TapSighashType, SighashCache, Prevouts},
|
||||
absolute::LockTime,
|
||||
script::{PushBytesBuf, ScriptBuf},
|
||||
transaction::{Version, Transaction},
|
||||
OutPoint, Sequence, Witness, TxIn, Amount, TxOut, Address,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
crypto::Schnorr,
|
||||
wallet::{ReceivedOutput, address_payload},
|
||||
};
|
||||
|
||||
#[rustfmt::skip]
|
||||
// https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04/src/policy/policy.cpp#L26-L63
|
||||
// As the above notes, a lower amount may not be considered dust if contained in a SegWit output
|
||||
// This doesn't bother with delineation due to how marginal these values are, and because it isn't
|
||||
// worth the complexity to implement differentation
|
||||
pub const DUST: u64 = 546;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
||||
pub enum TransactionError {
|
||||
#[error("no inputs were specified")]
|
||||
NoInputs,
|
||||
#[error("no outputs were created")]
|
||||
NoOutputs,
|
||||
#[error("a specified payment's amount was less than bitcoin's required minimum")]
|
||||
DustPayment,
|
||||
#[error("too much data was specified")]
|
||||
TooMuchData,
|
||||
#[error("fee was too low to pass the default minimum fee rate")]
|
||||
TooLowFee,
|
||||
#[error("not enough funds for these payments")]
|
||||
NotEnoughFunds,
|
||||
#[error("transaction was too large")]
|
||||
TooLargeTransaction,
|
||||
}
|
||||
|
||||
/// A signable transaction, clone-able across attempts.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct SignableTransaction {
|
||||
tx: Transaction,
|
||||
offsets: Vec<Scalar>,
|
||||
prevouts: Vec<TxOut>,
|
||||
needed_fee: u64,
|
||||
}
|
||||
|
||||
impl SignableTransaction {
|
||||
fn calculate_weight(inputs: usize, payments: &[(Address, u64)], change: Option<&Address>) -> u64 {
|
||||
// Expand this a full transaction in order to use the bitcoin library's weight function
|
||||
let mut tx = Transaction {
|
||||
version: Version(2),
|
||||
lock_time: LockTime::ZERO,
|
||||
input: vec![
|
||||
TxIn {
|
||||
// This is a fixed size
|
||||
// See https://developer.bitcoin.org/reference/transactions.html#raw-transaction-format
|
||||
previous_output: OutPoint::default(),
|
||||
// This is empty for a Taproot spend
|
||||
script_sig: ScriptBuf::new(),
|
||||
// This is fixed size, yet we do use Sequence::MAX
|
||||
sequence: Sequence::MAX,
|
||||
// Our witnesses contains a single 64-byte signature
|
||||
witness: Witness::from_slice(&[vec![0; 64]])
|
||||
};
|
||||
inputs
|
||||
],
|
||||
output: payments
|
||||
.iter()
|
||||
// The payment is a fixed size so we don't have to use it here
|
||||
// The script pub key is not of a fixed size and does have to be used here
|
||||
.map(|payment| TxOut {
|
||||
value: Amount::from_sat(payment.1),
|
||||
script_pubkey: payment.0.script_pubkey(),
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
if let Some(change) = change {
|
||||
// Use a 0 value since we're currently unsure what the change amount will be, and since
|
||||
// the value is fixed size (so any value could be used here)
|
||||
tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.script_pubkey() });
|
||||
}
|
||||
u64::from(tx.weight())
|
||||
}
|
||||
|
||||
/// Returns the fee necessary for this transaction to achieve the fee rate specified at
|
||||
/// construction.
|
||||
///
|
||||
/// The actual fee this transaction will use is `sum(inputs) - sum(outputs)`.
|
||||
pub fn needed_fee(&self) -> u64 {
|
||||
self.needed_fee
|
||||
}
|
||||
|
||||
/// Returns the fee this transaction will use.
|
||||
pub fn fee(&self) -> u64 {
|
||||
self.prevouts.iter().map(|prevout| prevout.value.to_sat()).sum::<u64>() -
|
||||
self.tx.output.iter().map(|prevout| prevout.value.to_sat()).sum::<u64>()
|
||||
}
|
||||
|
||||
/// Create a new SignableTransaction.
|
||||
///
|
||||
/// If a change address is specified, any leftover funds will be sent to it if the leftover funds
|
||||
/// exceed the minimum output amount. If a change address isn't specified, all leftover funds
|
||||
/// will become part of the paid fee.
|
||||
///
|
||||
/// If data is specified, an OP_RETURN output will be added with it.
|
||||
pub fn new(
|
||||
mut inputs: Vec<ReceivedOutput>,
|
||||
payments: &[(Address, u64)],
|
||||
change: Option<Address>,
|
||||
data: Option<Vec<u8>>,
|
||||
fee_per_weight: u64,
|
||||
) -> Result<SignableTransaction, TransactionError> {
|
||||
if inputs.is_empty() {
|
||||
Err(TransactionError::NoInputs)?;
|
||||
}
|
||||
|
||||
if payments.is_empty() && change.is_none() && data.is_none() {
|
||||
Err(TransactionError::NoOutputs)?;
|
||||
}
|
||||
|
||||
for (_, amount) in payments {
|
||||
if *amount < DUST {
|
||||
Err(TransactionError::DustPayment)?;
|
||||
}
|
||||
}
|
||||
|
||||
if data.as_ref().map(|data| data.len()).unwrap_or(0) > 80 {
|
||||
Err(TransactionError::TooMuchData)?;
|
||||
}
|
||||
|
||||
let input_sat = inputs.iter().map(|input| input.output.value.to_sat()).sum::<u64>();
|
||||
let offsets = inputs.iter().map(|input| input.offset).collect();
|
||||
let tx_ins = inputs
|
||||
.iter()
|
||||
.map(|input| TxIn {
|
||||
previous_output: input.outpoint,
|
||||
script_sig: ScriptBuf::new(),
|
||||
sequence: Sequence::MAX,
|
||||
witness: Witness::new(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>();
|
||||
let mut tx_outs = payments
|
||||
.iter()
|
||||
.map(|payment| TxOut {
|
||||
value: Amount::from_sat(payment.1),
|
||||
script_pubkey: payment.0.script_pubkey(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Add the OP_RETURN output
|
||||
if let Some(data) = data {
|
||||
tx_outs.push(TxOut {
|
||||
value: Amount::ZERO,
|
||||
script_pubkey: ScriptBuf::new_op_return(
|
||||
PushBytesBuf::try_from(data)
|
||||
.expect("data didn't fit into PushBytes depsite being checked"),
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
let mut weight = Self::calculate_weight(tx_ins.len(), payments, None);
|
||||
let mut needed_fee = fee_per_weight * weight;
|
||||
|
||||
// "Virtual transaction size" is weight ceildiv 4 per
|
||||
// https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki
|
||||
|
||||
// https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04/
|
||||
// src/policy/policy.cpp#L295-L298
|
||||
// implements this as expected
|
||||
|
||||
// Technically, it takes whatever's greater, the weight or the amount of signature operatons
|
||||
// multiplied by DEFAULT_BYTES_PER_SIGOP (20)
|
||||
// We only use 1 signature per input, and our inputs have a weight exceeding 20
|
||||
// Accordingly, our inputs' weight will always be greater than the cost of the signature ops
|
||||
let vsize = weight.div_ceil(4);
|
||||
debug_assert_eq!(
|
||||
u64::try_from(bitcoin::policy::get_virtual_tx_size(
|
||||
weight.try_into().unwrap(),
|
||||
tx_ins.len().try_into().unwrap()
|
||||
))
|
||||
.unwrap(),
|
||||
vsize
|
||||
);
|
||||
// Technically, if there isn't change, this TX may still pay enough of a fee to pass the
|
||||
// minimum fee. Such edge cases aren't worth programming when they go against intent, as the
|
||||
// specified fee rate is too low to be valid
|
||||
// bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE is in sats/kilo-vbyte
|
||||
if needed_fee < ((u64::from(bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE) * vsize) / 1000) {
|
||||
Err(TransactionError::TooLowFee)?;
|
||||
}
|
||||
|
||||
if input_sat < (payment_sat + needed_fee) {
|
||||
Err(TransactionError::NotEnoughFunds)?;
|
||||
}
|
||||
|
||||
// If there's a change address, check if there's change to give it
|
||||
if let Some(change) = change.as_ref() {
|
||||
let weight_with_change = Self::calculate_weight(tx_ins.len(), payments, Some(change));
|
||||
let fee_with_change = fee_per_weight * weight_with_change;
|
||||
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
|
||||
if value >= DUST {
|
||||
tx_outs
|
||||
.push(TxOut { value: Amount::from_sat(value), script_pubkey: change.script_pubkey() });
|
||||
weight = weight_with_change;
|
||||
needed_fee = fee_with_change;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if tx_outs.is_empty() {
|
||||
Err(TransactionError::NoOutputs)?;
|
||||
}
|
||||
|
||||
if weight > u64::from(bitcoin::policy::MAX_STANDARD_TX_WEIGHT) {
|
||||
Err(TransactionError::TooLargeTransaction)?;
|
||||
}
|
||||
|
||||
Ok(SignableTransaction {
|
||||
tx: Transaction {
|
||||
version: Version(2),
|
||||
lock_time: LockTime::ZERO,
|
||||
input: tx_ins,
|
||||
output: tx_outs,
|
||||
},
|
||||
offsets,
|
||||
prevouts: inputs.drain(..).map(|input| input.output).collect(),
|
||||
needed_fee,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the outputs this transaction will create.
|
||||
pub fn outputs(&self) -> &[TxOut] {
|
||||
&self.tx.output
|
||||
}
|
||||
|
||||
/// Create a multisig machine for this transaction.
|
||||
///
|
||||
/// Returns None if the wrong keys are used.
|
||||
pub fn multisig(
|
||||
self,
|
||||
keys: ThresholdKeys<Secp256k1>,
|
||||
mut transcript: RecommendedTranscript,
|
||||
) -> Option<TransactionMachine> {
|
||||
transcript.domain_separate(b"bitcoin_transaction");
|
||||
transcript.append_message(b"root_key", keys.group_key().to_encoded_point(true).as_bytes());
|
||||
|
||||
// Transcript the inputs and outputs
|
||||
let tx = &self.tx;
|
||||
for input in &tx.input {
|
||||
transcript.append_message(b"input_hash", input.previous_output.txid);
|
||||
transcript.append_message(b"input_output_index", input.previous_output.vout.to_le_bytes());
|
||||
}
|
||||
for payment in &tx.output {
|
||||
transcript.append_message(b"output_script", payment.script_pubkey.as_bytes());
|
||||
transcript.append_message(b"output_amount", payment.value.to_sat().to_le_bytes());
|
||||
}
|
||||
|
||||
let mut sigs = vec![];
|
||||
for i in 0 .. tx.input.len() {
|
||||
let mut transcript = transcript.clone();
|
||||
// This unwrap is safe since any transaction with this many inputs violates the maximum
|
||||
// size allowed under standards, which this lib will error on creation of
|
||||
transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes());
|
||||
|
||||
let offset = keys.clone().offset(self.offsets[i]);
|
||||
if address_payload(offset.group_key())?.script_pubkey() != self.prevouts[i].script_pubkey {
|
||||
None?;
|
||||
}
|
||||
|
||||
sigs.push(AlgorithmMachine::new(
|
||||
Schnorr::new(transcript),
|
||||
keys.clone().offset(self.offsets[i]),
|
||||
));
|
||||
}
|
||||
|
||||
Some(TransactionMachine { tx: self, sigs })
|
||||
}
|
||||
}
|
||||
|
||||
/// A FROST signing machine to produce a Bitcoin transaction.
|
||||
///
|
||||
/// This does not support caching its preprocess. When sign is called, the message must be empty.
|
||||
/// This will panic if either `cache` is called or the message isn't empty.
|
||||
pub struct TransactionMachine {
|
||||
tx: SignableTransaction,
|
||||
sigs: Vec<AlgorithmMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
|
||||
}
|
||||
|
||||
impl PreprocessMachine for TransactionMachine {
|
||||
type Preprocess = Vec<Preprocess<Secp256k1, ()>>;
|
||||
type Signature = Transaction;
|
||||
type SignMachine = TransactionSignMachine;
|
||||
|
||||
fn preprocess<R: RngCore + CryptoRng>(
|
||||
mut self,
|
||||
rng: &mut R,
|
||||
) -> (Self::SignMachine, Self::Preprocess) {
|
||||
let mut preprocesses = Vec::with_capacity(self.sigs.len());
|
||||
let sigs = self
|
||||
.sigs
|
||||
.drain(..)
|
||||
.map(|sig| {
|
||||
let (sig, preprocess) = sig.preprocess(rng);
|
||||
preprocesses.push(preprocess);
|
||||
sig
|
||||
})
|
||||
.collect();
|
||||
|
||||
(TransactionSignMachine { tx: self.tx, sigs }, preprocesses)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TransactionSignMachine {
|
||||
tx: SignableTransaction,
|
||||
sigs: Vec<AlgorithmSignMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
|
||||
}
|
||||
|
||||
impl SignMachine<Transaction> for TransactionSignMachine {
|
||||
type Params = ();
|
||||
type Keys = ThresholdKeys<Secp256k1>;
|
||||
type Preprocess = Vec<Preprocess<Secp256k1, ()>>;
|
||||
type SignatureShare = Vec<SignatureShare<Secp256k1>>;
|
||||
type SignatureMachine = TransactionSignatureMachine;
|
||||
|
||||
fn cache(self) -> CachedPreprocess {
|
||||
unimplemented!(
|
||||
"Bitcoin transactions don't support caching their preprocesses due to {}",
|
||||
"being already bound to a specific transaction"
|
||||
);
|
||||
}
|
||||
|
||||
fn from_cache(
|
||||
_: (),
|
||||
_: ThresholdKeys<Secp256k1>,
|
||||
_: CachedPreprocess,
|
||||
) -> Result<Self, FrostError> {
|
||||
unimplemented!(
|
||||
"Bitcoin transactions don't support caching their preprocesses due to {}",
|
||||
"being already bound to a specific transaction"
|
||||
);
|
||||
}
|
||||
|
||||
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
|
||||
self.sigs.iter().map(|sig| sig.read_preprocess(reader)).collect()
|
||||
}
|
||||
|
||||
fn sign(
|
||||
mut self,
|
||||
commitments: HashMap<Participant, Self::Preprocess>,
|
||||
msg: &[u8],
|
||||
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
|
||||
if !msg.is_empty() {
|
||||
panic!("message was passed to the TransactionMachine when it generates its own");
|
||||
}
|
||||
|
||||
let commitments = (0 .. self.sigs.len())
|
||||
.map(|c| {
|
||||
commitments
|
||||
.iter()
|
||||
.map(|(l, commitments)| (*l, commitments[c].clone()))
|
||||
.collect::<HashMap<_, _>>()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut cache = SighashCache::new(&self.tx.tx);
|
||||
// Sign committing to all inputs
|
||||
let prevouts = Prevouts::All(&self.tx.prevouts);
|
||||
|
||||
let mut shares = Vec::with_capacity(self.sigs.len());
|
||||
let sigs = self
|
||||
.sigs
|
||||
.drain(..)
|
||||
.enumerate()
|
||||
.map(|(i, sig)| {
|
||||
let (sig, share) = sig.sign(
|
||||
commitments[i].clone(),
|
||||
cache
|
||||
.taproot_key_spend_signature_hash(i, &prevouts, TapSighashType::Default)
|
||||
// This should never happen since the inputs align with the TX the cache was
|
||||
// constructed with, and because i is always < prevouts.len()
|
||||
.expect("taproot_key_spend_signature_hash failed to return a hash")
|
||||
.as_ref(),
|
||||
)?;
|
||||
shares.push(share);
|
||||
Ok(sig)
|
||||
})
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
Ok((TransactionSignatureMachine { tx: self.tx.tx, sigs }, shares))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TransactionSignatureMachine {
|
||||
tx: Transaction,
|
||||
sigs: Vec<AlgorithmSignatureMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
|
||||
}
|
||||
|
||||
impl SignatureMachine<Transaction> for TransactionSignatureMachine {
|
||||
type SignatureShare = Vec<SignatureShare<Secp256k1>>;
|
||||
|
||||
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare> {
|
||||
self.sigs.iter().map(|sig| sig.read_share(reader)).collect()
|
||||
}
|
||||
|
||||
fn complete(
|
||||
mut self,
|
||||
mut shares: HashMap<Participant, Self::SignatureShare>,
|
||||
) -> Result<Transaction, FrostError> {
|
||||
for (input, schnorr) in self.tx.input.iter_mut().zip(self.sigs.drain(..)) {
|
||||
let sig = schnorr.complete(
|
||||
shares.iter_mut().map(|(l, shares)| (*l, shares.remove(0))).collect::<HashMap<_, _>>(),
|
||||
)?;
|
||||
|
||||
let mut witness = Witness::new();
|
||||
witness.push(sig);
|
||||
input.witness = witness;
|
||||
}
|
||||
|
||||
Ok(self.tx)
|
||||
}
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
use bitcoin_serai::{bitcoin::hashes::Hash as HashTrait, rpc::RpcError};
|
||||
|
||||
mod runner;
|
||||
use runner::rpc;
|
||||
|
||||
async_sequential! {
|
||||
async fn test_rpc() {
|
||||
let rpc = rpc().await;
|
||||
|
||||
// Test get_latest_block_number and get_block_hash by round tripping them
|
||||
let latest = rpc.get_latest_block_number().await.unwrap();
|
||||
let hash = rpc.get_block_hash(latest).await.unwrap();
|
||||
assert_eq!(rpc.get_block_number(&hash).await.unwrap(), latest);
|
||||
|
||||
// Test this actually is the latest block number by checking asking for the next block's errors
|
||||
assert!(matches!(rpc.get_block_hash(latest + 1).await, Err(RpcError::RequestError(_))));
|
||||
|
||||
// Test get_block by checking the received block's hash matches the request
|
||||
let block = rpc.get_block(&hash).await.unwrap();
|
||||
// Hashes are stored in reverse. It's bs from Satoshi
|
||||
let mut block_hash = *block.block_hash().as_raw_hash().as_byte_array();
|
||||
block_hash.reverse();
|
||||
assert_eq!(hash, block_hash);
|
||||
}
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use bitcoin_serai::rpc::Rpc;
|
||||
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
static SEQUENTIAL_CELL: OnceLock<Mutex<()>> = OnceLock::new();
|
||||
#[allow(non_snake_case)]
|
||||
pub fn SEQUENTIAL() -> &'static Mutex<()> {
|
||||
SEQUENTIAL_CELL.get_or_init(|| Mutex::new(()))
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) async fn rpc() -> Rpc {
|
||||
let rpc = Rpc::new("http://serai:seraidex@127.0.0.1:18443".to_string()).await.unwrap();
|
||||
|
||||
// If this node has already been interacted with, clear its chain
|
||||
if rpc.get_latest_block_number().await.unwrap() > 0 {
|
||||
rpc
|
||||
.rpc_call(
|
||||
"invalidateblock",
|
||||
serde_json::json!([hex::encode(rpc.get_block_hash(1).await.unwrap())]),
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
rpc
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! async_sequential {
|
||||
($(async fn $name: ident() $body: block)*) => {
|
||||
$(
|
||||
#[tokio::test]
|
||||
async fn $name() {
|
||||
let guard = runner::SEQUENTIAL().lock().await;
|
||||
let local = tokio::task::LocalSet::new();
|
||||
local.run_until(async move {
|
||||
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {
|
||||
drop(guard);
|
||||
Err(err).unwrap()
|
||||
}
|
||||
}).await;
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
@@ -1,363 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use rand_core::{RngCore, OsRng};
|
||||
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
|
||||
use k256::{
|
||||
elliptic_curve::{
|
||||
group::{ff::Field, Group},
|
||||
sec1::{Tag, ToEncodedPoint},
|
||||
},
|
||||
Scalar, ProjectivePoint,
|
||||
};
|
||||
use frost::{
|
||||
curve::Secp256k1,
|
||||
Participant, ThresholdKeys,
|
||||
tests::{THRESHOLD, key_gen, sign_without_caching},
|
||||
};
|
||||
|
||||
use bitcoin_serai::{
|
||||
bitcoin::{
|
||||
hashes::Hash as HashTrait,
|
||||
blockdata::opcodes::all::OP_RETURN,
|
||||
script::{PushBytesBuf, Instruction, Instructions, Script},
|
||||
address::NetworkChecked,
|
||||
OutPoint, Amount, TxOut, Transaction, Network, Address,
|
||||
},
|
||||
wallet::{
|
||||
tweak_keys, address_payload, ReceivedOutput, Scanner, TransactionError, SignableTransaction,
|
||||
},
|
||||
rpc::Rpc,
|
||||
};
|
||||
|
||||
mod runner;
|
||||
use runner::rpc;
|
||||
|
||||
const FEE: u64 = 20;
|
||||
|
||||
fn is_even(key: ProjectivePoint) -> bool {
|
||||
key.to_encoded_point(true).tag() == Tag::CompressedEvenY
|
||||
}
|
||||
|
||||
async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint) -> ReceivedOutput {
|
||||
let block_number = rpc.get_latest_block_number().await.unwrap() + 1;
|
||||
|
||||
rpc
|
||||
.rpc_call::<Vec<String>>(
|
||||
"generatetoaddress",
|
||||
serde_json::json!([
|
||||
1,
|
||||
Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())
|
||||
]),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Mine until maturity
|
||||
rpc
|
||||
.rpc_call::<Vec<String>>(
|
||||
"generatetoaddress",
|
||||
serde_json::json!([100, Address::p2sh(Script::new(), Network::Regtest).unwrap()]),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let block = rpc.get_block(&rpc.get_block_hash(block_number).await.unwrap()).await.unwrap();
|
||||
|
||||
let mut outputs = scanner.scan_block(&block);
|
||||
assert_eq!(outputs, scanner.scan_transaction(&block.txdata[0]));
|
||||
|
||||
assert_eq!(outputs.len(), 1);
|
||||
assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].txid(), 0));
|
||||
assert_eq!(outputs[0].value(), block.txdata[0].output[0].value.to_sat());
|
||||
|
||||
assert_eq!(
|
||||
ReceivedOutput::read::<&[u8]>(&mut outputs[0].serialize().as_ref()).unwrap(),
|
||||
outputs[0]
|
||||
);
|
||||
|
||||
outputs.swap_remove(0)
|
||||
}
|
||||
|
||||
fn keys() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, ProjectivePoint) {
|
||||
let mut keys = key_gen(&mut OsRng);
|
||||
for (_, keys) in keys.iter_mut() {
|
||||
*keys = tweak_keys(keys);
|
||||
}
|
||||
let key = keys.values().next().unwrap().group_key();
|
||||
(keys, key)
|
||||
}
|
||||
|
||||
fn sign(
|
||||
keys: &HashMap<Participant, ThresholdKeys<Secp256k1>>,
|
||||
tx: SignableTransaction,
|
||||
) -> Transaction {
|
||||
let mut machines = HashMap::new();
|
||||
for i in (1 ..= THRESHOLD).map(|i| Participant::new(i).unwrap()) {
|
||||
machines.insert(
|
||||
i,
|
||||
tx.clone()
|
||||
.multisig(keys[&i].clone(), RecommendedTranscript::new(b"bitcoin-serai Test Transaction"))
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
sign_without_caching(&mut OsRng, machines, &[])
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tweak_keys() {
|
||||
let mut even = false;
|
||||
let mut odd = false;
|
||||
|
||||
// Generate keys until we get an even set and an odd set
|
||||
while !(even && odd) {
|
||||
let mut keys = key_gen(&mut OsRng).drain().next().unwrap().1;
|
||||
if is_even(keys.group_key()) {
|
||||
// Tweaking should do nothing
|
||||
assert_eq!(tweak_keys(&keys).group_key(), keys.group_key());
|
||||
|
||||
even = true;
|
||||
} else {
|
||||
let tweaked = tweak_keys(&keys).group_key();
|
||||
assert_ne!(tweaked, keys.group_key());
|
||||
// Tweaking should produce an even key
|
||||
assert!(is_even(tweaked));
|
||||
|
||||
// Verify it uses the smallest possible offset
|
||||
while keys.group_key().to_encoded_point(true).tag() == Tag::CompressedOddY {
|
||||
keys = keys.offset(Scalar::ONE);
|
||||
}
|
||||
assert_eq!(tweaked, keys.group_key());
|
||||
|
||||
odd = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async_sequential! {
|
||||
async fn test_scanner() {
|
||||
// Test Scanners are creatable for even keys.
|
||||
for _ in 0 .. 128 {
|
||||
let key = ProjectivePoint::random(&mut OsRng);
|
||||
assert_eq!(Scanner::new(key).is_some(), is_even(key));
|
||||
}
|
||||
|
||||
let mut key = ProjectivePoint::random(&mut OsRng);
|
||||
while !is_even(key) {
|
||||
key += ProjectivePoint::GENERATOR;
|
||||
}
|
||||
|
||||
{
|
||||
let mut scanner = Scanner::new(key).unwrap();
|
||||
for _ in 0 .. 128 {
|
||||
let mut offset = Scalar::random(&mut OsRng);
|
||||
let registered = scanner.register_offset(offset).unwrap();
|
||||
// Registering this again should return None
|
||||
assert!(scanner.register_offset(offset).is_none());
|
||||
|
||||
// We can only register offsets resulting in even keys
|
||||
// Make this even
|
||||
while !is_even(key + (ProjectivePoint::GENERATOR * offset)) {
|
||||
offset += Scalar::ONE;
|
||||
}
|
||||
// Ensure it matches the registered offset
|
||||
assert_eq!(registered, offset);
|
||||
// Assert registering this again fails
|
||||
assert!(scanner.register_offset(offset).is_none());
|
||||
}
|
||||
}
|
||||
|
||||
let rpc = rpc().await;
|
||||
let mut scanner = Scanner::new(key).unwrap();
|
||||
|
||||
assert_eq!(send_and_get_output(&rpc, &scanner, key).await.offset(), Scalar::ZERO);
|
||||
|
||||
// Register an offset and test receiving to it
|
||||
let offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
|
||||
assert_eq!(
|
||||
send_and_get_output(&rpc, &scanner, key + (ProjectivePoint::GENERATOR * offset))
|
||||
.await
|
||||
.offset(),
|
||||
offset
|
||||
);
|
||||
}
|
||||
|
||||
async fn test_transaction_errors() {
|
||||
let (_, key) = keys();
|
||||
|
||||
let rpc = rpc().await;
|
||||
let scanner = Scanner::new(key).unwrap();
|
||||
|
||||
let output = send_and_get_output(&rpc, &scanner, key).await;
|
||||
assert_eq!(output.offset(), Scalar::ZERO);
|
||||
|
||||
let inputs = vec![output];
|
||||
let addr = || Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap());
|
||||
let payments = vec![(addr(), 1000)];
|
||||
|
||||
assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok());
|
||||
|
||||
assert_eq!(
|
||||
SignableTransaction::new(vec![], &payments, None, None, FEE),
|
||||
Err(TransactionError::NoInputs)
|
||||
);
|
||||
|
||||
// No change
|
||||
assert!(SignableTransaction::new(inputs.clone(), &[(addr(), 1000)], None, None, FEE).is_ok());
|
||||
// Consolidation TX
|
||||
assert!(SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, FEE).is_ok());
|
||||
// Data
|
||||
assert!(SignableTransaction::new(inputs.clone(), &[], None, Some(vec![]), FEE).is_ok());
|
||||
// No outputs
|
||||
assert_eq!(
|
||||
SignableTransaction::new(inputs.clone(), &[], None, None, FEE),
|
||||
Err(TransactionError::NoOutputs),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
SignableTransaction::new(inputs.clone(), &[(addr(), 1)], None, None, FEE),
|
||||
Err(TransactionError::DustPayment),
|
||||
);
|
||||
|
||||
assert!(
|
||||
SignableTransaction::new(inputs.clone(), &payments, None, Some(vec![0; 80]), FEE).is_ok()
|
||||
);
|
||||
assert_eq!(
|
||||
SignableTransaction::new(inputs.clone(), &payments, None, Some(vec![0; 81]), FEE),
|
||||
Err(TransactionError::TooMuchData),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, 0),
|
||||
Err(TransactionError::TooLowFee),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
SignableTransaction::new(inputs.clone(), &[(addr(), inputs[0].value() * 2)], None, None, FEE),
|
||||
Err(TransactionError::NotEnoughFunds),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
SignableTransaction::new(inputs, &vec![(addr(), 1000); 10000], None, None, FEE),
|
||||
Err(TransactionError::TooLargeTransaction),
|
||||
);
|
||||
}
|
||||
|
||||
async fn test_send() {
|
||||
let (keys, key) = keys();
|
||||
|
||||
let rpc = rpc().await;
|
||||
let mut scanner = Scanner::new(key).unwrap();
|
||||
|
||||
// Get inputs, one not offset and one offset
|
||||
let output = send_and_get_output(&rpc, &scanner, key).await;
|
||||
assert_eq!(output.offset(), Scalar::ZERO);
|
||||
|
||||
let offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
|
||||
let offset_key = key + (ProjectivePoint::GENERATOR * offset);
|
||||
let offset_output = send_and_get_output(&rpc, &scanner, offset_key).await;
|
||||
assert_eq!(offset_output.offset(), offset);
|
||||
|
||||
// Declare payments, change, fee
|
||||
let payments = [
|
||||
(Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap()), 1005),
|
||||
(Address::<NetworkChecked>::new(Network::Regtest, address_payload(offset_key).unwrap()), 1007)
|
||||
];
|
||||
|
||||
let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
|
||||
let change_key = key + (ProjectivePoint::GENERATOR * change_offset);
|
||||
let change_addr =
|
||||
Address::<NetworkChecked>::new(Network::Regtest, address_payload(change_key).unwrap());
|
||||
|
||||
// Create and sign the TX
|
||||
let tx = SignableTransaction::new(
|
||||
vec![output.clone(), offset_output.clone()],
|
||||
&payments,
|
||||
Some(change_addr.clone()),
|
||||
None,
|
||||
FEE
|
||||
).unwrap();
|
||||
let needed_fee = tx.needed_fee();
|
||||
let tx = sign(&keys, tx);
|
||||
|
||||
assert_eq!(tx.output.len(), 3);
|
||||
|
||||
// Ensure we can scan it
|
||||
let outputs = scanner.scan_transaction(&tx);
|
||||
for (o, output) in outputs.iter().enumerate() {
|
||||
assert_eq!(output.outpoint(), &OutPoint::new(tx.txid(), u32::try_from(o).unwrap()));
|
||||
assert_eq!(&ReceivedOutput::read::<&[u8]>(&mut output.serialize().as_ref()).unwrap(), output);
|
||||
}
|
||||
|
||||
assert_eq!(outputs[0].offset(), Scalar::ZERO);
|
||||
assert_eq!(outputs[1].offset(), offset);
|
||||
assert_eq!(outputs[2].offset(), change_offset);
|
||||
|
||||
// Make sure the payments were properly created
|
||||
for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) {
|
||||
assert_eq!(
|
||||
output,
|
||||
&TxOut { script_pubkey: payment.0.script_pubkey(), value: Amount::from_sat(payment.1) },
|
||||
);
|
||||
assert_eq!(scanned.value(), payment.1 );
|
||||
}
|
||||
|
||||
// Make sure the change is correct
|
||||
assert_eq!(needed_fee, u64::from(tx.weight()) * FEE);
|
||||
let input_value = output.value() + offset_output.value();
|
||||
let output_value = tx.output.iter().map(|output| output.value.to_sat()).sum::<u64>();
|
||||
assert_eq!(input_value - output_value, needed_fee);
|
||||
|
||||
let change_amount =
|
||||
input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee;
|
||||
assert_eq!(
|
||||
tx.output[2],
|
||||
TxOut { script_pubkey: change_addr.script_pubkey(), value: Amount::from_sat(change_amount) },
|
||||
);
|
||||
|
||||
// This also tests send_raw_transaction and get_transaction, which the RPC test can't
|
||||
// effectively test
|
||||
rpc.send_raw_transaction(&tx).await.unwrap();
|
||||
let mut hash = *tx.txid().as_raw_hash().as_byte_array();
|
||||
hash.reverse();
|
||||
assert_eq!(tx, rpc.get_transaction(&hash).await.unwrap());
|
||||
}
|
||||
|
||||
async fn test_data() {
|
||||
let (keys, key) = keys();
|
||||
|
||||
let rpc = rpc().await;
|
||||
let scanner = Scanner::new(key).unwrap();
|
||||
|
||||
let output = send_and_get_output(&rpc, &scanner, key).await;
|
||||
assert_eq!(output.offset(), Scalar::ZERO);
|
||||
|
||||
let data_len = 60 + usize::try_from(OsRng.next_u64() % 21).unwrap();
|
||||
let mut data = vec![0; data_len];
|
||||
OsRng.fill_bytes(&mut data);
|
||||
|
||||
let tx = sign(
|
||||
&keys,
|
||||
SignableTransaction::new(
|
||||
vec![output],
|
||||
&[],
|
||||
Some(Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())),
|
||||
Some(data.clone()),
|
||||
FEE
|
||||
).unwrap()
|
||||
);
|
||||
|
||||
assert!(tx.output[0].script_pubkey.is_op_return());
|
||||
let check = |mut instructions: Instructions| {
|
||||
assert_eq!(instructions.next().unwrap().unwrap(), Instruction::Op(OP_RETURN));
|
||||
assert_eq!(
|
||||
instructions.next().unwrap().unwrap(),
|
||||
Instruction::PushBytes(&PushBytesBuf::try_from(data.clone()).unwrap()),
|
||||
);
|
||||
assert!(instructions.next().is_none());
|
||||
};
|
||||
check(tx.output[0].script_pubkey.instructions());
|
||||
check(tx.output[0].script_pubkey.instructions_minimal());
|
||||
}
|
||||
}
|
||||
3
coins/ethereum/.gitignore
vendored
3
coins/ethereum/.gitignore
vendored
@@ -1,3 +0,0 @@
|
||||
# solidity build outputs
|
||||
cache
|
||||
artifacts
|
||||
@@ -1,39 +0,0 @@
|
||||
[package]
|
||||
name = "ethereum-serai"
|
||||
version = "0.1.0"
|
||||
description = "An Ethereum library supporting Schnorr signing and on-chain verification"
|
||||
license = "AGPL-3.0-only"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
|
||||
edition = "2021"
|
||||
publish = false
|
||||
rust-version = "1.74"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
thiserror = { version = "1", default-features = false }
|
||||
eyre = { version = "0.6", default-features = false }
|
||||
|
||||
sha3 = { version = "0.10", default-features = false, features = ["std"] }
|
||||
|
||||
group = { version = "0.13", default-features = false }
|
||||
k256 = { version = "^0.13.1", default-features = false, features = ["std", "ecdsa"] }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] }
|
||||
|
||||
ethers-core = { version = "2", default-features = false }
|
||||
ethers-providers = { version = "2", default-features = false }
|
||||
ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] }
|
||||
|
||||
[dev-dependencies]
|
||||
rand_core = { version = "0.6", default-features = false, features = ["std"] }
|
||||
|
||||
hex = { version = "0.4", default-features = false, features = ["std"] }
|
||||
serde = { version = "1", default-features = false, features = ["std"] }
|
||||
serde_json = { version = "1", default-features = false, features = ["std"] }
|
||||
|
||||
sha2 = { version = "0.10", default-features = false, features = ["std"] }
|
||||
|
||||
tokio = { version = "1", features = ["macros"] }
|
||||
@@ -1,15 +0,0 @@
|
||||
AGPL-3.0-only license
|
||||
|
||||
Copyright (c) 2022-2023 Luke Parker
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License Version 3 as
|
||||
published by the Free Software Foundation.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
@@ -1,9 +0,0 @@
|
||||
# Ethereum
|
||||
|
||||
This package contains Ethereum-related functionality, specifically deploying and
|
||||
interacting with Serai contracts.
|
||||
|
||||
### Dependencies
|
||||
|
||||
- solc
|
||||
- [Foundry](https://github.com/foundry-rs/foundry)
|
||||
@@ -1,15 +0,0 @@
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=contracts");
|
||||
println!("cargo:rerun-if-changed=artifacts");
|
||||
|
||||
#[rustfmt::skip]
|
||||
let args = [
|
||||
"--base-path", ".",
|
||||
"-o", "./artifacts", "--overwrite",
|
||||
"--bin", "--abi",
|
||||
"--optimize",
|
||||
"./contracts/Schnorr.sol"
|
||||
];
|
||||
|
||||
assert!(std::process::Command::new("solc").args(args).status().unwrap().success());
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
//SPDX-License-Identifier: AGPLv3
|
||||
pragma solidity ^0.8.0;
|
||||
|
||||
// see https://github.com/noot/schnorr-verify for implementation details
|
||||
contract Schnorr {
|
||||
// secp256k1 group order
|
||||
uint256 constant public Q =
|
||||
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
|
||||
|
||||
// parity := public key y-coord parity (27 or 28)
|
||||
// px := public key x-coord
|
||||
// message := 32-byte message
|
||||
// s := schnorr signature
|
||||
// e := schnorr signature challenge
|
||||
function verify(
|
||||
uint8 parity,
|
||||
bytes32 px,
|
||||
bytes32 message,
|
||||
bytes32 s,
|
||||
bytes32 e
|
||||
) public view returns (bool) {
|
||||
// ecrecover = (m, v, r, s);
|
||||
bytes32 sp = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
|
||||
bytes32 ep = bytes32(Q - mulmod(uint256(e), uint256(px), Q));
|
||||
|
||||
require(sp != 0);
|
||||
// the ecrecover precompile implementation checks that the `r` and `s`
|
||||
// inputs are non-zero (in this case, `px` and `ep`), thus we don't need to
|
||||
// check if they're zero.will make me
|
||||
address R = ecrecover(sp, parity, px, ep);
|
||||
require(R != address(0), "ecrecover failed");
|
||||
return e == keccak256(
|
||||
abi.encodePacked(R, uint8(parity), px, block.chainid, message)
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
use thiserror::Error;
|
||||
use eyre::{eyre, Result};
|
||||
|
||||
use ethers_providers::{Provider, Http};
|
||||
use ethers_contract::abigen;
|
||||
|
||||
use crate::crypto::ProcessedSignature;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum EthereumError {
|
||||
#[error("failed to verify Schnorr signature")]
|
||||
VerificationError,
|
||||
}
|
||||
|
||||
abigen!(Schnorr, "./artifacts/Schnorr.abi");
|
||||
|
||||
pub async fn call_verify(
|
||||
contract: &Schnorr<Provider<Http>>,
|
||||
params: &ProcessedSignature,
|
||||
) -> Result<()> {
|
||||
if contract
|
||||
.verify(
|
||||
params.parity + 27,
|
||||
params.px.to_bytes().into(),
|
||||
params.message,
|
||||
params.s.to_bytes().into(),
|
||||
params.e.to_bytes().into(),
|
||||
)
|
||||
.call()
|
||||
.await?
|
||||
{
|
||||
Ok(())
|
||||
} else {
|
||||
Err(eyre!(EthereumError::VerificationError))
|
||||
}
|
||||
}
|
||||
@@ -1,107 +0,0 @@
|
||||
use sha3::{Digest, Keccak256};
|
||||
|
||||
use group::Group;
|
||||
use k256::{
|
||||
elliptic_curve::{
|
||||
bigint::ArrayEncoding, ops::Reduce, point::DecompressPoint, sec1::ToEncodedPoint,
|
||||
},
|
||||
AffinePoint, ProjectivePoint, Scalar, U256,
|
||||
};
|
||||
|
||||
use frost::{algorithm::Hram, curve::Secp256k1};
|
||||
|
||||
pub fn keccak256(data: &[u8]) -> [u8; 32] {
|
||||
Keccak256::digest(data).into()
|
||||
}
|
||||
|
||||
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||
Scalar::reduce(U256::from_be_slice(&keccak256(data)))
|
||||
}
|
||||
|
||||
pub fn address(point: &ProjectivePoint) -> [u8; 20] {
|
||||
let encoded_point = point.to_encoded_point(false);
|
||||
keccak256(&encoded_point.as_ref()[1 .. 65])[12 .. 32].try_into().unwrap()
|
||||
}
|
||||
|
||||
pub fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
|
||||
if r.is_zero().into() || s.is_zero().into() {
|
||||
return None;
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let R = AffinePoint::decompress(&r.to_bytes(), v.into());
|
||||
#[allow(non_snake_case)]
|
||||
if let Some(R) = Option::<AffinePoint>::from(R) {
|
||||
#[allow(non_snake_case)]
|
||||
let R = ProjectivePoint::from(R);
|
||||
|
||||
let r = r.invert().unwrap();
|
||||
let u1 = ProjectivePoint::GENERATOR * (-message * r);
|
||||
let u2 = R * (s * r);
|
||||
let key: ProjectivePoint = u1 + u2;
|
||||
if !bool::from(key.is_identity()) {
|
||||
return Some(address(&key));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct EthereumHram {}
|
||||
impl Hram<Secp256k1> for EthereumHram {
|
||||
#[allow(non_snake_case)]
|
||||
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
||||
let a_encoded_point = A.to_encoded_point(true);
|
||||
let mut a_encoded = a_encoded_point.as_ref().to_owned();
|
||||
a_encoded[0] += 25; // Ethereum uses 27/28 for point parity
|
||||
let mut data = address(R).to_vec();
|
||||
data.append(&mut a_encoded);
|
||||
data.append(&mut m.to_vec());
|
||||
Scalar::reduce(U256::from_be_slice(&keccak256(&data)))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ProcessedSignature {
|
||||
pub s: Scalar,
|
||||
pub px: Scalar,
|
||||
pub parity: u8,
|
||||
pub message: [u8; 32],
|
||||
pub e: Scalar,
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn preprocess_signature_for_ecrecover(
|
||||
m: [u8; 32],
|
||||
R: &ProjectivePoint,
|
||||
s: Scalar,
|
||||
A: &ProjectivePoint,
|
||||
chain_id: U256,
|
||||
) -> (Scalar, Scalar) {
|
||||
let processed_sig = process_signature_for_contract(m, R, s, A, chain_id);
|
||||
let sr = processed_sig.s.mul(&processed_sig.px).negate();
|
||||
let er = processed_sig.e.mul(&processed_sig.px).negate();
|
||||
(sr, er)
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn process_signature_for_contract(
|
||||
m: [u8; 32],
|
||||
R: &ProjectivePoint,
|
||||
s: Scalar,
|
||||
A: &ProjectivePoint,
|
||||
chain_id: U256,
|
||||
) -> ProcessedSignature {
|
||||
let encoded_pk = A.to_encoded_point(true);
|
||||
let px = &encoded_pk.as_ref()[1 .. 33];
|
||||
let px_scalar = Scalar::reduce(U256::from_be_slice(px));
|
||||
let e = EthereumHram::hram(R, A, &[chain_id.to_be_byte_array().as_slice(), &m].concat());
|
||||
ProcessedSignature {
|
||||
s,
|
||||
px: px_scalar,
|
||||
parity: &encoded_pk.as_ref()[0] - 2,
|
||||
#[allow(non_snake_case)]
|
||||
message: m,
|
||||
e,
|
||||
}
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
pub mod contract;
|
||||
pub mod crypto;
|
||||
@@ -1,128 +0,0 @@
|
||||
use std::{convert::TryFrom, sync::Arc, time::Duration, fs::File};
|
||||
|
||||
use rand_core::OsRng;
|
||||
|
||||
use ::k256::{
|
||||
elliptic_curve::{bigint::ArrayEncoding, PrimeField},
|
||||
U256,
|
||||
};
|
||||
|
||||
use ethers_core::{
|
||||
types::Signature,
|
||||
abi::Abi,
|
||||
utils::{keccak256, Anvil, AnvilInstance},
|
||||
};
|
||||
use ethers_contract::ContractFactory;
|
||||
use ethers_providers::{Middleware, Provider, Http};
|
||||
|
||||
use frost::{
|
||||
curve::Secp256k1,
|
||||
Participant,
|
||||
algorithm::IetfSchnorr,
|
||||
tests::{key_gen, algorithm_machines, sign},
|
||||
};
|
||||
|
||||
use ethereum_serai::{
|
||||
crypto,
|
||||
contract::{Schnorr, call_verify},
|
||||
};
|
||||
|
||||
// TODO: Replace with a contract deployment from an unknown account, so the environment solely has
|
||||
// to fund the deployer, not create/pass a wallet
|
||||
pub async fn deploy_schnorr_verifier_contract(
|
||||
chain_id: u32,
|
||||
client: Arc<Provider<Http>>,
|
||||
wallet: &k256::ecdsa::SigningKey,
|
||||
) -> eyre::Result<Schnorr<Provider<Http>>> {
|
||||
let abi: Abi = serde_json::from_reader(File::open("./artifacts/Schnorr.abi").unwrap()).unwrap();
|
||||
|
||||
let hex_bin_buf = std::fs::read_to_string("./artifacts/Schnorr.bin").unwrap();
|
||||
let hex_bin =
|
||||
if let Some(stripped) = hex_bin_buf.strip_prefix("0x") { stripped } else { &hex_bin_buf };
|
||||
let bin = hex::decode(hex_bin).unwrap();
|
||||
let factory = ContractFactory::new(abi, bin.into(), client.clone());
|
||||
|
||||
let mut deployment_tx = factory.deploy(())?.tx;
|
||||
deployment_tx.set_chain_id(chain_id);
|
||||
deployment_tx.set_gas(500_000);
|
||||
let (max_fee_per_gas, max_priority_fee_per_gas) = client.estimate_eip1559_fees(None).await?;
|
||||
deployment_tx.as_eip1559_mut().unwrap().max_fee_per_gas = Some(max_fee_per_gas);
|
||||
deployment_tx.as_eip1559_mut().unwrap().max_priority_fee_per_gas = Some(max_priority_fee_per_gas);
|
||||
|
||||
let sig_hash = deployment_tx.sighash();
|
||||
let (sig, rid) = wallet.sign_prehash_recoverable(sig_hash.as_ref()).unwrap();
|
||||
|
||||
// EIP-155 v
|
||||
let mut v = u64::from(rid.to_byte());
|
||||
assert!((v == 0) || (v == 1));
|
||||
v += u64::from((chain_id * 2) + 35);
|
||||
|
||||
let r = sig.r().to_repr();
|
||||
let r_ref: &[u8] = r.as_ref();
|
||||
let s = sig.s().to_repr();
|
||||
let s_ref: &[u8] = s.as_ref();
|
||||
let deployment_tx = deployment_tx.rlp_signed(&Signature { r: r_ref.into(), s: s_ref.into(), v });
|
||||
|
||||
let pending_tx = client.send_raw_transaction(deployment_tx).await?;
|
||||
|
||||
let mut receipt;
|
||||
while {
|
||||
receipt = client.get_transaction_receipt(pending_tx.tx_hash()).await?;
|
||||
receipt.is_none()
|
||||
} {
|
||||
tokio::time::sleep(Duration::from_secs(6)).await;
|
||||
}
|
||||
let receipt = receipt.unwrap();
|
||||
assert!(receipt.status == Some(1.into()));
|
||||
|
||||
let contract = Schnorr::new(receipt.contract_address.unwrap(), client.clone());
|
||||
Ok(contract)
|
||||
}
|
||||
|
||||
async fn deploy_test_contract() -> (u32, AnvilInstance, Schnorr<Provider<Http>>) {
|
||||
let anvil = Anvil::new().spawn();
|
||||
|
||||
let provider =
|
||||
Provider::<Http>::try_from(anvil.endpoint()).unwrap().interval(Duration::from_millis(10u64));
|
||||
let chain_id = provider.get_chainid().await.unwrap().as_u32();
|
||||
let wallet = anvil.keys()[0].clone().into();
|
||||
let client = Arc::new(provider);
|
||||
|
||||
(chain_id, anvil, deploy_schnorr_verifier_contract(chain_id, client, &wallet).await.unwrap())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_deploy_contract() {
|
||||
deploy_test_contract().await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_ecrecover_hack() {
|
||||
let (chain_id, _anvil, contract) = deploy_test_contract().await;
|
||||
let chain_id = U256::from(chain_id);
|
||||
|
||||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||
let group_key = keys[&Participant::new(1).unwrap()].group_key();
|
||||
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
let hashed_message = keccak256(MESSAGE);
|
||||
|
||||
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
||||
|
||||
let algo = IetfSchnorr::<Secp256k1, crypto::EthereumHram>::ietf();
|
||||
let sig = sign(
|
||||
&mut OsRng,
|
||||
algo.clone(),
|
||||
keys.clone(),
|
||||
algorithm_machines(&mut OsRng, algo, &keys),
|
||||
full_message,
|
||||
);
|
||||
let mut processed_sig =
|
||||
crypto::process_signature_for_contract(hashed_message, &sig.R, sig.s, &group_key, chain_id);
|
||||
|
||||
call_verify(&contract, &processed_sig).await.unwrap();
|
||||
|
||||
// test invalid signature fails
|
||||
processed_sig.message[0] = 0;
|
||||
assert!(call_verify(&contract, &processed_sig).await.is_err());
|
||||
}
|
||||
@@ -1,92 +0,0 @@
|
||||
use k256::{
|
||||
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, sec1::ToEncodedPoint},
|
||||
ProjectivePoint, Scalar, U256,
|
||||
};
|
||||
use frost::{curve::Secp256k1, Participant};
|
||||
|
||||
use ethereum_serai::crypto::*;
|
||||
|
||||
#[test]
|
||||
fn test_ecrecover() {
|
||||
use rand_core::OsRng;
|
||||
use sha2::Sha256;
|
||||
use sha3::{Digest, Keccak256};
|
||||
use k256::ecdsa::{hazmat::SignPrimitive, signature::DigestVerifier, SigningKey, VerifyingKey};
|
||||
|
||||
let private = SigningKey::random(&mut OsRng);
|
||||
let public = VerifyingKey::from(&private);
|
||||
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
let (sig, recovery_id) = private
|
||||
.as_nonzero_scalar()
|
||||
.try_sign_prehashed_rfc6979::<Sha256>(&Keccak256::digest(MESSAGE), b"")
|
||||
.unwrap();
|
||||
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
|
||||
{
|
||||
assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ());
|
||||
}
|
||||
|
||||
assert_eq!(
|
||||
ecrecover(hash_to_scalar(MESSAGE), recovery_id.unwrap().is_y_odd().into(), *sig.r(), *sig.s())
|
||||
.unwrap(),
|
||||
address(&ProjectivePoint::from(public.as_affine()))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_signing() {
|
||||
use frost::{
|
||||
algorithm::IetfSchnorr,
|
||||
tests::{algorithm_machines, key_gen, sign},
|
||||
};
|
||||
use rand_core::OsRng;
|
||||
|
||||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||
let _group_key = keys[&Participant::new(1).unwrap()].group_key();
|
||||
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
|
||||
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
||||
let _sig = sign(
|
||||
&mut OsRng,
|
||||
algo,
|
||||
keys.clone(),
|
||||
algorithm_machines(&mut OsRng, IetfSchnorr::<Secp256k1, EthereumHram>::ietf(), &keys),
|
||||
MESSAGE,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ecrecover_hack() {
|
||||
use frost::{
|
||||
algorithm::IetfSchnorr,
|
||||
tests::{algorithm_machines, key_gen, sign},
|
||||
};
|
||||
use rand_core::OsRng;
|
||||
|
||||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||
let group_key = keys[&Participant::new(1).unwrap()].group_key();
|
||||
let group_key_encoded = group_key.to_encoded_point(true);
|
||||
let group_key_compressed = group_key_encoded.as_ref();
|
||||
let group_key_x = Scalar::reduce(U256::from_be_slice(&group_key_compressed[1 .. 33]));
|
||||
|
||||
const MESSAGE: &[u8] = b"Hello, World!";
|
||||
let hashed_message = keccak256(MESSAGE);
|
||||
let chain_id = U256::ONE;
|
||||
|
||||
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
||||
|
||||
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
||||
let sig = sign(
|
||||
&mut OsRng,
|
||||
algo.clone(),
|
||||
keys.clone(),
|
||||
algorithm_machines(&mut OsRng, algo, &keys),
|
||||
full_message,
|
||||
);
|
||||
|
||||
let (sr, er) =
|
||||
preprocess_signature_for_ecrecover(hashed_message, &sig.R, sig.s, &group_key, chain_id);
|
||||
let q = ecrecover(sr, group_key_compressed[0] - 2, group_key_x, er).unwrap();
|
||||
assert_eq!(q, address(&sig.R));
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
mod contract;
|
||||
mod crypto;
|
||||
30
coins/firo/Cargo.toml
Normal file
30
coins/firo/Cargo.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
[package]
|
||||
name = "firo"
|
||||
version = "0.1.0"
|
||||
description = "A modern Firo wallet library"
|
||||
license = "MIT"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
lazy_static = "1"
|
||||
thiserror = "1"
|
||||
|
||||
rand_core = "0.6"
|
||||
rand_chacha = { version = "0.3", optional = true }
|
||||
|
||||
sha2 = "0.10"
|
||||
|
||||
ff = "0.12"
|
||||
group = "0.12"
|
||||
k256 = { version = "0.11", features = ["arithmetic"] }
|
||||
|
||||
blake2 = { version = "0.10", optional = true }
|
||||
transcript = { path = "../../crypto/transcript", package = "flexible-transcript", features = ["recommended"], optional = true }
|
||||
frost = { path = "../../crypto/frost", package = "modular-frost", features = ["secp256k1"], optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.8"
|
||||
|
||||
[features]
|
||||
multisig = ["blake2", "transcript", "frost", "rand_chacha"]
|
||||
4
coins/firo/src/lib.rs
Normal file
4
coins/firo/src/lib.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
pub mod spark;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
183
coins/firo/src/spark/chaum/mod.rs
Normal file
183
coins/firo/src/spark/chaum/mod.rs
Normal file
@@ -0,0 +1,183 @@
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use sha2::{Digest, Sha512};
|
||||
|
||||
use ff::Field;
|
||||
use group::{Group, GroupEncoding};
|
||||
use k256::{
|
||||
elliptic_curve::{bigint::{ArrayEncoding, U512}, ops::Reduce},
|
||||
Scalar, ProjectivePoint
|
||||
};
|
||||
|
||||
use crate::spark::{F, G, H, U, GENERATORS_TRANSCRIPT};
|
||||
|
||||
#[cfg(feature = "frost")]
|
||||
mod multisig;
|
||||
#[cfg(feature = "frost")]
|
||||
pub use multisig::ChaumMultisig;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ChaumStatement {
|
||||
context: Vec<u8>,
|
||||
S_T: Vec<(ProjectivePoint, ProjectivePoint)>,
|
||||
}
|
||||
|
||||
impl ChaumStatement {
|
||||
pub fn new(context: Vec<u8>, S_T: Vec<(ProjectivePoint, ProjectivePoint)>) -> ChaumStatement {
|
||||
ChaumStatement { context, S_T }
|
||||
}
|
||||
|
||||
fn transcript(&self) -> Vec<u8> {
|
||||
let mut transcript = self.context.clone();
|
||||
for S_T in &self.S_T {
|
||||
transcript.extend(S_T.0.to_bytes());
|
||||
transcript.extend(S_T.1.to_bytes());
|
||||
}
|
||||
transcript
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ChaumWitness {
|
||||
statement: ChaumStatement,
|
||||
xz: Vec<(Scalar, Scalar)>
|
||||
}
|
||||
|
||||
impl ChaumWitness {
|
||||
pub fn new(statement: ChaumStatement, xz: Vec<(Scalar, Scalar)>) -> ChaumWitness {
|
||||
assert!(statement.S_T.len() != 0);
|
||||
assert_eq!(statement.S_T.len(), xz.len());
|
||||
ChaumWitness { statement, xz }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub(crate) struct ChaumCommitments {
|
||||
A1: ProjectivePoint,
|
||||
A2: Vec<ProjectivePoint>
|
||||
}
|
||||
|
||||
impl ChaumCommitments {
|
||||
fn transcript(&self) -> Vec<u8> {
|
||||
let mut transcript = Vec::with_capacity((self.A2.len() + 1) * 33);
|
||||
transcript.extend(self.A1.to_bytes());
|
||||
for A in &self.A2 {
|
||||
transcript.extend(A.to_bytes());
|
||||
}
|
||||
transcript
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub struct ChaumProof {
|
||||
commitments: ChaumCommitments,
|
||||
t1: Vec<Scalar>,
|
||||
t2: Scalar,
|
||||
t3: Scalar
|
||||
}
|
||||
|
||||
impl ChaumProof {
|
||||
fn r_t_commitments<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
witness: &ChaumWitness
|
||||
) -> (Vec<Scalar>, Scalar, ChaumCommitments) {
|
||||
let len = witness.xz.len();
|
||||
let mut rs = Vec::with_capacity(len);
|
||||
let mut r_sum = Scalar::zero();
|
||||
|
||||
let mut commitments = ChaumCommitments {
|
||||
A1: ProjectivePoint::IDENTITY,
|
||||
A2: Vec::with_capacity(len)
|
||||
};
|
||||
|
||||
for (_, T) in &witness.statement.S_T {
|
||||
let r = Scalar::random(&mut *rng);
|
||||
r_sum += r;
|
||||
commitments.A2.push(T * &r);
|
||||
rs.push(r);
|
||||
}
|
||||
|
||||
let t = Scalar::random(&mut *rng);
|
||||
commitments.A1 = (*F * r_sum) + (*H * t);
|
||||
|
||||
(rs, t, commitments)
|
||||
}
|
||||
|
||||
fn t_prove(
|
||||
witness: &ChaumWitness,
|
||||
rs: &[Scalar],
|
||||
mut t3: Scalar,
|
||||
commitments: ChaumCommitments,
|
||||
nonces: &[Scalar],
|
||||
y: &Scalar
|
||||
) -> (Scalar, ChaumProof) {
|
||||
let challenge = ChaumProof::challenge(&witness.statement, &commitments);
|
||||
let mut t1 = Vec::with_capacity(rs.len());
|
||||
let mut t2 = Scalar::zero();
|
||||
|
||||
let mut accum = challenge;
|
||||
for (i, (x, z)) in witness.xz.iter().enumerate() {
|
||||
t1.push(rs[i] + (accum * x));
|
||||
t2 += nonces[i] + (accum * y);
|
||||
t3 += accum * z;
|
||||
accum *= challenge;
|
||||
}
|
||||
|
||||
(challenge, ChaumProof { commitments, t1, t2, t3 })
|
||||
}
|
||||
|
||||
fn challenge(statement: &ChaumStatement, commitments: &ChaumCommitments) -> Scalar {
|
||||
let mut transcript = b"Chaum".to_vec();
|
||||
transcript.extend(&*GENERATORS_TRANSCRIPT);
|
||||
transcript.extend(&statement.transcript());
|
||||
transcript.extend(&commitments.transcript());
|
||||
Scalar::from_uint_reduced(U512::from_be_byte_array(Sha512::digest(transcript)))
|
||||
}
|
||||
|
||||
pub fn prove<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
witness: &ChaumWitness,
|
||||
y: &Scalar
|
||||
) -> ChaumProof {
|
||||
let len = witness.xz.len();
|
||||
let (rs, t3, mut commitments) = Self::r_t_commitments(rng, witness);
|
||||
|
||||
let mut s_sum = Scalar::zero();
|
||||
let mut ss = Vec::with_capacity(len);
|
||||
for i in 0 .. len {
|
||||
let s = Scalar::random(&mut *rng);
|
||||
s_sum += s;
|
||||
commitments.A2[i] += *G * s;
|
||||
ss.push(s);
|
||||
}
|
||||
commitments.A1 += *G * s_sum;
|
||||
|
||||
let (_, proof) = Self::t_prove(&witness, &rs, t3, commitments, &ss, y);
|
||||
proof
|
||||
}
|
||||
|
||||
pub fn verify(&self, statement: &ChaumStatement) -> bool {
|
||||
let len = statement.S_T.len();
|
||||
assert_eq!(len, self.commitments.A2.len());
|
||||
assert_eq!(len, self.t1.len());
|
||||
|
||||
let challenge = Self::challenge(&statement, &self.commitments);
|
||||
|
||||
let mut one = self.commitments.A1 - ((*G * self.t2) + (*H * self.t3));
|
||||
let mut two = -(*G * self.t2);
|
||||
|
||||
let mut accum = challenge;
|
||||
for i in 0 .. len {
|
||||
one += statement.S_T[i].0 * accum;
|
||||
one -= *F * self.t1[i];
|
||||
|
||||
two += self.commitments.A2[i] + (*U * accum);
|
||||
two -= statement.S_T[i].1 * self.t1[i];
|
||||
accum *= challenge;
|
||||
}
|
||||
|
||||
one.is_identity().into() && two.is_identity().into()
|
||||
}
|
||||
}
|
||||
132
coins/firo/src/spark/chaum/multisig.rs
Normal file
132
coins/firo/src/spark/chaum/multisig.rs
Normal file
@@ -0,0 +1,132 @@
|
||||
use std::io::Read;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
||||
use rand_chacha::ChaCha12Rng;
|
||||
|
||||
use ff::Field;
|
||||
use k256::{Scalar, ProjectivePoint};
|
||||
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
use frost::{curve::Secp256k1, FrostError, FrostView, algorithm::Algorithm};
|
||||
|
||||
use crate::spark::{G, GENERATORS_TRANSCRIPT, chaum::{ChaumWitness, ChaumProof}};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ChaumMultisig {
|
||||
transcript: RecommendedTranscript,
|
||||
len: usize,
|
||||
witness: ChaumWitness,
|
||||
|
||||
challenge: Scalar,
|
||||
proof: Option<ChaumProof>
|
||||
}
|
||||
|
||||
impl ChaumMultisig {
|
||||
pub fn new(mut transcript: RecommendedTranscript, witness: ChaumWitness) -> ChaumMultisig {
|
||||
transcript.domain_separate(b"Chaum");
|
||||
transcript.append_message(b"generators", &*GENERATORS_TRANSCRIPT);
|
||||
transcript.append_message(b"statement", &witness.statement.transcript());
|
||||
for (x, z) in &witness.xz {
|
||||
transcript.append_message(b"x", &x.to_bytes());
|
||||
transcript.append_message(b"z", &z.to_bytes());
|
||||
}
|
||||
|
||||
let len = witness.xz.len();
|
||||
ChaumMultisig {
|
||||
transcript,
|
||||
len,
|
||||
witness,
|
||||
|
||||
challenge: Scalar::zero(),
|
||||
proof: None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Algorithm<Secp256k1> for ChaumMultisig {
|
||||
type Transcript = RecommendedTranscript;
|
||||
type Signature = ChaumProof;
|
||||
|
||||
fn transcript(&mut self) -> &mut Self::Transcript {
|
||||
&mut self.transcript
|
||||
}
|
||||
|
||||
fn nonces(&self) -> Vec<Vec<ProjectivePoint>> {
|
||||
vec![vec![*G]; self.len]
|
||||
}
|
||||
|
||||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
_: &mut R,
|
||||
_: &FrostView<Secp256k1>
|
||||
) -> Vec<u8> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn process_addendum<Re: Read>(
|
||||
&mut self,
|
||||
_: &FrostView<Secp256k1>,
|
||||
_: u16,
|
||||
_: &mut Re
|
||||
) -> Result<(), FrostError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn sign_share(
|
||||
&mut self,
|
||||
view: &FrostView<Secp256k1>,
|
||||
nonce_sums: &[Vec<ProjectivePoint>],
|
||||
nonces: &[Scalar],
|
||||
_: &[u8]
|
||||
) -> Scalar {
|
||||
let (rs, t3, mut commitments) = ChaumProof::r_t_commitments(
|
||||
&mut ChaCha12Rng::from_seed(self.transcript.rng_seed(b"r_t")),
|
||||
&self.witness
|
||||
);
|
||||
|
||||
for i in 0 .. self.len {
|
||||
commitments.A2[i] += nonce_sums[i][0];
|
||||
}
|
||||
commitments.A1 += nonce_sums.iter().map(|sum| sum[0]).sum::<ProjectivePoint>();
|
||||
|
||||
let (challenge, proof) = ChaumProof::t_prove(
|
||||
&self.witness,
|
||||
&rs,
|
||||
t3,
|
||||
commitments,
|
||||
nonces,
|
||||
&view.secret_share()
|
||||
);
|
||||
self.challenge = challenge;
|
||||
let t2 = proof.t2;
|
||||
self.proof = Some(proof);
|
||||
t2
|
||||
}
|
||||
|
||||
fn verify(
|
||||
&self,
|
||||
_: ProjectivePoint,
|
||||
_: &[Vec<ProjectivePoint>],
|
||||
sum: Scalar
|
||||
) -> Option<Self::Signature> {
|
||||
let mut proof = self.proof.clone().unwrap();
|
||||
proof.t2 = sum;
|
||||
Some(proof).filter(|proof| proof.verify(&self.witness.statement))
|
||||
}
|
||||
|
||||
fn verify_share(
|
||||
&self,
|
||||
_: u16,
|
||||
verification_share: ProjectivePoint,
|
||||
nonces: &[Vec<ProjectivePoint>],
|
||||
share: Scalar
|
||||
) -> bool {
|
||||
let mut t2 = ProjectivePoint::IDENTITY;
|
||||
let mut accum = self.challenge;
|
||||
for i in 0 .. self.len {
|
||||
t2 += nonces[i][0] + (verification_share * accum);
|
||||
accum *= self.challenge;
|
||||
}
|
||||
(*G * share) == t2
|
||||
}
|
||||
}
|
||||
42
coins/firo/src/spark/mod.rs
Normal file
42
coins/firo/src/spark/mod.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
use group::GroupEncoding;
|
||||
use k256::{ProjectivePoint, CompressedPoint};
|
||||
|
||||
pub mod chaum;
|
||||
|
||||
// Extremely basic hash to curve, which should not be used, yet which offers the needed generators
|
||||
fn generator(letter: u8) -> ProjectivePoint {
|
||||
if letter == b'G' {
|
||||
return ProjectivePoint::GENERATOR;
|
||||
}
|
||||
|
||||
let mut point = [2; 33];
|
||||
let mut g = b"Generator ".to_vec();
|
||||
|
||||
let mut res;
|
||||
while {
|
||||
g.push(letter);
|
||||
point[1..].copy_from_slice(&Sha256::digest(&g));
|
||||
res = ProjectivePoint::from_bytes(&CompressedPoint::from(point));
|
||||
res.is_none().into()
|
||||
} {}
|
||||
res.unwrap()
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
pub static ref F: ProjectivePoint = generator(b'F');
|
||||
pub static ref G: ProjectivePoint = generator(b'G');
|
||||
pub static ref H: ProjectivePoint = generator(b'H');
|
||||
pub static ref U: ProjectivePoint = generator(b'U');
|
||||
pub static ref GENERATORS_TRANSCRIPT: Vec<u8> = {
|
||||
let mut transcript = Vec::with_capacity(4 * 33);
|
||||
transcript.extend(&F.to_bytes());
|
||||
transcript.extend(&G.to_bytes());
|
||||
transcript.extend(&H.to_bytes());
|
||||
transcript.extend(&U.to_bytes());
|
||||
transcript
|
||||
};
|
||||
}
|
||||
72
coins/firo/src/tests/mod.rs
Normal file
72
coins/firo/src/tests/mod.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use rand::rngs::OsRng;
|
||||
|
||||
use ff::Field;
|
||||
use k256::Scalar;
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
#[cfg(feature = "multisig")]
|
||||
use frost::{curve::Secp256k1, tests::{key_gen, algorithm_machines, sign}};
|
||||
|
||||
use crate::spark::{F, G, H, U, chaum::*};
|
||||
|
||||
#[test]
|
||||
fn chaum() {
|
||||
#[allow(non_snake_case)]
|
||||
let mut S_T = vec![];
|
||||
let mut xz = vec![];
|
||||
let y = Scalar::random(&mut OsRng);
|
||||
for _ in 0 .. 2 {
|
||||
let x = Scalar::random(&mut OsRng);
|
||||
let z = Scalar::random(&mut OsRng);
|
||||
|
||||
S_T.push((
|
||||
(*F * x) + (*G * y) + (*H * z),
|
||||
// U = (x * T) + (y * G)
|
||||
// T = (U - (y * G)) * x^-1
|
||||
(*U - (*G * y)) * x.invert().unwrap()
|
||||
));
|
||||
|
||||
xz.push((x, z));
|
||||
}
|
||||
|
||||
let statement = ChaumStatement::new(b"Hello, World!".to_vec(), S_T);
|
||||
let witness = ChaumWitness::new(statement.clone(), xz);
|
||||
assert!(ChaumProof::prove(&mut OsRng, &witness, &y).verify(&statement));
|
||||
}
|
||||
|
||||
#[cfg(feature = "multisig")]
|
||||
#[test]
|
||||
fn chaum_multisig() {
|
||||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let mut S_T = vec![];
|
||||
let mut xz = vec![];
|
||||
for _ in 0 .. 5 {
|
||||
let x = Scalar::random(&mut OsRng);
|
||||
let z = Scalar::random(&mut OsRng);
|
||||
|
||||
S_T.push((
|
||||
(*F * x) + keys[&1].group_key() + (*H * z),
|
||||
(*U - keys[&1].group_key()) * x.invert().unwrap()
|
||||
));
|
||||
|
||||
xz.push((x, z));
|
||||
}
|
||||
|
||||
let statement = ChaumStatement::new(b"Hello, Multisig World!".to_vec(), S_T);
|
||||
let witness = ChaumWitness::new(statement.clone(), xz);
|
||||
|
||||
assert!(
|
||||
sign(
|
||||
&mut OsRng,
|
||||
algorithm_machines(
|
||||
&mut OsRng,
|
||||
ChaumMultisig::new(RecommendedTranscript::new(b"Firo Serai Chaum Test"), witness),
|
||||
&keys
|
||||
),
|
||||
&[]
|
||||
).verify(&statement)
|
||||
);
|
||||
}
|
||||
1
coins/monero/.gitignore
vendored
Normal file
1
coins/monero/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
c/.build
|
||||
@@ -1,110 +1,52 @@
|
||||
[package]
|
||||
name = "monero-serai"
|
||||
version = "0.1.4-alpha"
|
||||
description = "A modern Monero transaction library"
|
||||
version = "0.1.0"
|
||||
description = "A modern Monero wallet library"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.74"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
std-shims = { path = "../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||
|
||||
async-trait = { version = "0.1", default-features = false }
|
||||
thiserror = { version = "1", default-features = false, optional = true }
|
||||
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||
subtle = { version = "^2.4", default-features = false }
|
||||
|
||||
rand_core = { version = "0.6", default-features = false }
|
||||
# Used to send transactions
|
||||
rand = { version = "0.8", default-features = false }
|
||||
rand_chacha = { version = "0.3", default-features = false }
|
||||
# Used to select decoys
|
||||
rand_distr = { version = "0.4", default-features = false }
|
||||
|
||||
sha3 = { version = "0.10", default-features = false }
|
||||
pbkdf2 = { version = "0.12", features = ["simple"], default-features = false }
|
||||
|
||||
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "precomputed-tables"] }
|
||||
|
||||
# Used for the hash to curve, along with the more complicated proofs
|
||||
group = { version = "0.13", default-features = false }
|
||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
||||
multiexp = { path = "../../crypto/multiexp", version = "0.4", default-features = false, features = ["batch"] }
|
||||
|
||||
# Needed for multisig
|
||||
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
|
||||
dleq = { path = "../../crypto/dleq", version = "0.4", default-features = false, features = ["serialize"], optional = true }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["ed25519"], optional = true }
|
||||
|
||||
monero-generators = { path = "generators", version = "0.4", default-features = false }
|
||||
|
||||
futures = { version = "0.3", default-features = false, features = ["alloc"], optional = true }
|
||||
|
||||
hex-literal = "0.4"
|
||||
hex = { version = "0.4", default-features = false, features = ["alloc"] }
|
||||
serde = { version = "1", default-features = false, features = ["derive", "alloc"] }
|
||||
serde_json = { version = "1", default-features = false, features = ["alloc"] }
|
||||
|
||||
base58-monero = { version = "2", default-features = false, features = ["check"] }
|
||||
|
||||
# Used for the provided HTTP RPC
|
||||
digest_auth = { version = "0.3", default-features = false, optional = true }
|
||||
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls"], optional = true }
|
||||
tokio = { version = "1", default-features = false, optional = true }
|
||||
|
||||
[build-dependencies]
|
||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
||||
monero-generators = { path = "generators", version = "0.4", default-features = false }
|
||||
cc = "1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { version = "1", features = ["sync", "macros"] }
|
||||
[dependencies]
|
||||
hex-literal = "0.3"
|
||||
lazy_static = "1"
|
||||
thiserror = "1"
|
||||
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
|
||||
rand_core = "0.6"
|
||||
rand_chacha = { version = "0.3", optional = true }
|
||||
rand = "0.8"
|
||||
rand_distr = "0.4"
|
||||
|
||||
subtle = "2.4"
|
||||
|
||||
tiny-keccak = { version = "2", features = ["keccak"] }
|
||||
blake2 = { version = "0.10", optional = true }
|
||||
|
||||
curve25519-dalek = { version = "3", features = ["std"] }
|
||||
|
||||
group = { version = "0.12" }
|
||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group" }
|
||||
|
||||
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", features = ["recommended"], optional = true }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["ed25519"], optional = true }
|
||||
dleq = { package = "dleq-serai", path = "../../crypto/dleq", features = ["serialize"], optional = true }
|
||||
|
||||
hex = "0.4"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
||||
base58-monero = "1"
|
||||
monero-epee-bin-serde = "1.0"
|
||||
monero = "0.16"
|
||||
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
|
||||
[features]
|
||||
std = [
|
||||
"std-shims/std",
|
||||
|
||||
"thiserror",
|
||||
|
||||
"zeroize/std",
|
||||
"subtle/std",
|
||||
|
||||
"rand_core/std",
|
||||
"rand/std",
|
||||
"rand_chacha/std",
|
||||
"rand_distr/std",
|
||||
|
||||
"sha3/std",
|
||||
"pbkdf2/std",
|
||||
|
||||
"multiexp/std",
|
||||
|
||||
"transcript/std",
|
||||
"dleq/std",
|
||||
|
||||
"monero-generators/std",
|
||||
|
||||
"futures?/std",
|
||||
|
||||
"hex/std",
|
||||
"serde/std",
|
||||
"serde_json/std",
|
||||
|
||||
"base58-monero/std",
|
||||
]
|
||||
|
||||
cache-distribution = ["futures"]
|
||||
http-rpc = ["digest_auth", "simple-request", "tokio"]
|
||||
multisig = ["transcript", "frost", "dleq", "std"]
|
||||
binaries = ["tokio/rt-multi-thread", "tokio/macros", "http-rpc"]
|
||||
experimental = []
|
||||
multisig = ["rand_chacha", "blake2", "transcript", "frost", "dleq"]
|
||||
|
||||
default = ["std", "http-rpc"]
|
||||
[dev-dependencies]
|
||||
sha2 = "0.10"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022-2023 Luke Parker
|
||||
Copyright (c) 2022 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
||||
@@ -4,46 +4,4 @@ A modern Monero transaction library intended for usage in wallets. It prides
|
||||
itself on accuracy, correctness, and removing common pit falls developers may
|
||||
face.
|
||||
|
||||
monero-serai also offers the following features:
|
||||
|
||||
- Featured Addresses
|
||||
- A FROST-based multisig orders of magnitude more performant than Monero's
|
||||
|
||||
### Purpose and support
|
||||
|
||||
monero-serai was written for Serai, a decentralized exchange aiming to support
|
||||
Monero. Despite this, monero-serai is intended to be a widely usable library,
|
||||
accurate to Monero. monero-serai guarantees the functionality needed for Serai,
|
||||
yet will not deprive functionality from other users.
|
||||
|
||||
Various legacy transaction formats are not currently implemented, yet we are
|
||||
willing to add support for them. There aren't active development efforts around
|
||||
them however.
|
||||
|
||||
### Caveats
|
||||
|
||||
This library DOES attempt to do the following:
|
||||
|
||||
- Create on-chain transactions identical to how wallet2 would (unless told not
|
||||
to)
|
||||
- Not be detectable as monero-serai when scanning outputs
|
||||
- Not reveal spent outputs to the connected RPC node
|
||||
|
||||
This library DOES NOT attempt to do the following:
|
||||
|
||||
- Have identical RPC behavior when creating transactions
|
||||
- Be a wallet
|
||||
|
||||
This means that monero-serai shouldn't be fingerprintable on-chain. It also
|
||||
shouldn't be fingerprintable if a targeted attack occurs to detect if the
|
||||
receiving wallet is monero-serai or wallet2. It also should be generally safe
|
||||
for usage with remote nodes.
|
||||
|
||||
It won't hide from remote nodes it's monero-serai however, potentially
|
||||
allowing a remote node to profile you. The implications of this are left to the
|
||||
user to consider.
|
||||
|
||||
It also won't act as a wallet, just as a transaction library. wallet2 has
|
||||
several *non-transaction-level* policies, such as always attempting to use two
|
||||
inputs to create transactions. These are considered out of scope to
|
||||
monero-serai.
|
||||
Threshold multisignature support is available via the `multisig` feature.
|
||||
|
||||
@@ -1,67 +1,72 @@
|
||||
use std::{
|
||||
io::Write,
|
||||
env,
|
||||
path::Path,
|
||||
fs::{File, remove_file},
|
||||
};
|
||||
|
||||
use dalek_ff_group::EdwardsPoint;
|
||||
|
||||
use monero_generators::bulletproofs_generators;
|
||||
|
||||
fn serialize(generators_string: &mut String, points: &[EdwardsPoint]) {
|
||||
for generator in points {
|
||||
generators_string.extend(
|
||||
format!(
|
||||
"
|
||||
dalek_ff_group::EdwardsPoint(
|
||||
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap()
|
||||
),
|
||||
",
|
||||
generator.compress().to_bytes()
|
||||
)
|
||||
.chars(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn generators(prefix: &'static str, path: &str) {
|
||||
let generators = bulletproofs_generators(prefix.as_bytes());
|
||||
#[allow(non_snake_case)]
|
||||
let mut G_str = "".to_string();
|
||||
serialize(&mut G_str, &generators.G);
|
||||
#[allow(non_snake_case)]
|
||||
let mut H_str = "".to_string();
|
||||
serialize(&mut H_str, &generators.H);
|
||||
|
||||
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
|
||||
let _ = remove_file(&path);
|
||||
File::create(&path)
|
||||
.unwrap()
|
||||
.write_all(
|
||||
format!(
|
||||
"
|
||||
pub(crate) static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
|
||||
pub fn GENERATORS() -> &'static Generators {{
|
||||
GENERATORS_CELL.get_or_init(|| Generators {{
|
||||
G: vec![
|
||||
{G_str}
|
||||
],
|
||||
H: vec![
|
||||
{H_str}
|
||||
],
|
||||
}})
|
||||
}}
|
||||
",
|
||||
)
|
||||
.as_bytes(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
use std::{env, path::Path, process::Command};
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
if !Command::new("git").args(&["submodule", "update", "--init", "--recursive"]).status().unwrap().success() {
|
||||
panic!("git failed to init submodules");
|
||||
}
|
||||
|
||||
generators("bulletproof", "generators.rs");
|
||||
generators("bulletproof_plus", "generators_plus.rs");
|
||||
if !Command ::new("mkdir").args(&["-p", ".build"])
|
||||
.current_dir(&Path::new("c")).status().unwrap().success() {
|
||||
panic!("failed to create a directory to track build progress");
|
||||
}
|
||||
|
||||
let out_dir = &env::var("OUT_DIR").unwrap();
|
||||
|
||||
// Use a file to signal if Monero was already built, as that should never be rebuilt
|
||||
// If the signaling file was deleted, run this script again to rebuild Monero though
|
||||
println!("cargo:rerun-if-changed=c/.build/monero");
|
||||
if !Path::new("c/.build/monero").exists() {
|
||||
if !Command::new("make").arg(format!("-j{}", &env::var("THREADS").unwrap_or("2".to_string())))
|
||||
.current_dir(&Path::new("c/monero")).status().unwrap().success() {
|
||||
panic!("make failed to build Monero. Please check your dependencies");
|
||||
}
|
||||
|
||||
if !Command::new("touch").arg("monero")
|
||||
.current_dir(&Path::new("c/.build")).status().unwrap().success() {
|
||||
panic!("failed to create a file to label Monero as built");
|
||||
}
|
||||
}
|
||||
|
||||
println!("cargo:rerun-if-changed=c/wrapper.cpp");
|
||||
cc::Build::new()
|
||||
.static_flag(true)
|
||||
.warnings(false)
|
||||
.extra_warnings(false)
|
||||
.flag("-Wno-deprecated-declarations")
|
||||
|
||||
.include("c/monero/external/supercop/include")
|
||||
.include("c/monero/contrib/epee/include")
|
||||
.include("c/monero/src")
|
||||
.include("c/monero/build/release/generated_include")
|
||||
|
||||
.define("AUTO_INITIALIZE_EASYLOGGINGPP", None)
|
||||
.include("c/monero/external/easylogging++")
|
||||
.file("c/monero/external/easylogging++/easylogging++.cc")
|
||||
|
||||
.file("c/monero/src/common/aligned.c")
|
||||
.file("c/monero/src/common/perf_timer.cpp")
|
||||
|
||||
.include("c/monero/src/crypto")
|
||||
.file("c/monero/src/crypto/crypto-ops-data.c")
|
||||
.file("c/monero/src/crypto/crypto-ops.c")
|
||||
.file("c/monero/src/crypto/keccak.c")
|
||||
.file("c/monero/src/crypto/hash.c")
|
||||
|
||||
.include("c/monero/src/device")
|
||||
.file("c/monero/src/device/device_default.cpp")
|
||||
|
||||
.include("c/monero/src/ringct")
|
||||
.file("c/monero/src/ringct/rctCryptoOps.c")
|
||||
.file("c/monero/src/ringct/rctTypes.cpp")
|
||||
.file("c/monero/src/ringct/rctOps.cpp")
|
||||
.file("c/monero/src/ringct/multiexp.cc")
|
||||
.file("c/monero/src/ringct/bulletproofs.cc")
|
||||
.file("c/monero/src/ringct/rctSigs.cpp")
|
||||
|
||||
.file("c/wrapper.cpp")
|
||||
.compile("wrapper");
|
||||
|
||||
println!("cargo:rustc-link-search={}", out_dir);
|
||||
println!("cargo:rustc-link-lib=wrapper");
|
||||
println!("cargo:rustc-link-lib=stdc++");
|
||||
}
|
||||
|
||||
1
coins/monero/c/monero
Submodule
1
coins/monero/c/monero
Submodule
Submodule coins/monero/c/monero added at 424e4de16b
158
coins/monero/c/wrapper.cpp
Normal file
158
coins/monero/c/wrapper.cpp
Normal file
@@ -0,0 +1,158 @@
|
||||
#include <mutex>
|
||||
|
||||
#include "device/device_default.hpp"
|
||||
|
||||
#include "ringct/bulletproofs.h"
|
||||
#include "ringct/rctSigs.h"
|
||||
|
||||
typedef std::lock_guard<std::mutex> lock;
|
||||
|
||||
std::mutex rng_mutex;
|
||||
uint8_t rng_entropy[64];
|
||||
|
||||
extern "C" {
|
||||
void rng(uint8_t* seed) {
|
||||
// Set the first half to the seed
|
||||
memcpy(rng_entropy, seed, 32);
|
||||
// Set the second half to the hash of a DST to ensure a lack of collisions
|
||||
crypto::cn_fast_hash("RNG_entropy_seed", 16, (char*) &rng_entropy[32]);
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" void monero_wide_reduce(uint8_t* value);
|
||||
namespace crypto {
|
||||
void generate_random_bytes_not_thread_safe(size_t n, void* value) {
|
||||
size_t written = 0;
|
||||
while (written != n) {
|
||||
uint8_t hash[32];
|
||||
crypto::cn_fast_hash(rng_entropy, 64, (char*) hash);
|
||||
// Step the RNG by setting the latter half to the most recent result
|
||||
// Does not leak the RNG, even if the values are leaked (which they are
|
||||
// expected to be) due to the first half remaining constant and
|
||||
// undisclosed
|
||||
memcpy(&rng_entropy[32], hash, 32);
|
||||
|
||||
size_t next = n - written;
|
||||
if (next > 32) {
|
||||
next = 32;
|
||||
}
|
||||
memcpy(&((uint8_t*) value)[written], hash, next);
|
||||
written += next;
|
||||
}
|
||||
}
|
||||
|
||||
void random32_unbiased(unsigned char *bytes) {
|
||||
uint8_t value[64];
|
||||
generate_random_bytes_not_thread_safe(64, value);
|
||||
monero_wide_reduce(value);
|
||||
memcpy(bytes, value, 32);
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" {
|
||||
void c_hash_to_point(uint8_t* point) {
|
||||
rct::key key_point;
|
||||
ge_p3 e_p3;
|
||||
memcpy(key_point.bytes, point, 32);
|
||||
rct::hash_to_p3(e_p3, key_point);
|
||||
ge_p3_tobytes(point, &e_p3);
|
||||
}
|
||||
|
||||
uint8_t* c_generate_bp(uint8_t* seed, uint8_t len, uint64_t* a, uint8_t* m) {
|
||||
lock guard(rng_mutex);
|
||||
rng(seed);
|
||||
|
||||
rct::keyV masks;
|
||||
std::vector<uint64_t> amounts;
|
||||
masks.resize(len);
|
||||
amounts.resize(len);
|
||||
for (uint8_t i = 0; i < len; i++) {
|
||||
memcpy(masks[i].bytes, m + (i * 32), 32);
|
||||
amounts[i] = a[i];
|
||||
}
|
||||
|
||||
rct::Bulletproof bp = rct::bulletproof_PROVE(amounts, masks);
|
||||
|
||||
std::stringstream ss;
|
||||
binary_archive<true> ba(ss);
|
||||
::serialization::serialize(ba, bp);
|
||||
uint8_t* res = (uint8_t*) calloc(ss.str().size(), 1);
|
||||
memcpy(res, ss.str().data(), ss.str().size());
|
||||
return res;
|
||||
}
|
||||
|
||||
bool c_verify_bp(
|
||||
uint8_t* seed,
|
||||
uint s_len,
|
||||
uint8_t* s,
|
||||
uint8_t c_len,
|
||||
uint8_t* c
|
||||
) {
|
||||
// BPs are batch verified which use RNG based weights to ensure individual
|
||||
// integrity
|
||||
// That's why this must also have control over RNG, to prevent interrupting
|
||||
// multisig signing while not using known seeds. Considering this doesn't
|
||||
// actually define a batch, and it's only verifying a single BP,
|
||||
// it'd probably be fine, but...
|
||||
lock guard(rng_mutex);
|
||||
rng(seed);
|
||||
|
||||
rct::Bulletproof bp;
|
||||
std::stringstream ss;
|
||||
std::string str;
|
||||
str.assign((char*) s, (size_t) s_len);
|
||||
ss << str;
|
||||
binary_archive<false> ba(ss);
|
||||
::serialization::serialize(ba, bp);
|
||||
if (!ss.good()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
bp.V.resize(c_len);
|
||||
for (uint8_t i = 0; i < c_len; i++) {
|
||||
memcpy(bp.V[i].bytes, &c[i * 32], 32);
|
||||
}
|
||||
|
||||
try { return rct::bulletproof_VERIFY(bp); } catch(...) { return false; }
|
||||
}
|
||||
|
||||
bool c_verify_clsag(
|
||||
uint s_len,
|
||||
uint8_t* s,
|
||||
uint8_t k_len,
|
||||
uint8_t* k,
|
||||
uint8_t* I,
|
||||
uint8_t* p,
|
||||
uint8_t* m
|
||||
) {
|
||||
rct::clsag clsag;
|
||||
std::stringstream ss;
|
||||
std::string str;
|
||||
str.assign((char*) s, (size_t) s_len);
|
||||
ss << str;
|
||||
binary_archive<false> ba(ss);
|
||||
::serialization::serialize(ba, clsag);
|
||||
if (!ss.good()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
rct::ctkeyV keys;
|
||||
keys.resize(k_len);
|
||||
for (uint8_t i = 0; i < k_len; i++) {
|
||||
memcpy(keys[i].dest.bytes, &k[(i * 2) * 32], 32);
|
||||
memcpy(keys[i].mask.bytes, &k[((i * 2) + 1) * 32], 32);
|
||||
}
|
||||
|
||||
memcpy(clsag.I.bytes, I, 32);
|
||||
|
||||
rct::key pseudo_out;
|
||||
memcpy(pseudo_out.bytes, p, 32);
|
||||
|
||||
rct::key msg;
|
||||
memcpy(msg.bytes, m, 32);
|
||||
|
||||
try {
|
||||
return verRctCLSAGSimple(msg, clsag, keys, pseudo_out);
|
||||
} catch(...) { return false; }
|
||||
}
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
[package]
|
||||
name = "monero-generators"
|
||||
version = "0.4.0"
|
||||
description = "Monero's hash_to_point and generators"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||
|
||||
subtle = { version = "^2.4", default-features = false }
|
||||
|
||||
sha3 = { version = "0.10", default-features = false }
|
||||
|
||||
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "precomputed-tables"] }
|
||||
|
||||
group = { version = "0.13", default-features = false }
|
||||
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
||||
|
||||
[features]
|
||||
std = ["std-shims/std", "subtle/std", "sha3/std", "dalek-ff-group/std"]
|
||||
default = ["std"]
|
||||
@@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022-2023 Luke Parker
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -1,7 +0,0 @@
|
||||
# Monero Generators
|
||||
|
||||
Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
|
||||
An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
|
||||
`hash_to_point` here, is included, as needed to generate generators.
|
||||
|
||||
This library is usable under no_std when the `alloc` feature is enabled.
|
||||
@@ -1,51 +0,0 @@
|
||||
use subtle::ConditionallySelectable;
|
||||
|
||||
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
||||
|
||||
use group::ff::{Field, PrimeField};
|
||||
use dalek_ff_group::FieldElement;
|
||||
|
||||
use crate::hash;
|
||||
|
||||
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
|
||||
pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
|
||||
#[allow(non_snake_case)]
|
||||
let A = FieldElement::from(486662u64);
|
||||
|
||||
let v = FieldElement::from_square(hash(&bytes)).double();
|
||||
let w = v + FieldElement::ONE;
|
||||
let x = w.square() + (-A.square() * v);
|
||||
|
||||
// This isn't the complete X, yet its initial value
|
||||
// We don't calculate the full X, and instead solely calculate Y, letting dalek reconstruct X
|
||||
// While inefficient, it solves API boundaries and reduces the amount of work done here
|
||||
#[allow(non_snake_case)]
|
||||
let X = {
|
||||
let u = w;
|
||||
let v = x;
|
||||
let v3 = v * v * v;
|
||||
let uv3 = u * v3;
|
||||
let v7 = v3 * v3 * v;
|
||||
let uv7 = u * v7;
|
||||
uv3 * uv7.pow((-FieldElement::from(5u8)) * FieldElement::from(8u8).invert().unwrap())
|
||||
};
|
||||
let x = X.square() * x;
|
||||
|
||||
let y = w - x;
|
||||
let non_zero_0 = !y.is_zero();
|
||||
let y_if_non_zero_0 = w + x;
|
||||
let sign = non_zero_0 & (!y_if_non_zero_0.is_zero());
|
||||
|
||||
let mut z = -A;
|
||||
z *= FieldElement::conditional_select(&v, &FieldElement::from(1u8), sign);
|
||||
#[allow(non_snake_case)]
|
||||
let Z = z + w;
|
||||
#[allow(non_snake_case)]
|
||||
let mut Y = z - w;
|
||||
|
||||
Y *= Z.invert().unwrap();
|
||||
let mut bytes = Y.to_repr();
|
||||
bytes[31] |= sign.unwrap_u8() << 7;
|
||||
|
||||
CompressedEdwardsY(bytes).decompress().unwrap().mul_by_cofactor()
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
//! Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
|
||||
//!
|
||||
//! An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
|
||||
//! `hash_to_point` here, is included, as needed to generate generators.
|
||||
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
use std_shims::{sync::OnceLock, vec::Vec};
|
||||
|
||||
use sha3::{Digest, Keccak256};
|
||||
|
||||
use curve25519_dalek::edwards::{EdwardsPoint as DalekPoint, CompressedEdwardsY};
|
||||
|
||||
use group::{Group, GroupEncoding};
|
||||
use dalek_ff_group::EdwardsPoint;
|
||||
|
||||
mod varint;
|
||||
use varint::write_varint;
|
||||
|
||||
mod hash_to_point;
|
||||
pub use hash_to_point::hash_to_point;
|
||||
|
||||
fn hash(data: &[u8]) -> [u8; 32] {
|
||||
Keccak256::digest(data).into()
|
||||
}
|
||||
|
||||
static H_CELL: OnceLock<DalekPoint> = OnceLock::new();
|
||||
/// Monero's alternate generator `H`, used for amounts in Pedersen commitments.
|
||||
#[allow(non_snake_case)]
|
||||
pub fn H() -> DalekPoint {
|
||||
*H_CELL.get_or_init(|| {
|
||||
CompressedEdwardsY(hash(&EdwardsPoint::generator().to_bytes()))
|
||||
.decompress()
|
||||
.unwrap()
|
||||
.mul_by_cofactor()
|
||||
})
|
||||
}
|
||||
|
||||
static H_POW_2_CELL: OnceLock<[DalekPoint; 64]> = OnceLock::new();
|
||||
/// Monero's alternate generator `H`, multiplied by 2**i for i in 1 ..= 64.
|
||||
#[allow(non_snake_case)]
|
||||
pub fn H_pow_2() -> &'static [DalekPoint; 64] {
|
||||
H_POW_2_CELL.get_or_init(|| {
|
||||
let mut res = [H(); 64];
|
||||
for i in 1 .. 64 {
|
||||
res[i] = res[i - 1] + res[i - 1];
|
||||
}
|
||||
res
|
||||
})
|
||||
}
|
||||
|
||||
const MAX_M: usize = 16;
|
||||
const N: usize = 64;
|
||||
const MAX_MN: usize = MAX_M * N;
|
||||
|
||||
/// Container struct for Bulletproofs(+) generators.
|
||||
#[allow(non_snake_case)]
|
||||
pub struct Generators {
|
||||
pub G: Vec<EdwardsPoint>,
|
||||
pub H: Vec<EdwardsPoint>,
|
||||
}
|
||||
|
||||
/// Generate generators as needed for Bulletproofs(+), as Monero does.
|
||||
pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
|
||||
let mut res = Generators { G: Vec::with_capacity(MAX_MN), H: Vec::with_capacity(MAX_MN) };
|
||||
for i in 0 .. MAX_MN {
|
||||
let i = 2 * i;
|
||||
|
||||
let mut even = H().compress().to_bytes().to_vec();
|
||||
even.extend(dst);
|
||||
let mut odd = even.clone();
|
||||
|
||||
write_varint(&i.try_into().unwrap(), &mut even).unwrap();
|
||||
write_varint(&(i + 1).try_into().unwrap(), &mut odd).unwrap();
|
||||
res.H.push(EdwardsPoint(hash_to_point(hash(&even))));
|
||||
res.G.push(EdwardsPoint(hash_to_point(hash(&odd))));
|
||||
}
|
||||
res
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
use std_shims::io::{self, Write};
|
||||
|
||||
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
||||
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
||||
let mut varint = *varint;
|
||||
while {
|
||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
||||
varint >>= 7;
|
||||
if varint != 0 {
|
||||
b |= VARINT_CONTINUATION_MASK;
|
||||
}
|
||||
w.write_all(&[b])?;
|
||||
varint != 0
|
||||
} {}
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,323 +0,0 @@
|
||||
#[cfg(feature = "binaries")]
|
||||
mod binaries {
|
||||
pub(crate) use std::sync::Arc;
|
||||
|
||||
pub(crate) use curve25519_dalek::{
|
||||
scalar::Scalar,
|
||||
edwards::{CompressedEdwardsY, EdwardsPoint},
|
||||
};
|
||||
|
||||
pub(crate) use multiexp::BatchVerifier;
|
||||
|
||||
pub(crate) use serde::Deserialize;
|
||||
pub(crate) use serde_json::json;
|
||||
|
||||
pub(crate) use monero_serai::{
|
||||
Commitment,
|
||||
ringct::RctPrunable,
|
||||
transaction::{Input, Transaction},
|
||||
block::Block,
|
||||
rpc::{RpcError, Rpc, HttpRpc},
|
||||
};
|
||||
|
||||
pub(crate) use tokio::task::JoinHandle;
|
||||
|
||||
pub(crate) async fn check_block(rpc: Arc<Rpc<HttpRpc>>, block_i: usize) {
|
||||
let hash = loop {
|
||||
match rpc.get_block_hash(block_i).await {
|
||||
Ok(hash) => break hash,
|
||||
Err(RpcError::ConnectionError(e)) => {
|
||||
println!("get_block_hash ConnectionError: {e}");
|
||||
continue;
|
||||
}
|
||||
Err(e) => panic!("couldn't get block {block_i}'s hash: {e:?}"),
|
||||
}
|
||||
};
|
||||
|
||||
// TODO: Grab the JSON to also check it was deserialized correctly
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct BlockResponse {
|
||||
blob: String,
|
||||
}
|
||||
let res: BlockResponse = loop {
|
||||
match rpc.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await {
|
||||
Ok(res) => break res,
|
||||
Err(RpcError::ConnectionError(e)) => {
|
||||
println!("get_block ConnectionError: {e}");
|
||||
continue;
|
||||
}
|
||||
Err(e) => panic!("couldn't get block {block_i} via block.hash(): {e:?}"),
|
||||
}
|
||||
};
|
||||
|
||||
let blob = hex::decode(res.blob).expect("node returned non-hex block");
|
||||
let block = Block::read(&mut blob.as_slice())
|
||||
.unwrap_or_else(|e| panic!("couldn't deserialize block {block_i}: {e}"));
|
||||
assert_eq!(block.hash(), hash, "hash differs");
|
||||
assert_eq!(block.serialize(), blob, "serialization differs");
|
||||
|
||||
let txs_len = 1 + block.txs.len();
|
||||
|
||||
if !block.txs.is_empty() {
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct TransactionResponse {
|
||||
tx_hash: String,
|
||||
as_hex: String,
|
||||
}
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct TransactionsResponse {
|
||||
#[serde(default)]
|
||||
missed_tx: Vec<String>,
|
||||
txs: Vec<TransactionResponse>,
|
||||
}
|
||||
|
||||
let mut hashes_hex = block.txs.iter().map(hex::encode).collect::<Vec<_>>();
|
||||
let mut all_txs = vec![];
|
||||
while !hashes_hex.is_empty() {
|
||||
let txs: TransactionsResponse = loop {
|
||||
match rpc
|
||||
.rpc_call(
|
||||
"get_transactions",
|
||||
Some(json!({
|
||||
"txs_hashes": hashes_hex.drain(.. hashes_hex.len().min(100)).collect::<Vec<_>>(),
|
||||
})),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(txs) => break txs,
|
||||
Err(RpcError::ConnectionError(e)) => {
|
||||
println!("get_transactions ConnectionError: {e}");
|
||||
continue;
|
||||
}
|
||||
Err(e) => panic!("couldn't call get_transactions: {e:?}"),
|
||||
}
|
||||
};
|
||||
assert!(txs.missed_tx.is_empty());
|
||||
all_txs.extend(txs.txs);
|
||||
}
|
||||
|
||||
let mut batch = BatchVerifier::new(block.txs.len());
|
||||
for (tx_hash, tx_res) in block.txs.into_iter().zip(all_txs) {
|
||||
assert_eq!(
|
||||
tx_res.tx_hash,
|
||||
hex::encode(tx_hash),
|
||||
"node returned a transaction with different hash"
|
||||
);
|
||||
|
||||
let tx = Transaction::read(
|
||||
&mut hex::decode(&tx_res.as_hex).expect("node returned non-hex transaction").as_slice(),
|
||||
)
|
||||
.expect("couldn't deserialize transaction");
|
||||
|
||||
assert_eq!(
|
||||
hex::encode(tx.serialize()),
|
||||
tx_res.as_hex,
|
||||
"Transaction serialization was different"
|
||||
);
|
||||
assert_eq!(tx.hash(), tx_hash, "Transaction hash was different");
|
||||
|
||||
if matches!(tx.rct_signatures.prunable, RctPrunable::Null) {
|
||||
assert_eq!(tx.prefix.version, 1);
|
||||
assert!(!tx.signatures.is_empty());
|
||||
continue;
|
||||
}
|
||||
|
||||
let sig_hash = tx.signature_hash();
|
||||
// Verify all proofs we support proving for
|
||||
// This is due to having debug_asserts calling verify within their proving, and CLSAG
|
||||
// multisig explicitly calling verify as part of its signing process
|
||||
// Accordingly, making sure our signature_hash algorithm is correct is great, and further
|
||||
// making sure the verification functions are valid is appreciated
|
||||
match tx.rct_signatures.prunable {
|
||||
RctPrunable::Null |
|
||||
RctPrunable::AggregateMlsagBorromean { .. } |
|
||||
RctPrunable::MlsagBorromean { .. } => {}
|
||||
RctPrunable::MlsagBulletproofs { bulletproofs, .. } => {
|
||||
assert!(bulletproofs.batch_verify(
|
||||
&mut rand_core::OsRng,
|
||||
&mut batch,
|
||||
(),
|
||||
&tx.rct_signatures.base.commitments
|
||||
));
|
||||
}
|
||||
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs } => {
|
||||
assert!(bulletproofs.batch_verify(
|
||||
&mut rand_core::OsRng,
|
||||
&mut batch,
|
||||
(),
|
||||
&tx.rct_signatures.base.commitments
|
||||
));
|
||||
|
||||
for (i, clsag) in clsags.into_iter().enumerate() {
|
||||
let (amount, key_offsets, image) = match &tx.prefix.inputs[i] {
|
||||
Input::Gen(_) => panic!("Input::Gen"),
|
||||
Input::ToKey { amount, key_offsets, key_image } => (amount, key_offsets, key_image),
|
||||
};
|
||||
|
||||
let mut running_sum = 0;
|
||||
let mut actual_indexes = vec![];
|
||||
for offset in key_offsets {
|
||||
running_sum += offset;
|
||||
actual_indexes.push(running_sum);
|
||||
}
|
||||
|
||||
async fn get_outs(
|
||||
rpc: &Rpc<HttpRpc>,
|
||||
amount: u64,
|
||||
indexes: &[u64],
|
||||
) -> Vec<[EdwardsPoint; 2]> {
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct Out {
|
||||
key: String,
|
||||
mask: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct Outs {
|
||||
outs: Vec<Out>,
|
||||
}
|
||||
|
||||
let outs: Outs = loop {
|
||||
match rpc
|
||||
.rpc_call(
|
||||
"get_outs",
|
||||
Some(json!({
|
||||
"get_txid": true,
|
||||
"outputs": indexes.iter().map(|o| json!({
|
||||
"amount": amount,
|
||||
"index": o
|
||||
})).collect::<Vec<_>>()
|
||||
})),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(outs) => break outs,
|
||||
Err(RpcError::ConnectionError(e)) => {
|
||||
println!("get_outs ConnectionError: {e}");
|
||||
continue;
|
||||
}
|
||||
Err(e) => panic!("couldn't connect to RPC to get outs: {e:?}"),
|
||||
}
|
||||
};
|
||||
|
||||
let rpc_point = |point: &str| {
|
||||
CompressedEdwardsY(
|
||||
hex::decode(point)
|
||||
.expect("invalid hex for ring member")
|
||||
.try_into()
|
||||
.expect("invalid point len for ring member"),
|
||||
)
|
||||
.decompress()
|
||||
.expect("invalid point for ring member")
|
||||
};
|
||||
|
||||
outs
|
||||
.outs
|
||||
.iter()
|
||||
.map(|out| {
|
||||
let mask = rpc_point(&out.mask);
|
||||
if amount != 0 {
|
||||
assert_eq!(mask, Commitment::new(Scalar::from(1u8), amount).calculate());
|
||||
}
|
||||
[rpc_point(&out.key), mask]
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
clsag
|
||||
.verify(
|
||||
&get_outs(&rpc, amount.unwrap_or(0), &actual_indexes).await,
|
||||
image,
|
||||
&pseudo_outs[i],
|
||||
&sig_hash,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
assert!(batch.verify_vartime());
|
||||
}
|
||||
|
||||
println!("Deserialized, hashed, and reserialized {block_i} with {} TXs", txs_len);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "binaries")]
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
use binaries::*;
|
||||
|
||||
let args = std::env::args().collect::<Vec<String>>();
|
||||
|
||||
// Read start block as the first arg
|
||||
let mut block_i = args[1].parse::<usize>().expect("invalid start block");
|
||||
|
||||
// How many blocks to work on at once
|
||||
let async_parallelism: usize =
|
||||
args.get(2).unwrap_or(&"8".to_string()).parse::<usize>().expect("invalid parallelism argument");
|
||||
|
||||
// Read further args as RPC URLs
|
||||
let default_nodes = vec![
|
||||
"http://xmr-node.cakewallet.com:18081".to_string(),
|
||||
"https://node.sethforprivacy.com".to_string(),
|
||||
];
|
||||
let mut specified_nodes = vec![];
|
||||
{
|
||||
let mut i = 0;
|
||||
loop {
|
||||
let Some(node) = args.get(3 + i) else { break };
|
||||
specified_nodes.push(node.clone());
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
let nodes = if specified_nodes.is_empty() { default_nodes } else { specified_nodes };
|
||||
|
||||
let rpc = |url: String| async move {
|
||||
HttpRpc::new(url.clone())
|
||||
.await
|
||||
.unwrap_or_else(|_| panic!("couldn't create HttpRpc connected to {url}"))
|
||||
};
|
||||
let main_rpc = rpc(nodes[0].clone()).await;
|
||||
let mut rpcs = vec![];
|
||||
for i in 0 .. async_parallelism {
|
||||
rpcs.push(Arc::new(rpc(nodes[i % nodes.len()].clone()).await));
|
||||
}
|
||||
|
||||
let mut rpc_i = 0;
|
||||
let mut handles: Vec<JoinHandle<()>> = vec![];
|
||||
let mut height = 0;
|
||||
loop {
|
||||
let new_height = main_rpc.get_height().await.expect("couldn't call get_height");
|
||||
if new_height == height {
|
||||
break;
|
||||
}
|
||||
height = new_height;
|
||||
|
||||
while block_i < height {
|
||||
if handles.len() >= async_parallelism {
|
||||
// Guarantee one handle is complete
|
||||
handles.swap_remove(0).await.unwrap();
|
||||
|
||||
// Remove all of the finished handles
|
||||
let mut i = 0;
|
||||
while i < handles.len() {
|
||||
if handles[i].is_finished() {
|
||||
handles.swap_remove(i).await.unwrap();
|
||||
continue;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
handles.push(tokio::spawn(check_block(rpcs[rpc_i].clone(), block_i)));
|
||||
rpc_i = (rpc_i + 1) % rpcs.len();
|
||||
block_i += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "binaries"))]
|
||||
fn main() {
|
||||
panic!("To run binaries, please build with `--feature binaries`.");
|
||||
}
|
||||
@@ -1,31 +1,19 @@
|
||||
use std_shims::{
|
||||
vec::Vec,
|
||||
io::{self, Read, Write},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
hash,
|
||||
merkle::merkle_root,
|
||||
serialize::*,
|
||||
transaction::{Input, Transaction},
|
||||
transaction::Transaction
|
||||
};
|
||||
|
||||
const CORRECT_BLOCK_HASH_202612: [u8; 32] =
|
||||
hex_literal::hex!("426d16cff04c71f8b16340b722dc4010a2dd3831c22041431f772547ba6e331a");
|
||||
const EXISTING_BLOCK_HASH_202612: [u8; 32] =
|
||||
hex_literal::hex!("bbd604d2ba11ba27935e006ed39c9bfdd99b76bf4a50654bc1e1e61217962698");
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub struct BlockHeader {
|
||||
pub major_version: u8,
|
||||
pub minor_version: u8,
|
||||
pub major_version: u64,
|
||||
pub minor_version: u64,
|
||||
pub timestamp: u64,
|
||||
pub previous: [u8; 32],
|
||||
pub nonce: u32,
|
||||
pub nonce: u32
|
||||
}
|
||||
|
||||
impl BlockHeader {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
write_varint(&self.major_version, w)?;
|
||||
write_varint(&self.minor_version, w)?;
|
||||
write_varint(&self.timestamp, w)?;
|
||||
@@ -33,91 +21,46 @@ impl BlockHeader {
|
||||
w.write_all(&self.nonce.to_le_bytes())
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<BlockHeader> {
|
||||
Ok(BlockHeader {
|
||||
major_version: read_varint(r)?,
|
||||
minor_version: read_varint(r)?,
|
||||
timestamp: read_varint(r)?,
|
||||
previous: read_bytes(r)?,
|
||||
nonce: read_bytes(r).map(u32::from_le_bytes)?,
|
||||
})
|
||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<BlockHeader> {
|
||||
Ok(
|
||||
BlockHeader {
|
||||
major_version: read_varint(r)?,
|
||||
minor_version: read_varint(r)?,
|
||||
timestamp: read_varint(r)?,
|
||||
previous: { let mut previous = [0; 32]; r.read_exact(&mut previous)?; previous },
|
||||
nonce: { let mut nonce = [0; 4]; r.read_exact(&mut nonce)?; u32::from_le_bytes(nonce) }
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub struct Block {
|
||||
pub header: BlockHeader,
|
||||
pub miner_tx: Transaction,
|
||||
pub txs: Vec<[u8; 32]>,
|
||||
pub txs: Vec<[u8; 32]>
|
||||
}
|
||||
|
||||
impl Block {
|
||||
pub fn number(&self) -> usize {
|
||||
match self.miner_tx.prefix.inputs.first() {
|
||||
Some(Input::Gen(number)) => (*number).try_into().unwrap(),
|
||||
_ => panic!("invalid block, miner TX didn't have a Input::Gen"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.header.write(w)?;
|
||||
self.miner_tx.write(w)?;
|
||||
write_varint(&self.txs.len(), w)?;
|
||||
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
self.header.serialize(w)?;
|
||||
self.miner_tx.serialize(w)?;
|
||||
write_varint(&self.txs.len().try_into().unwrap(), w)?;
|
||||
for tx in &self.txs {
|
||||
w.write_all(tx)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn tx_merkle_root(&self) -> [u8; 32] {
|
||||
merkle_root(self.miner_tx.hash(), &self.txs)
|
||||
}
|
||||
|
||||
/// Serialize the block as required for the proof of work hash.
|
||||
///
|
||||
/// This is distinct from the serialization required for the block hash. To get the block hash,
|
||||
/// use the [`Block::hash`] function.
|
||||
pub fn serialize_hashable(&self) -> Vec<u8> {
|
||||
let mut blob = self.header.serialize();
|
||||
blob.extend_from_slice(&self.tx_merkle_root());
|
||||
write_varint(&(1 + u64::try_from(self.txs.len()).unwrap()), &mut blob).unwrap();
|
||||
|
||||
blob
|
||||
}
|
||||
|
||||
pub fn hash(&self) -> [u8; 32] {
|
||||
let mut hashable = self.serialize_hashable();
|
||||
// Monero pre-appends a VarInt of the block hashing blobs length before getting the block hash
|
||||
// but doesn't do this when getting the proof of work hash :)
|
||||
let mut hashing_blob = Vec::with_capacity(8 + hashable.len());
|
||||
write_varint(&u64::try_from(hashable.len()).unwrap(), &mut hashing_blob).unwrap();
|
||||
hashing_blob.append(&mut hashable);
|
||||
|
||||
let hash = hash(&hashing_blob);
|
||||
if hash == CORRECT_BLOCK_HASH_202612 {
|
||||
return EXISTING_BLOCK_HASH_202612;
|
||||
};
|
||||
|
||||
hash
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Block> {
|
||||
Ok(Block {
|
||||
header: BlockHeader::read(r)?,
|
||||
miner_tx: Transaction::read(r)?,
|
||||
txs: (0_usize .. read_varint(r)?).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
|
||||
})
|
||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Block> {
|
||||
Ok(
|
||||
Block {
|
||||
header: BlockHeader::deserialize(r)?,
|
||||
miner_tx: Transaction::deserialize(r)?,
|
||||
txs: (0 .. read_varint(r)?).map(
|
||||
|_| { let mut tx = [0; 32]; r.read_exact(&mut tx).map(|_| tx) }
|
||||
).collect::<Result<_, _>>()?
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
76
coins/monero/src/frost.rs
Normal file
76
coins/monero/src/frost.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
use std::io::Read;
|
||||
|
||||
use thiserror::Error;
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
||||
|
||||
use group::{Group, GroupEncoding};
|
||||
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
use dalek_ff_group as dfg;
|
||||
use dleq::{Generators, DLEqProof};
|
||||
|
||||
#[derive(Clone, Error, Debug)]
|
||||
pub enum MultisigError {
|
||||
#[error("internal error ({0})")]
|
||||
InternalError(String),
|
||||
#[error("invalid discrete log equality proof")]
|
||||
InvalidDLEqProof(u16),
|
||||
#[error("invalid key image {0}")]
|
||||
InvalidKeyImage(u16)
|
||||
}
|
||||
|
||||
fn transcript() -> RecommendedTranscript {
|
||||
RecommendedTranscript::new(b"monero_key_image_dleq")
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
H: EdwardsPoint,
|
||||
x: Scalar
|
||||
) -> Vec<u8> {
|
||||
let mut res = Vec::with_capacity(64);
|
||||
DLEqProof::prove(
|
||||
rng,
|
||||
// Doesn't take in a larger transcript object due to the usage of this
|
||||
// Every prover would immediately write their own DLEq proof, when they can only do so in
|
||||
// the proper order if they want to reach consensus
|
||||
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to try to
|
||||
// merge later in some form, when it should instead just merge xH (as it does)
|
||||
&mut transcript(),
|
||||
Generators::new(dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)),
|
||||
dfg::Scalar(x)
|
||||
).serialize(&mut res).unwrap();
|
||||
res
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub(crate) fn read_dleq<Re: Read>(
|
||||
serialized: &mut Re,
|
||||
H: EdwardsPoint,
|
||||
l: u16,
|
||||
xG: dfg::EdwardsPoint
|
||||
) -> Result<dfg::EdwardsPoint, MultisigError> {
|
||||
let mut bytes = [0; 32];
|
||||
serialized.read_exact(&mut bytes).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
|
||||
// dfg ensures the point is torsion free
|
||||
let xH = Option::<dfg::EdwardsPoint>::from(
|
||||
dfg::EdwardsPoint::from_bytes(&bytes)).ok_or(MultisigError::InvalidDLEqProof(l)
|
||||
)?;
|
||||
// Ensure this is a canonical point
|
||||
if xH.to_bytes() != bytes {
|
||||
Err(MultisigError::InvalidDLEqProof(l))?;
|
||||
}
|
||||
|
||||
DLEqProof::<dfg::EdwardsPoint>::deserialize(
|
||||
serialized
|
||||
).map_err(|_| MultisigError::InvalidDLEqProof(l))?.verify(
|
||||
&mut transcript(),
|
||||
Generators::new(dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)),
|
||||
(xG, xH)
|
||||
).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
|
||||
|
||||
Ok(xH)
|
||||
}
|
||||
@@ -1,225 +1,100 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
#[macro_use]
|
||||
extern crate alloc;
|
||||
|
||||
use std_shims::{sync::OnceLock, io};
|
||||
use std::slice;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
use subtle::ConstantTimeEq;
|
||||
|
||||
use sha3::{Digest, Keccak256};
|
||||
use tiny_keccak::{Hasher, Keccak};
|
||||
|
||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
||||
use curve25519_dalek::{
|
||||
constants::ED25519_BASEPOINT_TABLE,
|
||||
scalar::Scalar,
|
||||
edwards::{EdwardsPoint, EdwardsBasepointTable, CompressedEdwardsY}
|
||||
};
|
||||
|
||||
pub use monero_generators::H;
|
||||
|
||||
mod merkle;
|
||||
#[cfg(feature = "multisig")]
|
||||
pub mod frost;
|
||||
|
||||
mod serialize;
|
||||
use serialize::{read_byte, read_u16};
|
||||
|
||||
/// UnreducedScalar struct with functionality for recovering incorrectly reduced scalars.
|
||||
mod unreduced_scalar;
|
||||
|
||||
/// Ring Signature structs and functionality.
|
||||
pub mod ring_signatures;
|
||||
|
||||
/// RingCT structs and functionality.
|
||||
pub mod ringct;
|
||||
use ringct::RctType;
|
||||
|
||||
/// Transaction structs.
|
||||
pub mod transaction;
|
||||
/// Block structs.
|
||||
pub mod block;
|
||||
|
||||
/// Monero daemon RPC interface.
|
||||
pub mod rpc;
|
||||
/// Wallet functionality, enabling scanning and sending transactions.
|
||||
pub mod wallet;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
static INV_EIGHT_CELL: OnceLock<Scalar> = OnceLock::new();
|
||||
lazy_static! {
|
||||
static ref H: EdwardsPoint = CompressedEdwardsY(
|
||||
hex::decode("8b655970153799af2aeadc9ff1add0ea6c7251d54154cfa92c173a0dd39c1f94").unwrap().try_into().unwrap()
|
||||
).decompress().unwrap();
|
||||
static ref H_TABLE: EdwardsBasepointTable = EdwardsBasepointTable::create(&*H);
|
||||
}
|
||||
|
||||
// Function from libsodium our subsection of Monero relies on. Implementing it here means we don't
|
||||
// need to link against libsodium
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn crypto_verify_32(a: *const u8, b: *const u8) -> isize {
|
||||
isize::from(
|
||||
slice::from_raw_parts(a, 32).ct_eq(slice::from_raw_parts(b, 32)).unwrap_u8()
|
||||
) - 1
|
||||
}
|
||||
|
||||
// Offer a wide reduction to C. Our seeded RNG prevented Monero from defining an unbiased scalar
|
||||
// generation function, and in order to not use Monero code (which would require propagating its
|
||||
// license), the function was rewritten. It was rewritten with wide reduction, instead of rejection
|
||||
// sampling however, hence the need for this function
|
||||
#[no_mangle]
|
||||
unsafe extern "C" fn monero_wide_reduce(value: *mut u8) {
|
||||
let res = Scalar::from_bytes_mod_order_wide(
|
||||
std::slice::from_raw_parts(value, 64).try_into().unwrap()
|
||||
);
|
||||
for (i, b) in res.to_bytes().iter().enumerate() {
|
||||
value.add(i).write(*b);
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub(crate) fn INV_EIGHT() -> Scalar {
|
||||
*INV_EIGHT_CELL.get_or_init(|| Scalar::from(8u8).invert())
|
||||
}
|
||||
|
||||
/// Monero protocol version.
|
||||
///
|
||||
/// v15 is omitted as v15 was simply v14 and v16 being active at the same time, with regards to the
|
||||
/// transactions supported. Accordingly, v16 should be used during v15.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
#[allow(non_camel_case_types)]
|
||||
pub enum Protocol {
|
||||
v14,
|
||||
v16,
|
||||
Custom {
|
||||
ring_len: usize,
|
||||
bp_plus: bool,
|
||||
optimal_rct_type: RctType,
|
||||
view_tags: bool,
|
||||
v16_fee: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl Protocol {
|
||||
/// Amount of ring members under this protocol version.
|
||||
pub fn ring_len(&self) -> usize {
|
||||
match self {
|
||||
Protocol::v14 => 11,
|
||||
Protocol::v16 => 16,
|
||||
Protocol::Custom { ring_len, .. } => *ring_len,
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether or not the specified version uses Bulletproofs or Bulletproofs+.
|
||||
///
|
||||
/// This method will likely be reworked when versions not using Bulletproofs at all are added.
|
||||
pub fn bp_plus(&self) -> bool {
|
||||
match self {
|
||||
Protocol::v14 => false,
|
||||
Protocol::v16 => true,
|
||||
Protocol::Custom { bp_plus, .. } => *bp_plus,
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Make this an Option when we support pre-RCT protocols
|
||||
pub fn optimal_rct_type(&self) -> RctType {
|
||||
match self {
|
||||
Protocol::v14 => RctType::Clsag,
|
||||
Protocol::v16 => RctType::BulletproofsPlus,
|
||||
Protocol::Custom { optimal_rct_type, .. } => *optimal_rct_type,
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether or not the specified version uses view tags.
|
||||
pub fn view_tags(&self) -> bool {
|
||||
match self {
|
||||
Protocol::v14 => false,
|
||||
Protocol::v16 => true,
|
||||
Protocol::Custom { view_tags, .. } => *view_tags,
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether or not the specified version uses the fee algorithm from Monero
|
||||
/// hard fork version 16 (released in v18 binaries).
|
||||
pub fn v16_fee(&self) -> bool {
|
||||
match self {
|
||||
Protocol::v14 => false,
|
||||
Protocol::v16 => true,
|
||||
Protocol::Custom { v16_fee, .. } => *v16_fee,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
match self {
|
||||
Protocol::v14 => w.write_all(&[0, 14]),
|
||||
Protocol::v16 => w.write_all(&[0, 16]),
|
||||
Protocol::Custom { ring_len, bp_plus, optimal_rct_type, view_tags, v16_fee } => {
|
||||
// Custom, version 0
|
||||
w.write_all(&[1, 0])?;
|
||||
w.write_all(&u16::try_from(*ring_len).unwrap().to_le_bytes())?;
|
||||
w.write_all(&[u8::from(*bp_plus)])?;
|
||||
w.write_all(&[optimal_rct_type.to_byte()])?;
|
||||
w.write_all(&[u8::from(*view_tags)])?;
|
||||
w.write_all(&[u8::from(*v16_fee)])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn read<R: io::Read>(r: &mut R) -> io::Result<Protocol> {
|
||||
Ok(match read_byte(r)? {
|
||||
// Monero protocol
|
||||
0 => match read_byte(r)? {
|
||||
14 => Protocol::v14,
|
||||
16 => Protocol::v16,
|
||||
_ => Err(io::Error::other("unrecognized monero protocol"))?,
|
||||
},
|
||||
// Custom
|
||||
1 => match read_byte(r)? {
|
||||
0 => Protocol::Custom {
|
||||
ring_len: read_u16(r)?.into(),
|
||||
bp_plus: match read_byte(r)? {
|
||||
0 => false,
|
||||
1 => true,
|
||||
_ => Err(io::Error::other("invalid bool serialization"))?,
|
||||
},
|
||||
optimal_rct_type: RctType::from_byte(read_byte(r)?)
|
||||
.ok_or_else(|| io::Error::other("invalid RctType serialization"))?,
|
||||
view_tags: match read_byte(r)? {
|
||||
0 => false,
|
||||
1 => true,
|
||||
_ => Err(io::Error::other("invalid bool serialization"))?,
|
||||
},
|
||||
v16_fee: match read_byte(r)? {
|
||||
0 => false,
|
||||
1 => true,
|
||||
_ => Err(io::Error::other("invalid bool serialization"))?,
|
||||
},
|
||||
},
|
||||
_ => Err(io::Error::other("unrecognized custom protocol serialization"))?,
|
||||
},
|
||||
_ => Err(io::Error::other("unrecognized protocol serialization"))?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Transparent structure representing a Pedersen commitment's contents.
|
||||
#[allow(non_snake_case)]
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
pub struct Commitment {
|
||||
pub mask: Scalar,
|
||||
pub amount: u64,
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for Commitment {
|
||||
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||
fmt.debug_struct("Commitment").field("amount", &self.amount).finish_non_exhaustive()
|
||||
}
|
||||
pub amount: u64
|
||||
}
|
||||
|
||||
impl Commitment {
|
||||
/// A commitment to zero, defined with a mask of 1 (as to not be the identity).
|
||||
pub fn zero() -> Commitment {
|
||||
Commitment { mask: Scalar::ONE, amount: 0 }
|
||||
Commitment { mask: Scalar::one(), amount: 0}
|
||||
}
|
||||
|
||||
pub fn new(mask: Scalar, amount: u64) -> Commitment {
|
||||
Commitment { mask, amount }
|
||||
}
|
||||
|
||||
/// Calculate a Pedersen commitment, as a point, from the transparent structure.
|
||||
pub fn calculate(&self) -> EdwardsPoint {
|
||||
(&self.mask * ED25519_BASEPOINT_TABLE) + (Scalar::from(self.amount) * H())
|
||||
(&self.mask * &ED25519_BASEPOINT_TABLE) + (&Scalar::from(self.amount) * &*H_TABLE)
|
||||
}
|
||||
}
|
||||
|
||||
/// Support generating a random scalar using a modern rand, as dalek's is notoriously dated.
|
||||
// Allows using a modern rand as dalek's is notoriously dated
|
||||
pub fn random_scalar<R: RngCore + CryptoRng>(rng: &mut R) -> Scalar {
|
||||
let mut r = [0; 64];
|
||||
rng.fill_bytes(&mut r);
|
||||
Scalar::from_bytes_mod_order_wide(&r)
|
||||
}
|
||||
|
||||
pub(crate) fn hash(data: &[u8]) -> [u8; 32] {
|
||||
Keccak256::digest(data).into()
|
||||
pub fn hash(data: &[u8]) -> [u8; 32] {
|
||||
let mut keccak = Keccak::v256();
|
||||
keccak.update(data);
|
||||
let mut res = [0; 32];
|
||||
keccak.finalize(&mut res);
|
||||
res
|
||||
}
|
||||
|
||||
/// Hash the provided data to a scalar via keccak256(data) % l.
|
||||
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||
let scalar = Scalar::from_bytes_mod_order(hash(data));
|
||||
// Monero will explicitly error in this case
|
||||
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
|
||||
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
|
||||
// not generate/verify a proof we believe to be valid when it isn't
|
||||
assert!(scalar != Scalar::ZERO, "ZERO HASH: {data:?}");
|
||||
scalar
|
||||
Scalar::from_bytes_mod_order(hash(&data))
|
||||
}
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
use std_shims::vec::Vec;
|
||||
|
||||
use crate::hash;
|
||||
|
||||
pub(crate) fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] {
|
||||
match leafs.len() {
|
||||
0 => root,
|
||||
1 => hash(&[root, leafs[0]].concat()),
|
||||
_ => {
|
||||
let mut hashes = Vec::with_capacity(1 + leafs.len());
|
||||
hashes.push(root);
|
||||
hashes.extend(leafs);
|
||||
|
||||
// Monero preprocess this so the length is a power of 2
|
||||
let mut high_pow_2 = 4; // 4 is the lowest value this can be
|
||||
while high_pow_2 < hashes.len() {
|
||||
high_pow_2 *= 2;
|
||||
}
|
||||
let low_pow_2 = high_pow_2 / 2;
|
||||
|
||||
// Merge right-most hashes until we're at the low_pow_2
|
||||
{
|
||||
let overage = hashes.len() - low_pow_2;
|
||||
let mut rightmost = hashes.drain((low_pow_2 - overage) ..);
|
||||
// This is true since we took overage from beneath and above low_pow_2, taking twice as
|
||||
// many elements as overage
|
||||
debug_assert_eq!(rightmost.len() % 2, 0);
|
||||
|
||||
let mut paired_hashes = Vec::with_capacity(overage);
|
||||
while let Some(left) = rightmost.next() {
|
||||
let right = rightmost.next().unwrap();
|
||||
paired_hashes.push(hash(&[left.as_ref(), &right].concat()));
|
||||
}
|
||||
drop(rightmost);
|
||||
|
||||
hashes.extend(paired_hashes);
|
||||
assert_eq!(hashes.len(), low_pow_2);
|
||||
}
|
||||
|
||||
// Do a traditional pairing off
|
||||
let mut new_hashes = Vec::with_capacity(hashes.len() / 2);
|
||||
while hashes.len() > 1 {
|
||||
let mut i = 0;
|
||||
while i < hashes.len() {
|
||||
new_hashes.push(hash(&[hashes[i], hashes[i + 1]].concat()));
|
||||
i += 2;
|
||||
}
|
||||
|
||||
hashes = new_hashes;
|
||||
new_hashes = Vec::with_capacity(hashes.len() / 2);
|
||||
}
|
||||
hashes[0]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
use std_shims::{
|
||||
io::{self, *},
|
||||
vec::Vec,
|
||||
};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use curve25519_dalek::{EdwardsPoint, Scalar};
|
||||
|
||||
use monero_generators::hash_to_point;
|
||||
|
||||
use crate::{serialize::*, hash_to_scalar};
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct Signature {
|
||||
c: Scalar,
|
||||
r: Scalar,
|
||||
}
|
||||
|
||||
impl Signature {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
write_scalar(&self.c, w)?;
|
||||
write_scalar(&self.r, w)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Signature> {
|
||||
Ok(Signature { c: read_scalar(r)?, r: read_scalar(r)? })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct RingSignature {
|
||||
sigs: Vec<Signature>,
|
||||
}
|
||||
|
||||
impl RingSignature {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
for sig in &self.sigs {
|
||||
sig.write(w)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(members: usize, r: &mut R) -> io::Result<RingSignature> {
|
||||
Ok(RingSignature { sigs: read_raw_vec(Signature::read, members, r)? })
|
||||
}
|
||||
|
||||
pub fn verify(&self, msg: &[u8; 32], ring: &[EdwardsPoint], key_image: &EdwardsPoint) -> bool {
|
||||
if ring.len() != self.sigs.len() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let mut buf = Vec::with_capacity(32 + (32 * 2 * ring.len()));
|
||||
buf.extend_from_slice(msg);
|
||||
|
||||
let mut sum = Scalar::ZERO;
|
||||
|
||||
for (ring_member, sig) in ring.iter().zip(&self.sigs) {
|
||||
#[allow(non_snake_case)]
|
||||
let Li = EdwardsPoint::vartime_double_scalar_mul_basepoint(&sig.c, ring_member, &sig.r);
|
||||
buf.extend_from_slice(Li.compress().as_bytes());
|
||||
#[allow(non_snake_case)]
|
||||
let Ri = (sig.r * hash_to_point(ring_member.compress().to_bytes())) + (sig.c * key_image);
|
||||
buf.extend_from_slice(Ri.compress().as_bytes());
|
||||
|
||||
sum += sig.c;
|
||||
}
|
||||
|
||||
sum == hash_to_scalar(&buf)
|
||||
}
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
use core::fmt::Debug;
|
||||
use std_shims::io::{self, Read, Write};
|
||||
|
||||
use curve25519_dalek::{traits::Identity, Scalar, EdwardsPoint};
|
||||
|
||||
use monero_generators::H_pow_2;
|
||||
|
||||
use crate::{hash_to_scalar, unreduced_scalar::UnreducedScalar, serialize::*};
|
||||
|
||||
/// 64 Borromean ring signatures.
|
||||
///
|
||||
/// s0 and s1 are stored as `UnreducedScalar`s due to Monero not requiring they were reduced.
|
||||
/// `UnreducedScalar` preserves their original byte encoding and implements a custom reduction
|
||||
/// algorithm which was in use.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct BorromeanSignatures {
|
||||
pub s0: [UnreducedScalar; 64],
|
||||
pub s1: [UnreducedScalar; 64],
|
||||
pub ee: Scalar,
|
||||
}
|
||||
|
||||
impl BorromeanSignatures {
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<BorromeanSignatures> {
|
||||
Ok(BorromeanSignatures {
|
||||
s0: read_array(UnreducedScalar::read, r)?,
|
||||
s1: read_array(UnreducedScalar::read, r)?,
|
||||
ee: read_scalar(r)?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
for s0 in &self.s0 {
|
||||
s0.write(w)?;
|
||||
}
|
||||
for s1 in &self.s1 {
|
||||
s1.write(w)?;
|
||||
}
|
||||
write_scalar(&self.ee, w)
|
||||
}
|
||||
|
||||
fn verify(&self, keys_a: &[EdwardsPoint], keys_b: &[EdwardsPoint]) -> bool {
|
||||
let mut transcript = [0; 2048];
|
||||
|
||||
for i in 0 .. 64 {
|
||||
#[allow(non_snake_case)]
|
||||
let LL = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
||||
&self.ee,
|
||||
&keys_a[i],
|
||||
&self.s0[i].recover_monero_slide_scalar(),
|
||||
);
|
||||
#[allow(non_snake_case)]
|
||||
let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
||||
&hash_to_scalar(LL.compress().as_bytes()),
|
||||
&keys_b[i],
|
||||
&self.s1[i].recover_monero_slide_scalar(),
|
||||
);
|
||||
transcript[(i * 32) .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes());
|
||||
}
|
||||
|
||||
hash_to_scalar(&transcript) == self.ee
|
||||
}
|
||||
}
|
||||
|
||||
/// A range proof premised on Borromean ring signatures.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct BorromeanRange {
|
||||
pub sigs: BorromeanSignatures,
|
||||
pub bit_commitments: [EdwardsPoint; 64],
|
||||
}
|
||||
|
||||
impl BorromeanRange {
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<BorromeanRange> {
|
||||
Ok(BorromeanRange {
|
||||
sigs: BorromeanSignatures::read(r)?,
|
||||
bit_commitments: read_array(read_point, r)?,
|
||||
})
|
||||
}
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.sigs.write(w)?;
|
||||
write_raw_vec(write_point, &self.bit_commitments, w)
|
||||
}
|
||||
|
||||
pub fn verify(&self, commitment: &EdwardsPoint) -> bool {
|
||||
if &self.bit_commitments.iter().sum::<EdwardsPoint>() != commitment {
|
||||
return false;
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let H_pow_2 = H_pow_2();
|
||||
let mut commitments_sub_one = [EdwardsPoint::identity(); 64];
|
||||
for i in 0 .. 64 {
|
||||
commitments_sub_one[i] = self.bit_commitments[i] - H_pow_2[i];
|
||||
}
|
||||
|
||||
self.sigs.verify(&self.bit_commitments, &commitments_sub_one)
|
||||
}
|
||||
}
|
||||
161
coins/monero/src/ringct/bulletproofs.rs
Normal file
161
coins/monero/src/ringct/bulletproofs.rs
Normal file
@@ -0,0 +1,161 @@
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
||||
|
||||
use crate::{Commitment, wallet::TransactionError, serialize::*};
|
||||
|
||||
pub(crate) const MAX_OUTPUTS: usize = 16;
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub struct Bulletproofs {
|
||||
pub A: EdwardsPoint,
|
||||
pub S: EdwardsPoint,
|
||||
pub T1: EdwardsPoint,
|
||||
pub T2: EdwardsPoint,
|
||||
pub taux: Scalar,
|
||||
pub mu: Scalar,
|
||||
pub L: Vec<EdwardsPoint>,
|
||||
pub R: Vec<EdwardsPoint>,
|
||||
pub a: Scalar,
|
||||
pub b: Scalar,
|
||||
pub t: Scalar
|
||||
}
|
||||
|
||||
impl Bulletproofs {
|
||||
pub(crate) fn fee_weight(outputs: usize) -> usize {
|
||||
let proofs = 6 + usize::try_from(usize::BITS - (outputs - 1).leading_zeros()).unwrap();
|
||||
let len = (9 + (2 * proofs)) * 32;
|
||||
|
||||
let mut clawback = 0;
|
||||
let padded = 1 << (proofs - 6);
|
||||
if padded > 2 {
|
||||
const BP_BASE: usize = 368;
|
||||
clawback = ((BP_BASE * padded) - len) * 4 / 5;
|
||||
}
|
||||
|
||||
len + clawback
|
||||
}
|
||||
|
||||
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, outputs: &[Commitment]) -> Result<Bulletproofs, TransactionError> {
|
||||
if outputs.len() > MAX_OUTPUTS {
|
||||
return Err(TransactionError::TooManyOutputs)?;
|
||||
}
|
||||
|
||||
let mut seed = [0; 32];
|
||||
rng.fill_bytes(&mut seed);
|
||||
|
||||
let masks = outputs.iter().map(|commitment| commitment.mask.to_bytes()).collect::<Vec<_>>();
|
||||
let amounts = outputs.iter().map(|commitment| commitment.amount).collect::<Vec<_>>();
|
||||
|
||||
let res;
|
||||
unsafe {
|
||||
#[link(name = "wrapper")]
|
||||
extern "C" {
|
||||
fn free(ptr: *const u8);
|
||||
fn c_generate_bp(seed: *const u8, len: u8, amounts: *const u64, masks: *const [u8; 32]) -> *const u8;
|
||||
}
|
||||
|
||||
let ptr = c_generate_bp(
|
||||
seed.as_ptr(),
|
||||
u8::try_from(outputs.len()).unwrap(),
|
||||
amounts.as_ptr(),
|
||||
masks.as_ptr()
|
||||
);
|
||||
|
||||
let mut len = 6 * 32;
|
||||
len += (2 * (1 + (usize::from(ptr.add(len).read()) * 32))) + (3 * 32);
|
||||
res = Bulletproofs::deserialize(
|
||||
// Wrap in a cursor to provide a mutable Reader
|
||||
&mut std::io::Cursor::new(std::slice::from_raw_parts(ptr, len))
|
||||
).expect("Couldn't deserialize Bulletproofs from Monero");
|
||||
free(ptr);
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
|
||||
if commitments.len() > 16 {
|
||||
return false;
|
||||
}
|
||||
|
||||
let mut seed = [0; 32];
|
||||
rng.fill_bytes(&mut seed);
|
||||
|
||||
let mut serialized = Vec::with_capacity((9 + (2 * self.L.len())) * 32);
|
||||
self.serialize(&mut serialized).unwrap();
|
||||
let commitments: Vec<[u8; 32]> = commitments.iter().map(
|
||||
|commitment| (commitment * Scalar::from(8u8).invert()).compress().to_bytes()
|
||||
).collect();
|
||||
|
||||
unsafe {
|
||||
#[link(name = "wrapper")]
|
||||
extern "C" {
|
||||
fn c_verify_bp(
|
||||
seed: *const u8,
|
||||
serialized_len: usize,
|
||||
serialized: *const u8,
|
||||
commitments_len: u8,
|
||||
commitments: *const [u8; 32]
|
||||
) -> bool;
|
||||
}
|
||||
|
||||
c_verify_bp(
|
||||
seed.as_ptr(),
|
||||
serialized.len(),
|
||||
serialized.as_ptr(),
|
||||
u8::try_from(commitments.len()).unwrap(),
|
||||
commitments.as_ptr()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_core<
|
||||
W: std::io::Write,
|
||||
F: Fn(&[EdwardsPoint], &mut W) -> std::io::Result<()>
|
||||
>(&self, w: &mut W, specific_write_vec: F) -> std::io::Result<()> {
|
||||
write_point(&self.A, w)?;
|
||||
write_point(&self.S, w)?;
|
||||
write_point(&self.T1, w)?;
|
||||
write_point(&self.T2, w)?;
|
||||
write_scalar(&self.taux, w)?;
|
||||
write_scalar(&self.mu, w)?;
|
||||
specific_write_vec(&self.L, w)?;
|
||||
specific_write_vec(&self.R, w)?;
|
||||
write_scalar(&self.a, w)?;
|
||||
write_scalar(&self.b, w)?;
|
||||
write_scalar(&self.t, w)
|
||||
}
|
||||
|
||||
pub fn signature_serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
self.serialize_core(w, |points, w| write_raw_vec(write_point, points, w))
|
||||
}
|
||||
|
||||
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
|
||||
self.serialize_core(w, |points, w| write_vec(write_point, points, w))
|
||||
}
|
||||
|
||||
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Bulletproofs> {
|
||||
let bp = Bulletproofs {
|
||||
A: read_point(r)?,
|
||||
S: read_point(r)?,
|
||||
T1: read_point(r)?,
|
||||
T2: read_point(r)?,
|
||||
taux: read_scalar(r)?,
|
||||
mu: read_scalar(r)?,
|
||||
L: read_vec(read_point, r)?,
|
||||
R: read_vec(read_point, r)?,
|
||||
a: read_scalar(r)?,
|
||||
b: read_scalar(r)?,
|
||||
t: read_scalar(r)?
|
||||
};
|
||||
|
||||
if bp.L.len() != bp.R.len() {
|
||||
Err(std::io::Error::new(std::io::ErrorKind::Other, "mismatched L/R len"))?;
|
||||
}
|
||||
Ok(bp)
|
||||
}
|
||||
}
|
||||
@@ -1,153 +0,0 @@
|
||||
use std_shims::{vec::Vec, sync::OnceLock};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use subtle::{Choice, ConditionallySelectable};
|
||||
|
||||
use curve25519_dalek::edwards::EdwardsPoint as DalekPoint;
|
||||
|
||||
use group::{ff::Field, Group};
|
||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||
|
||||
use multiexp::multiexp as multiexp_const;
|
||||
|
||||
pub(crate) use monero_generators::Generators;
|
||||
|
||||
use crate::{INV_EIGHT as DALEK_INV_EIGHT, H as DALEK_H, Commitment, hash_to_scalar as dalek_hash};
|
||||
pub(crate) use crate::ringct::bulletproofs::scalar_vector::*;
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn INV_EIGHT() -> Scalar {
|
||||
Scalar(DALEK_INV_EIGHT())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn H() -> EdwardsPoint {
|
||||
EdwardsPoint(DALEK_H())
|
||||
}
|
||||
|
||||
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||
Scalar(dalek_hash(data))
|
||||
}
|
||||
|
||||
// Components common between variants
|
||||
pub(crate) const MAX_M: usize = 16;
|
||||
pub(crate) const LOG_N: usize = 6; // 2 << 6 == N
|
||||
pub(crate) const N: usize = 64;
|
||||
|
||||
pub(crate) fn prove_multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
|
||||
multiexp_const(pairs) * INV_EIGHT()
|
||||
}
|
||||
|
||||
pub(crate) fn vector_exponent(
|
||||
generators: &Generators,
|
||||
a: &ScalarVector,
|
||||
b: &ScalarVector,
|
||||
) -> EdwardsPoint {
|
||||
debug_assert_eq!(a.len(), b.len());
|
||||
(a * &generators.G[.. a.len()]) + (b * &generators.H[.. b.len()])
|
||||
}
|
||||
|
||||
pub(crate) fn hash_cache(cache: &mut Scalar, mash: &[[u8; 32]]) -> Scalar {
|
||||
let slice =
|
||||
&[cache.to_bytes().as_ref(), mash.iter().copied().flatten().collect::<Vec<_>>().as_ref()]
|
||||
.concat();
|
||||
*cache = hash_to_scalar(slice);
|
||||
*cache
|
||||
}
|
||||
|
||||
pub(crate) fn MN(outputs: usize) -> (usize, usize, usize) {
|
||||
let mut logM = 0;
|
||||
let mut M;
|
||||
while {
|
||||
M = 1 << logM;
|
||||
(M <= MAX_M) && (M < outputs)
|
||||
} {
|
||||
logM += 1;
|
||||
}
|
||||
|
||||
(logM + LOG_N, M, M * N)
|
||||
}
|
||||
|
||||
pub(crate) fn bit_decompose(commitments: &[Commitment]) -> (ScalarVector, ScalarVector) {
|
||||
let (_, M, MN) = MN(commitments.len());
|
||||
|
||||
let sv = commitments.iter().map(|c| Scalar::from(c.amount)).collect::<Vec<_>>();
|
||||
let mut aL = ScalarVector::new(MN);
|
||||
let mut aR = ScalarVector::new(MN);
|
||||
|
||||
for j in 0 .. M {
|
||||
for i in (0 .. N).rev() {
|
||||
let mut bit = Choice::from(0);
|
||||
if j < sv.len() {
|
||||
bit = Choice::from((sv[j][i / 8] >> (i % 8)) & 1);
|
||||
}
|
||||
aL.0[(j * N) + i] = Scalar::conditional_select(&Scalar::ZERO, &Scalar::ONE, bit);
|
||||
aR.0[(j * N) + i] = Scalar::conditional_select(&-Scalar::ONE, &Scalar::ZERO, bit);
|
||||
}
|
||||
}
|
||||
|
||||
(aL, aR)
|
||||
}
|
||||
|
||||
pub(crate) fn hash_commitments<C: IntoIterator<Item = DalekPoint>>(
|
||||
commitments: C,
|
||||
) -> (Scalar, Vec<EdwardsPoint>) {
|
||||
let V = commitments.into_iter().map(|c| EdwardsPoint(c) * INV_EIGHT()).collect::<Vec<_>>();
|
||||
(hash_to_scalar(&V.iter().flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>()), V)
|
||||
}
|
||||
|
||||
pub(crate) fn alpha_rho<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
generators: &Generators,
|
||||
aL: &ScalarVector,
|
||||
aR: &ScalarVector,
|
||||
) -> (Scalar, EdwardsPoint) {
|
||||
let ar = Scalar::random(rng);
|
||||
(ar, (vector_exponent(generators, aL, aR) + (EdwardsPoint::generator() * ar)) * INV_EIGHT())
|
||||
}
|
||||
|
||||
pub(crate) fn LR_statements(
|
||||
a: &ScalarVector,
|
||||
G_i: &[EdwardsPoint],
|
||||
b: &ScalarVector,
|
||||
H_i: &[EdwardsPoint],
|
||||
cL: Scalar,
|
||||
U: EdwardsPoint,
|
||||
) -> Vec<(Scalar, EdwardsPoint)> {
|
||||
let mut res = a
|
||||
.0
|
||||
.iter()
|
||||
.copied()
|
||||
.zip(G_i.iter().copied())
|
||||
.chain(b.0.iter().copied().zip(H_i.iter().copied()))
|
||||
.collect::<Vec<_>>();
|
||||
res.push((cL, U));
|
||||
res
|
||||
}
|
||||
|
||||
static TWO_N_CELL: OnceLock<ScalarVector> = OnceLock::new();
|
||||
pub(crate) fn TWO_N() -> &'static ScalarVector {
|
||||
TWO_N_CELL.get_or_init(|| ScalarVector::powers(Scalar::from(2u8), N))
|
||||
}
|
||||
|
||||
pub(crate) fn challenge_products(w: &[Scalar], winv: &[Scalar]) -> Vec<Scalar> {
|
||||
let mut products = vec![Scalar::ZERO; 1 << w.len()];
|
||||
products[0] = winv[0];
|
||||
products[1] = w[0];
|
||||
for j in 1 .. w.len() {
|
||||
let mut slots = (1 << (j + 1)) - 1;
|
||||
while slots > 0 {
|
||||
products[slots] = products[slots / 2] * w[j];
|
||||
products[slots - 1] = products[slots / 2] * winv[j];
|
||||
slots = slots.saturating_sub(2);
|
||||
}
|
||||
}
|
||||
|
||||
// Sanity check as if the above failed to populate, it'd be critical
|
||||
for w in &products {
|
||||
debug_assert!(!bool::from(w.is_zero()));
|
||||
}
|
||||
|
||||
products
|
||||
}
|
||||
@@ -1,229 +0,0 @@
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use std_shims::{
|
||||
vec::Vec,
|
||||
io::{self, Read, Write},
|
||||
};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use curve25519_dalek::edwards::EdwardsPoint;
|
||||
use multiexp::BatchVerifier;
|
||||
|
||||
use crate::{Commitment, wallet::TransactionError, serialize::*};
|
||||
|
||||
pub(crate) mod scalar_vector;
|
||||
pub(crate) mod core;
|
||||
use self::core::LOG_N;
|
||||
|
||||
pub(crate) mod original;
|
||||
use self::original::OriginalStruct;
|
||||
|
||||
pub(crate) mod plus;
|
||||
use self::plus::*;
|
||||
|
||||
pub(crate) const MAX_OUTPUTS: usize = self::core::MAX_M;
|
||||
|
||||
/// Bulletproofs enum, supporting the original and plus formulations.
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub enum Bulletproofs {
|
||||
Original(OriginalStruct),
|
||||
Plus(AggregateRangeProof),
|
||||
}
|
||||
|
||||
impl Bulletproofs {
|
||||
fn bp_fields(plus: bool) -> usize {
|
||||
if plus {
|
||||
6
|
||||
} else {
|
||||
9
|
||||
}
|
||||
}
|
||||
|
||||
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
||||
// src/cryptonote_basic/cryptonote_format_utils.cpp#L106-L124
|
||||
pub(crate) fn calculate_bp_clawback(plus: bool, n_outputs: usize) -> (usize, usize) {
|
||||
#[allow(non_snake_case)]
|
||||
let mut LR_len = 0;
|
||||
let mut n_padded_outputs = 1;
|
||||
while n_padded_outputs < n_outputs {
|
||||
LR_len += 1;
|
||||
n_padded_outputs = 1 << LR_len;
|
||||
}
|
||||
LR_len += LOG_N;
|
||||
|
||||
let mut bp_clawback = 0;
|
||||
if n_padded_outputs > 2 {
|
||||
let fields = Bulletproofs::bp_fields(plus);
|
||||
let base = ((fields + (2 * (LOG_N + 1))) * 32) / 2;
|
||||
let size = (fields + (2 * LR_len)) * 32;
|
||||
bp_clawback = ((base * n_padded_outputs) - size) * 4 / 5;
|
||||
}
|
||||
|
||||
(bp_clawback, LR_len)
|
||||
}
|
||||
|
||||
pub(crate) fn fee_weight(plus: bool, outputs: usize) -> usize {
|
||||
#[allow(non_snake_case)]
|
||||
let (bp_clawback, LR_len) = Bulletproofs::calculate_bp_clawback(plus, outputs);
|
||||
32 * (Bulletproofs::bp_fields(plus) + (2 * LR_len)) + 2 + bp_clawback
|
||||
}
|
||||
|
||||
/// Prove the list of commitments are within [0 .. 2^64).
|
||||
pub fn prove<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
outputs: &[Commitment],
|
||||
plus: bool,
|
||||
) -> Result<Bulletproofs, TransactionError> {
|
||||
if outputs.is_empty() {
|
||||
Err(TransactionError::NoOutputs)?;
|
||||
}
|
||||
if outputs.len() > MAX_OUTPUTS {
|
||||
Err(TransactionError::TooManyOutputs)?;
|
||||
}
|
||||
Ok(if !plus {
|
||||
Bulletproofs::Original(OriginalStruct::prove(rng, outputs))
|
||||
} else {
|
||||
use dalek_ff_group::EdwardsPoint as DfgPoint;
|
||||
Bulletproofs::Plus(
|
||||
AggregateRangeStatement::new(outputs.iter().map(|com| DfgPoint(com.calculate())).collect())
|
||||
.unwrap()
|
||||
.prove(rng, AggregateRangeWitness::new(outputs).unwrap())
|
||||
.unwrap(),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/// Verify the given Bulletproofs.
|
||||
#[must_use]
|
||||
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
|
||||
match self {
|
||||
Bulletproofs::Original(bp) => bp.verify(rng, commitments),
|
||||
Bulletproofs::Plus(bp) => {
|
||||
let mut verifier = BatchVerifier::new(1);
|
||||
// If this commitment is torsioned (which is allowed), this won't be a well-formed
|
||||
// dfg::EdwardsPoint (expected to be of prime-order)
|
||||
// The actual BP+ impl will perform a torsion clear though, making this safe
|
||||
// TODO: Have AggregateRangeStatement take in dalek EdwardsPoint for clarity on this
|
||||
let Some(statement) = AggregateRangeStatement::new(
|
||||
commitments.iter().map(|c| dalek_ff_group::EdwardsPoint(*c)).collect(),
|
||||
) else {
|
||||
return false;
|
||||
};
|
||||
if !statement.verify(rng, &mut verifier, (), bp.clone()) {
|
||||
return false;
|
||||
}
|
||||
verifier.verify_vartime()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Accumulate the verification for the given Bulletproofs into the specified BatchVerifier.
|
||||
/// Returns false if the Bulletproofs aren't sane, without mutating the BatchVerifier.
|
||||
/// Returns true if the Bulletproofs are sane, regardless of their validity.
|
||||
#[must_use]
|
||||
pub fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<ID, dalek_ff_group::EdwardsPoint>,
|
||||
id: ID,
|
||||
commitments: &[EdwardsPoint],
|
||||
) -> bool {
|
||||
match self {
|
||||
Bulletproofs::Original(bp) => bp.batch_verify(rng, verifier, id, commitments),
|
||||
Bulletproofs::Plus(bp) => {
|
||||
let Some(statement) = AggregateRangeStatement::new(
|
||||
commitments.iter().map(|c| dalek_ff_group::EdwardsPoint(*c)).collect(),
|
||||
) else {
|
||||
return false;
|
||||
};
|
||||
statement.verify(rng, verifier, id, bp.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn write_core<W: Write, F: Fn(&[EdwardsPoint], &mut W) -> io::Result<()>>(
|
||||
&self,
|
||||
w: &mut W,
|
||||
specific_write_vec: F,
|
||||
) -> io::Result<()> {
|
||||
match self {
|
||||
Bulletproofs::Original(bp) => {
|
||||
write_point(&bp.A, w)?;
|
||||
write_point(&bp.S, w)?;
|
||||
write_point(&bp.T1, w)?;
|
||||
write_point(&bp.T2, w)?;
|
||||
write_scalar(&bp.taux, w)?;
|
||||
write_scalar(&bp.mu, w)?;
|
||||
specific_write_vec(&bp.L, w)?;
|
||||
specific_write_vec(&bp.R, w)?;
|
||||
write_scalar(&bp.a, w)?;
|
||||
write_scalar(&bp.b, w)?;
|
||||
write_scalar(&bp.t, w)
|
||||
}
|
||||
|
||||
Bulletproofs::Plus(bp) => {
|
||||
write_point(&bp.A.0, w)?;
|
||||
write_point(&bp.wip.A.0, w)?;
|
||||
write_point(&bp.wip.B.0, w)?;
|
||||
write_scalar(&bp.wip.r_answer.0, w)?;
|
||||
write_scalar(&bp.wip.s_answer.0, w)?;
|
||||
write_scalar(&bp.wip.delta_answer.0, w)?;
|
||||
specific_write_vec(&bp.wip.L.iter().cloned().map(|L| L.0).collect::<Vec<_>>(), w)?;
|
||||
specific_write_vec(&bp.wip.R.iter().cloned().map(|R| R.0).collect::<Vec<_>>(), w)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.write_core(w, |points, w| write_raw_vec(write_point, points, w))
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.write_core(w, |points, w| write_vec(write_point, points, w))
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut serialized = vec![];
|
||||
self.write(&mut serialized).unwrap();
|
||||
serialized
|
||||
}
|
||||
|
||||
/// Read Bulletproofs.
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Bulletproofs> {
|
||||
Ok(Bulletproofs::Original(OriginalStruct {
|
||||
A: read_point(r)?,
|
||||
S: read_point(r)?,
|
||||
T1: read_point(r)?,
|
||||
T2: read_point(r)?,
|
||||
taux: read_scalar(r)?,
|
||||
mu: read_scalar(r)?,
|
||||
L: read_vec(read_point, r)?,
|
||||
R: read_vec(read_point, r)?,
|
||||
a: read_scalar(r)?,
|
||||
b: read_scalar(r)?,
|
||||
t: read_scalar(r)?,
|
||||
}))
|
||||
}
|
||||
|
||||
/// Read Bulletproofs+.
|
||||
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Bulletproofs> {
|
||||
use dalek_ff_group::{Scalar as DfgScalar, EdwardsPoint as DfgPoint};
|
||||
|
||||
Ok(Bulletproofs::Plus(AggregateRangeProof {
|
||||
A: DfgPoint(read_point(r)?),
|
||||
wip: WipProof {
|
||||
A: DfgPoint(read_point(r)?),
|
||||
B: DfgPoint(read_point(r)?),
|
||||
r_answer: DfgScalar(read_scalar(r)?),
|
||||
s_answer: DfgScalar(read_scalar(r)?),
|
||||
delta_answer: DfgScalar(read_scalar(r)?),
|
||||
L: read_vec(read_point, r)?.into_iter().map(DfgPoint).collect(),
|
||||
R: read_vec(read_point, r)?.into_iter().map(DfgPoint).collect(),
|
||||
},
|
||||
}))
|
||||
}
|
||||
}
|
||||
@@ -1,309 +0,0 @@
|
||||
use std_shims::{vec::Vec, sync::OnceLock};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
|
||||
|
||||
use group::{ff::Field, Group};
|
||||
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
|
||||
|
||||
use multiexp::BatchVerifier;
|
||||
|
||||
use crate::{Commitment, ringct::bulletproofs::core::*};
|
||||
|
||||
include!(concat!(env!("OUT_DIR"), "/generators.rs"));
|
||||
|
||||
static IP12_CELL: OnceLock<Scalar> = OnceLock::new();
|
||||
pub(crate) fn IP12() -> Scalar {
|
||||
*IP12_CELL.get_or_init(|| inner_product(&ScalarVector(vec![Scalar::ONE; N]), TWO_N()))
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct OriginalStruct {
|
||||
pub(crate) A: DalekPoint,
|
||||
pub(crate) S: DalekPoint,
|
||||
pub(crate) T1: DalekPoint,
|
||||
pub(crate) T2: DalekPoint,
|
||||
pub(crate) taux: DalekScalar,
|
||||
pub(crate) mu: DalekScalar,
|
||||
pub(crate) L: Vec<DalekPoint>,
|
||||
pub(crate) R: Vec<DalekPoint>,
|
||||
pub(crate) a: DalekScalar,
|
||||
pub(crate) b: DalekScalar,
|
||||
pub(crate) t: DalekScalar,
|
||||
}
|
||||
|
||||
impl OriginalStruct {
|
||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
commitments: &[Commitment],
|
||||
) -> OriginalStruct {
|
||||
let (logMN, M, MN) = MN(commitments.len());
|
||||
|
||||
let (aL, aR) = bit_decompose(commitments);
|
||||
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||
let (mut cache, _) = hash_commitments(commitments_points.clone());
|
||||
|
||||
let (sL, sR) =
|
||||
ScalarVector((0 .. (MN * 2)).map(|_| Scalar::random(&mut *rng)).collect::<Vec<_>>()).split();
|
||||
|
||||
let generators = GENERATORS();
|
||||
let (mut alpha, A) = alpha_rho(&mut *rng, generators, &aL, &aR);
|
||||
let (mut rho, S) = alpha_rho(&mut *rng, generators, &sL, &sR);
|
||||
|
||||
let y = hash_cache(&mut cache, &[A.compress().to_bytes(), S.compress().to_bytes()]);
|
||||
let mut cache = hash_to_scalar(&y.to_bytes());
|
||||
let z = cache;
|
||||
|
||||
let l0 = &aL - z;
|
||||
let l1 = sL;
|
||||
|
||||
let mut zero_twos = Vec::with_capacity(MN);
|
||||
let zpow = ScalarVector::powers(z, M + 2);
|
||||
for j in 0 .. M {
|
||||
for i in 0 .. N {
|
||||
zero_twos.push(zpow[j + 2] * TWO_N()[i]);
|
||||
}
|
||||
}
|
||||
|
||||
let yMN = ScalarVector::powers(y, MN);
|
||||
let r0 = (&(aR + z) * &yMN) + ScalarVector(zero_twos);
|
||||
let r1 = yMN * sR;
|
||||
|
||||
let (T1, T2, x, mut taux) = {
|
||||
let t1 = inner_product(&l0, &r1) + inner_product(&l1, &r0);
|
||||
let t2 = inner_product(&l1, &r1);
|
||||
|
||||
let mut tau1 = Scalar::random(&mut *rng);
|
||||
let mut tau2 = Scalar::random(&mut *rng);
|
||||
|
||||
let T1 = prove_multiexp(&[(t1, H()), (tau1, EdwardsPoint::generator())]);
|
||||
let T2 = prove_multiexp(&[(t2, H()), (tau2, EdwardsPoint::generator())]);
|
||||
|
||||
let x =
|
||||
hash_cache(&mut cache, &[z.to_bytes(), T1.compress().to_bytes(), T2.compress().to_bytes()]);
|
||||
|
||||
let taux = (tau2 * (x * x)) + (tau1 * x);
|
||||
|
||||
tau1.zeroize();
|
||||
tau2.zeroize();
|
||||
(T1, T2, x, taux)
|
||||
};
|
||||
|
||||
let mu = (x * rho) + alpha;
|
||||
alpha.zeroize();
|
||||
rho.zeroize();
|
||||
|
||||
for (i, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
|
||||
taux += zpow[i + 2] * gamma;
|
||||
}
|
||||
|
||||
let l = &l0 + &(l1 * x);
|
||||
let r = &r0 + &(r1 * x);
|
||||
|
||||
let t = inner_product(&l, &r);
|
||||
|
||||
let x_ip =
|
||||
hash_cache(&mut cache, &[x.to_bytes(), taux.to_bytes(), mu.to_bytes(), t.to_bytes()]);
|
||||
|
||||
let mut a = l;
|
||||
let mut b = r;
|
||||
|
||||
let yinv = y.invert().unwrap();
|
||||
let yinvpow = ScalarVector::powers(yinv, MN);
|
||||
|
||||
let mut G_proof = generators.G[.. a.len()].to_vec();
|
||||
let mut H_proof = generators.H[.. a.len()].to_vec();
|
||||
H_proof.iter_mut().zip(yinvpow.0.iter()).for_each(|(this_H, yinvpow)| *this_H *= yinvpow);
|
||||
let U = H() * x_ip;
|
||||
|
||||
let mut L = Vec::with_capacity(logMN);
|
||||
let mut R = Vec::with_capacity(logMN);
|
||||
|
||||
while a.len() != 1 {
|
||||
let (aL, aR) = a.split();
|
||||
let (bL, bR) = b.split();
|
||||
|
||||
let cL = inner_product(&aL, &bR);
|
||||
let cR = inner_product(&aR, &bL);
|
||||
|
||||
let (G_L, G_R) = G_proof.split_at(aL.len());
|
||||
let (H_L, H_R) = H_proof.split_at(aL.len());
|
||||
|
||||
let L_i = prove_multiexp(&LR_statements(&aL, G_R, &bR, H_L, cL, U));
|
||||
let R_i = prove_multiexp(&LR_statements(&aR, G_L, &bL, H_R, cR, U));
|
||||
L.push(L_i);
|
||||
R.push(R_i);
|
||||
|
||||
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
|
||||
let winv = w.invert().unwrap();
|
||||
|
||||
a = (aL * w) + (aR * winv);
|
||||
b = (bL * winv) + (bR * w);
|
||||
|
||||
if a.len() != 1 {
|
||||
G_proof = hadamard_fold(G_L, G_R, winv, w);
|
||||
H_proof = hadamard_fold(H_L, H_R, w, winv);
|
||||
}
|
||||
}
|
||||
|
||||
let res = OriginalStruct {
|
||||
A: *A,
|
||||
S: *S,
|
||||
T1: *T1,
|
||||
T2: *T2,
|
||||
taux: *taux,
|
||||
mu: *mu,
|
||||
L: L.drain(..).map(|L| *L).collect(),
|
||||
R: R.drain(..).map(|R| *R).collect(),
|
||||
a: *a[0],
|
||||
b: *b[0],
|
||||
t: *t,
|
||||
};
|
||||
debug_assert!(res.verify(rng, &commitments_points));
|
||||
res
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
||||
id: ID,
|
||||
commitments: &[DalekPoint],
|
||||
) -> bool {
|
||||
// Verify commitments are valid
|
||||
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify L and R are properly sized
|
||||
if self.L.len() != self.R.len() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let (logMN, M, MN) = MN(commitments.len());
|
||||
if self.L.len() != logMN {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Rebuild all challenges
|
||||
let (mut cache, commitments) = hash_commitments(commitments.iter().copied());
|
||||
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes(), self.S.compress().to_bytes()]);
|
||||
|
||||
let z = hash_to_scalar(&y.to_bytes());
|
||||
cache = z;
|
||||
|
||||
let x = hash_cache(
|
||||
&mut cache,
|
||||
&[z.to_bytes(), self.T1.compress().to_bytes(), self.T2.compress().to_bytes()],
|
||||
);
|
||||
|
||||
let x_ip = hash_cache(
|
||||
&mut cache,
|
||||
&[x.to_bytes(), self.taux.to_bytes(), self.mu.to_bytes(), self.t.to_bytes()],
|
||||
);
|
||||
|
||||
let mut w = Vec::with_capacity(logMN);
|
||||
let mut winv = Vec::with_capacity(logMN);
|
||||
for (L, R) in self.L.iter().zip(&self.R) {
|
||||
w.push(hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]));
|
||||
winv.push(cache.invert().unwrap());
|
||||
}
|
||||
|
||||
// Convert the proof from * INV_EIGHT to its actual form
|
||||
let normalize = |point: &DalekPoint| EdwardsPoint(point.mul_by_cofactor());
|
||||
|
||||
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
|
||||
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
|
||||
let T1 = normalize(&self.T1);
|
||||
let T2 = normalize(&self.T2);
|
||||
let A = normalize(&self.A);
|
||||
let S = normalize(&self.S);
|
||||
|
||||
let commitments = commitments.iter().map(|c| c.mul_by_cofactor()).collect::<Vec<_>>();
|
||||
|
||||
// Verify it
|
||||
let mut proof = Vec::with_capacity(4 + commitments.len());
|
||||
|
||||
let zpow = ScalarVector::powers(z, M + 3);
|
||||
let ip1y = ScalarVector::powers(y, M * N).sum();
|
||||
let mut k = -(zpow[2] * ip1y);
|
||||
for j in 1 ..= M {
|
||||
k -= zpow[j + 2] * IP12();
|
||||
}
|
||||
let y1 = Scalar(self.t) - ((z * ip1y) + k);
|
||||
proof.push((-y1, H()));
|
||||
|
||||
proof.push((-Scalar(self.taux), G));
|
||||
|
||||
for (j, commitment) in commitments.iter().enumerate() {
|
||||
proof.push((zpow[j + 2], *commitment));
|
||||
}
|
||||
|
||||
proof.push((x, T1));
|
||||
proof.push((x * x, T2));
|
||||
verifier.queue(&mut *rng, id, proof);
|
||||
|
||||
proof = Vec::with_capacity(4 + (2 * (MN + logMN)));
|
||||
let z3 = (Scalar(self.t) - (Scalar(self.a) * Scalar(self.b))) * x_ip;
|
||||
proof.push((z3, H()));
|
||||
proof.push((-Scalar(self.mu), G));
|
||||
|
||||
proof.push((Scalar::ONE, A));
|
||||
proof.push((x, S));
|
||||
|
||||
{
|
||||
let ypow = ScalarVector::powers(y, MN);
|
||||
let yinv = y.invert().unwrap();
|
||||
let yinvpow = ScalarVector::powers(yinv, MN);
|
||||
|
||||
let w_cache = challenge_products(&w, &winv);
|
||||
|
||||
let generators = GENERATORS();
|
||||
for i in 0 .. MN {
|
||||
let g = (Scalar(self.a) * w_cache[i]) + z;
|
||||
proof.push((-g, generators.G[i]));
|
||||
|
||||
let mut h = Scalar(self.b) * yinvpow[i] * w_cache[(!i) & (MN - 1)];
|
||||
h -= ((zpow[(i / N) + 2] * TWO_N()[i % N]) + (z * ypow[i])) * yinvpow[i];
|
||||
proof.push((-h, generators.H[i]));
|
||||
}
|
||||
}
|
||||
|
||||
for i in 0 .. logMN {
|
||||
proof.push((w[i] * w[i], L[i]));
|
||||
proof.push((winv[i] * winv[i], R[i]));
|
||||
}
|
||||
verifier.queue(rng, id, proof);
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn verify<R: RngCore + CryptoRng>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
commitments: &[DalekPoint],
|
||||
) -> bool {
|
||||
let mut verifier = BatchVerifier::new(1);
|
||||
if self.verify_core(rng, &mut verifier, (), commitments) {
|
||||
verifier.verify_vartime()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
||||
id: ID,
|
||||
commitments: &[DalekPoint],
|
||||
) -> bool {
|
||||
self.verify_core(rng, verifier, id, commitments)
|
||||
}
|
||||
}
|
||||
@@ -1,249 +0,0 @@
|
||||
use std_shims::vec::Vec;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use multiexp::{multiexp, multiexp_vartime, BatchVerifier};
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
Group, GroupEncoding,
|
||||
};
|
||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||
|
||||
use crate::{
|
||||
Commitment,
|
||||
ringct::{
|
||||
bulletproofs::core::{MAX_M, N},
|
||||
bulletproofs::plus::{
|
||||
ScalarVector, PointVector, GeneratorsList, Generators,
|
||||
transcript::*,
|
||||
weighted_inner_product::{WipStatement, WipWitness, WipProof},
|
||||
padded_pow_of_2, u64_decompose,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Figure 3
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct AggregateRangeStatement {
|
||||
generators: Generators,
|
||||
V: Vec<EdwardsPoint>,
|
||||
}
|
||||
|
||||
impl Zeroize for AggregateRangeStatement {
|
||||
fn zeroize(&mut self) {
|
||||
self.V.zeroize();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub(crate) struct AggregateRangeWitness {
|
||||
values: Vec<u64>,
|
||||
gammas: Vec<Scalar>,
|
||||
}
|
||||
|
||||
impl AggregateRangeWitness {
|
||||
pub(crate) fn new(commitments: &[Commitment]) -> Option<Self> {
|
||||
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut values = Vec::with_capacity(commitments.len());
|
||||
let mut gammas = Vec::with_capacity(commitments.len());
|
||||
for commitment in commitments {
|
||||
values.push(commitment.amount);
|
||||
gammas.push(Scalar(commitment.mask));
|
||||
}
|
||||
Some(AggregateRangeWitness { values, gammas })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct AggregateRangeProof {
|
||||
pub(crate) A: EdwardsPoint,
|
||||
pub(crate) wip: WipProof,
|
||||
}
|
||||
|
||||
impl AggregateRangeStatement {
|
||||
pub(crate) fn new(V: Vec<EdwardsPoint>) -> Option<Self> {
|
||||
if V.is_empty() || (V.len() > MAX_M) {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(Self { generators: Generators::new(), V })
|
||||
}
|
||||
|
||||
fn transcript_A(transcript: &mut Scalar, A: EdwardsPoint) -> (Scalar, Scalar) {
|
||||
let y = hash_to_scalar(&[transcript.to_repr().as_ref(), A.to_bytes().as_ref()].concat());
|
||||
let z = hash_to_scalar(y.to_bytes().as_ref());
|
||||
*transcript = z;
|
||||
(y, z)
|
||||
}
|
||||
|
||||
fn d_j(j: usize, m: usize) -> ScalarVector {
|
||||
let mut d_j = Vec::with_capacity(m * N);
|
||||
for _ in 0 .. (j - 1) * N {
|
||||
d_j.push(Scalar::ZERO);
|
||||
}
|
||||
d_j.append(&mut ScalarVector::powers(Scalar::from(2u8), N).0);
|
||||
for _ in 0 .. (m - j) * N {
|
||||
d_j.push(Scalar::ZERO);
|
||||
}
|
||||
ScalarVector(d_j)
|
||||
}
|
||||
|
||||
fn compute_A_hat(
|
||||
mut V: PointVector,
|
||||
generators: &Generators,
|
||||
transcript: &mut Scalar,
|
||||
mut A: EdwardsPoint,
|
||||
) -> (Scalar, ScalarVector, Scalar, Scalar, ScalarVector, EdwardsPoint) {
|
||||
let (y, z) = Self::transcript_A(transcript, A);
|
||||
A = A.mul_by_cofactor();
|
||||
|
||||
while V.len() < padded_pow_of_2(V.len()) {
|
||||
V.0.push(EdwardsPoint::identity());
|
||||
}
|
||||
let mn = V.len() * N;
|
||||
|
||||
let mut z_pow = Vec::with_capacity(V.len());
|
||||
|
||||
let mut d = ScalarVector::new(mn);
|
||||
for j in 1 ..= V.len() {
|
||||
z_pow.push(z.pow(Scalar::from(2 * u64::try_from(j).unwrap()))); // TODO: Optimize this
|
||||
d = d.add_vec(&Self::d_j(j, V.len()).mul(z_pow[j - 1]));
|
||||
}
|
||||
|
||||
let mut ascending_y = ScalarVector(vec![y]);
|
||||
for i in 1 .. d.len() {
|
||||
ascending_y.0.push(ascending_y[i - 1] * y);
|
||||
}
|
||||
let y_pows = ascending_y.clone().sum();
|
||||
|
||||
let mut descending_y = ascending_y.clone();
|
||||
descending_y.0.reverse();
|
||||
|
||||
let d_descending_y = d.mul_vec(&descending_y);
|
||||
|
||||
let y_mn_plus_one = descending_y[0] * y;
|
||||
|
||||
let mut commitment_accum = EdwardsPoint::identity();
|
||||
for (j, commitment) in V.0.iter().enumerate() {
|
||||
commitment_accum += *commitment * z_pow[j];
|
||||
}
|
||||
|
||||
let neg_z = -z;
|
||||
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 2);
|
||||
for (i, d_y_z) in d_descending_y.add(z).0.drain(..).enumerate() {
|
||||
A_terms.push((neg_z, generators.generator(GeneratorsList::GBold1, i)));
|
||||
A_terms.push((d_y_z, generators.generator(GeneratorsList::HBold1, i)));
|
||||
}
|
||||
A_terms.push((y_mn_plus_one, commitment_accum));
|
||||
A_terms.push((
|
||||
((y_pows * z) - (d.sum() * y_mn_plus_one * z) - (y_pows * z.square())),
|
||||
generators.g(),
|
||||
));
|
||||
|
||||
(y, d_descending_y, y_mn_plus_one, z, ScalarVector(z_pow), A + multiexp_vartime(&A_terms))
|
||||
}
|
||||
|
||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
||||
self,
|
||||
rng: &mut R,
|
||||
witness: AggregateRangeWitness,
|
||||
) -> Option<AggregateRangeProof> {
|
||||
// Check for consistency with the witness
|
||||
if self.V.len() != witness.values.len() {
|
||||
return None;
|
||||
}
|
||||
for (commitment, (value, gamma)) in
|
||||
self.V.iter().zip(witness.values.iter().zip(witness.gammas.iter()))
|
||||
{
|
||||
if Commitment::new(**gamma, *value).calculate() != **commitment {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
let Self { generators, V } = self;
|
||||
// Monero expects all of these points to be torsion-free
|
||||
// Generally, for Bulletproofs, it sends points * INV_EIGHT and then performs a torsion clear
|
||||
// by multiplying by 8
|
||||
// This also restores the original value due to the preprocessing
|
||||
// Commitments aren't transmitted INV_EIGHT though, so this multiplies by INV_EIGHT to enable
|
||||
// clearing its cofactor without mutating the value
|
||||
// For some reason, these values are transcripted * INV_EIGHT, not as transmitted
|
||||
let mut V = V.into_iter().map(|V| EdwardsPoint(V.0 * crate::INV_EIGHT())).collect::<Vec<_>>();
|
||||
let mut transcript = initial_transcript(V.iter());
|
||||
V.iter_mut().for_each(|V| *V = V.mul_by_cofactor());
|
||||
|
||||
// Pad V
|
||||
while V.len() < padded_pow_of_2(V.len()) {
|
||||
V.push(EdwardsPoint::identity());
|
||||
}
|
||||
|
||||
let generators = generators.reduce(V.len() * N);
|
||||
|
||||
let mut d_js = Vec::with_capacity(V.len());
|
||||
let mut a_l = ScalarVector(Vec::with_capacity(V.len() * N));
|
||||
for j in 1 ..= V.len() {
|
||||
d_js.push(Self::d_j(j, V.len()));
|
||||
a_l.0.append(&mut u64_decompose(*witness.values.get(j - 1).unwrap_or(&0)).0);
|
||||
}
|
||||
|
||||
let a_r = a_l.sub(Scalar::ONE);
|
||||
|
||||
let alpha = Scalar::random(&mut *rng);
|
||||
|
||||
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 1);
|
||||
for (i, a_l) in a_l.0.iter().enumerate() {
|
||||
A_terms.push((*a_l, generators.generator(GeneratorsList::GBold1, i)));
|
||||
}
|
||||
for (i, a_r) in a_r.0.iter().enumerate() {
|
||||
A_terms.push((*a_r, generators.generator(GeneratorsList::HBold1, i)));
|
||||
}
|
||||
A_terms.push((alpha, generators.h()));
|
||||
let mut A = multiexp(&A_terms);
|
||||
A_terms.zeroize();
|
||||
|
||||
// Multiply by INV_EIGHT per earlier commentary
|
||||
A.0 *= crate::INV_EIGHT();
|
||||
|
||||
let (y, d_descending_y, y_mn_plus_one, z, z_pow, A_hat) =
|
||||
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, A);
|
||||
|
||||
let a_l = a_l.sub(z);
|
||||
let a_r = a_r.add_vec(&d_descending_y).add(z);
|
||||
let mut alpha = alpha;
|
||||
for j in 1 ..= witness.gammas.len() {
|
||||
alpha += z_pow[j - 1] * witness.gammas[j - 1] * y_mn_plus_one;
|
||||
}
|
||||
|
||||
Some(AggregateRangeProof {
|
||||
A,
|
||||
wip: WipStatement::new(generators, A_hat, y)
|
||||
.prove(rng, transcript, WipWitness::new(a_l, a_r, alpha).unwrap())
|
||||
.unwrap(),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn verify<Id: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||
self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<Id, EdwardsPoint>,
|
||||
id: Id,
|
||||
proof: AggregateRangeProof,
|
||||
) -> bool {
|
||||
let Self { generators, V } = self;
|
||||
|
||||
let mut V = V.into_iter().map(|V| EdwardsPoint(V.0 * crate::INV_EIGHT())).collect::<Vec<_>>();
|
||||
let mut transcript = initial_transcript(V.iter());
|
||||
V.iter_mut().for_each(|V| *V = V.mul_by_cofactor());
|
||||
|
||||
let generators = generators.reduce(V.len() * N);
|
||||
|
||||
let (y, _, _, _, _, A_hat) =
|
||||
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, proof.A);
|
||||
WipStatement::new(generators, A_hat, y).verify(rng, verifier, id, transcript, proof.wip)
|
||||
}
|
||||
}
|
||||
@@ -1,92 +0,0 @@
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use group::Group;
|
||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||
|
||||
mod scalar_vector;
|
||||
pub(crate) use scalar_vector::{ScalarVector, weighted_inner_product};
|
||||
mod point_vector;
|
||||
pub(crate) use point_vector::PointVector;
|
||||
|
||||
pub(crate) mod transcript;
|
||||
pub(crate) mod weighted_inner_product;
|
||||
pub(crate) use weighted_inner_product::*;
|
||||
pub(crate) mod aggregate_range_proof;
|
||||
pub(crate) use aggregate_range_proof::*;
|
||||
|
||||
pub(crate) fn padded_pow_of_2(i: usize) -> usize {
|
||||
let mut next_pow_of_2 = 1;
|
||||
while next_pow_of_2 < i {
|
||||
next_pow_of_2 <<= 1;
|
||||
}
|
||||
next_pow_of_2
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||
pub(crate) enum GeneratorsList {
|
||||
GBold1,
|
||||
HBold1,
|
||||
}
|
||||
|
||||
// TODO: Table these
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct Generators {
|
||||
g: EdwardsPoint,
|
||||
|
||||
g_bold1: &'static [EdwardsPoint],
|
||||
h_bold1: &'static [EdwardsPoint],
|
||||
}
|
||||
|
||||
mod generators {
|
||||
use std_shims::sync::OnceLock;
|
||||
use monero_generators::Generators;
|
||||
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
|
||||
}
|
||||
|
||||
impl Generators {
|
||||
#[allow(clippy::new_without_default)]
|
||||
pub(crate) fn new() -> Self {
|
||||
let gens = generators::GENERATORS();
|
||||
Generators { g: dalek_ff_group::EdwardsPoint(crate::H()), g_bold1: &gens.G, h_bold1: &gens.H }
|
||||
}
|
||||
|
||||
pub(crate) fn len(&self) -> usize {
|
||||
self.g_bold1.len()
|
||||
}
|
||||
|
||||
pub(crate) fn g(&self) -> EdwardsPoint {
|
||||
self.g
|
||||
}
|
||||
|
||||
pub(crate) fn h(&self) -> EdwardsPoint {
|
||||
EdwardsPoint::generator()
|
||||
}
|
||||
|
||||
pub(crate) fn generator(&self, list: GeneratorsList, i: usize) -> EdwardsPoint {
|
||||
match list {
|
||||
GeneratorsList::GBold1 => self.g_bold1[i],
|
||||
GeneratorsList::HBold1 => self.h_bold1[i],
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn reduce(&self, generators: usize) -> Self {
|
||||
// Round to the nearest power of 2
|
||||
let generators = padded_pow_of_2(generators);
|
||||
assert!(generators <= self.g_bold1.len());
|
||||
|
||||
Generators {
|
||||
g: self.g,
|
||||
g_bold1: &self.g_bold1[.. generators],
|
||||
h_bold1: &self.h_bold1[.. generators],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the little-endian decomposition.
|
||||
fn u64_decompose(value: u64) -> ScalarVector {
|
||||
let mut bits = ScalarVector::new(64);
|
||||
for bit in 0 .. 64 {
|
||||
bits[bit] = Scalar::from((value >> bit) & 1);
|
||||
}
|
||||
bits
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
use core::ops::{Index, IndexMut};
|
||||
use std_shims::vec::Vec;
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use dalek_ff_group::EdwardsPoint;
|
||||
|
||||
#[cfg(test)]
|
||||
use multiexp::multiexp;
|
||||
#[cfg(test)]
|
||||
use crate::ringct::bulletproofs::plus::ScalarVector;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub(crate) struct PointVector(pub(crate) Vec<EdwardsPoint>);
|
||||
|
||||
impl Index<usize> for PointVector {
|
||||
type Output = EdwardsPoint;
|
||||
fn index(&self, index: usize) -> &EdwardsPoint {
|
||||
&self.0[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexMut<usize> for PointVector {
|
||||
fn index_mut(&mut self, index: usize) -> &mut EdwardsPoint {
|
||||
&mut self.0[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl PointVector {
|
||||
#[cfg(test)]
|
||||
pub(crate) fn multiexp(&self, vector: &ScalarVector) -> EdwardsPoint {
|
||||
debug_assert_eq!(self.len(), vector.len());
|
||||
let mut res = Vec::with_capacity(self.len());
|
||||
for (point, scalar) in self.0.iter().copied().zip(vector.0.iter().copied()) {
|
||||
res.push((scalar, point));
|
||||
}
|
||||
multiexp(&res)
|
||||
}
|
||||
|
||||
pub(crate) fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub(crate) fn split(mut self) -> (Self, Self) {
|
||||
debug_assert!(self.len() > 1);
|
||||
let r = self.0.split_off(self.0.len() / 2);
|
||||
debug_assert_eq!(self.len(), r.len());
|
||||
(self, PointVector(r))
|
||||
}
|
||||
}
|
||||
@@ -1,114 +0,0 @@
|
||||
use core::{
|
||||
borrow::Borrow,
|
||||
ops::{Index, IndexMut},
|
||||
};
|
||||
use std_shims::vec::Vec;
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use group::ff::Field;
|
||||
use dalek_ff_group::Scalar;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
||||
|
||||
impl Index<usize> for ScalarVector {
|
||||
type Output = Scalar;
|
||||
fn index(&self, index: usize) -> &Scalar {
|
||||
&self.0[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexMut<usize> for ScalarVector {
|
||||
fn index_mut(&mut self, index: usize) -> &mut Scalar {
|
||||
&mut self.0[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl ScalarVector {
|
||||
pub(crate) fn new(len: usize) -> Self {
|
||||
ScalarVector(vec![Scalar::ZERO; len])
|
||||
}
|
||||
|
||||
pub(crate) fn add(&self, scalar: impl Borrow<Scalar>) -> Self {
|
||||
let mut res = self.clone();
|
||||
for val in res.0.iter_mut() {
|
||||
*val += scalar.borrow();
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn sub(&self, scalar: impl Borrow<Scalar>) -> Self {
|
||||
let mut res = self.clone();
|
||||
for val in res.0.iter_mut() {
|
||||
*val -= scalar.borrow();
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn mul(&self, scalar: impl Borrow<Scalar>) -> Self {
|
||||
let mut res = self.clone();
|
||||
for val in res.0.iter_mut() {
|
||||
*val *= scalar.borrow();
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn add_vec(&self, vector: &Self) -> Self {
|
||||
debug_assert_eq!(self.len(), vector.len());
|
||||
let mut res = self.clone();
|
||||
for (i, val) in res.0.iter_mut().enumerate() {
|
||||
*val += vector.0[i];
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn mul_vec(&self, vector: &Self) -> Self {
|
||||
debug_assert_eq!(self.len(), vector.len());
|
||||
let mut res = self.clone();
|
||||
for (i, val) in res.0.iter_mut().enumerate() {
|
||||
*val *= vector.0[i];
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn inner_product(&self, vector: &Self) -> Scalar {
|
||||
self.mul_vec(vector).sum()
|
||||
}
|
||||
|
||||
pub(crate) fn powers(x: Scalar, len: usize) -> Self {
|
||||
debug_assert!(len != 0);
|
||||
|
||||
let mut res = Vec::with_capacity(len);
|
||||
res.push(Scalar::ONE);
|
||||
res.push(x);
|
||||
for i in 2 .. len {
|
||||
res.push(res[i - 1] * x);
|
||||
}
|
||||
res.truncate(len);
|
||||
ScalarVector(res)
|
||||
}
|
||||
|
||||
pub(crate) fn sum(mut self) -> Scalar {
|
||||
self.0.drain(..).sum()
|
||||
}
|
||||
|
||||
pub(crate) fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub(crate) fn split(mut self) -> (Self, Self) {
|
||||
debug_assert!(self.len() > 1);
|
||||
let r = self.0.split_off(self.0.len() / 2);
|
||||
debug_assert_eq!(self.len(), r.len());
|
||||
(self, ScalarVector(r))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn weighted_inner_product(
|
||||
a: &ScalarVector,
|
||||
b: &ScalarVector,
|
||||
y: &ScalarVector,
|
||||
) -> Scalar {
|
||||
a.inner_product(&b.mul_vec(y))
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
use std_shims::{sync::OnceLock, vec::Vec};
|
||||
|
||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||
|
||||
use monero_generators::{hash_to_point as raw_hash_to_point};
|
||||
use crate::{hash, hash_to_scalar as dalek_hash};
|
||||
|
||||
// Monero starts BP+ transcripts with the following constant.
|
||||
static TRANSCRIPT_CELL: OnceLock<[u8; 32]> = OnceLock::new();
|
||||
pub(crate) fn TRANSCRIPT() -> [u8; 32] {
|
||||
// Why this uses a hash_to_point is completely unknown.
|
||||
*TRANSCRIPT_CELL
|
||||
.get_or_init(|| raw_hash_to_point(hash(b"bulletproof_plus_transcript")).compress().to_bytes())
|
||||
}
|
||||
|
||||
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||
Scalar(dalek_hash(data))
|
||||
}
|
||||
|
||||
pub(crate) fn initial_transcript(commitments: core::slice::Iter<'_, EdwardsPoint>) -> Scalar {
|
||||
let commitments_hash =
|
||||
hash_to_scalar(&commitments.flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>());
|
||||
hash_to_scalar(&[TRANSCRIPT().as_ref(), &commitments_hash.to_bytes()].concat())
|
||||
}
|
||||
@@ -1,447 +0,0 @@
|
||||
use std_shims::vec::Vec;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use multiexp::{multiexp, multiexp_vartime, BatchVerifier};
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
GroupEncoding,
|
||||
};
|
||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||
|
||||
use crate::ringct::bulletproofs::plus::{
|
||||
ScalarVector, PointVector, GeneratorsList, Generators, padded_pow_of_2, weighted_inner_product,
|
||||
transcript::*,
|
||||
};
|
||||
|
||||
// Figure 1
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct WipStatement {
|
||||
generators: Generators,
|
||||
P: EdwardsPoint,
|
||||
y: ScalarVector,
|
||||
}
|
||||
|
||||
impl Zeroize for WipStatement {
|
||||
fn zeroize(&mut self) {
|
||||
self.P.zeroize();
|
||||
self.y.zeroize();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub(crate) struct WipWitness {
|
||||
a: ScalarVector,
|
||||
b: ScalarVector,
|
||||
alpha: Scalar,
|
||||
}
|
||||
|
||||
impl WipWitness {
|
||||
pub(crate) fn new(mut a: ScalarVector, mut b: ScalarVector, alpha: Scalar) -> Option<Self> {
|
||||
if a.0.is_empty() || (a.len() != b.len()) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Pad to the nearest power of 2
|
||||
let missing = padded_pow_of_2(a.len()) - a.len();
|
||||
a.0.reserve(missing);
|
||||
b.0.reserve(missing);
|
||||
for _ in 0 .. missing {
|
||||
a.0.push(Scalar::ZERO);
|
||||
b.0.push(Scalar::ZERO);
|
||||
}
|
||||
|
||||
Some(Self { a, b, alpha })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub(crate) struct WipProof {
|
||||
pub(crate) L: Vec<EdwardsPoint>,
|
||||
pub(crate) R: Vec<EdwardsPoint>,
|
||||
pub(crate) A: EdwardsPoint,
|
||||
pub(crate) B: EdwardsPoint,
|
||||
pub(crate) r_answer: Scalar,
|
||||
pub(crate) s_answer: Scalar,
|
||||
pub(crate) delta_answer: Scalar,
|
||||
}
|
||||
|
||||
impl WipStatement {
|
||||
pub(crate) fn new(generators: Generators, P: EdwardsPoint, y: Scalar) -> Self {
|
||||
debug_assert_eq!(generators.len(), padded_pow_of_2(generators.len()));
|
||||
|
||||
// y ** n
|
||||
let mut y_vec = ScalarVector::new(generators.len());
|
||||
y_vec[0] = y;
|
||||
for i in 1 .. y_vec.len() {
|
||||
y_vec[i] = y_vec[i - 1] * y;
|
||||
}
|
||||
|
||||
Self { generators, P, y: y_vec }
|
||||
}
|
||||
|
||||
fn transcript_L_R(transcript: &mut Scalar, L: EdwardsPoint, R: EdwardsPoint) -> Scalar {
|
||||
let e = hash_to_scalar(
|
||||
&[transcript.to_repr().as_ref(), L.to_bytes().as_ref(), R.to_bytes().as_ref()].concat(),
|
||||
);
|
||||
*transcript = e;
|
||||
e
|
||||
}
|
||||
|
||||
fn transcript_A_B(transcript: &mut Scalar, A: EdwardsPoint, B: EdwardsPoint) -> Scalar {
|
||||
let e = hash_to_scalar(
|
||||
&[transcript.to_repr().as_ref(), A.to_bytes().as_ref(), B.to_bytes().as_ref()].concat(),
|
||||
);
|
||||
*transcript = e;
|
||||
e
|
||||
}
|
||||
|
||||
// Prover's variant of the shared code block to calculate G/H/P when n > 1
|
||||
// Returns each permutation of G/H since the prover needs to do operation on each permutation
|
||||
// P is dropped as it's unused in the prover's path
|
||||
// TODO: It'd still probably be faster to keep in terms of the original generators, both between
|
||||
// the reduced amount of group operations and the potential tabling of the generators under
|
||||
// multiexp
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn next_G_H(
|
||||
transcript: &mut Scalar,
|
||||
mut g_bold1: PointVector,
|
||||
mut g_bold2: PointVector,
|
||||
mut h_bold1: PointVector,
|
||||
mut h_bold2: PointVector,
|
||||
L: EdwardsPoint,
|
||||
R: EdwardsPoint,
|
||||
y_inv_n_hat: Scalar,
|
||||
) -> (Scalar, Scalar, Scalar, Scalar, PointVector, PointVector) {
|
||||
debug_assert_eq!(g_bold1.len(), g_bold2.len());
|
||||
debug_assert_eq!(h_bold1.len(), h_bold2.len());
|
||||
debug_assert_eq!(g_bold1.len(), h_bold1.len());
|
||||
|
||||
let e = Self::transcript_L_R(transcript, L, R);
|
||||
let inv_e = e.invert().unwrap();
|
||||
|
||||
// This vartime is safe as all of these arguments are public
|
||||
let mut new_g_bold = Vec::with_capacity(g_bold1.len());
|
||||
let e_y_inv = e * y_inv_n_hat;
|
||||
for g_bold in g_bold1.0.drain(..).zip(g_bold2.0.drain(..)) {
|
||||
new_g_bold.push(multiexp_vartime(&[(inv_e, g_bold.0), (e_y_inv, g_bold.1)]));
|
||||
}
|
||||
|
||||
let mut new_h_bold = Vec::with_capacity(h_bold1.len());
|
||||
for h_bold in h_bold1.0.drain(..).zip(h_bold2.0.drain(..)) {
|
||||
new_h_bold.push(multiexp_vartime(&[(e, h_bold.0), (inv_e, h_bold.1)]));
|
||||
}
|
||||
|
||||
let e_square = e.square();
|
||||
let inv_e_square = inv_e.square();
|
||||
|
||||
(e, inv_e, e_square, inv_e_square, PointVector(new_g_bold), PointVector(new_h_bold))
|
||||
}
|
||||
|
||||
/*
|
||||
This has room for optimization worth investigating further. It currently takes
|
||||
an iterative approach. It can be optimized further via divide and conquer.
|
||||
|
||||
Assume there are 4 challenges.
|
||||
|
||||
Iterative approach (current):
|
||||
1. Do the optimal multiplications across challenge column 0 and 1.
|
||||
2. Do the optimal multiplications across that result and column 2.
|
||||
3. Do the optimal multiplications across that result and column 3.
|
||||
|
||||
Divide and conquer (worth investigating further):
|
||||
1. Do the optimal multiplications across challenge column 0 and 1.
|
||||
2. Do the optimal multiplications across challenge column 2 and 3.
|
||||
3. Multiply both results together.
|
||||
|
||||
When there are 4 challenges (n=16), the iterative approach does 28 multiplications
|
||||
versus divide and conquer's 24.
|
||||
*/
|
||||
fn challenge_products(challenges: &[(Scalar, Scalar)]) -> Vec<Scalar> {
|
||||
let mut products = vec![Scalar::ONE; 1 << challenges.len()];
|
||||
|
||||
if !challenges.is_empty() {
|
||||
products[0] = challenges[0].1;
|
||||
products[1] = challenges[0].0;
|
||||
|
||||
for (j, challenge) in challenges.iter().enumerate().skip(1) {
|
||||
let mut slots = (1 << (j + 1)) - 1;
|
||||
while slots > 0 {
|
||||
products[slots] = products[slots / 2] * challenge.0;
|
||||
products[slots - 1] = products[slots / 2] * challenge.1;
|
||||
|
||||
slots = slots.saturating_sub(2);
|
||||
}
|
||||
}
|
||||
|
||||
// Sanity check since if the above failed to populate, it'd be critical
|
||||
for product in &products {
|
||||
debug_assert!(!bool::from(product.is_zero()));
|
||||
}
|
||||
}
|
||||
|
||||
products
|
||||
}
|
||||
|
||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
||||
self,
|
||||
rng: &mut R,
|
||||
mut transcript: Scalar,
|
||||
witness: WipWitness,
|
||||
) -> Option<WipProof> {
|
||||
let WipStatement { generators, P, mut y } = self;
|
||||
#[cfg(not(debug_assertions))]
|
||||
let _ = P;
|
||||
|
||||
if generators.len() != witness.a.len() {
|
||||
return None;
|
||||
}
|
||||
let (g, h) = (generators.g(), generators.h());
|
||||
let mut g_bold = vec![];
|
||||
let mut h_bold = vec![];
|
||||
for i in 0 .. generators.len() {
|
||||
g_bold.push(generators.generator(GeneratorsList::GBold1, i));
|
||||
h_bold.push(generators.generator(GeneratorsList::HBold1, i));
|
||||
}
|
||||
let mut g_bold = PointVector(g_bold);
|
||||
let mut h_bold = PointVector(h_bold);
|
||||
|
||||
// Check P has the expected relationship
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
let mut P_terms = witness
|
||||
.a
|
||||
.0
|
||||
.iter()
|
||||
.copied()
|
||||
.zip(g_bold.0.iter().copied())
|
||||
.chain(witness.b.0.iter().copied().zip(h_bold.0.iter().copied()))
|
||||
.collect::<Vec<_>>();
|
||||
P_terms.push((weighted_inner_product(&witness.a, &witness.b, &y), g));
|
||||
P_terms.push((witness.alpha, h));
|
||||
debug_assert_eq!(multiexp(&P_terms), P);
|
||||
P_terms.zeroize();
|
||||
}
|
||||
|
||||
let mut a = witness.a.clone();
|
||||
let mut b = witness.b.clone();
|
||||
let mut alpha = witness.alpha;
|
||||
|
||||
// From here on, g_bold.len() is used as n
|
||||
debug_assert_eq!(g_bold.len(), a.len());
|
||||
|
||||
let mut L_vec = vec![];
|
||||
let mut R_vec = vec![];
|
||||
|
||||
// else n > 1 case from figure 1
|
||||
while g_bold.len() > 1 {
|
||||
let (a1, a2) = a.clone().split();
|
||||
let (b1, b2) = b.clone().split();
|
||||
let (g_bold1, g_bold2) = g_bold.split();
|
||||
let (h_bold1, h_bold2) = h_bold.split();
|
||||
|
||||
let n_hat = g_bold1.len();
|
||||
debug_assert_eq!(a1.len(), n_hat);
|
||||
debug_assert_eq!(a2.len(), n_hat);
|
||||
debug_assert_eq!(b1.len(), n_hat);
|
||||
debug_assert_eq!(b2.len(), n_hat);
|
||||
debug_assert_eq!(g_bold1.len(), n_hat);
|
||||
debug_assert_eq!(g_bold2.len(), n_hat);
|
||||
debug_assert_eq!(h_bold1.len(), n_hat);
|
||||
debug_assert_eq!(h_bold2.len(), n_hat);
|
||||
|
||||
let y_n_hat = y[n_hat - 1];
|
||||
y.0.truncate(n_hat);
|
||||
|
||||
let d_l = Scalar::random(&mut *rng);
|
||||
let d_r = Scalar::random(&mut *rng);
|
||||
|
||||
let c_l = weighted_inner_product(&a1, &b2, &y);
|
||||
let c_r = weighted_inner_product(&(a2.mul(y_n_hat)), &b1, &y);
|
||||
|
||||
// TODO: Calculate these with a batch inversion
|
||||
let y_inv_n_hat = y_n_hat.invert().unwrap();
|
||||
|
||||
let mut L_terms = a1
|
||||
.mul(y_inv_n_hat)
|
||||
.0
|
||||
.drain(..)
|
||||
.zip(g_bold2.0.iter().copied())
|
||||
.chain(b2.0.iter().copied().zip(h_bold1.0.iter().copied()))
|
||||
.collect::<Vec<_>>();
|
||||
L_terms.push((c_l, g));
|
||||
L_terms.push((d_l, h));
|
||||
let L = multiexp(&L_terms) * Scalar(crate::INV_EIGHT());
|
||||
L_vec.push(L);
|
||||
L_terms.zeroize();
|
||||
|
||||
let mut R_terms = a2
|
||||
.mul(y_n_hat)
|
||||
.0
|
||||
.drain(..)
|
||||
.zip(g_bold1.0.iter().copied())
|
||||
.chain(b1.0.iter().copied().zip(h_bold2.0.iter().copied()))
|
||||
.collect::<Vec<_>>();
|
||||
R_terms.push((c_r, g));
|
||||
R_terms.push((d_r, h));
|
||||
let R = multiexp(&R_terms) * Scalar(crate::INV_EIGHT());
|
||||
R_vec.push(R);
|
||||
R_terms.zeroize();
|
||||
|
||||
let (e, inv_e, e_square, inv_e_square);
|
||||
(e, inv_e, e_square, inv_e_square, g_bold, h_bold) =
|
||||
Self::next_G_H(&mut transcript, g_bold1, g_bold2, h_bold1, h_bold2, L, R, y_inv_n_hat);
|
||||
|
||||
a = a1.mul(e).add_vec(&a2.mul(y_n_hat * inv_e));
|
||||
b = b1.mul(inv_e).add_vec(&b2.mul(e));
|
||||
alpha += (d_l * e_square) + (d_r * inv_e_square);
|
||||
|
||||
debug_assert_eq!(g_bold.len(), a.len());
|
||||
debug_assert_eq!(g_bold.len(), h_bold.len());
|
||||
debug_assert_eq!(g_bold.len(), b.len());
|
||||
}
|
||||
|
||||
// n == 1 case from figure 1
|
||||
debug_assert_eq!(g_bold.len(), 1);
|
||||
debug_assert_eq!(h_bold.len(), 1);
|
||||
|
||||
debug_assert_eq!(a.len(), 1);
|
||||
debug_assert_eq!(b.len(), 1);
|
||||
|
||||
let r = Scalar::random(&mut *rng);
|
||||
let s = Scalar::random(&mut *rng);
|
||||
let delta = Scalar::random(&mut *rng);
|
||||
let eta = Scalar::random(&mut *rng);
|
||||
|
||||
let ry = r * y[0];
|
||||
|
||||
let mut A_terms =
|
||||
vec![(r, g_bold[0]), (s, h_bold[0]), ((ry * b[0]) + (s * y[0] * a[0]), g), (delta, h)];
|
||||
let A = multiexp(&A_terms) * Scalar(crate::INV_EIGHT());
|
||||
A_terms.zeroize();
|
||||
|
||||
let mut B_terms = vec![(ry * s, g), (eta, h)];
|
||||
let B = multiexp(&B_terms) * Scalar(crate::INV_EIGHT());
|
||||
B_terms.zeroize();
|
||||
|
||||
let e = Self::transcript_A_B(&mut transcript, A, B);
|
||||
|
||||
let r_answer = r + (a[0] * e);
|
||||
let s_answer = s + (b[0] * e);
|
||||
let delta_answer = eta + (delta * e) + (alpha * e.square());
|
||||
|
||||
Some(WipProof { L: L_vec, R: R_vec, A, B, r_answer, s_answer, delta_answer })
|
||||
}
|
||||
|
||||
pub(crate) fn verify<Id: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||
self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<Id, EdwardsPoint>,
|
||||
id: Id,
|
||||
mut transcript: Scalar,
|
||||
mut proof: WipProof,
|
||||
) -> bool {
|
||||
let WipStatement { generators, P, y } = self;
|
||||
|
||||
let (g, h) = (generators.g(), generators.h());
|
||||
|
||||
// Verify the L/R lengths
|
||||
{
|
||||
let mut lr_len = 0;
|
||||
while (1 << lr_len) < generators.len() {
|
||||
lr_len += 1;
|
||||
}
|
||||
if (proof.L.len() != lr_len) ||
|
||||
(proof.R.len() != lr_len) ||
|
||||
(generators.len() != (1 << lr_len))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
let inv_y = {
|
||||
let inv_y = y[0].invert().unwrap();
|
||||
let mut res = Vec::with_capacity(y.len());
|
||||
res.push(inv_y);
|
||||
while res.len() < y.len() {
|
||||
res.push(inv_y * res.last().unwrap());
|
||||
}
|
||||
res
|
||||
};
|
||||
|
||||
let mut P_terms = vec![(Scalar::ONE, P)];
|
||||
P_terms.reserve(6 + (2 * generators.len()) + proof.L.len());
|
||||
|
||||
let mut challenges = Vec::with_capacity(proof.L.len());
|
||||
let product_cache = {
|
||||
let mut es = Vec::with_capacity(proof.L.len());
|
||||
for (L, R) in proof.L.iter_mut().zip(proof.R.iter_mut()) {
|
||||
es.push(Self::transcript_L_R(&mut transcript, *L, *R));
|
||||
*L = L.mul_by_cofactor();
|
||||
*R = R.mul_by_cofactor();
|
||||
}
|
||||
|
||||
let mut inv_es = es.clone();
|
||||
let mut scratch = vec![Scalar::ZERO; es.len()];
|
||||
group::ff::BatchInverter::invert_with_external_scratch(&mut inv_es, &mut scratch);
|
||||
drop(scratch);
|
||||
|
||||
debug_assert_eq!(es.len(), inv_es.len());
|
||||
debug_assert_eq!(es.len(), proof.L.len());
|
||||
debug_assert_eq!(es.len(), proof.R.len());
|
||||
for ((e, inv_e), (L, R)) in
|
||||
es.drain(..).zip(inv_es.drain(..)).zip(proof.L.iter().zip(proof.R.iter()))
|
||||
{
|
||||
debug_assert_eq!(e.invert().unwrap(), inv_e);
|
||||
|
||||
challenges.push((e, inv_e));
|
||||
|
||||
let e_square = e.square();
|
||||
let inv_e_square = inv_e.square();
|
||||
P_terms.push((e_square, *L));
|
||||
P_terms.push((inv_e_square, *R));
|
||||
}
|
||||
|
||||
Self::challenge_products(&challenges)
|
||||
};
|
||||
|
||||
let e = Self::transcript_A_B(&mut transcript, proof.A, proof.B);
|
||||
proof.A = proof.A.mul_by_cofactor();
|
||||
proof.B = proof.B.mul_by_cofactor();
|
||||
let neg_e_square = -e.square();
|
||||
|
||||
let mut multiexp = P_terms;
|
||||
multiexp.reserve(4 + (2 * generators.len()));
|
||||
for (scalar, _) in multiexp.iter_mut() {
|
||||
*scalar *= neg_e_square;
|
||||
}
|
||||
|
||||
let re = proof.r_answer * e;
|
||||
for i in 0 .. generators.len() {
|
||||
let mut scalar = product_cache[i] * re;
|
||||
if i > 0 {
|
||||
scalar *= inv_y[i - 1];
|
||||
}
|
||||
multiexp.push((scalar, generators.generator(GeneratorsList::GBold1, i)));
|
||||
}
|
||||
|
||||
let se = proof.s_answer * e;
|
||||
for i in 0 .. generators.len() {
|
||||
multiexp.push((
|
||||
se * product_cache[product_cache.len() - 1 - i],
|
||||
generators.generator(GeneratorsList::HBold1, i),
|
||||
));
|
||||
}
|
||||
|
||||
multiexp.push((-e, proof.A));
|
||||
multiexp.push((proof.r_answer * y[0] * proof.s_answer, g));
|
||||
multiexp.push((proof.delta_answer, h));
|
||||
multiexp.push((-Scalar::ONE, proof.B));
|
||||
|
||||
verifier.queue(rng, id, multiexp);
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
@@ -1,114 +0,0 @@
|
||||
use core::ops::{Add, Sub, Mul, Index};
|
||||
use std_shims::vec::Vec;
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use group::ff::Field;
|
||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||
|
||||
use multiexp::multiexp;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
||||
macro_rules! math_op {
|
||||
($Op: ident, $op: ident, $f: expr) => {
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
impl $Op<Scalar> for ScalarVector {
|
||||
type Output = ScalarVector;
|
||||
fn $op(self, b: Scalar) -> ScalarVector {
|
||||
ScalarVector(self.0.iter().map(|a| $f((a, &b))).collect())
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
impl $Op<Scalar> for &ScalarVector {
|
||||
type Output = ScalarVector;
|
||||
fn $op(self, b: Scalar) -> ScalarVector {
|
||||
ScalarVector(self.0.iter().map(|a| $f((a, &b))).collect())
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
impl $Op<ScalarVector> for ScalarVector {
|
||||
type Output = ScalarVector;
|
||||
fn $op(self, b: ScalarVector) -> ScalarVector {
|
||||
debug_assert_eq!(self.len(), b.len());
|
||||
ScalarVector(self.0.iter().zip(b.0.iter()).map($f).collect())
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
impl $Op<&ScalarVector> for &ScalarVector {
|
||||
type Output = ScalarVector;
|
||||
fn $op(self, b: &ScalarVector) -> ScalarVector {
|
||||
debug_assert_eq!(self.len(), b.len());
|
||||
ScalarVector(self.0.iter().zip(b.0.iter()).map($f).collect())
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
math_op!(Add, add, |(a, b): (&Scalar, &Scalar)| *a + *b);
|
||||
math_op!(Sub, sub, |(a, b): (&Scalar, &Scalar)| *a - *b);
|
||||
math_op!(Mul, mul, |(a, b): (&Scalar, &Scalar)| *a * *b);
|
||||
|
||||
impl ScalarVector {
|
||||
pub(crate) fn new(len: usize) -> ScalarVector {
|
||||
ScalarVector(vec![Scalar::ZERO; len])
|
||||
}
|
||||
|
||||
pub(crate) fn powers(x: Scalar, len: usize) -> ScalarVector {
|
||||
debug_assert!(len != 0);
|
||||
|
||||
let mut res = Vec::with_capacity(len);
|
||||
res.push(Scalar::ONE);
|
||||
for i in 1 .. len {
|
||||
res.push(res[i - 1] * x);
|
||||
}
|
||||
ScalarVector(res)
|
||||
}
|
||||
|
||||
pub(crate) fn sum(mut self) -> Scalar {
|
||||
self.0.drain(..).sum()
|
||||
}
|
||||
|
||||
pub(crate) fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub(crate) fn split(self) -> (ScalarVector, ScalarVector) {
|
||||
let (l, r) = self.0.split_at(self.0.len() / 2);
|
||||
(ScalarVector(l.to_vec()), ScalarVector(r.to_vec()))
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<usize> for ScalarVector {
|
||||
type Output = Scalar;
|
||||
fn index(&self, index: usize) -> &Scalar {
|
||||
&self.0[index]
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn inner_product(a: &ScalarVector, b: &ScalarVector) -> Scalar {
|
||||
(a * b).sum()
|
||||
}
|
||||
|
||||
impl Mul<&[EdwardsPoint]> for &ScalarVector {
|
||||
type Output = EdwardsPoint;
|
||||
fn mul(self, b: &[EdwardsPoint]) -> EdwardsPoint {
|
||||
debug_assert_eq!(self.len(), b.len());
|
||||
multiexp(&self.0.iter().copied().zip(b.iter().copied()).collect::<Vec<_>>())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn hadamard_fold(
|
||||
l: &[EdwardsPoint],
|
||||
r: &[EdwardsPoint],
|
||||
a: Scalar,
|
||||
b: Scalar,
|
||||
) -> Vec<EdwardsPoint> {
|
||||
let mut res = Vec::with_capacity(l.len() / 2);
|
||||
for i in 0 .. l.len() {
|
||||
res.push(multiexp(&[(a, l[i]), (b, r[i])]));
|
||||
}
|
||||
res
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user