57 Commits

Author SHA1 Message Date
Luke Parker
9f7dbf2132 Handle Monero fee logic properly in the processor 2024-07-06 18:46:41 -04:00
Luke Parker
6357bc0ed4 Remove unused dep from processor 2024-07-06 04:27:30 -04:00
Luke Parker
2334725ec8 Correct the accidental swap of stagenet/testnet address bytes 2024-07-06 04:26:44 -04:00
Luke Parker
0631607b8f Tidy inlined epee code in the RPC 2024-07-06 04:21:06 -04:00
Luke Parker
d847ec5efb Reject torsioned spend keys to ensure we can spend the outputs we scan 2024-07-06 03:48:45 -04:00
Luke Parker
b2c962cd3e Fix remaining bugs in monero-wallet tests 2024-07-06 03:24:38 -04:00
Luke Parker
788c4fc0a7 Fix a pair of bugs in the decoy tests
Unfortunately, this test is still failing.
2024-07-06 01:58:51 -04:00
Luke Parker
04df229df1 Have Monero test runner evaluate an Eventuality for all signed TXs 2024-07-06 01:58:41 -04:00
Luke Parker
1f5e5fc7ac Correct misc TODOs in monero-serai 2024-07-05 23:30:02 -04:00
Luke Parker
90880cc9c8 fmt, machete, GH CI 2024-07-05 13:55:15 -04:00
Luke Parker
e94a04d47c Restore the reserialize chain binary 2024-07-04 03:17:35 -04:00
Luke Parker
0f9a5afa07 Remove possible panic in monero-serai on systems < 32 bits
This was done by requiring the system's usize can represent a certain number.
2024-07-04 02:27:16 -04:00
Luke Parker
3de89c717d Correct imports for no-std 2024-07-04 02:18:38 -04:00
Luke Parker
08169e29bb Finish documenting monero-serai 2024-07-04 02:18:37 -04:00
Luke Parker
b56c6fb39e Remove non-small-order view key bound
Guaranteed addresses are in fact guaranteed even with this due to prefixing key
images causing zeroing the ECDH to not zero the shared key.
2024-07-04 02:18:37 -04:00
Luke Parker
daa0f8f7d5 Diversify ViewPair/Scanner into ViewPair/GuaranteedViewPair and Scanner/GuaranteedScanner
Also cleans the Scanner impl.
2024-07-04 02:18:37 -04:00
Luke Parker
64e74c52ec Represent height/block number as a u32 2024-07-04 02:18:37 -04:00
Luke Parker
06246618ab Sync rest of repo with monero-serai changes 2024-07-04 02:18:37 -04:00
Luke Parker
69e077bf7a Clean and document monero-address 2024-07-04 02:18:37 -04:00
Luke Parker
8319d219d7 Update monero-wallet tests to compile
Some are _consistently_ failing due to the inputs we attempt to spend being too
young. I'm unsure what's up with that. Most seem to pass _consistently_,
implying it's not a random issue yet some configuration/env aspect.
2024-07-04 02:18:37 -04:00
Luke Parker
891362a710 Correct invalid RPC type def in monero-rpc 2024-07-04 02:18:37 -04:00
Luke Parker
08d604fcb3 Restore Monero multisig TX code 2024-07-04 02:18:37 -04:00
Luke Parker
abd48e9206 Add SignableTransaction Read/Write 2024-07-04 02:18:37 -04:00
Luke Parker
70c36ed06c Rewrite monero-wallet's send code
I have yet to redo the multisig code and the builder. This should be much
cleaner, albeit slower due to redoing work.

This compiles with clippy --all-features. I have to finish the multisig/builder
for --all-targets to work (and start updating the rest of Serai).
2024-07-04 02:18:37 -04:00
Luke Parker
b3b0edb82f Fix deserializing v2 miner transactions 2024-07-04 02:18:37 -04:00
Luke Parker
0f477537a0 Document cargo features
Credit to @hinto-janai for adding such sections to their work on documenting
monero-serai in #568.
2024-07-04 02:18:37 -04:00
Luke Parker
eb0c19bfff Smash out Monero addresses 2024-07-04 02:18:37 -04:00
Luke Parker
0b20004ba1 Get the repo to compile again 2024-07-04 02:18:37 -04:00
Luke Parker
11dba9173f Smash out seed 2024-07-04 02:18:37 -04:00
Luke Parker
1e2e3bd5ce Smash out polyseed 2024-07-04 02:18:37 -04:00
Luke Parker
df095f027f Only read transactions with one Input::Gen or all Input::ToKey
Also adds a helper to fetch a transaction's prefix.
2024-07-04 02:18:37 -04:00
Luke Parker
6fc8b30df2 Remove TODO on reading pseudo_outs for AggregateMlsagBorromean 2024-07-04 02:18:37 -04:00
Luke Parker
74aaac46ef Finish documenting monero-serai 2024-07-04 02:18:37 -04:00
Luke Parker
1db40914eb Incomplete work on using Option to remove panic cases 2024-07-04 02:18:37 -04:00
Luke Parker
b5b9d4a871 Move Protocol to monero-wallet 2024-07-04 02:18:37 -04:00
Luke Parker
6f61861d4b Improve docs a bit 2024-07-04 02:18:37 -04:00
Luke Parker
08b95abdd8 Document the RPC 2024-07-04 02:18:37 -04:00
Luke Parker
d740bd2924 Smash out RPC, wallet 2024-07-04 02:18:37 -04:00
Luke Parker
3a1c6c7247 Tidy up monero-serai as a meta crate 2024-07-04 02:18:37 -04:00
Luke Parker
3e82ee60b3 Smash out Borromean 2024-07-04 02:18:37 -04:00
Luke Parker
303e72c844 Smash out MLSAG 2024-07-04 02:18:37 -04:00
Luke Parker
60d5c06ac3 Error if missing documentation 2024-07-04 02:18:36 -04:00
Luke Parker
77a2496ade Tidy and document monero-bulletproofs
I still don't like the impl of the original Bulletproofs...
2024-07-04 02:18:36 -04:00
Luke Parker
d9107b53a6 Correct no-std builds for monero-clsag and monero-bulletproofs 2024-07-04 02:18:36 -04:00
Luke Parker
f7c13fd1ca Smash out monero-bulletproofs
Removes usage of dalek-ff-group/multiexp for curve25519-dalek.

Makes compiling in the generators an optional feature.

Adds a structured batch verifier which should be notably more performant.

Documentation and clean up still necessary.
2024-07-04 02:18:36 -04:00
Luke Parker
798ffc9b28 Add a dedicated send/recv CLSAG mask struct
Abstracts the types used internally.

Also moves the tests from monero-serai to monero-clsag.
2024-07-04 02:18:36 -04:00
Luke Parker
865dee80e5 Document and clean clsag 2024-07-04 02:18:36 -04:00
Luke Parker
9c217913e6 Further documentation, start shoring up API boundaries of existing crates 2024-07-04 02:18:36 -04:00
Luke Parker
784a273747 Begin crate smashing 2024-07-04 02:18:36 -04:00
Luke Parker
5cdae6eeb8 Various RingCT doc comments 2024-07-04 02:18:36 -04:00
Luke Parker
a1d1de0c9c Move amount_decryption into EncryptedAmount::decrypt 2024-07-04 02:18:36 -04:00
Luke Parker
d2a27dc1e5 Remove experimental feature from monero-serai 2024-07-04 02:18:36 -04:00
Luke Parker
c165c36777 Tidy Borromean/MLSAG a tad 2024-07-04 02:18:36 -04:00
Luke Parker
f1ad768859 Remove the distribution cache
It's a notable bandwidth/performance improvement, yet it's not ready. We need a
dedicated Distribution struct which is managed by the wallet and passed in.
While we can do that now, it's not currently worth the effort.
2024-07-04 02:18:36 -04:00
Luke Parker
cd8b0544f4 Make CLSAG signing private
Also adds a bit more documentation and does a bit more tidying.
2024-07-04 02:18:36 -04:00
Luke Parker
f5d9d03658 Rename Bulletproofs to Bulletproof, since they are a single Bulletproof
Also bifurcates prove with prove_plus, and adds a few documentation items.
2024-07-04 02:18:36 -04:00
Luke Parker
98b08eaa38 Remove unsafe creation of dalek_ff_group::EdwardsPoint in BP+ 2024-07-04 02:18:36 -04:00
765 changed files with 27297 additions and 46354 deletions

View File

@@ -37,4 +37,4 @@ runs:
- name: Bitcoin Regtest Daemon - name: Bitcoin Regtest Daemon
shell: bash shell: bash
run: PATH=$PATH:/usr/bin ./orchestration/dev/networks/bitcoin/run.sh -txindex -daemon run: PATH=$PATH:/usr/bin ./orchestration/dev/coins/bitcoin/run.sh -daemon

View File

@@ -42,8 +42,8 @@ runs:
shell: bash shell: bash
run: | run: |
cargo install svm-rs cargo install svm-rs
svm install 0.8.26 svm install 0.8.25
svm use 0.8.26 svm use 0.8.25
# - name: Cache Rust # - name: Cache Rust
# uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43 # uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43

View File

@@ -5,7 +5,7 @@ inputs:
version: version:
description: "Version to download and run" description: "Version to download and run"
required: false required: false
default: v0.18.3.4 default: v0.18.3.1
runs: runs:
using: "composite" using: "composite"

View File

@@ -5,7 +5,7 @@ inputs:
version: version:
description: "Version to download and run" description: "Version to download and run"
required: false required: false
default: v0.18.3.4 default: v0.18.3.1
runs: runs:
using: "composite" using: "composite"
@@ -43,4 +43,4 @@ runs:
- name: Monero Regtest Daemon - name: Monero Regtest Daemon
shell: bash shell: bash
run: PATH=$PATH:/usr/bin ./orchestration/dev/networks/monero/run.sh --detach run: PATH=$PATH:/usr/bin ./orchestration/dev/coins/monero/run.sh --detach

View File

@@ -5,12 +5,12 @@ inputs:
monero-version: monero-version:
description: "Monero version to download and run as a regtest node" description: "Monero version to download and run as a regtest node"
required: false required: false
default: v0.18.3.4 default: v0.18.3.1
bitcoin-version: bitcoin-version:
description: "Bitcoin version to download and run as a regtest node" description: "Bitcoin version to download and run as a regtest node"
required: false required: false
default: "27.1" default: "27.0"
runs: runs:
using: "composite" using: "composite"

View File

@@ -1,4 +1,4 @@
name: networks/ Tests name: coins/ Tests
on: on:
push: push:
@@ -7,18 +7,18 @@ on:
paths: paths:
- "common/**" - "common/**"
- "crypto/**" - "crypto/**"
- "networks/**" - "coins/**"
pull_request: pull_request:
paths: paths:
- "common/**" - "common/**"
- "crypto/**" - "crypto/**"
- "networks/**" - "coins/**"
workflow_dispatch: workflow_dispatch:
jobs: jobs:
test-networks: test-coins:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
@@ -30,9 +30,8 @@ jobs:
run: | run: |
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \ GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
-p bitcoin-serai \ -p bitcoin-serai \
-p build-solidity-contracts \
-p ethereum-schnorr-contract \
-p alloy-simple-request-transport \ -p alloy-simple-request-transport \
-p ethereum-serai \
-p serai-ethereum-relayer \ -p serai-ethereum-relayer \
-p monero-io \ -p monero-io \
-p monero-generators \ -p monero-generators \

View File

@@ -27,8 +27,6 @@ jobs:
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \ GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
-p std-shims \ -p std-shims \
-p zalloc \ -p zalloc \
-p patchable-async-sleep \
-p serai-db \ -p serai-db \
-p serai-env \ -p serai-env \
-p serai-task \
-p simple-request -p simple-request

View File

@@ -7,7 +7,7 @@ on:
paths: paths:
- "common/**" - "common/**"
- "crypto/**" - "crypto/**"
- "networks/**" - "coins/**"
- "message-queue/**" - "message-queue/**"
- "coordinator/**" - "coordinator/**"
- "orchestration/**" - "orchestration/**"
@@ -18,7 +18,7 @@ on:
paths: paths:
- "common/**" - "common/**"
- "crypto/**" - "crypto/**"
- "networks/**" - "coins/**"
- "message-queue/**" - "message-queue/**"
- "coordinator/**" - "coordinator/**"
- "orchestration/**" - "orchestration/**"

View File

@@ -35,10 +35,6 @@ jobs:
-p multiexp \ -p multiexp \
-p schnorr-signatures \ -p schnorr-signatures \
-p dleq \ -p dleq \
-p generalized-bulletproofs \
-p generalized-bulletproofs-circuit-abstraction \
-p ec-divisors \
-p generalized-bulletproofs-ec-gadgets \
-p dkg \ -p dkg \
-p modular-frost \ -p modular-frost \
-p frost-schnorrkel -p frost-schnorrkel

View File

@@ -73,15 +73,6 @@ jobs:
- name: Run rustfmt - name: Run rustfmt
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check
- name: Install foundry
uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773
with:
version: nightly-41d4e5437107f6f42c7711123890147bc736a609
cache: false
- name: Run forge fmt
run: FOUNDRY_FMT_SORT_INPUTS=false FOUNDRY_FMT_LINE_LENGTH=100 FOUNDRY_FMT_TAB_WIDTH=2 FOUNDRY_FMT_BRACKET_SPACING=true FOUNDRY_FMT_INT_TYPES=preserve forge fmt --check $(find . -iname "*.sol")
machete: machete:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
@@ -90,25 +81,3 @@ jobs:
run: | run: |
cargo install cargo-machete cargo install cargo-machete
cargo machete cargo machete
slither:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Slither
run: |
python3 -m pip install solc-select
solc-select install 0.8.26
solc-select use 0.8.26
python3 -m pip install slither-analyzer
slither --include-paths ./networks/ethereum/schnorr/contracts/Schnorr.sol
slither --include-paths ./networks/ethereum/schnorr/contracts ./networks/ethereum/schnorr/contracts/tests/Schnorr.sol
slither processor/ethereum/deployer/contracts/Deployer.sol
slither processor/ethereum/erc20/contracts/IERC20.sol
cp networks/ethereum/schnorr/contracts/Schnorr.sol processor/ethereum/router/contracts/
cp processor/ethereum/erc20/contracts/IERC20.sol processor/ethereum/router/contracts/
cd processor/ethereum/router/contracts
slither Router.sol

View File

@@ -5,12 +5,12 @@ on:
branches: branches:
- develop - develop
paths: paths:
- "networks/monero/**" - "coins/monero/**"
- "processor/**" - "processor/**"
pull_request: pull_request:
paths: paths:
- "networks/monero/**" - "coins/monero/**"
- "processor/**" - "processor/**"
workflow_dispatch: workflow_dispatch:
@@ -42,6 +42,7 @@ jobs:
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-seed --lib GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-seed --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package polyseed --lib GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package polyseed --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --lib GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai-verify-chain --lib
# Doesn't run unit tests with features as the tests workflow will # Doesn't run unit tests with features as the tests workflow will
@@ -50,7 +51,7 @@ jobs:
# Test against all supported protocol versions # Test against all supported protocol versions
strategy: strategy:
matrix: matrix:
version: [v0.17.3.2, v0.18.3.4] version: [v0.17.3.2, v0.18.2.0]
steps: steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
@@ -66,12 +67,14 @@ jobs:
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*' GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --test '*' GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --test '*' GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai-verify-chain --test '*'
- name: Run Integration Tests - name: Run Integration Tests
# Don't run if the the tests workflow also will # Don't run if the the tests workflow also will
if: ${{ matrix.version != 'v0.18.3.4' }} if: ${{ matrix.version != 'v0.18.2.0' }}
run: | run: |
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*' GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*' GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --all-features --test '*' GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --all-features --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --all-features --test '*' GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --all-features --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai-verify-chain --test '*'

View File

@@ -1,259 +0,0 @@
name: Weekly MSRV Check
on:
schedule:
- cron: "0 0 * * 0"
workflow_dispatch:
jobs:
msrv-common:
name: Run cargo msrv on common
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
- name: Install cargo msrv
run: cargo install --locked cargo-msrv
- name: Run cargo msrv on common
run: |
cargo msrv verify --manifest-path common/zalloc/Cargo.toml
cargo msrv verify --manifest-path common/std-shims/Cargo.toml
cargo msrv verify --manifest-path common/env/Cargo.toml
cargo msrv verify --manifest-path common/db/Cargo.toml
cargo msrv verify --manifest-path common/task/Cargo.toml
cargo msrv verify --manifest-path common/request/Cargo.toml
cargo msrv verify --manifest-path common/patchable-async-sleep/Cargo.toml
msrv-crypto:
name: Run cargo msrv on crypto
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
- name: Install cargo msrv
run: cargo install --locked cargo-msrv
- name: Run cargo msrv on crypto
run: |
cargo msrv verify --manifest-path crypto/transcript/Cargo.toml
cargo msrv verify --manifest-path crypto/ff-group-tests/Cargo.toml
cargo msrv verify --manifest-path crypto/dalek-ff-group/Cargo.toml
cargo msrv verify --manifest-path crypto/ed448/Cargo.toml
cargo msrv verify --manifest-path crypto/multiexp/Cargo.toml
cargo msrv verify --manifest-path crypto/dleq/Cargo.toml
cargo msrv verify --manifest-path crypto/ciphersuite/Cargo.toml
cargo msrv verify --manifest-path crypto/schnorr/Cargo.toml
cargo msrv verify --manifest-path crypto/evrf/generalized-bulletproofs/Cargo.toml
cargo msrv verify --manifest-path crypto/evrf/circuit-abstraction/Cargo.toml
cargo msrv verify --manifest-path crypto/evrf/divisors/Cargo.toml
cargo msrv verify --manifest-path crypto/evrf/ec-gadgets/Cargo.toml
cargo msrv verify --manifest-path crypto/evrf/embedwards25519/Cargo.toml
cargo msrv verify --manifest-path crypto/evrf/secq256k1/Cargo.toml
cargo msrv verify --manifest-path crypto/dkg/Cargo.toml
cargo msrv verify --manifest-path crypto/frost/Cargo.toml
cargo msrv verify --manifest-path crypto/schnorrkel/Cargo.toml
msrv-networks:
name: Run cargo msrv on networks
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
- name: Install cargo msrv
run: cargo install --locked cargo-msrv
- name: Run cargo msrv on networks
run: |
cargo msrv verify --manifest-path networks/bitcoin/Cargo.toml
cargo msrv verify --manifest-path networks/ethereum/build-contracts/Cargo.toml
cargo msrv verify --manifest-path networks/ethereum/schnorr/Cargo.toml
cargo msrv verify --manifest-path networks/ethereum/alloy-simple-request-transport/Cargo.toml
cargo msrv verify --manifest-path networks/ethereum/relayer/Cargo.toml --features parity-db
cargo msrv verify --manifest-path networks/monero/io/Cargo.toml
cargo msrv verify --manifest-path networks/monero/generators/Cargo.toml
cargo msrv verify --manifest-path networks/monero/primitives/Cargo.toml
cargo msrv verify --manifest-path networks/monero/ringct/mlsag/Cargo.toml
cargo msrv verify --manifest-path networks/monero/ringct/clsag/Cargo.toml
cargo msrv verify --manifest-path networks/monero/ringct/borromean/Cargo.toml
cargo msrv verify --manifest-path networks/monero/ringct/bulletproofs/Cargo.toml
cargo msrv verify --manifest-path networks/monero/Cargo.toml
cargo msrv verify --manifest-path networks/monero/rpc/Cargo.toml
cargo msrv verify --manifest-path networks/monero/rpc/simple-request/Cargo.toml
cargo msrv verify --manifest-path networks/monero/wallet/address/Cargo.toml
cargo msrv verify --manifest-path networks/monero/wallet/Cargo.toml
cargo msrv verify --manifest-path networks/monero/verify-chain/Cargo.toml
msrv-message-queue:
name: Run cargo msrv on message-queue
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
- name: Install cargo msrv
run: cargo install --locked cargo-msrv
- name: Run cargo msrv on message-queue
run: |
cargo msrv verify --manifest-path message-queue/Cargo.toml --features parity-db
msrv-processor:
name: Run cargo msrv on processor
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
- name: Install cargo msrv
run: cargo install --locked cargo-msrv
- name: Run cargo msrv on processor
run: |
cargo msrv verify --manifest-path processor/view-keys/Cargo.toml
cargo msrv verify --manifest-path processor/primitives/Cargo.toml
cargo msrv verify --manifest-path processor/messages/Cargo.toml
cargo msrv verify --manifest-path processor/scanner/Cargo.toml
cargo msrv verify --manifest-path processor/scheduler/primitives/Cargo.toml
cargo msrv verify --manifest-path processor/scheduler/smart-contract/Cargo.toml
cargo msrv verify --manifest-path processor/scheduler/utxo/primitives/Cargo.toml
cargo msrv verify --manifest-path processor/scheduler/utxo/standard/Cargo.toml
cargo msrv verify --manifest-path processor/scheduler/utxo/transaction-chaining/Cargo.toml
cargo msrv verify --manifest-path processor/key-gen/Cargo.toml
cargo msrv verify --manifest-path processor/frost-attempt-manager/Cargo.toml
cargo msrv verify --manifest-path processor/signers/Cargo.toml
cargo msrv verify --manifest-path processor/bin/Cargo.toml --features parity-db
cargo msrv verify --manifest-path processor/bitcoin/Cargo.toml
cargo msrv verify --manifest-path processor/ethereum/primitives/Cargo.toml
cargo msrv verify --manifest-path processor/ethereum/test-primitives/Cargo.toml
cargo msrv verify --manifest-path processor/ethereum/erc20/Cargo.toml
cargo msrv verify --manifest-path processor/ethereum/deployer/Cargo.toml
cargo msrv verify --manifest-path processor/ethereum/router/Cargo.toml
cargo msrv verify --manifest-path processor/ethereum/Cargo.toml
cargo msrv verify --manifest-path processor/monero/Cargo.toml
msrv-coordinator:
name: Run cargo msrv on coordinator
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
- name: Install cargo msrv
run: cargo install --locked cargo-msrv
- name: Run cargo msrv on coordinator
run: |
cargo msrv verify --manifest-path coordinator/tributary-sdk/tendermint/Cargo.toml
cargo msrv verify --manifest-path coordinator/tributary-sdk/Cargo.toml
cargo msrv verify --manifest-path coordinator/cosign/Cargo.toml
cargo msrv verify --manifest-path coordinator/substrate/Cargo.toml
cargo msrv verify --manifest-path coordinator/tributary/Cargo.toml
cargo msrv verify --manifest-path coordinator/p2p/Cargo.toml
cargo msrv verify --manifest-path coordinator/p2p/libp2p/Cargo.toml
cargo msrv verify --manifest-path coordinator/Cargo.toml
msrv-substrate:
name: Run cargo msrv on substrate
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
- name: Install cargo msrv
run: cargo install --locked cargo-msrv
- name: Run cargo msrv on substrate
run: |
cargo msrv verify --manifest-path substrate/primitives/Cargo.toml
cargo msrv verify --manifest-path substrate/coins/primitives/Cargo.toml
cargo msrv verify --manifest-path substrate/coins/pallet/Cargo.toml
cargo msrv verify --manifest-path substrate/dex/pallet/Cargo.toml
cargo msrv verify --manifest-path substrate/economic-security/pallet/Cargo.toml
cargo msrv verify --manifest-path substrate/genesis-liquidity/primitives/Cargo.toml
cargo msrv verify --manifest-path substrate/genesis-liquidity/pallet/Cargo.toml
cargo msrv verify --manifest-path substrate/in-instructions/primitives/Cargo.toml
cargo msrv verify --manifest-path substrate/in-instructions/pallet/Cargo.toml
cargo msrv verify --manifest-path substrate/validator-sets/pallet/Cargo.toml
cargo msrv verify --manifest-path substrate/validator-sets/primitives/Cargo.toml
cargo msrv verify --manifest-path substrate/emissions/primitives/Cargo.toml
cargo msrv verify --manifest-path substrate/emissions/pallet/Cargo.toml
cargo msrv verify --manifest-path substrate/signals/primitives/Cargo.toml
cargo msrv verify --manifest-path substrate/signals/pallet/Cargo.toml
cargo msrv verify --manifest-path substrate/abi/Cargo.toml
cargo msrv verify --manifest-path substrate/client/Cargo.toml
cargo msrv verify --manifest-path substrate/runtime/Cargo.toml
cargo msrv verify --manifest-path substrate/node/Cargo.toml
msrv-orchestration:
name: Run cargo msrv on orchestration
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
- name: Install cargo msrv
run: cargo install --locked cargo-msrv
- name: Run cargo msrv on message-queue
run: |
cargo msrv verify --manifest-path orchestration/Cargo.toml
msrv-mini:
name: Run cargo msrv on mini
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
- name: Install cargo msrv
run: cargo install --locked cargo-msrv
- name: Run cargo msrv on mini
run: |
cargo msrv verify --manifest-path mini/Cargo.toml

View File

@@ -7,14 +7,14 @@ on:
paths: paths:
- "common/**" - "common/**"
- "crypto/**" - "crypto/**"
- "networks/**" - "coins/**"
- "tests/no-std/**" - "tests/no-std/**"
pull_request: pull_request:
paths: paths:
- "common/**" - "common/**"
- "crypto/**" - "crypto/**"
- "networks/**" - "coins/**"
- "tests/no-std/**" - "tests/no-std/**"
workflow_dispatch: workflow_dispatch:

View File

@@ -7,7 +7,7 @@ on:
paths: paths:
- "common/**" - "common/**"
- "crypto/**" - "crypto/**"
- "networks/**" - "coins/**"
- "message-queue/**" - "message-queue/**"
- "processor/**" - "processor/**"
- "orchestration/**" - "orchestration/**"
@@ -18,7 +18,7 @@ on:
paths: paths:
- "common/**" - "common/**"
- "crypto/**" - "crypto/**"
- "networks/**" - "coins/**"
- "message-queue/**" - "message-queue/**"
- "processor/**" - "processor/**"
- "orchestration/**" - "orchestration/**"

View File

@@ -7,7 +7,7 @@ on:
paths: paths:
- "common/**" - "common/**"
- "crypto/**" - "crypto/**"
- "networks/**" - "coins/**"
- "message-queue/**" - "message-queue/**"
- "processor/**" - "processor/**"
- "coordinator/**" - "coordinator/**"
@@ -17,7 +17,7 @@ on:
paths: paths:
- "common/**" - "common/**"
- "crypto/**" - "crypto/**"
- "networks/**" - "coins/**"
- "message-queue/**" - "message-queue/**"
- "processor/**" - "processor/**"
- "coordinator/**" - "coordinator/**"
@@ -39,33 +39,9 @@ jobs:
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \ GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
-p serai-message-queue \ -p serai-message-queue \
-p serai-processor-messages \ -p serai-processor-messages \
-p serai-processor-key-gen \ -p serai-processor \
-p serai-processor-view-keys \
-p serai-processor-frost-attempt-manager \
-p serai-processor-primitives \
-p serai-processor-scanner \
-p serai-processor-scheduler-primitives \
-p serai-processor-utxo-scheduler-primitives \
-p serai-processor-utxo-scheduler \
-p serai-processor-transaction-chaining-scheduler \
-p serai-processor-smart-contract-scheduler \
-p serai-processor-signers \
-p serai-processor-bin \
-p serai-bitcoin-processor \
-p serai-processor-ethereum-primitives \
-p serai-processor-ethereum-test-primitives \
-p serai-processor-ethereum-deployer \
-p serai-processor-ethereum-router \
-p serai-processor-ethereum-erc20 \
-p serai-ethereum-processor \
-p serai-monero-processor \
-p tendermint-machine \ -p tendermint-machine \
-p tributary-sdk \ -p tributary-chain \
-p serai-cosign \
-p serai-coordinator-substrate \
-p serai-coordinator-tributary \
-p serai-coordinator-p2p \
-p serai-coordinator-libp2p-p2p \
-p serai-coordinator \ -p serai-coordinator \
-p serai-orchestrator \ -p serai-orchestrator \
-p serai-docker-tests -p serai-docker-tests
@@ -87,11 +63,6 @@ jobs:
-p serai-dex-pallet \ -p serai-dex-pallet \
-p serai-validator-sets-primitives \ -p serai-validator-sets-primitives \
-p serai-validator-sets-pallet \ -p serai-validator-sets-pallet \
-p serai-genesis-liquidity-primitives \
-p serai-genesis-liquidity-pallet \
-p serai-emissions-primitives \
-p serai-emissions-pallet \
-p serai-economic-security-pallet \
-p serai-in-instructions-primitives \ -p serai-in-instructions-primitives \
-p serai-in-instructions-pallet \ -p serai-in-instructions-pallet \
-p serai-signals-primitives \ -p serai-signals-primitives \

2906
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,7 @@ members = [
"patches/parking_lot", "patches/parking_lot",
"patches/zstd", "patches/zstd",
"patches/rocksdb", "patches/rocksdb",
"patches/proc-macro-crate",
# std patches # std patches
"patches/matches", "patches/matches",
@@ -17,10 +18,8 @@ members = [
"common/std-shims", "common/std-shims",
"common/zalloc", "common/zalloc",
"common/patchable-async-sleep",
"common/db", "common/db",
"common/env", "common/env",
"common/task",
"common/request", "common/request",
"crypto/transcript", "crypto/transcript",
@@ -31,78 +30,43 @@ members = [
"crypto/ciphersuite", "crypto/ciphersuite",
"crypto/multiexp", "crypto/multiexp",
"crypto/schnorr", "crypto/schnorr",
"crypto/dleq", "crypto/dleq",
"crypto/evrf/secq256k1",
"crypto/evrf/embedwards25519",
"crypto/evrf/generalized-bulletproofs",
"crypto/evrf/circuit-abstraction",
"crypto/evrf/divisors",
"crypto/evrf/ec-gadgets",
"crypto/dkg", "crypto/dkg",
"crypto/frost", "crypto/frost",
"crypto/schnorrkel", "crypto/schnorrkel",
"networks/bitcoin", "coins/bitcoin",
"networks/ethereum/build-contracts", "coins/ethereum/alloy-simple-request-transport",
"networks/ethereum/schnorr", "coins/ethereum",
"networks/ethereum/alloy-simple-request-transport", "coins/ethereum/relayer",
"networks/ethereum/relayer",
"networks/monero/io", "coins/monero/io",
"networks/monero/generators", "coins/monero/generators",
"networks/monero/primitives", "coins/monero/primitives",
"networks/monero/ringct/mlsag", "coins/monero/ringct/mlsag",
"networks/monero/ringct/clsag", "coins/monero/ringct/clsag",
"networks/monero/ringct/borromean", "coins/monero/ringct/borromean",
"networks/monero/ringct/bulletproofs", "coins/monero/ringct/bulletproofs",
"networks/monero", "coins/monero",
"networks/monero/rpc", "coins/monero/rpc",
"networks/monero/rpc/simple-request", "coins/monero/rpc/simple-request",
"networks/monero/wallet/address", "coins/monero/wallet/address",
"networks/monero/wallet", "coins/monero/wallet",
"networks/monero/wallet/seed", "coins/monero/wallet/seed",
"networks/monero/wallet/polyseed", "coins/monero/wallet/polyseed",
"networks/monero/wallet/util", "coins/monero/wallet/util",
"networks/monero/verify-chain", "coins/monero/verify-chain",
"message-queue", "message-queue",
"processor/messages", "processor/messages",
"processor",
"processor/key-gen", "coordinator/tributary/tendermint",
"processor/view-keys",
"processor/frost-attempt-manager",
"processor/primitives",
"processor/scanner",
"processor/scheduler/primitives",
"processor/scheduler/utxo/primitives",
"processor/scheduler/utxo/standard",
"processor/scheduler/utxo/transaction-chaining",
"processor/scheduler/smart-contract",
"processor/signers",
"processor/bin",
"processor/bitcoin",
"processor/ethereum/primitives",
"processor/ethereum/test-primitives",
"processor/ethereum/deployer",
"processor/ethereum/router",
"processor/ethereum/erc20",
"processor/ethereum",
"processor/monero",
"coordinator/tributary-sdk/tendermint",
"coordinator/tributary-sdk",
"coordinator/cosign",
"coordinator/substrate",
"coordinator/tributary", "coordinator/tributary",
"coordinator/p2p",
"coordinator/p2p/libp2p",
"coordinator", "coordinator",
"substrate/primitives", "substrate/primitives",
@@ -115,14 +79,6 @@ members = [
"substrate/validator-sets/primitives", "substrate/validator-sets/primitives",
"substrate/validator-sets/pallet", "substrate/validator-sets/pallet",
"substrate/genesis-liquidity/primitives",
"substrate/genesis-liquidity/pallet",
"substrate/emissions/primitives",
"substrate/emissions/pallet",
"substrate/economic-security/pallet",
"substrate/in-instructions/primitives", "substrate/in-instructions/primitives",
"substrate/in-instructions/pallet", "substrate/in-instructions/pallet",
@@ -154,32 +110,18 @@ members = [
# to the extensive operations required for Bulletproofs # to the extensive operations required for Bulletproofs
[profile.dev.package] [profile.dev.package]
subtle = { opt-level = 3 } subtle = { opt-level = 3 }
curve25519-dalek = { opt-level = 3 }
ff = { opt-level = 3 } ff = { opt-level = 3 }
group = { opt-level = 3 } group = { opt-level = 3 }
crypto-bigint = { opt-level = 3 } crypto-bigint = { opt-level = 3 }
secp256k1 = { opt-level = 3 }
curve25519-dalek = { opt-level = 3 }
dalek-ff-group = { opt-level = 3 } dalek-ff-group = { opt-level = 3 }
minimal-ed448 = { opt-level = 3 } minimal-ed448 = { opt-level = 3 }
multiexp = { opt-level = 3 } multiexp = { opt-level = 3 }
secq256k1 = { opt-level = 3 } monero-serai = { opt-level = 3 }
embedwards25519 = { opt-level = 3 }
generalized-bulletproofs = { opt-level = 3 }
generalized-bulletproofs-circuit-abstraction = { opt-level = 3 }
ec-divisors = { opt-level = 3 }
generalized-bulletproofs-ec-gadgets = { opt-level = 3 }
dkg = { opt-level = 3 }
monero-generators = { opt-level = 3 }
monero-borromean = { opt-level = 3 }
monero-bulletproofs = { opt-level = 3 }
monero-mlsag = { opt-level = 3 }
monero-clsag = { opt-level = 3 }
[profile.release] [profile.release]
panic = "unwind" panic = "unwind"
@@ -188,12 +130,17 @@ panic = "unwind"
# https://github.com/rust-lang-nursery/lazy-static.rs/issues/201 # https://github.com/rust-lang-nursery/lazy-static.rs/issues/201
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" } lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
# Needed due to dockertest's usage of `Rc`s when we need `Arc`s
dockertest = { git = "https://github.com/orcalabs/dockertest-rs", rev = "4dd6ae24738aa6dc5c89444cc822ea4745517493" }
parking_lot_core = { path = "patches/parking_lot_core" } parking_lot_core = { path = "patches/parking_lot_core" }
parking_lot = { path = "patches/parking_lot" } parking_lot = { path = "patches/parking_lot" }
# wasmtime pulls in an old version for this # wasmtime pulls in an old version for this
zstd = { path = "patches/zstd" } zstd = { path = "patches/zstd" }
# Needed for WAL compression # Needed for WAL compression
rocksdb = { path = "patches/rocksdb" } rocksdb = { path = "patches/rocksdb" }
# proc-macro-crate 2 binds to an old version of toml for msrv so we patch to 3
proc-macro-crate = { path = "patches/proc-macro-crate" }
# is-terminal now has an std-based solution with an equivalent API # is-terminal now has an std-based solution with an equivalent API
is-terminal = { path = "patches/is-terminal" } is-terminal = { path = "patches/is-terminal" }
@@ -208,12 +155,8 @@ matches = { path = "patches/matches" }
option-ext = { path = "patches/option-ext" } option-ext = { path = "patches/option-ext" }
directories-next = { path = "patches/directories-next" } directories-next = { path = "patches/directories-next" }
# The official pasta_curves repo doesn't support Zeroize
pasta_curves = { git = "https://github.com/kayabaNerve/pasta_curves", rev = "a46b5be95cacbff54d06aad8d3bbcba42e05d616" }
[workspace.lints.clippy] [workspace.lints.clippy]
unwrap_or_default = "allow" unwrap_or_default = "allow"
map_unwrap_or = "allow"
borrow_as_ptr = "deny" borrow_as_ptr = "deny"
cast_lossless = "deny" cast_lossless = "deny"
cast_possible_truncation = "deny" cast_possible_truncation = "deny"
@@ -241,6 +184,7 @@ manual_instant_elapsed = "deny"
manual_let_else = "deny" manual_let_else = "deny"
manual_ok_or = "deny" manual_ok_or = "deny"
manual_string_new = "deny" manual_string_new = "deny"
map_unwrap_or = "deny"
match_bool = "deny" match_bool = "deny"
match_same_arms = "deny" match_same_arms = "deny"
missing_fields_in_debug = "deny" missing_fields_in_debug = "deny"
@@ -252,7 +196,6 @@ range_plus_one = "deny"
redundant_closure_for_method_calls = "deny" redundant_closure_for_method_calls = "deny"
redundant_else = "deny" redundant_else = "deny"
string_add_assign = "deny" string_add_assign = "deny"
string_slice = "deny"
unchecked_duration_subtraction = "deny" unchecked_duration_subtraction = "deny"
uninlined_format_args = "deny" uninlined_format_args = "deny"
unnecessary_box_returns = "deny" unnecessary_box_returns = "deny"

View File

@@ -24,7 +24,7 @@ wallet.
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
needed for Bitcoin-Monero atomic swaps. needed for Bitcoin-Monero atomic swaps.
- `networks`: Various libraries intended for usage in Serai yet also by the - `coins`: Various coin libraries intended for usage in Serai yet also by the
wider community. This means they will always support the functionality Serai wider community. This means they will always support the functionality Serai
needs, yet won't disadvantage other use cases when possible. needs, yet won't disadvantage other use cases when possible.

View File

@@ -0,0 +1,6 @@
# Cypher Stack /coins/bitcoin Audit, August 2023
This audit was over the /coins/bitcoin folder. It is encompassing up to commit
5121ca75199dff7bd34230880a1fdd793012068c.
Please see https://github.com/cypherstack/serai-btc-audit for provenance.

View File

@@ -1,7 +0,0 @@
# Cypher Stack /networks/bitcoin Audit, August 2023
This audit was over the `/networks/bitcoin` folder (at the time located at
`/coins/bitcoin`). It is encompassing up to commit
5121ca75199dff7bd34230880a1fdd793012068c.
Please see https://github.com/cypherstack/serai-btc-audit for provenance.

View File

@@ -3,10 +3,10 @@ name = "bitcoin-serai"
version = "0.3.0" version = "0.3.0"
description = "A Bitcoin library for FROST-signing transactions" description = "A Bitcoin library for FROST-signing transactions"
license = "MIT" license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/bitcoin" repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"] authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
edition = "2021" edition = "2021"
rust-version = "1.80" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true
@@ -18,7 +18,7 @@ workspace = true
[dependencies] [dependencies]
std-shims = { version = "0.1.1", path = "../../common/std-shims", default-features = false } std-shims = { version = "0.1.1", path = "../../common/std-shims", default-features = false }
thiserror = { version = "2", default-features = false } thiserror = { version = "1", default-features = false, optional = true }
zeroize = { version = "^1.5", default-features = false } zeroize = { version = "^1.5", default-features = false }
rand_core = { version = "0.6", default-features = false } rand_core = { version = "0.6", default-features = false }
@@ -26,6 +26,8 @@ rand_core = { version = "0.6", default-features = false }
bitcoin = { version = "0.32", default-features = false } bitcoin = { version = "0.32", default-features = false }
k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] } k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] }
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["secp256k1"], optional = true } frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["secp256k1"], optional = true }
hex = { version = "0.4", default-features = false, optional = true } hex = { version = "0.4", default-features = false, optional = true }
@@ -44,7 +46,7 @@ tokio = { version = "1", features = ["macros"] }
std = [ std = [
"std-shims/std", "std-shims/std",
"thiserror/std", "thiserror",
"zeroize/std", "zeroize/std",
"rand_core/std", "rand_core/std",
@@ -53,6 +55,8 @@ std = [
"bitcoin/serde", "bitcoin/serde",
"k256/std", "k256/std",
"transcript/std",
"frost", "frost",
"hex/std", "hex/std",

View File

@@ -40,12 +40,14 @@ mod frost_crypto {
use bitcoin::hashes::{HashEngine, Hash, sha256::Hash as Sha256}; use bitcoin::hashes::{HashEngine, Hash, sha256::Hash as Sha256};
use transcript::Transcript;
use k256::{elliptic_curve::ops::Reduce, U256, Scalar}; use k256::{elliptic_curve::ops::Reduce, U256, Scalar};
use frost::{ use frost::{
curve::{Ciphersuite, Secp256k1}, curve::{Ciphersuite, Secp256k1},
Participant, ThresholdKeys, ThresholdView, FrostError, Participant, ThresholdKeys, ThresholdView, FrostError,
algorithm::{Hram as HramTrait, Algorithm, IetfSchnorr as FrostSchnorr}, algorithm::{Hram as HramTrait, Algorithm, Schnorr as FrostSchnorr},
}; };
use super::*; use super::*;
@@ -80,17 +82,16 @@ mod frost_crypto {
/// ///
/// This must be used with a ThresholdKeys whose group key is even. If it is odd, this will panic. /// This must be used with a ThresholdKeys whose group key is even. If it is odd, this will panic.
#[derive(Clone)] #[derive(Clone)]
pub struct Schnorr(FrostSchnorr<Secp256k1, Hram>); pub struct Schnorr<T: Sync + Clone + Debug + Transcript>(FrostSchnorr<Secp256k1, T, Hram>);
impl Schnorr { impl<T: Sync + Clone + Debug + Transcript> Schnorr<T> {
/// Construct a Schnorr algorithm continuing the specified transcript. /// Construct a Schnorr algorithm continuing the specified transcript.
#[allow(clippy::new_without_default)] pub fn new(transcript: T) -> Schnorr<T> {
pub fn new() -> Schnorr { Schnorr(FrostSchnorr::new(transcript))
Schnorr(FrostSchnorr::ietf())
} }
} }
impl Algorithm<Secp256k1> for Schnorr { impl<T: Sync + Clone + Debug + Transcript> Algorithm<Secp256k1> for Schnorr<T> {
type Transcript = <FrostSchnorr<Secp256k1, Hram> as Algorithm<Secp256k1>>::Transcript; type Transcript = T;
type Addendum = (); type Addendum = ();
type Signature = [u8; 64]; type Signature = [u8; 64];

View File

@@ -3,6 +3,7 @@ use rand_core::OsRng;
use secp256k1::{Secp256k1 as BContext, Message, schnorr::Signature}; use secp256k1::{Secp256k1 as BContext, Message, schnorr::Signature};
use k256::Scalar; use k256::Scalar;
use transcript::{Transcript, RecommendedTranscript};
use frost::{ use frost::{
curve::Secp256k1, curve::Secp256k1,
Participant, Participant,
@@ -24,7 +25,8 @@ fn test_algorithm() {
*keys = keys.offset(Scalar::from(offset)); *keys = keys.offset(Scalar::from(offset));
} }
let algo = Schnorr::new(); let algo =
Schnorr::<RecommendedTranscript>::new(RecommendedTranscript::new(b"bitcoin-serai sign test"));
let sig = sign( let sig = sign(
&mut OsRng, &mut OsRng,
&algo, &algo,

View File

@@ -22,7 +22,7 @@ use bitcoin::{
Block, Block,
}; };
#[cfg(feature = "std")] #[cfg(feature = "std")]
use bitcoin::{hashes::Hash, consensus::encode::Decodable, TapTweakHash}; use bitcoin::consensus::encode::Decodable;
use crate::crypto::x_only; use crate::crypto::x_only;
#[cfg(feature = "std")] #[cfg(feature = "std")]
@@ -33,40 +33,12 @@ mod send;
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub use send::*; pub use send::*;
/// Tweak keys to ensure they're usable with Bitcoin's Taproot upgrade. /// Tweak keys to ensure they're usable with Bitcoin.
/// ///
/// This adds an unspendable script path to the key, preventing any outputs received to this key /// Taproot keys, which these keys are used as, must be even. This offsets the keys until they're
/// from being spent via a script. To have keys which have spendable script paths, further offsets /// even.
/// from this position must be used.
///
/// After adding an unspendable script path, the key is incremented until its even. This means the
/// existence of the unspendable script path may not provable, without an understanding of the
/// algorithm used here.
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> { pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
// Adds the unspendable script path per
// https://github.com/bitcoin/bips/blob/master/bip-0341.mediawiki#cite_note-23
let keys = {
use k256::elliptic_curve::{
bigint::{Encoding, U256},
ops::Reduce,
group::GroupEncoding,
};
let tweak_hash = TapTweakHash::hash(&keys.group_key().to_bytes().as_slice()[1 ..]);
/*
https://github.com/bitcoin/bips/blob/master/bip-0340.mediawiki#cite_ref-13-0 states how the
bias is negligible. This reduction shouldn't ever occur, yet if it did, the script path
would be unusable due to a check the script path hash is less than the order. That doesn't
impact us as we don't want the script path to be usable.
*/
keys.offset(<Secp256k1 as Ciphersuite>::F::reduce(U256::from_be_bytes(
*tweak_hash.to_raw_hash().as_ref(),
)))
};
// This doesn't risk re-introducing a script path as you'd have to find a preimage for the tweak
// hash with whatever increment, or manipulate the key so that the tweak hash and increment
// equals the desired offset, yet manipulating the key would change the tweak hash
let (_, offset) = make_even(keys.group_key()); let (_, offset) = make_even(keys.group_key());
keys.offset(Scalar::from(offset)) keys.offset(Scalar::from(offset))
} }
@@ -170,10 +142,6 @@ impl Scanner {
/// ///
/// This means offsets are surjective, not bijective, and the order offsets are registered in /// This means offsets are surjective, not bijective, and the order offsets are registered in
/// may determine the validity of future offsets. /// may determine the validity of future offsets.
///
/// The offsets registered must be securely generated. Arbitrary offsets may introduce a script
/// path into the output, allowing the output to be spent by satisfaction of an arbitrary script
/// (not by the signature of the key).
pub fn register_offset(&mut self, mut offset: Scalar) -> Option<Scalar> { pub fn register_offset(&mut self, mut offset: Scalar) -> Option<Scalar> {
// This loop will terminate as soon as an even point is found, with any point having a ~50% // This loop will terminate as soon as an even point is found, with any point having a ~50%
// chance of being even // chance of being even

View File

@@ -7,7 +7,9 @@ use thiserror::Error;
use rand_core::{RngCore, CryptoRng}; use rand_core::{RngCore, CryptoRng};
use k256::Scalar; use transcript::{Transcript, RecommendedTranscript};
use k256::{elliptic_curve::sec1::ToEncodedPoint, Scalar};
use frost::{curve::Secp256k1, Participant, ThresholdKeys, FrostError, sign::*}; use frost::{curve::Secp256k1, Participant, ThresholdKeys, FrostError, sign::*};
use bitcoin::{ use bitcoin::{
@@ -44,7 +46,7 @@ pub enum TransactionError {
#[error("fee was too low to pass the default minimum fee rate")] #[error("fee was too low to pass the default minimum fee rate")]
TooLowFee, TooLowFee,
#[error("not enough funds for these payments")] #[error("not enough funds for these payments")]
NotEnoughFunds { inputs: u64, payments: u64, fee: u64 }, NotEnoughFunds,
#[error("transaction was too large")] #[error("transaction was too large")]
TooLargeTransaction, TooLargeTransaction,
} }
@@ -59,11 +61,11 @@ pub struct SignableTransaction {
} }
impl SignableTransaction { impl SignableTransaction {
fn calculate_weight_vbytes( fn calculate_weight(
inputs: usize, inputs: usize,
payments: &[(ScriptBuf, u64)], payments: &[(ScriptBuf, u64)],
change: Option<&ScriptBuf>, change: Option<&ScriptBuf>,
) -> (u64, u64) { ) -> u64 {
// Expand this a full transaction in order to use the bitcoin library's weight function // Expand this a full transaction in order to use the bitcoin library's weight function
let mut tx = Transaction { let mut tx = Transaction {
version: Version(2), version: Version(2),
@@ -97,33 +99,7 @@ impl SignableTransaction {
// the value is fixed size (so any value could be used here) // the value is fixed size (so any value could be used here)
tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.clone() }); tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.clone() });
} }
u64::from(tx.weight())
let weight = tx.weight();
// Now calculate the size in vbytes
/*
"Virtual transaction size" is weight ceildiv 4 per
https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki
https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04
/src/policy/policy.cpp#L295-L298
implements this almost as expected, with an additional consideration to signature operations
Signature operations (the second argument of the following call) do not count Taproot
signatures per https://github.com/bitcoin/bips/blob/master/bip-0342.mediawiki#cite_ref-11-0
We don't risk running afoul of the Taproot signature limit as it allows at least one per
input, which is all we use
*/
(
weight.to_wu(),
u64::try_from(bitcoin::policy::get_virtual_tx_size(
i64::try_from(weight.to_wu()).unwrap(),
0i64,
))
.unwrap(),
)
} }
/// Returns the fee necessary for this transaction to achieve the fee rate specified at /// Returns the fee necessary for this transaction to achieve the fee rate specified at
@@ -152,7 +128,7 @@ impl SignableTransaction {
payments: &[(ScriptBuf, u64)], payments: &[(ScriptBuf, u64)],
change: Option<ScriptBuf>, change: Option<ScriptBuf>,
data: Option<Vec<u8>>, data: Option<Vec<u8>>,
fee_per_vbyte: u64, fee_per_weight: u64,
) -> Result<SignableTransaction, TransactionError> { ) -> Result<SignableTransaction, TransactionError> {
if inputs.is_empty() { if inputs.is_empty() {
Err(TransactionError::NoInputs)?; Err(TransactionError::NoInputs)?;
@@ -201,30 +177,45 @@ impl SignableTransaction {
}) })
} }
let (mut weight, vbytes) = Self::calculate_weight_vbytes(tx_ins.len(), payments, None); let mut weight = Self::calculate_weight(tx_ins.len(), payments, None);
let mut needed_fee = fee_per_weight * weight;
let mut needed_fee = fee_per_vbyte * vbytes; // "Virtual transaction size" is weight ceildiv 4 per
// https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki
// https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04/
// src/policy/policy.cpp#L295-L298
// implements this as expected
// Technically, it takes whatever's greater, the weight or the amount of signature operations
// multiplied by DEFAULT_BYTES_PER_SIGOP (20)
// We only use 1 signature per input, and our inputs have a weight exceeding 20
// Accordingly, our inputs' weight will always be greater than the cost of the signature ops
let vsize = weight.div_ceil(4);
debug_assert_eq!(
u64::try_from(bitcoin::policy::get_virtual_tx_size(
weight.try_into().unwrap(),
tx_ins.len().try_into().unwrap()
))
.unwrap(),
vsize
);
// Technically, if there isn't change, this TX may still pay enough of a fee to pass the // Technically, if there isn't change, this TX may still pay enough of a fee to pass the
// minimum fee. Such edge cases aren't worth programming when they go against intent, as the // minimum fee. Such edge cases aren't worth programming when they go against intent, as the
// specified fee rate is too low to be valid // specified fee rate is too low to be valid
// bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE is in sats/kilo-vbyte // bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE is in sats/kilo-vbyte
if needed_fee < ((u64::from(bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE) * vbytes) / 1000) { if needed_fee < ((u64::from(bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE) * vsize) / 1000) {
Err(TransactionError::TooLowFee)?; Err(TransactionError::TooLowFee)?;
} }
if input_sat < (payment_sat + needed_fee) { if input_sat < (payment_sat + needed_fee) {
Err(TransactionError::NotEnoughFunds { Err(TransactionError::NotEnoughFunds)?;
inputs: input_sat,
payments: payment_sat,
fee: needed_fee,
})?;
} }
// If there's a change address, check if there's change to give it // If there's a change address, check if there's change to give it
if let Some(change) = change { if let Some(change) = change {
let (weight_with_change, vbytes_with_change) = let weight_with_change = Self::calculate_weight(tx_ins.len(), payments, Some(&change));
Self::calculate_weight_vbytes(tx_ins.len(), payments, Some(&change)); let fee_with_change = fee_per_weight * weight_with_change;
let fee_with_change = fee_per_vbyte * vbytes_with_change;
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) { if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
if value >= DUST { if value >= DUST {
tx_outs.push(TxOut { value: Amount::from_sat(value), script_pubkey: change }); tx_outs.push(TxOut { value: Amount::from_sat(value), script_pubkey: change });
@@ -262,23 +253,49 @@ impl SignableTransaction {
res res
} }
/// Returns the transaction, sans witness, this will create if signed. /// Returns the outputs this transaction will create.
pub fn transaction(&self) -> &Transaction { pub fn outputs(&self) -> &[TxOut] {
&self.tx &self.tx.output
} }
/// Create a multisig machine for this transaction. /// Create a multisig machine for this transaction.
/// ///
/// Returns None if the wrong keys are used. /// Returns None if the wrong keys are used.
pub fn multisig(self, keys: &ThresholdKeys<Secp256k1>) -> Option<TransactionMachine> { pub fn multisig(
self,
keys: &ThresholdKeys<Secp256k1>,
mut transcript: RecommendedTranscript,
) -> Option<TransactionMachine> {
transcript.domain_separate(b"bitcoin_transaction");
transcript.append_message(b"root_key", keys.group_key().to_encoded_point(true).as_bytes());
// Transcript the inputs and outputs
let tx = &self.tx;
for input in &tx.input {
transcript.append_message(b"input_hash", input.previous_output.txid);
transcript.append_message(b"input_output_index", input.previous_output.vout.to_le_bytes());
}
for payment in &tx.output {
transcript.append_message(b"output_script", payment.script_pubkey.as_bytes());
transcript.append_message(b"output_amount", payment.value.to_sat().to_le_bytes());
}
let mut sigs = vec![]; let mut sigs = vec![];
for i in 0 .. self.tx.input.len() { for i in 0 .. tx.input.len() {
let mut transcript = transcript.clone();
// This unwrap is safe since any transaction with this many inputs violates the maximum
// size allowed under standards, which this lib will error on creation of
transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes());
let offset = keys.clone().offset(self.offsets[i]); let offset = keys.clone().offset(self.offsets[i]);
if p2tr_script_buf(offset.group_key())? != self.prevouts[i].script_pubkey { if p2tr_script_buf(offset.group_key())? != self.prevouts[i].script_pubkey {
None?; None?;
} }
sigs.push(AlgorithmMachine::new(Schnorr::new(), keys.clone().offset(self.offsets[i]))); sigs.push(AlgorithmMachine::new(
Schnorr::new(transcript),
keys.clone().offset(self.offsets[i]),
));
} }
Some(TransactionMachine { tx: self, sigs }) Some(TransactionMachine { tx: self, sigs })
@@ -291,7 +308,7 @@ impl SignableTransaction {
/// This will panic if either `cache` is called or the message isn't empty. /// This will panic if either `cache` is called or the message isn't empty.
pub struct TransactionMachine { pub struct TransactionMachine {
tx: SignableTransaction, tx: SignableTransaction,
sigs: Vec<AlgorithmMachine<Secp256k1, Schnorr>>, sigs: Vec<AlgorithmMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
} }
impl PreprocessMachine for TransactionMachine { impl PreprocessMachine for TransactionMachine {
@@ -320,7 +337,7 @@ impl PreprocessMachine for TransactionMachine {
pub struct TransactionSignMachine { pub struct TransactionSignMachine {
tx: SignableTransaction, tx: SignableTransaction,
sigs: Vec<AlgorithmSignMachine<Secp256k1, Schnorr>>, sigs: Vec<AlgorithmSignMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
} }
impl SignMachine<Transaction> for TransactionSignMachine { impl SignMachine<Transaction> for TransactionSignMachine {
@@ -400,7 +417,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
pub struct TransactionSignatureMachine { pub struct TransactionSignatureMachine {
tx: Transaction, tx: Transaction,
sigs: Vec<AlgorithmSignatureMachine<Secp256k1, Schnorr>>, sigs: Vec<AlgorithmSignatureMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
} }
impl SignatureMachine<Transaction> for TransactionSignatureMachine { impl SignatureMachine<Transaction> for TransactionSignatureMachine {

View File

@@ -1,11 +1,14 @@
use std::sync::LazyLock; use std::sync::OnceLock;
use bitcoin_serai::rpc::Rpc; use bitcoin_serai::rpc::Rpc;
use tokio::sync::Mutex; use tokio::sync::Mutex;
#[allow(dead_code)] static SEQUENTIAL_CELL: OnceLock<Mutex<()>> = OnceLock::new();
pub(crate) static SEQUENTIAL: LazyLock<Mutex<()>> = LazyLock::new(|| Mutex::new(())); #[allow(non_snake_case)]
pub fn SEQUENTIAL() -> &'static Mutex<()> {
SEQUENTIAL_CELL.get_or_init(|| Mutex::new(()))
}
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) async fn rpc() -> Rpc { pub(crate) async fn rpc() -> Rpc {
@@ -31,7 +34,7 @@ macro_rules! async_sequential {
$( $(
#[tokio::test] #[tokio::test]
async fn $name() { async fn $name() {
let guard = runner::SEQUENTIAL.lock().await; let guard = runner::SEQUENTIAL().lock().await;
let local = tokio::task::LocalSet::new(); let local = tokio::task::LocalSet::new();
local.run_until(async move { local.run_until(async move {
if let Err(err) = tokio::task::spawn_local(async move { $body }).await { if let Err(err) = tokio::task::spawn_local(async move { $body }).await {

View File

@@ -2,6 +2,8 @@ use std::collections::HashMap;
use rand_core::{RngCore, OsRng}; use rand_core::{RngCore, OsRng};
use transcript::{Transcript, RecommendedTranscript};
use k256::{ use k256::{
elliptic_curve::{ elliptic_curve::{
group::{ff::Field, Group}, group::{ff::Field, Group},
@@ -92,11 +94,46 @@ fn sign(
) -> Transaction { ) -> Transaction {
let mut machines = HashMap::new(); let mut machines = HashMap::new();
for i in (1 ..= THRESHOLD).map(|i| Participant::new(i).unwrap()) { for i in (1 ..= THRESHOLD).map(|i| Participant::new(i).unwrap()) {
machines.insert(i, tx.clone().multisig(&keys[&i].clone()).unwrap()); machines.insert(
i,
tx.clone()
.multisig(&keys[&i].clone(), RecommendedTranscript::new(b"bitcoin-serai Test Transaction"))
.unwrap(),
);
} }
sign_without_caching(&mut OsRng, machines, &[]) sign_without_caching(&mut OsRng, machines, &[])
} }
#[test]
fn test_tweak_keys() {
let mut even = false;
let mut odd = false;
// Generate keys until we get an even set and an odd set
while !(even && odd) {
let mut keys = key_gen(&mut OsRng).drain().next().unwrap().1;
if is_even(keys.group_key()) {
// Tweaking should do nothing
assert_eq!(tweak_keys(&keys).group_key(), keys.group_key());
even = true;
} else {
let tweaked = tweak_keys(&keys).group_key();
assert_ne!(tweaked, keys.group_key());
// Tweaking should produce an even key
assert!(is_even(tweaked));
// Verify it uses the smallest possible offset
while keys.group_key().to_encoded_point(true).tag() == Tag::CompressedOddY {
keys = keys.offset(Scalar::ONE);
}
assert_eq!(tweaked, keys.group_key());
odd = true;
}
}
}
async_sequential! { async_sequential! {
async fn test_scanner() { async fn test_scanner() {
// Test Scanners are creatable for even keys. // Test Scanners are creatable for even keys.
@@ -195,10 +232,10 @@ async_sequential! {
Err(TransactionError::TooLowFee), Err(TransactionError::TooLowFee),
); );
assert!(matches!( assert_eq!(
SignableTransaction::new(inputs.clone(), &[(addr(), inputs[0].value() * 2)], None, None, FEE), SignableTransaction::new(inputs.clone(), &[(addr(), inputs[0].value() * 2)], None, None, FEE),
Err(TransactionError::NotEnoughFunds { .. }), Err(TransactionError::NotEnoughFunds),
)); );
assert_eq!( assert_eq!(
SignableTransaction::new(inputs, &vec![(addr(), 1000); 10000], None, None, FEE), SignableTransaction::new(inputs, &vec![(addr(), 1000); 10000], None, None, FEE),
@@ -266,7 +303,7 @@ async_sequential! {
} }
// Make sure the change is correct // Make sure the change is correct
assert_eq!(needed_fee, u64::try_from(tx.vsize()).unwrap() * FEE); assert_eq!(needed_fee, u64::from(tx.weight()) * FEE);
let input_value = output.value() + offset_output.value(); let input_value = output.value() + offset_output.value();
let output_value = tx.output.iter().map(|output| output.value.to_sat()).sum::<u64>(); let output_value = tx.output.iter().map(|output| output.value.to_sat()).sum::<u64>();
assert_eq!(input_value - output_value, needed_fee); assert_eq!(input_value - output_value, needed_fee);

3
coins/ethereum/.gitignore vendored Normal file
View File

@@ -0,0 +1,3 @@
# Solidity build outputs
cache
artifacts

49
coins/ethereum/Cargo.toml Normal file
View File

@@ -0,0 +1,49 @@
[package]
name = "ethereum-serai"
version = "0.1.0"
description = "An Ethereum library supporting Schnorr signing and on-chain verification"
license = "AGPL-3.0-only"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
edition = "2021"
publish = false
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
thiserror = { version = "1", default-features = false }
rand_core = { version = "0.6", default-features = false, features = ["std"] }
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", default-features = false, features = ["recommended"] }
group = { version = "0.13", default-features = false }
k256 = { version = "^0.13.1", default-features = false, features = ["std", "ecdsa", "arithmetic"] }
frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["secp256k1"] }
alloy-core = { version = "0.7", default-features = false }
alloy-sol-types = { version = "0.7", default-features = false, features = ["json"] }
alloy-consensus = { version = "0.1", default-features = false, features = ["k256"] }
alloy-network = { version = "0.1", default-features = false }
alloy-rpc-types-eth = { version = "0.1", default-features = false }
alloy-rpc-client = { version = "0.1", default-features = false }
alloy-simple-request-transport = { path = "./alloy-simple-request-transport", default-features = false }
alloy-provider = { version = "0.1", default-features = false }
alloy-node-bindings = { version = "0.1", default-features = false, optional = true }
[dev-dependencies]
frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["tests"] }
tokio = { version = "1", features = ["macros"] }
alloy-node-bindings = { version = "0.1", default-features = false }
[features]
tests = ["alloy-node-bindings", "frost/tests"]

View File

@@ -1,6 +1,6 @@
AGPL-3.0-only license AGPL-3.0-only license
Copyright (c) 2023-2025 Luke Parker Copyright (c) 2022-2023 Luke Parker
This program is free software: you can redistribute it and/or modify This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License Version 3 as it under the terms of the GNU Affero General Public License Version 3 as

15
coins/ethereum/README.md Normal file
View File

@@ -0,0 +1,15 @@
# Ethereum
This package contains Ethereum-related functionality, specifically deploying and
interacting with Serai contracts.
While `monero-serai` and `bitcoin-serai` are general purpose libraries,
`ethereum-serai` is Serai specific. If any of the utilities are generally
desired, please fork and maintain your own copy to ensure the desired
functionality is preserved, or open an issue to request we make this library
general purpose.
### Dependencies
- solc
- [Foundry](https://github.com/foundry-rs/foundry)

View File

@@ -1,12 +1,12 @@
[package] [package]
name = "alloy-simple-request-transport" name = "alloy-simple-request-transport"
version = "0.1.1" version = "0.1.0"
description = "A transport for alloy based off simple-request" description = "A transport for alloy based off simple-request"
license = "MIT" license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/ethereum/alloy-simple-request-transport" repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum/alloy-simple-request-transport"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021" edition = "2021"
rust-version = "1.81" rust-version = "1.74"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true
@@ -16,13 +16,13 @@ rustdoc-args = ["--cfg", "docsrs"]
workspace = true workspace = true
[dependencies] [dependencies]
tower = "0.5" tower = "0.4"
serde_json = { version = "1", default-features = false } serde_json = { version = "1", default-features = false }
simple-request = { path = "../../../common/request", version = "0.1", default-features = false } simple-request = { path = "../../../common/request", default-features = false }
alloy-json-rpc = { version = "0.9", default-features = false } alloy-json-rpc = { version = "0.1", default-features = false }
alloy-transport = { version = "0.9", default-features = false } alloy-transport = { version = "0.1", default-features = false }
[features] [features]
default = ["tls"] default = ["tls"]

41
coins/ethereum/build.rs Normal file
View File

@@ -0,0 +1,41 @@
use std::process::Command;
fn main() {
println!("cargo:rerun-if-changed=contracts/*");
println!("cargo:rerun-if-changed=artifacts/*");
for line in String::from_utf8(Command::new("solc").args(["--version"]).output().unwrap().stdout)
.unwrap()
.lines()
{
if let Some(version) = line.strip_prefix("Version: ") {
let version = version.split('+').next().unwrap();
assert_eq!(version, "0.8.25");
}
}
#[rustfmt::skip]
let args = [
"--base-path", ".",
"-o", "./artifacts", "--overwrite",
"--bin", "--abi",
"--via-ir", "--optimize",
"./contracts/IERC20.sol",
"./contracts/Schnorr.sol",
"./contracts/Deployer.sol",
"./contracts/Sandbox.sol",
"./contracts/Router.sol",
"./src/tests/contracts/Schnorr.sol",
"./src/tests/contracts/ERC20.sol",
"--no-color",
];
let solc = Command::new("solc").args(args).output().unwrap();
assert!(solc.status.success());
for line in String::from_utf8(solc.stderr).unwrap().lines() {
assert!(!line.starts_with("Error:"));
}
}

View File

@@ -0,0 +1,52 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
/*
The expected deployment process of the Router is as follows:
1) A transaction deploying Deployer is made. Then, a deterministic signature is
created such that an account with an unknown private key is the creator of
the contract. Anyone can fund this address, and once anyone does, the
transaction deploying Deployer can be published by anyone. No other
transaction may be made from that account.
2) Anyone deploys the Router through the Deployer. This uses a sequential nonce
such that meet-in-the-middle attacks, with complexity 2**80, aren't feasible.
While such attacks would still be feasible if the Deployer's address was
controllable, the usage of a deterministic signature with a NUMS method
prevents that.
This doesn't have any denial-of-service risks and will resolve once anyone steps
forward as deployer. This does fail to guarantee an identical address across
every chain, though it enables letting anyone efficiently ask the Deployer for
the address (with the Deployer having an identical address on every chain).
Unfortunately, guaranteeing identical addresses aren't feasible. We'd need the
Deployer contract to use a consistent salt for the Router, yet the Router must
be deployed with a specific public key for Serai. Since Ethereum isn't able to
determine a valid public key (one the result of a Serai DKG) from a dishonest
public key, we have to allow multiple deployments with Serai being the one to
determine which to use.
The alternative would be to have a council publish the Serai key on-Ethereum,
with Serai verifying the published result. This would introduce a DoS risk in
the council not publishing the correct key/not publishing any key.
*/
contract Deployer {
event Deployment(bytes32 indexed init_code_hash, address created);
error DeploymentFailed();
function deploy(bytes memory init_code) external {
address created;
assembly {
created := create(0, add(init_code, 0x20), mload(init_code))
}
if (created == address(0)) {
revert DeploymentFailed();
}
// These may be emitted out of order upon re-entrancy
emit Deployment(keccak256(init_code), created);
}
}

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: CC0 // SPDX-License-Identifier: CC0
pragma solidity ^0.8.26; pragma solidity ^0.8.0;
interface IERC20 { interface IERC20 {
event Transfer(address indexed from, address indexed to, uint256 value); event Transfer(address indexed from, address indexed to, uint256 value);

View File

@@ -0,0 +1,222 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
import "./IERC20.sol";
import "./Schnorr.sol";
import "./Sandbox.sol";
contract Router {
// Nonce is incremented for each batch of transactions executed/key update
uint256 public nonce;
// Current public key's x-coordinate
// This key must always have the parity defined within the Schnorr contract
bytes32 public seraiKey;
struct OutInstruction {
address to;
Call[] calls;
uint256 value;
}
struct Signature {
bytes32 c;
bytes32 s;
}
event SeraiKeyUpdated(
uint256 indexed nonce,
bytes32 indexed key,
Signature signature
);
event InInstruction(
address indexed from,
address indexed coin,
uint256 amount,
bytes instruction
);
// success is a uint256 representing a bitfield of transaction successes
event Executed(
uint256 indexed nonce,
bytes32 indexed batch,
uint256 success,
Signature signature
);
// error types
error InvalidKey();
error InvalidSignature();
error InvalidAmount();
error FailedTransfer();
error TooManyTransactions();
modifier _updateSeraiKeyAtEndOfFn(
uint256 _nonce,
bytes32 key,
Signature memory sig
) {
if (
(key == bytes32(0)) ||
((bytes32(uint256(key) % Schnorr.Q)) != key)
) {
revert InvalidKey();
}
_;
seraiKey = key;
emit SeraiKeyUpdated(_nonce, key, sig);
}
constructor(bytes32 _seraiKey) _updateSeraiKeyAtEndOfFn(
0,
_seraiKey,
Signature({ c: bytes32(0), s: bytes32(0) })
) {
nonce = 1;
}
// updateSeraiKey validates the given Schnorr signature against the current
// public key, and if successful, updates the contract's public key to the
// given one.
function updateSeraiKey(
bytes32 _seraiKey,
Signature calldata sig
) external _updateSeraiKeyAtEndOfFn(nonce, _seraiKey, sig) {
bytes memory message =
abi.encodePacked("updateSeraiKey", block.chainid, nonce, _seraiKey);
nonce++;
if (!Schnorr.verify(seraiKey, message, sig.c, sig.s)) {
revert InvalidSignature();
}
}
function inInstruction(
address coin,
uint256 amount,
bytes memory instruction
) external payable {
if (coin == address(0)) {
if (amount != msg.value) {
revert InvalidAmount();
}
} else {
(bool success, bytes memory res) =
address(coin).call(
abi.encodeWithSelector(
IERC20.transferFrom.selector,
msg.sender,
address(this),
amount
)
);
// Require there was nothing returned, which is done by some non-standard
// tokens, or that the ERC20 contract did in fact return true
bool nonStandardResOrTrue =
(res.length == 0) || abi.decode(res, (bool));
if (!(success && nonStandardResOrTrue)) {
revert FailedTransfer();
}
}
/*
Due to fee-on-transfer tokens, emitting the amount directly is frowned upon.
The amount instructed to transfer may not actually be the amount
transferred.
If we add nonReentrant to every single function which can effect the
balance, we can check the amount exactly matches. This prevents transfers of
less value than expected occurring, at least, not without an additional
transfer to top up the difference (which isn't routed through this contract
and accordingly isn't trying to artificially create events).
If we don't add nonReentrant, a transfer can be started, and then a new
transfer for the difference can follow it up (again and again until a
rounding error is reached). This contract would believe all transfers were
done in full, despite each only being done in part (except for the last
one).
Given fee-on-transfer tokens aren't intended to be supported, the only
token planned to be supported is Dai and it doesn't have any fee-on-transfer
logic, fee-on-transfer tokens aren't even able to be supported at this time,
we simply classify this entire class of tokens as non-standard
implementations which induce undefined behavior. It is the Serai network's
role not to add support for any non-standard implementations.
*/
emit InInstruction(msg.sender, coin, amount, instruction);
}
// execute accepts a list of transactions to execute as well as a signature.
// if signature verification passes, the given transactions are executed.
// if signature verification fails, this function will revert.
function execute(
OutInstruction[] calldata transactions,
Signature calldata sig
) external {
if (transactions.length > 256) {
revert TooManyTransactions();
}
bytes memory message =
abi.encode("execute", block.chainid, nonce, transactions);
uint256 executed_with_nonce = nonce;
// This prevents re-entrancy from causing double spends yet does allow
// out-of-order execution via re-entrancy
nonce++;
if (!Schnorr.verify(seraiKey, message, sig.c, sig.s)) {
revert InvalidSignature();
}
uint256 successes;
for (uint256 i = 0; i < transactions.length; i++) {
bool success;
// If there are no calls, send to `to` the value
if (transactions[i].calls.length == 0) {
(success, ) = transactions[i].to.call{
value: transactions[i].value,
gas: 5_000
}("");
} else {
// If there are calls, ignore `to`. Deploy a new Sandbox and proxy the
// calls through that
//
// We could use a single sandbox in order to reduce gas costs, yet that
// risks one person creating an approval that's hooked before another
// user's intended action executes, in order to drain their coins
//
// While technically, that would be a flaw in the sandboxed flow, this
// is robust and prevents such flaws from being possible
//
// We also don't want people to set state via the Sandbox and expect it
// future available when anyone else could set a distinct value
Sandbox sandbox = new Sandbox();
(success, ) = address(sandbox).call{
value: transactions[i].value,
// TODO: Have the Call specify the gas up front
gas: 350_000
}(
abi.encodeWithSelector(
Sandbox.sandbox.selector,
transactions[i].calls
)
);
}
assembly {
successes := or(successes, shl(i, success))
}
}
emit Executed(
executed_with_nonce,
keccak256(message),
successes,
sig
);
}
}

View File

@@ -0,0 +1,48 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.24;
struct Call {
address to;
uint256 value;
bytes data;
}
// A minimal sandbox focused on gas efficiency.
//
// The first call is executed if any of the calls fail, making it a fallback.
// All other calls are executed sequentially.
contract Sandbox {
error AlreadyCalled();
error CallsFailed();
function sandbox(Call[] calldata calls) external payable {
// Prevent re-entrancy due to this executing arbitrary calls from anyone
// and anywhere
bool called;
assembly { called := tload(0) }
if (called) {
revert AlreadyCalled();
}
assembly { tstore(0, 1) }
// Execute the calls, starting from 1
for (uint256 i = 1; i < calls.length; i++) {
(bool success, ) =
calls[i].to.call{ value: calls[i].value }(calls[i].data);
// If this call failed, execute the fallback (call 0)
if (!success) {
(success, ) =
calls[0].to.call{ value: address(this).balance }(calls[0].data);
// If this call also failed, revert entirely
if (!success) {
revert CallsFailed();
}
return;
}
}
// We don't clear the re-entrancy guard as this contract should never be
// called again, so there's no reason to spend the effort
}
}

View File

@@ -0,0 +1,44 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
// see https://github.com/noot/schnorr-verify for implementation details
library Schnorr {
// secp256k1 group order
uint256 constant public Q =
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
// Fixed parity for the public keys used in this contract
// This avoids spending a word passing the parity in a similar style to
// Bitcoin's Taproot
uint8 constant public KEY_PARITY = 27;
error InvalidSOrA();
error MalformedSignature();
// px := public key x-coord, where the public key has a parity of KEY_PARITY
// message := 32-byte hash of the message
// c := schnorr signature challenge
// s := schnorr signature
function verify(
bytes32 px,
bytes memory message,
bytes32 c,
bytes32 s
) internal pure returns (bool) {
// ecrecover = (m, v, r, s) -> key
// We instead pass the following to obtain the nonce (not the key)
// Then we hash it and verify it matches the challenge
bytes32 sa = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
bytes32 ca = bytes32(Q - mulmod(uint256(c), uint256(px), Q));
// For safety, we want each input to ecrecover to be 0 (sa, px, ca)
// The ecreover precomple checks `r` and `s` (`px` and `ca`) are non-zero
// That leaves us to check `sa` are non-zero
if (sa == 0) revert InvalidSOrA();
address R = ecrecover(sa, KEY_PARITY, px, ca);
if (R == address(0)) revert MalformedSignature();
// Check the signature is correct by rebuilding the challenge
return c == keccak256(abi.encodePacked(R, px, message));
}
}

View File

@@ -3,12 +3,11 @@ name = "serai-ethereum-relayer"
version = "0.1.0" version = "0.1.0"
description = "A relayer for Serai's Ethereum transactions" description = "A relayer for Serai's Ethereum transactions"
license = "AGPL-3.0-only" license = "AGPL-3.0-only"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/ethereum/relayer" repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum/relayer"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = [] keywords = []
edition = "2021" edition = "2021"
publish = false publish = false
rust-version = "1.72"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -0,0 +1,4 @@
# Ethereum Transaction Relayer
This server collects Ethereum router commands to be published, offering an RPC
to fetch them.

View File

@@ -40,8 +40,8 @@ async fn main() {
db db
}; };
// Start transaction recipience server // Start command recipience server
// This MUST NOT be publicly exposed // This should not be publicly exposed
// TODO: Add auth // TODO: Add auth
tokio::spawn({ tokio::spawn({
let db = db.clone(); let db = db.clone();
@@ -58,27 +58,25 @@ async fn main() {
let mut buf = vec![0; usize::try_from(msg_len).unwrap()]; let mut buf = vec![0; usize::try_from(msg_len).unwrap()];
let Ok(_) = socket.read_exact(&mut buf).await else { break }; let Ok(_) = socket.read_exact(&mut buf).await else { break };
if buf.len() < (4 + 1) { if buf.len() < 5 {
break; break;
} }
let nonce = u32::from_le_bytes(buf[.. 4].try_into().unwrap()); let nonce = u32::from_le_bytes(buf[.. 4].try_into().unwrap());
let mut txn = db.txn(); let mut txn = db.txn();
// Save the transaction
txn.put(nonce.to_le_bytes(), &buf[4 ..]); txn.put(nonce.to_le_bytes(), &buf[4 ..]);
txn.commit(); txn.commit();
let Ok(()) = socket.write_all(&[1]).await else { break }; let Ok(()) = socket.write_all(&[1]).await else { break };
log::info!("received transaction to publish (nonce {nonce})"); log::info!("received signed command #{nonce}");
} }
}); });
} }
} }
}); });
// Start transaction fetch server // Start command fetch server
// 5132 ^ ((b'E' << 8) | b'R') + 1 // 5132 ^ ((b'E' << 8) | b'R') + 1
// TODO: JSON-RPC server which returns this as JSON?
let server = TcpListener::bind("0.0.0.0:20831").await.unwrap(); let server = TcpListener::bind("0.0.0.0:20831").await.unwrap();
loop { loop {
let (mut socket, _) = server.accept().await.unwrap(); let (mut socket, _) = server.accept().await.unwrap();
@@ -86,17 +84,16 @@ async fn main() {
tokio::spawn(async move { tokio::spawn(async move {
let db = db.clone(); let db = db.clone();
loop { loop {
// Nonce to get the unsigned transaction for // Nonce to get the router comamnd for
let mut buf = vec![0; 4]; let mut buf = vec![0; 4];
let Ok(_) = socket.read_exact(&mut buf).await else { break }; let Ok(_) = socket.read_exact(&mut buf).await else { break };
let transaction = db.get(&buf[.. 4]).unwrap_or(vec![]); let command = db.get(&buf[.. 4]).unwrap_or(vec![]);
let Ok(()) = let Ok(()) = socket.write_all(&u32::try_from(command.len()).unwrap().to_le_bytes()).await
socket.write_all(&u32::try_from(transaction.len()).unwrap().to_le_bytes()).await
else { else {
break; break;
}; };
let Ok(()) = socket.write_all(&transaction).await else { break }; let Ok(()) = socket.write_all(&command).await else { break };
} }
}); });
} }

View File

@@ -0,0 +1,37 @@
use alloy_sol_types::sol;
#[rustfmt::skip]
#[allow(warnings)]
#[allow(needless_pass_by_value)]
#[allow(clippy::all)]
#[allow(clippy::ignored_unit_patterns)]
#[allow(clippy::redundant_closure_for_method_calls)]
mod erc20_container {
use super::*;
sol!("contracts/IERC20.sol");
}
pub use erc20_container::IERC20 as erc20;
#[rustfmt::skip]
#[allow(warnings)]
#[allow(needless_pass_by_value)]
#[allow(clippy::all)]
#[allow(clippy::ignored_unit_patterns)]
#[allow(clippy::redundant_closure_for_method_calls)]
mod deployer_container {
use super::*;
sol!("contracts/Deployer.sol");
}
pub use deployer_container::Deployer as deployer;
#[rustfmt::skip]
#[allow(warnings)]
#[allow(needless_pass_by_value)]
#[allow(clippy::all)]
#[allow(clippy::ignored_unit_patterns)]
#[allow(clippy::redundant_closure_for_method_calls)]
mod router_container {
use super::*;
sol!(Router, "artifacts/Router.abi");
}
pub use router_container::Router as router;

View File

@@ -0,0 +1,188 @@
use group::ff::PrimeField;
use k256::{
elliptic_curve::{ops::Reduce, point::AffineCoordinates, sec1::ToEncodedPoint},
ProjectivePoint, Scalar, U256 as KU256,
};
#[cfg(test)]
use k256::{elliptic_curve::point::DecompressPoint, AffinePoint};
use frost::{
algorithm::{Hram, SchnorrSignature},
curve::{Ciphersuite, Secp256k1},
};
use alloy_core::primitives::{Parity, Signature as AlloySignature};
use alloy_consensus::{SignableTransaction, Signed, TxLegacy};
use crate::abi::router::{Signature as AbiSignature};
pub(crate) fn keccak256(data: &[u8]) -> [u8; 32] {
alloy_core::primitives::keccak256(data).into()
}
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
<Scalar as Reduce<KU256>>::reduce_bytes(&keccak256(data).into())
}
pub fn address(point: &ProjectivePoint) -> [u8; 20] {
let encoded_point = point.to_encoded_point(false);
// Last 20 bytes of the hash of the concatenated x and y coordinates
// We obtain the concatenated x and y coordinates via the uncompressed encoding of the point
keccak256(&encoded_point.as_ref()[1 .. 65])[12 ..].try_into().unwrap()
}
/// Deterministically sign a transaction.
///
/// This function panics if passed a transaction with a non-None chain ID.
pub fn deterministically_sign(tx: &TxLegacy) -> Signed<TxLegacy> {
assert!(
tx.chain_id.is_none(),
"chain ID was Some when deterministically signing a TX (causing a non-deterministic signer)"
);
let sig_hash = tx.signature_hash().0;
let mut r = hash_to_scalar(&[sig_hash.as_slice(), b"r"].concat());
let mut s = hash_to_scalar(&[sig_hash.as_slice(), b"s"].concat());
loop {
let r_bytes: [u8; 32] = r.to_repr().into();
let s_bytes: [u8; 32] = s.to_repr().into();
let v = Parity::NonEip155(false);
let signature =
AlloySignature::from_scalars_and_parity(r_bytes.into(), s_bytes.into(), v).unwrap();
let tx = tx.clone().into_signed(signature);
if tx.recover_signer().is_ok() {
return tx;
}
// Re-hash until valid
r = hash_to_scalar(r_bytes.as_ref());
s = hash_to_scalar(s_bytes.as_ref());
}
}
/// The public key for a Schnorr-signing account.
#[allow(non_snake_case)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct PublicKey {
pub(crate) A: ProjectivePoint,
pub(crate) px: Scalar,
}
impl PublicKey {
/// Construct a new `PublicKey`.
///
/// This will return None if the provided point isn't eligible to be a public key (due to
/// bounds such as parity).
#[allow(non_snake_case)]
pub fn new(A: ProjectivePoint) -> Option<PublicKey> {
let affine = A.to_affine();
// Only allow even keys to save a word within Ethereum
let is_odd = bool::from(affine.y_is_odd());
if is_odd {
None?;
}
let x_coord = affine.x();
let x_coord_scalar = <Scalar as Reduce<KU256>>::reduce_bytes(&x_coord);
// Return None if a reduction would occur
// Reductions would be incredibly unlikely and shouldn't be an issue, yet it's one less
// headache/concern to have
// This does ban a trivial amoount of public keys
if x_coord_scalar.to_repr() != x_coord {
None?;
}
Some(PublicKey { A, px: x_coord_scalar })
}
pub fn point(&self) -> ProjectivePoint {
self.A
}
pub(crate) fn eth_repr(&self) -> [u8; 32] {
self.px.to_repr().into()
}
#[cfg(test)]
pub(crate) fn from_eth_repr(repr: [u8; 32]) -> Option<Self> {
#[allow(non_snake_case)]
let A = Option::<AffinePoint>::from(AffinePoint::decompress(&repr.into(), 0.into()))?.into();
Option::from(Scalar::from_repr(repr.into())).map(|px| PublicKey { A, px })
}
}
/// The HRAm to use for the Schnorr contract.
#[derive(Clone, Default)]
pub struct EthereumHram {}
impl Hram<Secp256k1> for EthereumHram {
#[allow(non_snake_case)]
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
let x_coord = A.to_affine().x();
let mut data = address(R).to_vec();
data.extend(x_coord.as_slice());
data.extend(m);
<Scalar as Reduce<KU256>>::reduce_bytes(&keccak256(&data).into())
}
}
/// A signature for the Schnorr contract.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct Signature {
pub(crate) c: Scalar,
pub(crate) s: Scalar,
}
impl Signature {
pub fn verify(&self, public_key: &PublicKey, message: &[u8]) -> bool {
#[allow(non_snake_case)]
let R = (Secp256k1::generator() * self.s) - (public_key.A * self.c);
EthereumHram::hram(&R, &public_key.A, message) == self.c
}
/// Construct a new `Signature`.
///
/// This will return None if the signature is invalid.
pub fn new(
public_key: &PublicKey,
message: &[u8],
signature: SchnorrSignature<Secp256k1>,
) -> Option<Signature> {
let c = EthereumHram::hram(&signature.R, &public_key.A, message);
if !signature.verify(public_key.A, c) {
None?;
}
let res = Signature { c, s: signature.s };
assert!(res.verify(public_key, message));
Some(res)
}
pub fn c(&self) -> Scalar {
self.c
}
pub fn s(&self) -> Scalar {
self.s
}
pub fn to_bytes(&self) -> [u8; 64] {
let mut res = [0; 64];
res[.. 32].copy_from_slice(self.c.to_repr().as_ref());
res[32 ..].copy_from_slice(self.s.to_repr().as_ref());
res
}
pub fn from_bytes(bytes: [u8; 64]) -> std::io::Result<Self> {
let mut reader = bytes.as_slice();
let c = Secp256k1::read_F(&mut reader)?;
let s = Secp256k1::read_F(&mut reader)?;
Ok(Signature { c, s })
}
}
impl From<&Signature> for AbiSignature {
fn from(sig: &Signature) -> AbiSignature {
let c: [u8; 32] = sig.c.to_repr().into();
let s: [u8; 32] = sig.s.to_repr().into();
AbiSignature { c: c.into(), s: s.into() }
}
}

View File

@@ -0,0 +1,113 @@
use std::sync::Arc;
use alloy_core::primitives::{hex::FromHex, Address, B256, U256, Bytes, TxKind};
use alloy_consensus::{Signed, TxLegacy};
use alloy_sol_types::{SolCall, SolEvent};
use alloy_rpc_types_eth::{BlockNumberOrTag, Filter};
use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider};
use crate::{
Error,
crypto::{self, keccak256, PublicKey},
router::Router,
};
pub use crate::abi::deployer as abi;
/// The Deployer contract for the Router contract.
///
/// This Deployer has a deterministic address, letting it be immediately identified on any
/// compatible chain. It then supports retrieving the Router contract's address (which isn't
/// deterministic) using a single log query.
#[derive(Clone, Debug)]
pub struct Deployer;
impl Deployer {
/// Obtain the transaction to deploy this contract, already signed.
///
/// The account this transaction is sent from (which is populated in `from`) must be sufficiently
/// funded for this transaction to be submitted. This account has no known private key to anyone,
/// so ETH sent can be neither misappropriated nor returned.
pub fn deployment_tx() -> Signed<TxLegacy> {
let bytecode = include_str!("../artifacts/Deployer.bin");
let bytecode =
Bytes::from_hex(bytecode).expect("compiled-in Deployer bytecode wasn't valid hex");
let tx = TxLegacy {
chain_id: None,
nonce: 0,
gas_price: 100_000_000_000u128,
// TODO: Use a more accurate gas limit
gas_limit: 1_000_000u128,
to: TxKind::Create,
value: U256::ZERO,
input: bytecode,
};
crypto::deterministically_sign(&tx)
}
/// Obtain the deterministic address for this contract.
pub fn address() -> [u8; 20] {
let deployer_deployer =
Self::deployment_tx().recover_signer().expect("deployment_tx didn't have a valid signature");
**Address::create(&deployer_deployer, 0)
}
/// Construct a new view of the `Deployer`.
pub async fn new(provider: Arc<RootProvider<SimpleRequest>>) -> Result<Option<Self>, Error> {
let address = Self::address();
let code = provider.get_code_at(address.into()).await.map_err(|_| Error::ConnectionError)?;
// Contract has yet to be deployed
if code.is_empty() {
return Ok(None);
}
Ok(Some(Self))
}
/// Yield the `ContractCall` necessary to deploy the Router.
pub fn deploy_router(&self, key: &PublicKey) -> TxLegacy {
TxLegacy {
to: TxKind::Call(Self::address().into()),
input: abi::deployCall::new((Router::init_code(key).into(),)).abi_encode().into(),
gas_limit: 1_000_000,
..Default::default()
}
}
/// Find the first Router deployed with the specified key as its first key.
///
/// This is the Router Serai will use, and is the only way to construct a `Router`.
pub async fn find_router(
&self,
provider: Arc<RootProvider<SimpleRequest>>,
key: &PublicKey,
) -> Result<Option<Router>, Error> {
let init_code = Router::init_code(key);
let init_code_hash = keccak256(&init_code);
#[cfg(not(test))]
let to_block = BlockNumberOrTag::Finalized;
#[cfg(test)]
let to_block = BlockNumberOrTag::Latest;
// Find the first log using this init code (where the init code is binding to the key)
// TODO: Make an abstraction for event filtering (de-duplicating common code)
let filter =
Filter::new().from_block(0).to_block(to_block).address(Address::from(Self::address()));
let filter = filter.event_signature(abi::Deployment::SIGNATURE_HASH);
let filter = filter.topic1(B256::from(init_code_hash));
let logs = provider.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
let Some(first_log) = logs.first() else { return Ok(None) };
let router = first_log
.log_decode::<abi::Deployment>()
.map_err(|_| Error::ConnectionError)?
.inner
.data
.created;
Ok(Some(Router::new(provider, router)))
}
}

105
coins/ethereum/src/erc20.rs Normal file
View File

@@ -0,0 +1,105 @@
use std::{sync::Arc, collections::HashSet};
use alloy_core::primitives::{Address, B256, U256};
use alloy_sol_types::{SolInterface, SolEvent};
use alloy_rpc_types_eth::Filter;
use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider};
use crate::Error;
pub use crate::abi::erc20 as abi;
use abi::{IERC20Calls, Transfer, transferCall, transferFromCall};
#[derive(Clone, Debug)]
pub struct TopLevelErc20Transfer {
pub id: [u8; 32],
pub from: [u8; 20],
pub amount: U256,
pub data: Vec<u8>,
}
/// A view for an ERC20 contract.
#[derive(Clone, Debug)]
pub struct Erc20(Arc<RootProvider<SimpleRequest>>, Address);
impl Erc20 {
/// Construct a new view of the specified ERC20 contract.
pub fn new(provider: Arc<RootProvider<SimpleRequest>>, address: [u8; 20]) -> Self {
Self(provider, Address::from(&address))
}
pub async fn top_level_transfers(
&self,
block: u64,
to: [u8; 20],
) -> Result<Vec<TopLevelErc20Transfer>, Error> {
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
let filter = filter.event_signature(Transfer::SIGNATURE_HASH);
let mut to_topic = [0; 32];
to_topic[12 ..].copy_from_slice(&to);
let filter = filter.topic2(B256::from(to_topic));
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
let mut handled = HashSet::new();
let mut top_level_transfers = vec![];
for log in logs {
// Double check the address which emitted this log
if log.address() != self.1 {
Err(Error::ConnectionError)?;
}
let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?;
let tx =
self.0.get_transaction_by_hash(tx_id).await.ok().flatten().ok_or(Error::ConnectionError)?;
// If this is a top-level call...
if tx.to == Some(self.1) {
// And we recognize the call...
// Don't validate the encoding as this can't be re-encoded to an identical bytestring due
// to the InInstruction appended
if let Ok(call) = IERC20Calls::abi_decode(&tx.input, false) {
// Extract the top-level call's from/to/value
let (from, call_to, value) = match call {
IERC20Calls::transfer(transferCall { to: call_to, value }) => (tx.from, call_to, value),
IERC20Calls::transferFrom(transferFromCall { from, to: call_to, value }) => {
(from, call_to, value)
}
// Treat any other function selectors as unrecognized
_ => continue,
};
let log = log.log_decode::<Transfer>().map_err(|_| Error::ConnectionError)?.inner.data;
// Ensure the top-level transfer is equivalent, and this presumably isn't a log for an
// internal transfer
if (log.from != from) || (call_to != to) || (value != log.value) {
continue;
}
// Now that the top-level transfer is confirmed to be equivalent to the log, ensure it's
// the only log we handle
if handled.contains(&tx_id) {
continue;
}
handled.insert(tx_id);
// Read the data appended after
let encoded = call.abi_encode();
let data = tx.input.as_ref()[encoded.len() ..].to_vec();
// Push the transfer
top_level_transfers.push(TopLevelErc20Transfer {
// Since we'll only handle one log for this TX, set the ID to the TX ID
id: *tx_id,
from: *log.from.0,
amount: log.value,
data,
});
}
}
}
Ok(top_level_transfers)
}
}

35
coins/ethereum/src/lib.rs Normal file
View File

@@ -0,0 +1,35 @@
use thiserror::Error;
pub mod alloy {
pub use alloy_core::primitives;
pub use alloy_core as core;
pub use alloy_sol_types as sol_types;
pub use alloy_consensus as consensus;
pub use alloy_network as network;
pub use alloy_rpc_types_eth as rpc_types;
pub use alloy_simple_request_transport as simple_request_transport;
pub use alloy_rpc_client as rpc_client;
pub use alloy_provider as provider;
}
pub mod crypto;
pub(crate) mod abi;
pub mod erc20;
pub mod deployer;
pub mod router;
pub mod machine;
#[cfg(any(test, feature = "tests"))]
pub mod tests;
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
pub enum Error {
#[error("failed to verify Schnorr signature")]
InvalidSignature,
#[error("couldn't make call/send TX")]
ConnectionError,
}

View File

@@ -0,0 +1,414 @@
use std::{
io::{self, Read},
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng};
use transcript::{Transcript, RecommendedTranscript};
use group::GroupEncoding;
use frost::{
curve::{Ciphersuite, Secp256k1},
Participant, ThresholdKeys, FrostError,
algorithm::Schnorr,
sign::*,
};
use alloy_core::primitives::U256;
use crate::{
crypto::{PublicKey, EthereumHram, Signature},
router::{
abi::{Call as AbiCall, OutInstruction as AbiOutInstruction},
Router,
},
};
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Call {
pub to: [u8; 20],
pub value: U256,
pub data: Vec<u8>,
}
impl Call {
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let mut to = [0; 20];
reader.read_exact(&mut to)?;
let value = {
let mut value_bytes = [0; 32];
reader.read_exact(&mut value_bytes)?;
U256::from_le_slice(&value_bytes)
};
let mut data_len = {
let mut data_len = [0; 4];
reader.read_exact(&mut data_len)?;
usize::try_from(u32::from_le_bytes(data_len)).expect("u32 couldn't fit within a usize")
};
// A valid DoS would be to claim a 4 GB data is present for only 4 bytes
// We read this in 1 KB chunks to only read data actually present (with a max DoS of 1 KB)
let mut data = vec![];
while data_len > 0 {
let chunk_len = data_len.min(1024);
let mut chunk = vec![0; chunk_len];
reader.read_exact(&mut chunk)?;
data.extend(&chunk);
data_len -= chunk_len;
}
Ok(Call { to, value, data })
}
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(&self.to)?;
writer.write_all(&self.value.as_le_bytes())?;
let data_len = u32::try_from(self.data.len())
.map_err(|_| io::Error::other("call data length exceeded 2**32"))?;
writer.write_all(&data_len.to_le_bytes())?;
writer.write_all(&self.data)
}
}
impl From<Call> for AbiCall {
fn from(call: Call) -> AbiCall {
AbiCall { to: call.to.into(), value: call.value, data: call.data.into() }
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum OutInstructionTarget {
Direct([u8; 20]),
Calls(Vec<Call>),
}
impl OutInstructionTarget {
fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let mut kind = [0xff];
reader.read_exact(&mut kind)?;
match kind[0] {
0 => {
let mut addr = [0; 20];
reader.read_exact(&mut addr)?;
Ok(OutInstructionTarget::Direct(addr))
}
1 => {
let mut calls_len = [0; 4];
reader.read_exact(&mut calls_len)?;
let calls_len = u32::from_le_bytes(calls_len);
let mut calls = vec![];
for _ in 0 .. calls_len {
calls.push(Call::read(reader)?);
}
Ok(OutInstructionTarget::Calls(calls))
}
_ => Err(io::Error::other("unrecognized OutInstructionTarget"))?,
}
}
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
match self {
OutInstructionTarget::Direct(addr) => {
writer.write_all(&[0])?;
writer.write_all(addr)?;
}
OutInstructionTarget::Calls(calls) => {
writer.write_all(&[1])?;
let call_len = u32::try_from(calls.len())
.map_err(|_| io::Error::other("amount of calls exceeded 2**32"))?;
writer.write_all(&call_len.to_le_bytes())?;
for call in calls {
call.write(writer)?;
}
}
}
Ok(())
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct OutInstruction {
pub target: OutInstructionTarget,
pub value: U256,
}
impl OutInstruction {
fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let target = OutInstructionTarget::read(reader)?;
let value = {
let mut value_bytes = [0; 32];
reader.read_exact(&mut value_bytes)?;
U256::from_le_slice(&value_bytes)
};
Ok(OutInstruction { target, value })
}
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
self.target.write(writer)?;
writer.write_all(&self.value.as_le_bytes())
}
}
impl From<OutInstruction> for AbiOutInstruction {
fn from(instruction: OutInstruction) -> AbiOutInstruction {
match instruction.target {
OutInstructionTarget::Direct(addr) => {
AbiOutInstruction { to: addr.into(), calls: vec![], value: instruction.value }
}
OutInstructionTarget::Calls(calls) => AbiOutInstruction {
to: [0; 20].into(),
calls: calls.into_iter().map(Into::into).collect(),
value: instruction.value,
},
}
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum RouterCommand {
UpdateSeraiKey { chain_id: U256, nonce: U256, key: PublicKey },
Execute { chain_id: U256, nonce: U256, outs: Vec<OutInstruction> },
}
impl RouterCommand {
pub fn msg(&self) -> Vec<u8> {
match self {
RouterCommand::UpdateSeraiKey { chain_id, nonce, key } => {
Router::update_serai_key_message(*chain_id, *nonce, key)
}
RouterCommand::Execute { chain_id, nonce, outs } => Router::execute_message(
*chain_id,
*nonce,
outs.iter().map(|out| out.clone().into()).collect(),
),
}
}
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let mut kind = [0xff];
reader.read_exact(&mut kind)?;
match kind[0] {
0 => {
let mut chain_id = [0; 32];
reader.read_exact(&mut chain_id)?;
let mut nonce = [0; 32];
reader.read_exact(&mut nonce)?;
let key = PublicKey::new(Secp256k1::read_G(reader)?)
.ok_or(io::Error::other("key for RouterCommand doesn't have an eth representation"))?;
Ok(RouterCommand::UpdateSeraiKey {
chain_id: U256::from_le_slice(&chain_id),
nonce: U256::from_le_slice(&nonce),
key,
})
}
1 => {
let mut chain_id = [0; 32];
reader.read_exact(&mut chain_id)?;
let chain_id = U256::from_le_slice(&chain_id);
let mut nonce = [0; 32];
reader.read_exact(&mut nonce)?;
let nonce = U256::from_le_slice(&nonce);
let mut outs_len = [0; 4];
reader.read_exact(&mut outs_len)?;
let outs_len = u32::from_le_bytes(outs_len);
let mut outs = vec![];
for _ in 0 .. outs_len {
outs.push(OutInstruction::read(reader)?);
}
Ok(RouterCommand::Execute { chain_id, nonce, outs })
}
_ => Err(io::Error::other("reading unknown type of RouterCommand"))?,
}
}
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
match self {
RouterCommand::UpdateSeraiKey { chain_id, nonce, key } => {
writer.write_all(&[0])?;
writer.write_all(&chain_id.as_le_bytes())?;
writer.write_all(&nonce.as_le_bytes())?;
writer.write_all(&key.A.to_bytes())
}
RouterCommand::Execute { chain_id, nonce, outs } => {
writer.write_all(&[1])?;
writer.write_all(&chain_id.as_le_bytes())?;
writer.write_all(&nonce.as_le_bytes())?;
writer.write_all(&u32::try_from(outs.len()).unwrap().to_le_bytes())?;
for out in outs {
out.write(writer)?;
}
Ok(())
}
}
}
pub fn serialize(&self) -> Vec<u8> {
let mut res = vec![];
self.write(&mut res).unwrap();
res
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct SignedRouterCommand {
command: RouterCommand,
signature: Signature,
}
impl SignedRouterCommand {
pub fn new(key: &PublicKey, command: RouterCommand, signature: &[u8; 64]) -> Option<Self> {
let c = Secp256k1::read_F(&mut &signature[.. 32]).ok()?;
let s = Secp256k1::read_F(&mut &signature[32 ..]).ok()?;
let signature = Signature { c, s };
if !signature.verify(key, &command.msg()) {
None?
}
Some(SignedRouterCommand { command, signature })
}
pub fn command(&self) -> &RouterCommand {
&self.command
}
pub fn signature(&self) -> &Signature {
&self.signature
}
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let command = RouterCommand::read(reader)?;
let mut sig = [0; 64];
reader.read_exact(&mut sig)?;
let signature = Signature::from_bytes(sig)?;
Ok(SignedRouterCommand { command, signature })
}
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
self.command.write(writer)?;
writer.write_all(&self.signature.to_bytes())
}
}
pub struct RouterCommandMachine {
key: PublicKey,
command: RouterCommand,
machine: AlgorithmMachine<Secp256k1, Schnorr<Secp256k1, RecommendedTranscript, EthereumHram>>,
}
impl RouterCommandMachine {
pub fn new(keys: ThresholdKeys<Secp256k1>, command: RouterCommand) -> Option<Self> {
// The Schnorr algorithm should be fine without this, even when using the IETF variant
// If this is better and more comprehensive, we should do it, even if not necessary
let mut transcript = RecommendedTranscript::new(b"ethereum-serai RouterCommandMachine v0.1");
let key = keys.group_key();
transcript.append_message(b"key", key.to_bytes());
transcript.append_message(b"command", command.serialize());
Some(Self {
key: PublicKey::new(key)?,
command,
machine: AlgorithmMachine::new(Schnorr::new(transcript), keys),
})
}
}
impl PreprocessMachine for RouterCommandMachine {
type Preprocess = Preprocess<Secp256k1, ()>;
type Signature = SignedRouterCommand;
type SignMachine = RouterCommandSignMachine;
fn preprocess<R: RngCore + CryptoRng>(
self,
rng: &mut R,
) -> (Self::SignMachine, Self::Preprocess) {
let (machine, preprocess) = self.machine.preprocess(rng);
(RouterCommandSignMachine { key: self.key, command: self.command, machine }, preprocess)
}
}
pub struct RouterCommandSignMachine {
key: PublicKey,
command: RouterCommand,
machine: AlgorithmSignMachine<Secp256k1, Schnorr<Secp256k1, RecommendedTranscript, EthereumHram>>,
}
impl SignMachine<SignedRouterCommand> for RouterCommandSignMachine {
type Params = ();
type Keys = ThresholdKeys<Secp256k1>;
type Preprocess = Preprocess<Secp256k1, ()>;
type SignatureShare = SignatureShare<Secp256k1>;
type SignatureMachine = RouterCommandSignatureMachine;
fn cache(self) -> CachedPreprocess {
unimplemented!(
"RouterCommand machines don't support caching their preprocesses due to {}",
"being already bound to a specific command"
);
}
fn from_cache(
(): (),
_: ThresholdKeys<Secp256k1>,
_: CachedPreprocess,
) -> (Self, Self::Preprocess) {
unimplemented!(
"RouterCommand machines don't support caching their preprocesses due to {}",
"being already bound to a specific command"
);
}
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
self.machine.read_preprocess(reader)
}
fn sign(
self,
commitments: HashMap<Participant, Self::Preprocess>,
msg: &[u8],
) -> Result<(RouterCommandSignatureMachine, Self::SignatureShare), FrostError> {
if !msg.is_empty() {
panic!("message was passed to a RouterCommand machine when it generates its own");
}
let (machine, share) = self.machine.sign(commitments, &self.command.msg())?;
Ok((RouterCommandSignatureMachine { key: self.key, command: self.command, machine }, share))
}
}
pub struct RouterCommandSignatureMachine {
key: PublicKey,
command: RouterCommand,
machine:
AlgorithmSignatureMachine<Secp256k1, Schnorr<Secp256k1, RecommendedTranscript, EthereumHram>>,
}
impl SignatureMachine<SignedRouterCommand> for RouterCommandSignatureMachine {
type SignatureShare = SignatureShare<Secp256k1>;
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare> {
self.machine.read_share(reader)
}
fn complete(
self,
shares: HashMap<Participant, Self::SignatureShare>,
) -> Result<SignedRouterCommand, FrostError> {
let sig = self.machine.complete(shares)?;
let signature = Signature::new(&self.key, &self.command.msg(), sig)
.expect("machine produced an invalid signature");
Ok(SignedRouterCommand { command: self.command, signature })
}
}

View File

@@ -0,0 +1,443 @@
use std::{sync::Arc, io, collections::HashSet};
use k256::{
elliptic_curve::{group::GroupEncoding, sec1},
ProjectivePoint,
};
use alloy_core::primitives::{hex::FromHex, Address, U256, Bytes, TxKind};
#[cfg(test)]
use alloy_core::primitives::B256;
use alloy_consensus::TxLegacy;
use alloy_sol_types::{SolValue, SolConstructor, SolCall, SolEvent};
use alloy_rpc_types_eth::Filter;
#[cfg(test)]
use alloy_rpc_types_eth::{BlockId, TransactionRequest, TransactionInput};
use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider};
pub use crate::{
Error,
crypto::{PublicKey, Signature},
abi::{erc20::Transfer, router as abi},
};
use abi::{SeraiKeyUpdated, InInstruction as InInstructionEvent, Executed as ExecutedEvent};
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum Coin {
Ether,
Erc20([u8; 20]),
}
impl Coin {
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let mut kind = [0xff];
reader.read_exact(&mut kind)?;
Ok(match kind[0] {
0 => Coin::Ether,
1 => {
let mut address = [0; 20];
reader.read_exact(&mut address)?;
Coin::Erc20(address)
}
_ => Err(io::Error::other("unrecognized Coin type"))?,
})
}
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
match self {
Coin::Ether => writer.write_all(&[0]),
Coin::Erc20(token) => {
writer.write_all(&[1])?;
writer.write_all(token)
}
}
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct InInstruction {
pub id: ([u8; 32], u64),
pub from: [u8; 20],
pub coin: Coin,
pub amount: U256,
pub data: Vec<u8>,
pub key_at_end_of_block: ProjectivePoint,
}
impl InInstruction {
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let id = {
let mut id_hash = [0; 32];
reader.read_exact(&mut id_hash)?;
let mut id_pos = [0; 8];
reader.read_exact(&mut id_pos)?;
let id_pos = u64::from_le_bytes(id_pos);
(id_hash, id_pos)
};
let mut from = [0; 20];
reader.read_exact(&mut from)?;
let coin = Coin::read(reader)?;
let mut amount = [0; 32];
reader.read_exact(&mut amount)?;
let amount = U256::from_le_slice(&amount);
let mut data_len = [0; 4];
reader.read_exact(&mut data_len)?;
let data_len = usize::try_from(u32::from_le_bytes(data_len))
.map_err(|_| io::Error::other("InInstruction data exceeded 2**32 in length"))?;
let mut data = vec![0; data_len];
reader.read_exact(&mut data)?;
let mut key_at_end_of_block = <ProjectivePoint as GroupEncoding>::Repr::default();
reader.read_exact(&mut key_at_end_of_block)?;
let key_at_end_of_block = Option::from(ProjectivePoint::from_bytes(&key_at_end_of_block))
.ok_or(io::Error::other("InInstruction had key at end of block which wasn't valid"))?;
Ok(InInstruction { id, from, coin, amount, data, key_at_end_of_block })
}
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(&self.id.0)?;
writer.write_all(&self.id.1.to_le_bytes())?;
writer.write_all(&self.from)?;
self.coin.write(writer)?;
writer.write_all(&self.amount.as_le_bytes())?;
writer.write_all(
&u32::try_from(self.data.len())
.map_err(|_| {
io::Error::other("InInstruction being written had data exceeding 2**32 in length")
})?
.to_le_bytes(),
)?;
writer.write_all(&self.data)?;
writer.write_all(&self.key_at_end_of_block.to_bytes())
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Executed {
pub tx_id: [u8; 32],
pub nonce: u64,
pub signature: [u8; 64],
}
/// The contract Serai uses to manage its state.
#[derive(Clone, Debug)]
pub struct Router(Arc<RootProvider<SimpleRequest>>, Address);
impl Router {
pub(crate) fn code() -> Vec<u8> {
let bytecode = include_str!("../artifacts/Router.bin");
Bytes::from_hex(bytecode).expect("compiled-in Router bytecode wasn't valid hex").to_vec()
}
pub(crate) fn init_code(key: &PublicKey) -> Vec<u8> {
let mut bytecode = Self::code();
// Append the constructor arguments
bytecode.extend((abi::constructorCall { _seraiKey: key.eth_repr().into() }).abi_encode());
bytecode
}
// This isn't pub in order to force users to use `Deployer::find_router`.
pub(crate) fn new(provider: Arc<RootProvider<SimpleRequest>>, address: Address) -> Self {
Self(provider, address)
}
pub fn address(&self) -> [u8; 20] {
**self.1
}
/// Get the key for Serai at the specified block.
#[cfg(test)]
pub async fn serai_key(&self, at: [u8; 32]) -> Result<PublicKey, Error> {
let call = TransactionRequest::default()
.to(self.1)
.input(TransactionInput::new(abi::seraiKeyCall::new(()).abi_encode().into()));
let bytes = self
.0
.call(&call)
.block(BlockId::Hash(B256::from(at).into()))
.await
.map_err(|_| Error::ConnectionError)?;
let res =
abi::seraiKeyCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?;
PublicKey::from_eth_repr(res._0.0).ok_or(Error::ConnectionError)
}
/// Get the message to be signed in order to update the key for Serai.
pub(crate) fn update_serai_key_message(chain_id: U256, nonce: U256, key: &PublicKey) -> Vec<u8> {
let mut buffer = b"updateSeraiKey".to_vec();
buffer.extend(&chain_id.to_be_bytes::<32>());
buffer.extend(&nonce.to_be_bytes::<32>());
buffer.extend(&key.eth_repr());
buffer
}
/// Update the key representing Serai.
pub fn update_serai_key(&self, public_key: &PublicKey, sig: &Signature) -> TxLegacy {
// TODO: Set a more accurate gas
TxLegacy {
to: TxKind::Call(self.1),
input: abi::updateSeraiKeyCall::new((public_key.eth_repr().into(), sig.into()))
.abi_encode()
.into(),
gas_limit: 100_000,
..Default::default()
}
}
/// Get the current nonce for the published batches.
#[cfg(test)]
pub async fn nonce(&self, at: [u8; 32]) -> Result<U256, Error> {
let call = TransactionRequest::default()
.to(self.1)
.input(TransactionInput::new(abi::nonceCall::new(()).abi_encode().into()));
let bytes = self
.0
.call(&call)
.block(BlockId::Hash(B256::from(at).into()))
.await
.map_err(|_| Error::ConnectionError)?;
let res =
abi::nonceCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?;
Ok(res._0)
}
/// Get the message to be signed in order to update the key for Serai.
pub(crate) fn execute_message(
chain_id: U256,
nonce: U256,
outs: Vec<abi::OutInstruction>,
) -> Vec<u8> {
("execute".to_string(), chain_id, nonce, outs).abi_encode_params()
}
/// Execute a batch of `OutInstruction`s.
pub fn execute(&self, outs: &[abi::OutInstruction], sig: &Signature) -> TxLegacy {
TxLegacy {
to: TxKind::Call(self.1),
input: abi::executeCall::new((outs.to_vec(), sig.into())).abi_encode().into(),
// TODO
gas_limit: 100_000 + ((200_000 + 10_000) * u128::try_from(outs.len()).unwrap()),
..Default::default()
}
}
pub async fn key_at_end_of_block(&self, block: u64) -> Result<Option<ProjectivePoint>, Error> {
let filter = Filter::new().from_block(0).to_block(block).address(self.1);
let filter = filter.event_signature(SeraiKeyUpdated::SIGNATURE_HASH);
let all_keys = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
if all_keys.is_empty() {
return Ok(None);
};
let last_key_x_coordinate_log = all_keys.last().ok_or(Error::ConnectionError)?;
let last_key_x_coordinate = last_key_x_coordinate_log
.log_decode::<SeraiKeyUpdated>()
.map_err(|_| Error::ConnectionError)?
.inner
.data
.key;
let mut compressed_point = <ProjectivePoint as GroupEncoding>::Repr::default();
compressed_point[0] = u8::from(sec1::Tag::CompressedEvenY);
compressed_point[1 ..].copy_from_slice(last_key_x_coordinate.as_slice());
let key =
Option::from(ProjectivePoint::from_bytes(&compressed_point)).ok_or(Error::ConnectionError)?;
Ok(Some(key))
}
pub async fn in_instructions(
&self,
block: u64,
allowed_tokens: &HashSet<[u8; 20]>,
) -> Result<Vec<InInstruction>, Error> {
let Some(key_at_end_of_block) = self.key_at_end_of_block(block).await? else {
return Ok(vec![]);
};
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
let filter = filter.event_signature(InInstructionEvent::SIGNATURE_HASH);
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
let mut transfer_check = HashSet::new();
let mut in_instructions = vec![];
for log in logs {
// Double check the address which emitted this log
if log.address() != self.1 {
Err(Error::ConnectionError)?;
}
let id = (
log.block_hash.ok_or(Error::ConnectionError)?.into(),
log.log_index.ok_or(Error::ConnectionError)?,
);
let tx_hash = log.transaction_hash.ok_or(Error::ConnectionError)?;
let tx = self
.0
.get_transaction_by_hash(tx_hash)
.await
.ok()
.flatten()
.ok_or(Error::ConnectionError)?;
let log =
log.log_decode::<InInstructionEvent>().map_err(|_| Error::ConnectionError)?.inner.data;
let coin = if log.coin.0 == [0; 20] {
Coin::Ether
} else {
let token = *log.coin.0;
if !allowed_tokens.contains(&token) {
continue;
}
// If this also counts as a top-level transfer via the token, drop it
//
// Necessary in order to handle a potential edge case with some theoretical token
// implementations
//
// This will either let it be handled by the top-level transfer hook or will drop it
// entirely on the side of caution
if tx.to == Some(token.into()) {
continue;
}
// Get all logs for this TX
let receipt = self
.0
.get_transaction_receipt(tx_hash)
.await
.map_err(|_| Error::ConnectionError)?
.ok_or(Error::ConnectionError)?;
let tx_logs = receipt.inner.logs();
// Find a matching transfer log
let mut found_transfer = false;
for tx_log in tx_logs {
let log_index = tx_log.log_index.ok_or(Error::ConnectionError)?;
// Ensure we didn't already use this transfer to check a distinct InInstruction event
if transfer_check.contains(&log_index) {
continue;
}
// Check if this log is from the token we expected to be transferred
if tx_log.address().0 != token {
continue;
}
// Check if this is a transfer log
// https://github.com/alloy-rs/core/issues/589
if tx_log.topics()[0] != Transfer::SIGNATURE_HASH {
continue;
}
let Ok(transfer) = Transfer::decode_log(&tx_log.inner.clone(), true) else { continue };
// Check if this is a transfer to us for the expected amount
if (transfer.to == self.1) && (transfer.value == log.amount) {
transfer_check.insert(log_index);
found_transfer = true;
break;
}
}
if !found_transfer {
// This shouldn't be a ConnectionError
// This is an exploit, a non-conforming ERC20, or an invalid connection
// This should halt the process which is sufficient, yet this is sub-optimal
// TODO
Err(Error::ConnectionError)?;
}
Coin::Erc20(token)
};
in_instructions.push(InInstruction {
id,
from: *log.from.0,
coin,
amount: log.amount,
data: log.instruction.as_ref().to_vec(),
key_at_end_of_block,
});
}
Ok(in_instructions)
}
pub async fn executed_commands(&self, block: u64) -> Result<Vec<Executed>, Error> {
let mut res = vec![];
{
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
let filter = filter.event_signature(SeraiKeyUpdated::SIGNATURE_HASH);
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
for log in logs {
// Double check the address which emitted this log
if log.address() != self.1 {
Err(Error::ConnectionError)?;
}
let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?.into();
let log =
log.log_decode::<SeraiKeyUpdated>().map_err(|_| Error::ConnectionError)?.inner.data;
let mut signature = [0; 64];
signature[.. 32].copy_from_slice(log.signature.c.as_ref());
signature[32 ..].copy_from_slice(log.signature.s.as_ref());
res.push(Executed {
tx_id,
nonce: log.nonce.try_into().map_err(|_| Error::ConnectionError)?,
signature,
});
}
}
{
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
let filter = filter.event_signature(ExecutedEvent::SIGNATURE_HASH);
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
for log in logs {
// Double check the address which emitted this log
if log.address() != self.1 {
Err(Error::ConnectionError)?;
}
let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?.into();
let log = log.log_decode::<ExecutedEvent>().map_err(|_| Error::ConnectionError)?.inner.data;
let mut signature = [0; 64];
signature[.. 32].copy_from_slice(log.signature.c.as_ref());
signature[32 ..].copy_from_slice(log.signature.s.as_ref());
res.push(Executed {
tx_id,
nonce: log.nonce.try_into().map_err(|_| Error::ConnectionError)?,
signature,
});
}
}
Ok(res)
}
#[cfg(feature = "tests")]
pub fn key_updated_filter(&self) -> Filter {
Filter::new().address(self.1).event_signature(SeraiKeyUpdated::SIGNATURE_HASH)
}
#[cfg(feature = "tests")]
pub fn executed_filter(&self) -> Filter {
Filter::new().address(self.1).event_signature(ExecutedEvent::SIGNATURE_HASH)
}
}

View File

@@ -0,0 +1,13 @@
use alloy_sol_types::sol;
#[rustfmt::skip]
#[allow(warnings)]
#[allow(needless_pass_by_value)]
#[allow(clippy::all)]
#[allow(clippy::ignored_unit_patterns)]
#[allow(clippy::redundant_closure_for_method_calls)]
mod schnorr_container {
use super::*;
sol!("src/tests/contracts/Schnorr.sol");
}
pub(crate) use schnorr_container::TestSchnorr as schnorr;

View File

@@ -1,5 +1,5 @@
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.26; pragma solidity ^0.8.0;
contract TestERC20 { contract TestERC20 {
event Transfer(address indexed from, address indexed to, uint256 value); event Transfer(address indexed from, address indexed to, uint256 value);
@@ -8,11 +8,9 @@ contract TestERC20 {
function name() public pure returns (string memory) { function name() public pure returns (string memory) {
return "Test ERC20"; return "Test ERC20";
} }
function symbol() public pure returns (string memory) { function symbol() public pure returns (string memory) {
return "TEST"; return "TEST";
} }
function decimals() public pure returns (uint8) { function decimals() public pure returns (uint8) {
return 18; return 18;
} }
@@ -31,13 +29,11 @@ contract TestERC20 {
function balanceOf(address owner) public view returns (uint256) { function balanceOf(address owner) public view returns (uint256) {
return balances[owner]; return balances[owner];
} }
function transfer(address to, uint256 value) public returns (bool) { function transfer(address to, uint256 value) public returns (bool) {
balances[msg.sender] -= value; balances[msg.sender] -= value;
balances[to] += value; balances[to] += value;
return true; return true;
} }
function transferFrom(address from, address to, uint256 value) public returns (bool) { function transferFrom(address from, address to, uint256 value) public returns (bool) {
allowances[from][msg.sender] -= value; allowances[from][msg.sender] -= value;
balances[from] -= value; balances[from] -= value;
@@ -49,7 +45,6 @@ contract TestERC20 {
allowances[msg.sender][spender] = value; allowances[msg.sender][spender] = value;
return true; return true;
} }
function allowance(address owner, address spender) public view returns (uint256) { function allowance(address owner, address spender) public view returns (uint256) {
return allowances[owner][spender]; return allowances[owner][spender];
} }

View File

@@ -0,0 +1,15 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
import "../../../contracts/Schnorr.sol";
contract TestSchnorr {
function verify(
bytes32 px,
bytes calldata message,
bytes32 c,
bytes32 s
) external pure returns (bool) {
return Schnorr.verify(px, message, c, s);
}
}

View File

@@ -0,0 +1,105 @@
use rand_core::OsRng;
use group::ff::{Field, PrimeField};
use k256::{
ecdsa::{
self, hazmat::SignPrimitive, signature::hazmat::PrehashVerifier, SigningKey, VerifyingKey,
},
Scalar, ProjectivePoint,
};
use frost::{
curve::{Ciphersuite, Secp256k1},
algorithm::{Hram, IetfSchnorr},
tests::{algorithm_machines, sign},
};
use crate::{crypto::*, tests::key_gen};
// The ecrecover opcode, yet with parity replacing v
pub(crate) fn ecrecover(message: Scalar, odd_y: bool, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
let sig = ecdsa::Signature::from_scalars(r, s).ok()?;
let message: [u8; 32] = message.to_repr().into();
alloy_core::primitives::Signature::from_signature_and_parity(
sig,
alloy_core::primitives::Parity::Parity(odd_y),
)
.ok()?
.recover_address_from_prehash(&alloy_core::primitives::B256::from(message))
.ok()
.map(Into::into)
}
#[test]
fn test_ecrecover() {
let private = SigningKey::random(&mut OsRng);
let public = VerifyingKey::from(&private);
// Sign the signature
const MESSAGE: &[u8] = b"Hello, World!";
let (sig, recovery_id) = private
.as_nonzero_scalar()
.try_sign_prehashed(
<Secp256k1 as Ciphersuite>::F::random(&mut OsRng),
&keccak256(MESSAGE).into(),
)
.unwrap();
// Sanity check the signature verifies
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
{
assert_eq!(public.verify_prehash(&keccak256(MESSAGE), &sig).unwrap(), ());
}
// Perform the ecrecover
assert_eq!(
ecrecover(
hash_to_scalar(MESSAGE),
u8::from(recovery_id.unwrap().is_y_odd()) == 1,
*sig.r(),
*sig.s()
)
.unwrap(),
address(&ProjectivePoint::from(public.as_affine()))
);
}
// Run the sign test with the EthereumHram
#[test]
fn test_signing() {
let (keys, _) = key_gen();
const MESSAGE: &[u8] = b"Hello, World!";
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let _sig =
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
}
#[allow(non_snake_case)]
pub fn preprocess_signature_for_ecrecover(
R: ProjectivePoint,
public_key: &PublicKey,
m: &[u8],
s: Scalar,
) -> (Scalar, Scalar) {
let c = EthereumHram::hram(&R, &public_key.A, m);
let sa = -(s * public_key.px);
let ca = -(c * public_key.px);
(sa, ca)
}
#[test]
fn test_ecrecover_hack() {
let (keys, public_key) = key_gen();
const MESSAGE: &[u8] = b"Hello, World!";
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let sig =
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
let (sa, ca) = preprocess_signature_for_ecrecover(sig.R, &public_key, MESSAGE, sig.s);
let q = ecrecover(sa, false, public_key.px, ca).unwrap();
assert_eq!(q, address(&sig.R));
}

View File

@@ -1,5 +1,3 @@
// TODO
use std::{sync::Arc, collections::HashMap}; use std::{sync::Arc, collections::HashMap};
use rand_core::OsRng; use rand_core::OsRng;
@@ -8,7 +6,7 @@ use k256::{Scalar, ProjectivePoint};
use frost::{curve::Secp256k1, Participant, ThresholdKeys, tests::key_gen as frost_key_gen}; use frost::{curve::Secp256k1, Participant, ThresholdKeys, tests::key_gen as frost_key_gen};
use alloy_core::{ use alloy_core::{
primitives::{Address, U256, Bytes, Signature, TxKind}, primitives::{Address, U256, Bytes, TxKind},
hex::FromHex, hex::FromHex,
}; };
use alloy_consensus::{SignableTransaction, TxLegacy}; use alloy_consensus::{SignableTransaction, TxLegacy};
@@ -23,7 +21,9 @@ use crate::crypto::{address, deterministically_sign, PublicKey};
mod crypto; mod crypto;
#[cfg(test)] #[cfg(test)]
use contracts::tests as abi; mod abi;
#[cfg(test)]
mod schnorr;
#[cfg(test)] #[cfg(test)]
mod router; mod router;
@@ -69,7 +69,7 @@ pub async fn send(
); );
let mut bytes = vec![]; let mut bytes = vec![];
tx.encode_with_signature_fields(&Signature::from(sig), &mut bytes); tx.encode_with_signature_fields(&sig.into(), &mut bytes);
let pending_tx = provider.send_raw_transaction(&bytes).await.ok()?; let pending_tx = provider.send_raw_transaction(&bytes).await.ok()?;
pending_tx.get_receipt().await.ok() pending_tx.get_receipt().await.ok()
} }

View File

@@ -1,5 +1,3 @@
// TODO
use std::{convert::TryFrom, sync::Arc, collections::HashMap}; use std::{convert::TryFrom, sync::Arc, collections::HashMap};
use rand_core::OsRng; use rand_core::OsRng;
@@ -93,6 +91,7 @@ async fn latest_block_hash(client: &RootProvider<SimpleRequest>) -> [u8; 32] {
.unwrap() .unwrap()
.header .header
.hash .hash
.unwrap()
.0 .0
} }

View File

@@ -0,0 +1,93 @@
use std::sync::Arc;
use rand_core::OsRng;
use group::ff::PrimeField;
use k256::Scalar;
use frost::{
curve::Secp256k1,
algorithm::IetfSchnorr,
tests::{algorithm_machines, sign},
};
use alloy_core::primitives::Address;
use alloy_sol_types::SolCall;
use alloy_rpc_types_eth::{TransactionInput, TransactionRequest};
use alloy_simple_request_transport::SimpleRequest;
use alloy_rpc_client::ClientBuilder;
use alloy_provider::{Provider, RootProvider};
use alloy_node_bindings::{Anvil, AnvilInstance};
use crate::{
Error,
crypto::*,
tests::{key_gen, deploy_contract, abi::schnorr as abi},
};
async fn setup_test() -> (AnvilInstance, Arc<RootProvider<SimpleRequest>>, Address) {
let anvil = Anvil::new().spawn();
let provider = RootProvider::new(
ClientBuilder::default().transport(SimpleRequest::new(anvil.endpoint()), true),
);
let wallet = anvil.keys()[0].clone().into();
let client = Arc::new(provider);
let address = deploy_contract(client.clone(), &wallet, "TestSchnorr").await.unwrap();
(anvil, client, address)
}
#[tokio::test]
async fn test_deploy_contract() {
setup_test().await;
}
pub async fn call_verify(
provider: &RootProvider<SimpleRequest>,
contract: Address,
public_key: &PublicKey,
message: &[u8],
signature: &Signature,
) -> Result<(), Error> {
let px: [u8; 32] = public_key.px.to_repr().into();
let c_bytes: [u8; 32] = signature.c.to_repr().into();
let s_bytes: [u8; 32] = signature.s.to_repr().into();
let call = TransactionRequest::default().to(contract).input(TransactionInput::new(
abi::verifyCall::new((px.into(), message.to_vec().into(), c_bytes.into(), s_bytes.into()))
.abi_encode()
.into(),
));
let bytes = provider.call(&call).await.map_err(|_| Error::ConnectionError)?;
let res =
abi::verifyCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?;
if res._0 {
Ok(())
} else {
Err(Error::InvalidSignature)
}
}
#[tokio::test]
async fn test_ecrecover_hack() {
let (_anvil, client, contract) = setup_test().await;
let (keys, public_key) = key_gen();
const MESSAGE: &[u8] = b"Hello, World!";
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let sig =
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
let sig = Signature::new(&public_key, MESSAGE, sig).unwrap();
call_verify(&client, contract, &public_key, MESSAGE, &sig).await.unwrap();
// Test an invalid signature fails
let mut sig = sig;
sig.s += Scalar::ONE;
assert!(call_verify(&client, contract, &public_key, MESSAGE, &sig).await.is_err());
}

View File

@@ -3,10 +3,10 @@ name = "monero-serai"
version = "0.1.4-alpha" version = "0.1.4-alpha"
description = "A modern Monero transaction library" description = "A modern Monero transaction library"
license = "MIT" license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero" repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021" edition = "2021"
rust-version = "1.80" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true
@@ -32,11 +32,6 @@ monero-bulletproofs = { path = "ringct/bulletproofs", version = "0.1", default-f
hex-literal = "0.4" hex-literal = "0.4"
[dev-dependencies]
hex = { version = "0.4", default-features = false, features = ["std"] }
serde = { version = "1", default-features = false, features = ["std", "derive"] }
serde_json = { version = "1", default-features = false, features = ["std"] }
[features] [features]
std = [ std = [
"std-shims/std", "std-shims/std",
@@ -53,4 +48,5 @@ std = [
] ]
compile-time-generators = ["curve25519-dalek/precomputed-tables", "monero-bulletproofs/compile-time-generators"] compile-time-generators = ["curve25519-dalek/precomputed-tables", "monero-bulletproofs/compile-time-generators"]
multisig = ["monero-clsag/multisig", "std"]
default = ["std", "compile-time-generators"] default = ["std", "compile-time-generators"]

View File

@@ -3,10 +3,9 @@ name = "monero-generators"
version = "0.4.0" version = "0.4.0"
description = "Monero's hash to point function and generators" description = "Monero's hash to point function and generators"
license = "MIT" license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/generators" repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021" edition = "2021"
rust-version = "1.80"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -3,7 +3,7 @@
#![deny(missing_docs)] #![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(not(feature = "std"), no_std)]
use std_shims::{sync::LazyLock, vec::Vec}; use std_shims::{sync::OnceLock, vec::Vec};
use sha3::{Digest, Keccak256}; use sha3::{Digest, Keccak256};
@@ -21,30 +21,33 @@ fn keccak256(data: &[u8]) -> [u8; 32] {
Keccak256::digest(data).into() Keccak256::digest(data).into()
} }
static H_CELL: OnceLock<EdwardsPoint> = OnceLock::new();
/// Monero's `H` generator. /// Monero's `H` generator.
/// ///
/// Contrary to convention (`G` for values, `H` for randomness), `H` is used by Monero for amounts /// Contrary to convention (`G` for values, `H` for randomness), `H` is used by Monero for amounts
/// within Pedersen commitments. /// within Pedersen commitments.
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub static H: LazyLock<EdwardsPoint> = LazyLock::new(|| { pub fn H() -> EdwardsPoint {
decompress_point(keccak256(&ED25519_BASEPOINT_POINT.compress().to_bytes())) *H_CELL.get_or_init(|| {
.unwrap() decompress_point(keccak256(&ED25519_BASEPOINT_POINT.compress().to_bytes()))
.mul_by_cofactor() .unwrap()
}); .mul_by_cofactor()
})
}
static H_POW_2_CELL: LazyLock<[EdwardsPoint; 64]> = LazyLock::new(|| { static H_POW_2_CELL: OnceLock<[EdwardsPoint; 64]> = OnceLock::new();
let mut res = [*H; 64];
for i in 1 .. 64 {
res[i] = res[i - 1] + res[i - 1];
}
res
});
/// Monero's `H` generator, multiplied by 2**i for i in 1 ..= 64. /// Monero's `H` generator, multiplied by 2**i for i in 1 ..= 64.
/// ///
/// This table is useful when working with amounts, which are u64s. /// This table is useful when working with amounts, which are u64s.
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub fn H_pow_2() -> &'static [EdwardsPoint; 64] { pub fn H_pow_2() -> &'static [EdwardsPoint; 64] {
&H_POW_2_CELL H_POW_2_CELL.get_or_init(|| {
let mut res = [H(); 64];
for i in 1 .. 64 {
res[i] = res[i - 1] + res[i - 1];
}
res
})
} }
/// The maximum amount of commitments provable for within a single range proof. /// The maximum amount of commitments provable for within a single range proof.
@@ -71,7 +74,7 @@ pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
// The maximum amount of bits used within a single range proof. // The maximum amount of bits used within a single range proof.
const MAX_MN: usize = MAX_COMMITMENTS * COMMITMENT_BITS; const MAX_MN: usize = MAX_COMMITMENTS * COMMITMENT_BITS;
let mut preimage = H.compress().to_bytes().to_vec(); let mut preimage = H().compress().to_bytes().to_vec();
preimage.extend(dst); preimage.extend(dst);
let mut res = Generators { G: Vec::with_capacity(MAX_MN), H: Vec::with_capacity(MAX_MN) }; let mut res = Generators { G: Vec::with_capacity(MAX_MN), H: Vec::with_capacity(MAX_MN) };

View File

@@ -3,10 +3,9 @@ name = "monero-io"
version = "0.1.0" version = "0.1.0"
description = "Serialization functions, as within the Monero protocol" description = "Serialization functions, as within the Monero protocol"
license = "MIT" license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/io" repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/io"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021" edition = "2021"
rust-version = "1.80"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -3,10 +3,10 @@ name = "monero-primitives"
version = "0.1.0" version = "0.1.0"
description = "Primitives for the Monero protocol" description = "Primitives for the Monero protocol"
license = "MIT" license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/primitives" repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/primitives"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021" edition = "2021"
rust-version = "1.80" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -5,7 +5,7 @@
use std_shims::{io, vec::Vec}; use std_shims::{io, vec::Vec};
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std_shims::sync::LazyLock; use std_shims::sync::OnceLock;
use zeroize::{Zeroize, ZeroizeOnDrop}; use zeroize::{Zeroize, ZeroizeOnDrop};
@@ -28,15 +28,15 @@ mod tests;
// On std, we cache some variables in statics. // On std, we cache some variables in statics.
#[cfg(feature = "std")] #[cfg(feature = "std")]
static INV_EIGHT_CELL: LazyLock<Scalar> = LazyLock::new(|| Scalar::from(8u8).invert()); static INV_EIGHT_CELL: OnceLock<Scalar> = OnceLock::new();
/// The inverse of 8 over l, the prime factor of the order of Ed25519. /// The inverse of 8 over l.
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub fn INV_EIGHT() -> Scalar { pub fn INV_EIGHT() -> Scalar {
*INV_EIGHT_CELL *INV_EIGHT_CELL.get_or_init(|| Scalar::from(8u8).invert())
} }
// In no-std environments, we prefer the reduced memory use and calculate it ad-hoc. // In no-std environments, we prefer the reduced memory use and calculate it ad-hoc.
/// The inverse of 8 over l, the prime factor of the order of Ed25519. /// The inverse of 8 over l.
#[cfg(not(feature = "std"))] #[cfg(not(feature = "std"))]
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub fn INV_EIGHT() -> Scalar { pub fn INV_EIGHT() -> Scalar {
@@ -44,13 +44,12 @@ pub fn INV_EIGHT() -> Scalar {
} }
#[cfg(feature = "std")] #[cfg(feature = "std")]
static G_PRECOMP_CELL: LazyLock<VartimeEdwardsPrecomputation> = static G_PRECOMP_CELL: OnceLock<VartimeEdwardsPrecomputation> = OnceLock::new();
LazyLock::new(|| VartimeEdwardsPrecomputation::new([ED25519_BASEPOINT_POINT]));
/// A cached (if std) pre-computation of the Ed25519 generator, G. /// A cached (if std) pre-computation of the Ed25519 generator, G.
#[cfg(feature = "std")] #[cfg(feature = "std")]
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub fn G_PRECOMP() -> &'static VartimeEdwardsPrecomputation { pub fn G_PRECOMP() -> &'static VartimeEdwardsPrecomputation {
&G_PRECOMP_CELL G_PRECOMP_CELL.get_or_init(|| VartimeEdwardsPrecomputation::new([ED25519_BASEPOINT_POINT]))
} }
/// A cached (if std) pre-computation of the Ed25519 generator, G. /// A cached (if std) pre-computation of the Ed25519 generator, G.
#[cfg(not(feature = "std"))] #[cfg(not(feature = "std"))]
@@ -106,55 +105,18 @@ impl Commitment {
/// Calculate the Pedersen commitment, as a point, from this transparent structure. /// Calculate the Pedersen commitment, as a point, from this transparent structure.
pub fn calculate(&self) -> EdwardsPoint { pub fn calculate(&self) -> EdwardsPoint {
EdwardsPoint::vartime_double_scalar_mul_basepoint(&Scalar::from(self.amount), &H, &self.mask) EdwardsPoint::vartime_double_scalar_mul_basepoint(&Scalar::from(self.amount), &H(), &self.mask)
}
/// Write the Commitment.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(&self.mask.to_bytes())?;
w.write_all(&self.amount.to_le_bytes())
}
/// Serialize the Commitment to a `Vec<u8>`.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(32 + 8);
self.write(&mut res).unwrap();
res
}
/// Read a Commitment.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn read<R: io::Read>(r: &mut R) -> io::Result<Commitment> {
Ok(Commitment::new(read_scalar(r)?, read_u64(r)?))
} }
} }
/// Decoy data, as used for producing Monero's ring signatures. /// Decoy data, as used for producing Monero's ring signatures.
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)] #[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct Decoys { pub struct Decoys {
offsets: Vec<u64>, offsets: Vec<u64>,
signer_index: u8, signer_index: u8,
ring: Vec<[EdwardsPoint; 2]>, ring: Vec<[EdwardsPoint; 2]>,
} }
impl core::fmt::Debug for Decoys {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
fmt
.debug_struct("Decoys")
.field("offsets", &self.offsets)
.field("ring", &self.ring)
.finish_non_exhaustive()
}
}
#[allow(clippy::len_without_is_empty)] #[allow(clippy::len_without_is_empty)]
impl Decoys { impl Decoys {
/// Create a new instance of decoy data. /// Create a new instance of decoy data.

View File

@@ -1,6 +1,6 @@
use core::cmp::Ordering; use core::cmp::Ordering;
use std_shims::{ use std_shims::{
sync::LazyLock, sync::OnceLock,
io::{self, *}, io::{self, *},
}; };
@@ -10,14 +10,18 @@ use curve25519_dalek::scalar::Scalar;
use monero_io::*; use monero_io::*;
static PRECOMPUTED_SCALARS_CELL: OnceLock<[Scalar; 8]> = OnceLock::new();
// Precomputed scalars used to recover an incorrectly reduced scalar. // Precomputed scalars used to recover an incorrectly reduced scalar.
static PRECOMPUTED_SCALARS: LazyLock<[Scalar; 8]> = LazyLock::new(|| { #[allow(non_snake_case)]
let mut precomputed_scalars = [Scalar::ONE; 8]; fn PRECOMPUTED_SCALARS() -> [Scalar; 8] {
for (i, scalar) in precomputed_scalars.iter_mut().enumerate().skip(1) { *PRECOMPUTED_SCALARS_CELL.get_or_init(|| {
*scalar = Scalar::from(u8::try_from((i * 2) + 1).unwrap()); let mut precomputed_scalars = [Scalar::ONE; 8];
} for (i, scalar) in precomputed_scalars.iter_mut().enumerate().skip(1) {
precomputed_scalars *scalar = Scalar::from(u8::try_from((i * 2) + 1).unwrap());
}); }
precomputed_scalars
})
}
/// An unreduced scalar. /// An unreduced scalar.
/// ///
@@ -123,12 +127,14 @@ impl UnreducedScalar {
return Scalar::from_bytes_mod_order(self.0); return Scalar::from_bytes_mod_order(self.0);
} }
let precomputed_scalars = PRECOMPUTED_SCALARS();
let mut recovered = Scalar::ZERO; let mut recovered = Scalar::ZERO;
for &numb in self.non_adjacent_form().iter().rev() { for &numb in self.non_adjacent_form().iter().rev() {
recovered += recovered; recovered += recovered;
match numb.cmp(&0) { match numb.cmp(&0) {
Ordering::Greater => recovered += PRECOMPUTED_SCALARS[usize::try_from(numb).unwrap() / 2], Ordering::Greater => recovered += precomputed_scalars[usize::try_from(numb).unwrap() / 2],
Ordering::Less => recovered -= PRECOMPUTED_SCALARS[usize::try_from(-numb).unwrap() / 2], Ordering::Less => recovered -= precomputed_scalars[usize::try_from(-numb).unwrap() / 2],
Ordering::Equal => (), Ordering::Equal => (),
} }
} }

View File

@@ -3,10 +3,10 @@ name = "monero-borromean"
version = "0.1.0" version = "0.1.0"
description = "Borromean ring signatures arranged into a range proof, as done by the Monero protocol" description = "Borromean ring signatures arranged into a range proof, as done by the Monero protocol"
license = "MIT" license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/ringct/borromean" repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/ringct/borromean"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021" edition = "2021"
rust-version = "1.80" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true

View File

@@ -3,10 +3,10 @@ name = "monero-bulletproofs"
version = "0.1.0" version = "0.1.0"
description = "Bulletproofs(+) range proofs, as defined by the Monero protocol" description = "Bulletproofs(+) range proofs, as defined by the Monero protocol"
license = "MIT" license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/networks/monero/ringct/bulletproofs" repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/ringct/bulletproofs"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021" edition = "2021"
rust-version = "1.80" rust-version = "1.79"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true
@@ -18,10 +18,11 @@ workspace = true
[dependencies] [dependencies]
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false } std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
thiserror = { version = "2", default-features = false } thiserror = { version = "1", default-features = false, optional = true }
rand_core = { version = "0.6", default-features = false } rand_core = { version = "0.6", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
subtle = { version = "^2.4", default-features = false }
# Cryptographic dependencies # Cryptographic dependencies
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] } curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
@@ -42,10 +43,11 @@ hex-literal = "0.4"
std = [ std = [
"std-shims/std", "std-shims/std",
"thiserror/std", "thiserror",
"rand_core/std", "rand_core/std",
"zeroize/std", "zeroize/std",
"subtle/std",
"monero-io/std", "monero-io/std",
"monero-generators/std", "monero-generators/std",

View File

@@ -40,14 +40,17 @@ fn generators(prefix: &'static str, path: &str) {
.write_all( .write_all(
format!( format!(
" "
pub(crate) static GENERATORS: LazyLock<Generators> = LazyLock::new(|| Generators {{ static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
G: std_shims::vec![ pub(crate) fn GENERATORS() -> &'static Generators {{
{G_str} GENERATORS_CELL.get_or_init(|| Generators {{
], G: std_shims::vec![
H: std_shims::vec![ {G_str}
{H_str} ],
], H: std_shims::vec![
}}); {H_str}
],
}})
}}
", ",
) )
.as_bytes(), .as_bytes(),
@@ -64,9 +67,12 @@ fn generators(prefix: &'static str, path: &str) {
.write_all( .write_all(
format!( format!(
r#" r#"
pub(crate) static GENERATORS: LazyLock<Generators> = LazyLock::new(|| {{ static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
monero_generators::bulletproofs_generators(b"{prefix}") pub(crate) fn GENERATORS() -> &'static Generators {{
}}); GENERATORS_CELL.get_or_init(|| {{
monero_generators::bulletproofs_generators(b"{prefix}")
}})
}}
"#, "#,
) )
.as_bytes(), .as_bytes(),

View File

@@ -7,7 +7,7 @@ use curve25519_dalek::{
edwards::EdwardsPoint, edwards::EdwardsPoint,
}; };
use monero_generators::{H as MONERO_H, Generators}; use monero_generators::{H, Generators};
use crate::{original, plus}; use crate::{original, plus};
@@ -57,7 +57,7 @@ pub(crate) struct BulletproofsBatchVerifier(pub(crate) InternalBatchVerifier);
impl BulletproofsBatchVerifier { impl BulletproofsBatchVerifier {
#[must_use] #[must_use]
pub(crate) fn verify(self) -> bool { pub(crate) fn verify(self) -> bool {
self.0.verify(ED25519_BASEPOINT_POINT, *MONERO_H, &original::GENERATORS) self.0.verify(ED25519_BASEPOINT_POINT, H(), original::GENERATORS())
} }
} }
@@ -68,7 +68,7 @@ impl BulletproofsPlusBatchVerifier {
pub(crate) fn verify(self) -> bool { pub(crate) fn verify(self) -> bool {
// Bulletproofs+ is written as per the paper, with G for the value and H for the mask // Bulletproofs+ is written as per the paper, with G for the value and H for the mask
// Monero uses H for the value and G for the mask // Monero uses H for the value and G for the mask
self.0.verify(*MONERO_H, ED25519_BASEPOINT_POINT, &plus::GENERATORS) self.0.verify(H(), ED25519_BASEPOINT_POINT, plus::GENERATORS())
} }
} }

View File

@@ -20,8 +20,6 @@ pub use monero_generators::MAX_COMMITMENTS;
use monero_primitives::Commitment; use monero_primitives::Commitment;
pub(crate) mod scalar_vector; pub(crate) mod scalar_vector;
pub(crate) mod point_vector;
pub(crate) mod core; pub(crate) mod core;
use crate::core::LOG_COMMITMENT_BITS; use crate::core::LOG_COMMITMENT_BITS;
@@ -30,28 +28,23 @@ use batch_verifier::{BulletproofsBatchVerifier, BulletproofsPlusBatchVerifier};
pub use batch_verifier::BatchVerifier; pub use batch_verifier::BatchVerifier;
pub(crate) mod original; pub(crate) mod original;
use crate::original::{ use crate::original::OriginalStruct;
IpProof, AggregateRangeStatement as OriginalStatement, AggregateRangeWitness as OriginalWitness,
AggregateRangeProof as OriginalProof,
};
pub(crate) mod plus; pub(crate) mod plus;
use crate::plus::{ use crate::plus::*;
WipProof, AggregateRangeStatement as PlusStatement, AggregateRangeWitness as PlusWitness,
AggregateRangeProof as PlusProof,
};
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
/// An error from proving/verifying Bulletproofs(+). /// An error from proving/verifying Bulletproofs(+).
#[derive(Clone, Copy, PartialEq, Eq, Debug, thiserror::Error)] #[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum BulletproofError { pub enum BulletproofError {
/// Proving/verifying a Bulletproof(+) range proof with no commitments. /// Proving/verifying a Bulletproof(+) range proof with no commitments.
#[error("no commitments to prove the range for")] #[cfg_attr(feature = "std", error("no commitments to prove the range for"))]
NoCommitments, NoCommitments,
/// Proving/verifying a Bulletproof(+) range proof with more commitments than supported. /// Proving/verifying a Bulletproof(+) range proof with more commitments than supported.
#[error("too many commitments to prove the range for")] #[cfg_attr(feature = "std", error("too many commitments to prove the range for"))]
TooManyCommitments, TooManyCommitments,
} }
@@ -62,9 +55,9 @@ pub enum BulletproofError {
#[derive(Clone, PartialEq, Eq, Debug)] #[derive(Clone, PartialEq, Eq, Debug)]
pub enum Bulletproof { pub enum Bulletproof {
/// A Bulletproof. /// A Bulletproof.
Original(OriginalProof), Original(OriginalStruct),
/// A Bulletproof+. /// A Bulletproof+.
Plus(PlusProof), Plus(AggregateRangeProof),
} }
impl Bulletproof { impl Bulletproof {
@@ -107,7 +100,7 @@ impl Bulletproof {
/// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof. /// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof.
pub fn prove<R: RngCore + CryptoRng>( pub fn prove<R: RngCore + CryptoRng>(
rng: &mut R, rng: &mut R,
outputs: Vec<Commitment>, outputs: &[Commitment],
) -> Result<Bulletproof, BulletproofError> { ) -> Result<Bulletproof, BulletproofError> {
if outputs.is_empty() { if outputs.is_empty() {
Err(BulletproofError::NoCommitments)?; Err(BulletproofError::NoCommitments)?;
@@ -115,13 +108,7 @@ impl Bulletproof {
if outputs.len() > MAX_COMMITMENTS { if outputs.len() > MAX_COMMITMENTS {
Err(BulletproofError::TooManyCommitments)?; Err(BulletproofError::TooManyCommitments)?;
} }
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>(); Ok(Bulletproof::Original(OriginalStruct::prove(rng, outputs)))
Ok(Bulletproof::Original(
OriginalStatement::new(&commitments)
.unwrap()
.prove(rng, OriginalWitness::new(outputs).unwrap())
.unwrap(),
))
} }
/// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof+. /// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof+.
@@ -135,11 +122,10 @@ impl Bulletproof {
if outputs.len() > MAX_COMMITMENTS { if outputs.len() > MAX_COMMITMENTS {
Err(BulletproofError::TooManyCommitments)?; Err(BulletproofError::TooManyCommitments)?;
} }
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
Ok(Bulletproof::Plus( Ok(Bulletproof::Plus(
PlusStatement::new(&commitments) AggregateRangeStatement::new(outputs.iter().map(Commitment::calculate).collect())
.unwrap() .unwrap()
.prove(rng, &Zeroizing::new(PlusWitness::new(outputs).unwrap())) .prove(rng, &Zeroizing::new(AggregateRangeWitness::new(outputs).unwrap()))
.unwrap(), .unwrap(),
)) ))
} }
@@ -150,17 +136,14 @@ impl Bulletproof {
match self { match self {
Bulletproof::Original(bp) => { Bulletproof::Original(bp) => {
let mut verifier = BulletproofsBatchVerifier::default(); let mut verifier = BulletproofsBatchVerifier::default();
let Some(statement) = OriginalStatement::new(commitments) else { if !bp.verify(rng, &mut verifier, commitments) {
return false;
};
if !statement.verify(rng, &mut verifier, bp.clone()) {
return false; return false;
} }
verifier.verify() verifier.verify()
} }
Bulletproof::Plus(bp) => { Bulletproof::Plus(bp) => {
let mut verifier = BulletproofsPlusBatchVerifier::default(); let mut verifier = BulletproofsPlusBatchVerifier::default();
let Some(statement) = PlusStatement::new(commitments) else { let Some(statement) = AggregateRangeStatement::new(commitments.to_vec()) else {
return false; return false;
}; };
if !statement.verify(rng, &mut verifier, bp.clone()) { if !statement.verify(rng, &mut verifier, bp.clone()) {
@@ -187,14 +170,9 @@ impl Bulletproof {
commitments: &[EdwardsPoint], commitments: &[EdwardsPoint],
) -> bool { ) -> bool {
match self { match self {
Bulletproof::Original(bp) => { Bulletproof::Original(bp) => bp.verify(rng, &mut verifier.original, commitments),
let Some(statement) = OriginalStatement::new(commitments) else {
return false;
};
statement.verify(rng, &mut verifier.original, bp.clone())
}
Bulletproof::Plus(bp) => { Bulletproof::Plus(bp) => {
let Some(statement) = PlusStatement::new(commitments) else { let Some(statement) = AggregateRangeStatement::new(commitments.to_vec()) else {
return false; return false;
}; };
statement.verify(rng, &mut verifier.plus, bp.clone()) statement.verify(rng, &mut verifier.plus, bp.clone())
@@ -215,11 +193,11 @@ impl Bulletproof {
write_point(&bp.T2, w)?; write_point(&bp.T2, w)?;
write_scalar(&bp.tau_x, w)?; write_scalar(&bp.tau_x, w)?;
write_scalar(&bp.mu, w)?; write_scalar(&bp.mu, w)?;
specific_write_vec(&bp.ip.L, w)?; specific_write_vec(&bp.L, w)?;
specific_write_vec(&bp.ip.R, w)?; specific_write_vec(&bp.R, w)?;
write_scalar(&bp.ip.a, w)?; write_scalar(&bp.a, w)?;
write_scalar(&bp.ip.b, w)?; write_scalar(&bp.b, w)?;
write_scalar(&bp.t_hat, w) write_scalar(&bp.t, w)
} }
Bulletproof::Plus(bp) => { Bulletproof::Plus(bp) => {
@@ -256,26 +234,24 @@ impl Bulletproof {
/// Read a Bulletproof. /// Read a Bulletproof.
pub fn read<R: Read>(r: &mut R) -> io::Result<Bulletproof> { pub fn read<R: Read>(r: &mut R) -> io::Result<Bulletproof> {
Ok(Bulletproof::Original(OriginalProof { Ok(Bulletproof::Original(OriginalStruct {
A: read_point(r)?, A: read_point(r)?,
S: read_point(r)?, S: read_point(r)?,
T1: read_point(r)?, T1: read_point(r)?,
T2: read_point(r)?, T2: read_point(r)?,
tau_x: read_scalar(r)?, tau_x: read_scalar(r)?,
mu: read_scalar(r)?, mu: read_scalar(r)?,
ip: IpProof { L: read_vec(read_point, r)?,
L: read_vec(read_point, r)?, R: read_vec(read_point, r)?,
R: read_vec(read_point, r)?, a: read_scalar(r)?,
a: read_scalar(r)?, b: read_scalar(r)?,
b: read_scalar(r)?, t: read_scalar(r)?,
},
t_hat: read_scalar(r)?,
})) }))
} }
/// Read a Bulletproof+. /// Read a Bulletproof+.
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Bulletproof> { pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Bulletproof> {
Ok(Bulletproof::Plus(PlusProof { Ok(Bulletproof::Plus(AggregateRangeProof {
A: read_point(r)?, A: read_point(r)?,
wip: WipProof { wip: WipProof {
A: read_point(r)?, A: read_point(r)?,

View File

@@ -0,0 +1,395 @@
use std_shims::{vec, vec::Vec, sync::OnceLock};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use subtle::{Choice, ConditionallySelectable};
use curve25519_dalek::{
constants::{ED25519_BASEPOINT_POINT, ED25519_BASEPOINT_TABLE},
scalar::Scalar,
edwards::EdwardsPoint,
};
use monero_generators::{H, Generators};
use monero_primitives::{INV_EIGHT, Commitment, keccak256_to_scalar};
use crate::{core::*, ScalarVector, batch_verifier::BulletproofsBatchVerifier};
include!(concat!(env!("OUT_DIR"), "/generators.rs"));
static TWO_N_CELL: OnceLock<ScalarVector> = OnceLock::new();
fn TWO_N() -> &'static ScalarVector {
TWO_N_CELL.get_or_init(|| ScalarVector::powers(Scalar::from(2u8), COMMITMENT_BITS))
}
static IP12_CELL: OnceLock<Scalar> = OnceLock::new();
fn IP12() -> Scalar {
*IP12_CELL.get_or_init(|| ScalarVector(vec![Scalar::ONE; COMMITMENT_BITS]).inner_product(TWO_N()))
}
fn MN(outputs: usize) -> (usize, usize, usize) {
let mut logM = 0;
let mut M;
while {
M = 1 << logM;
(M <= MAX_COMMITMENTS) && (M < outputs)
} {
logM += 1;
}
(logM + LOG_COMMITMENT_BITS, M, M * COMMITMENT_BITS)
}
fn bit_decompose(commitments: &[Commitment]) -> (ScalarVector, ScalarVector) {
let (_, M, MN) = MN(commitments.len());
let sv = commitments.iter().map(|c| Scalar::from(c.amount)).collect::<Vec<_>>();
let mut aL = ScalarVector::new(MN);
let mut aR = ScalarVector::new(MN);
for j in 0 .. M {
for i in (0 .. COMMITMENT_BITS).rev() {
let bit =
if j < sv.len() { Choice::from((sv[j][i / 8] >> (i % 8)) & 1) } else { Choice::from(0) };
aL.0[(j * COMMITMENT_BITS) + i] =
Scalar::conditional_select(&Scalar::ZERO, &Scalar::ONE, bit);
aR.0[(j * COMMITMENT_BITS) + i] =
Scalar::conditional_select(&-Scalar::ONE, &Scalar::ZERO, bit);
}
}
(aL, aR)
}
fn hash_commitments<C: IntoIterator<Item = EdwardsPoint>>(
commitments: C,
) -> (Scalar, Vec<EdwardsPoint>) {
let V = commitments.into_iter().map(|c| c * INV_EIGHT()).collect::<Vec<_>>();
(keccak256_to_scalar(V.iter().flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>()), V)
}
fn alpha_rho<R: RngCore + CryptoRng>(
rng: &mut R,
generators: &Generators,
aL: &ScalarVector,
aR: &ScalarVector,
) -> (Scalar, EdwardsPoint) {
fn vector_exponent(generators: &Generators, a: &ScalarVector, b: &ScalarVector) -> EdwardsPoint {
debug_assert_eq!(a.len(), b.len());
(a * &generators.G[.. a.len()]) + (b * &generators.H[.. b.len()])
}
let ar = Scalar::random(rng);
(ar, (vector_exponent(generators, aL, aR) + (ED25519_BASEPOINT_TABLE * &ar)) * INV_EIGHT())
}
fn LR_statements(
a: &ScalarVector,
G_i: &[EdwardsPoint],
b: &ScalarVector,
H_i: &[EdwardsPoint],
cL: Scalar,
U: EdwardsPoint,
) -> Vec<(Scalar, EdwardsPoint)> {
let mut res = a
.0
.iter()
.copied()
.zip(G_i.iter().copied())
.chain(b.0.iter().copied().zip(H_i.iter().copied()))
.collect::<Vec<_>>();
res.push((cL, U));
res
}
fn hash_cache(cache: &mut Scalar, mash: &[[u8; 32]]) -> Scalar {
let slice =
&[cache.to_bytes().as_ref(), mash.iter().copied().flatten().collect::<Vec<_>>().as_ref()]
.concat();
*cache = keccak256_to_scalar(slice);
*cache
}
fn hadamard_fold(
l: &[EdwardsPoint],
r: &[EdwardsPoint],
a: Scalar,
b: Scalar,
) -> Vec<EdwardsPoint> {
let mut res = Vec::with_capacity(l.len() / 2);
for i in 0 .. l.len() {
res.push(multiexp(&[(a, l[i]), (b, r[i])]));
}
res
}
/// Internal structure representing a Bulletproof, as defined by Monero..
#[doc(hidden)]
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct OriginalStruct {
pub(crate) A: EdwardsPoint,
pub(crate) S: EdwardsPoint,
pub(crate) T1: EdwardsPoint,
pub(crate) T2: EdwardsPoint,
pub(crate) tau_x: Scalar,
pub(crate) mu: Scalar,
pub(crate) L: Vec<EdwardsPoint>,
pub(crate) R: Vec<EdwardsPoint>,
pub(crate) a: Scalar,
pub(crate) b: Scalar,
pub(crate) t: Scalar,
}
impl OriginalStruct {
pub(crate) fn prove<R: RngCore + CryptoRng>(
rng: &mut R,
commitments: &[Commitment],
) -> OriginalStruct {
let (logMN, M, MN) = MN(commitments.len());
let (aL, aR) = bit_decompose(commitments);
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
let (mut cache, _) = hash_commitments(commitments_points.clone());
let (sL, sR) =
ScalarVector((0 .. (MN * 2)).map(|_| Scalar::random(&mut *rng)).collect::<Vec<_>>()).split();
let generators = GENERATORS();
let (mut alpha, A) = alpha_rho(&mut *rng, generators, &aL, &aR);
let (mut rho, S) = alpha_rho(&mut *rng, generators, &sL, &sR);
let y = hash_cache(&mut cache, &[A.compress().to_bytes(), S.compress().to_bytes()]);
let mut cache = keccak256_to_scalar(y.to_bytes());
let z = cache;
let l0 = aL - z;
let l1 = sL;
let mut zero_twos = Vec::with_capacity(MN);
let zpow = ScalarVector::powers(z, M + 2);
for j in 0 .. M {
for i in 0 .. COMMITMENT_BITS {
zero_twos.push(zpow[j + 2] * TWO_N()[i]);
}
}
let yMN = ScalarVector::powers(y, MN);
let r0 = ((aR + z) * &yMN) + &ScalarVector(zero_twos);
let r1 = yMN * &sR;
let (T1, T2, x, mut tau_x) = {
let t1 = l0.clone().inner_product(&r1) + r0.clone().inner_product(&l1);
let t2 = l1.clone().inner_product(&r1);
let mut tau1 = Scalar::random(&mut *rng);
let mut tau2 = Scalar::random(&mut *rng);
let T1 = multiexp(&[(t1, H()), (tau1, ED25519_BASEPOINT_POINT)]) * INV_EIGHT();
let T2 = multiexp(&[(t2, H()), (tau2, ED25519_BASEPOINT_POINT)]) * INV_EIGHT();
let x =
hash_cache(&mut cache, &[z.to_bytes(), T1.compress().to_bytes(), T2.compress().to_bytes()]);
let tau_x = (tau2 * (x * x)) + (tau1 * x);
tau1.zeroize();
tau2.zeroize();
(T1, T2, x, tau_x)
};
let mu = (x * rho) + alpha;
alpha.zeroize();
rho.zeroize();
for (i, gamma) in commitments.iter().map(|c| c.mask).enumerate() {
tau_x += zpow[i + 2] * gamma;
}
let l = l0 + &(l1 * x);
let r = r0 + &(r1 * x);
let t = l.clone().inner_product(&r);
let x_ip =
hash_cache(&mut cache, &[x.to_bytes(), tau_x.to_bytes(), mu.to_bytes(), t.to_bytes()]);
let mut a = l;
let mut b = r;
let yinv = y.invert();
let yinvpow = ScalarVector::powers(yinv, MN);
let mut G_proof = generators.G[.. a.len()].to_vec();
let mut H_proof = generators.H[.. a.len()].to_vec();
H_proof.iter_mut().zip(yinvpow.0.iter()).for_each(|(this_H, yinvpow)| *this_H *= yinvpow);
let U = H() * x_ip;
let mut L = Vec::with_capacity(logMN);
let mut R = Vec::with_capacity(logMN);
while a.len() != 1 {
let (aL, aR) = a.split();
let (bL, bR) = b.split();
let cL = aL.clone().inner_product(&bR);
let cR = aR.clone().inner_product(&bL);
let (G_L, G_R) = G_proof.split_at(aL.len());
let (H_L, H_R) = H_proof.split_at(aL.len());
let L_i = multiexp(&LR_statements(&aL, G_R, &bR, H_L, cL, U)) * INV_EIGHT();
let R_i = multiexp(&LR_statements(&aR, G_L, &bL, H_R, cR, U)) * INV_EIGHT();
L.push(L_i);
R.push(R_i);
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
let w_inv = w.invert();
a = (aL * w) + &(aR * w_inv);
b = (bL * w_inv) + &(bR * w);
if a.len() != 1 {
G_proof = hadamard_fold(G_L, G_R, w_inv, w);
H_proof = hadamard_fold(H_L, H_R, w, w_inv);
}
}
let res = OriginalStruct { A, S, T1, T2, tau_x, mu, L, R, a: a[0], b: b[0], t };
#[cfg(debug_assertions)]
{
let mut verifier = BulletproofsBatchVerifier::default();
debug_assert!(res.verify(rng, &mut verifier, &commitments_points));
debug_assert!(verifier.verify());
}
res
}
#[must_use]
pub(crate) fn verify<R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BulletproofsBatchVerifier,
commitments: &[EdwardsPoint],
) -> bool {
// Verify commitments are valid
if commitments.is_empty() || (commitments.len() > MAX_COMMITMENTS) {
return false;
}
// Verify L and R are properly sized
if self.L.len() != self.R.len() {
return false;
}
let (logMN, M, MN) = MN(commitments.len());
if self.L.len() != logMN {
return false;
}
// Rebuild all challenges
let (mut cache, commitments) = hash_commitments(commitments.iter().copied());
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes(), self.S.compress().to_bytes()]);
let z = keccak256_to_scalar(y.to_bytes());
cache = z;
let x = hash_cache(
&mut cache,
&[z.to_bytes(), self.T1.compress().to_bytes(), self.T2.compress().to_bytes()],
);
let x_ip = hash_cache(
&mut cache,
&[x.to_bytes(), self.tau_x.to_bytes(), self.mu.to_bytes(), self.t.to_bytes()],
);
let mut w_and_w_inv = Vec::with_capacity(logMN);
for (L, R) in self.L.iter().zip(&self.R) {
let w = hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]);
let w_inv = w.invert();
w_and_w_inv.push((w, w_inv));
}
// Convert the proof from * INV_EIGHT to its actual form
let normalize = |point: &EdwardsPoint| point.mul_by_cofactor();
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
let T1 = normalize(&self.T1);
let T2 = normalize(&self.T2);
let A = normalize(&self.A);
let S = normalize(&self.S);
let commitments = commitments.iter().map(EdwardsPoint::mul_by_cofactor).collect::<Vec<_>>();
// Verify it
let zpow = ScalarVector::powers(z, M + 3);
// First multiexp
{
let verifier_weight = Scalar::random(rng);
let ip1y = ScalarVector::powers(y, M * COMMITMENT_BITS).sum();
let mut k = -(zpow[2] * ip1y);
for j in 1 ..= M {
k -= zpow[j + 2] * IP12();
}
let y1 = self.t - ((z * ip1y) + k);
verifier.0.h -= verifier_weight * y1;
verifier.0.g -= verifier_weight * self.tau_x;
for (j, commitment) in commitments.iter().enumerate() {
verifier.0.other.push((verifier_weight * zpow[j + 2], *commitment));
}
verifier.0.other.push((verifier_weight * x, T1));
verifier.0.other.push((verifier_weight * (x * x), T2));
}
// Second multiexp
{
let verifier_weight = Scalar::random(rng);
let z3 = (self.t - (self.a * self.b)) * x_ip;
verifier.0.h += verifier_weight * z3;
verifier.0.g -= verifier_weight * self.mu;
verifier.0.other.push((verifier_weight, A));
verifier.0.other.push((verifier_weight * x, S));
{
let ypow = ScalarVector::powers(y, MN);
let yinv = y.invert();
let yinvpow = ScalarVector::powers(yinv, MN);
let w_cache = challenge_products(&w_and_w_inv);
while verifier.0.g_bold.len() < MN {
verifier.0.g_bold.push(Scalar::ZERO);
}
while verifier.0.h_bold.len() < MN {
verifier.0.h_bold.push(Scalar::ZERO);
}
for i in 0 .. MN {
let g = (self.a * w_cache[i]) + z;
verifier.0.g_bold[i] -= verifier_weight * g;
let mut h = self.b * yinvpow[i] * w_cache[(!i) & (MN - 1)];
h -= ((zpow[(i / COMMITMENT_BITS) + 2] * TWO_N()[i % COMMITMENT_BITS]) + (z * ypow[i])) *
yinvpow[i];
verifier.0.h_bold[i] -= verifier_weight * h;
}
}
for i in 0 .. logMN {
verifier.0.other.push((verifier_weight * (w_and_w_inv[i].0 * w_and_w_inv[i].0), L[i]));
verifier.0.other.push((verifier_weight * (w_and_w_inv[i].1 * w_and_w_inv[i].1), R[i]));
}
}
true
}
}

View File

@@ -20,9 +20,15 @@ use crate::{
// Figure 3 of the Bulletproofs+ Paper // Figure 3 of the Bulletproofs+ Paper
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub(crate) struct AggregateRangeStatement<'a> { pub(crate) struct AggregateRangeStatement {
generators: BpPlusGenerators, generators: BpPlusGenerators,
V: &'a [EdwardsPoint], V: Vec<EdwardsPoint>,
}
impl Zeroize for AggregateRangeStatement {
fn zeroize(&mut self) {
self.V.zeroize();
}
} }
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)] #[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
@@ -55,8 +61,8 @@ struct AHatComputation {
A_hat: EdwardsPoint, A_hat: EdwardsPoint,
} }
impl<'a> AggregateRangeStatement<'a> { impl AggregateRangeStatement {
pub(crate) fn new(V: &'a [EdwardsPoint]) -> Option<Self> { pub(crate) fn new(V: Vec<EdwardsPoint>) -> Option<Self> {
if V.is_empty() || (V.len() > MAX_COMMITMENTS) { if V.is_empty() || (V.len() > MAX_COMMITMENTS) {
return None; return None;
} }
@@ -174,7 +180,7 @@ impl<'a> AggregateRangeStatement<'a> {
// Commitments aren't transmitted INV_EIGHT though, so this multiplies by INV_EIGHT to enable // Commitments aren't transmitted INV_EIGHT though, so this multiplies by INV_EIGHT to enable
// clearing its cofactor without mutating the value // clearing its cofactor without mutating the value
// For some reason, these values are transcripted * INV_EIGHT, not as transmitted // For some reason, these values are transcripted * INV_EIGHT, not as transmitted
let V = V.iter().map(|V| V * INV_EIGHT()).collect::<Vec<_>>(); let V = V.into_iter().map(|V| V * INV_EIGHT()).collect::<Vec<_>>();
let mut transcript = initial_transcript(V.iter()); let mut transcript = initial_transcript(V.iter());
let mut V = V.iter().map(EdwardsPoint::mul_by_cofactor).collect::<Vec<_>>(); let mut V = V.iter().map(EdwardsPoint::mul_by_cofactor).collect::<Vec<_>>();
@@ -242,7 +248,7 @@ impl<'a> AggregateRangeStatement<'a> {
) -> bool { ) -> bool {
let Self { generators, V } = self; let Self { generators, V } = self;
let V = V.iter().map(|V| V * INV_EIGHT()).collect::<Vec<_>>(); let V = V.into_iter().map(|V| V * INV_EIGHT()).collect::<Vec<_>>();
let mut transcript = initial_transcript(V.iter()); let mut transcript = initial_transcript(V.iter());
let V = V.iter().map(EdwardsPoint::mul_by_cofactor).collect::<Vec<_>>(); let V = V.iter().map(EdwardsPoint::mul_by_cofactor).collect::<Vec<_>>();

Some files were not shown because too many files have changed in this diff Show More