mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-12 14:09:25 +00:00
Compare commits
85 Commits
testnet-2
...
ee10692b23
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ee10692b23 | ||
|
|
7a68b065e0 | ||
|
|
3ddf1eec0c | ||
|
|
84f0e6c26e | ||
|
|
5bb3256d1f | ||
|
|
774424b70b | ||
|
|
ed662568e2 | ||
|
|
b744ac9a76 | ||
|
|
d7f7f69738 | ||
|
|
a2c3aba82b | ||
|
|
703c6a2358 | ||
|
|
52bb918cc9 | ||
|
|
ba244e8090 | ||
|
|
3e99d68cfe | ||
|
|
4d9c2df38c | ||
|
|
8ab6f9c36e | ||
|
|
253cf3253d | ||
|
|
03445b3020 | ||
|
|
9af111b4aa | ||
|
|
41ce5b1738 | ||
|
|
2a05cf3225 | ||
|
|
f4147c39b2 | ||
|
|
cd69f3b9d6 | ||
|
|
1d2beb3ee4 | ||
|
|
ac709b2945 | ||
|
|
a473800c26 | ||
|
|
09aac20293 | ||
|
|
f93214012d | ||
|
|
400319cd29 | ||
|
|
a0a7d63dad | ||
|
|
fb7d12ee6e | ||
|
|
11ec9e3535 | ||
|
|
ae8a27b876 | ||
|
|
af79586488 | ||
|
|
d27d93480a | ||
|
|
02c4417a46 | ||
|
|
79a79db399 | ||
|
|
0c9dd5048e | ||
|
|
5501de1f3a | ||
|
|
21123590bb | ||
|
|
bc1dec7991 | ||
|
|
cef63a631a | ||
|
|
d57fef8999 | ||
|
|
d1474e9188 | ||
|
|
b39c751403 | ||
|
|
cc7202e0bf | ||
|
|
19e68f7f75 | ||
|
|
d94c9a4a5e | ||
|
|
43dc036660 | ||
|
|
95591218bb | ||
|
|
7dd587a864 | ||
|
|
023275bcb6 | ||
|
|
8cef9eff6f | ||
|
|
b5e22dca8f | ||
|
|
a41329c027 | ||
|
|
a25e6330bd | ||
|
|
558a2bfa46 | ||
|
|
c73acb3d62 | ||
|
|
933b17aa91 | ||
|
|
5fa7e3d450 | ||
|
|
749d783b1e | ||
|
|
5a3ea80943 | ||
|
|
fddbebc7c0 | ||
|
|
e01848aa9e | ||
|
|
320b5627b5 | ||
|
|
be7780e69d | ||
|
|
0ddbaefb38 | ||
|
|
0f0db14f05 | ||
|
|
43083dfd49 | ||
|
|
523d2ac911 | ||
|
|
fd4f247917 | ||
|
|
ac9e356af4 | ||
|
|
bba7d2a356 | ||
|
|
4c349ae605 | ||
|
|
a4428761f7 | ||
|
|
940e9553fd | ||
|
|
593aefd229 | ||
|
|
5830c2463d | ||
|
|
bcc88c3e86 | ||
|
|
fea16df567 | ||
|
|
4960c3222e | ||
|
|
6b4df4f2c0 | ||
|
|
dac46c8d7d | ||
|
|
db2e8376df | ||
|
|
33dd412e67 |
2
.github/actions/bitcoin/action.yml
vendored
2
.github/actions/bitcoin/action.yml
vendored
@@ -5,7 +5,7 @@ inputs:
|
|||||||
version:
|
version:
|
||||||
description: "Version to download and run"
|
description: "Version to download and run"
|
||||||
required: false
|
required: false
|
||||||
default: 24.0.1
|
default: "27.0"
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
|
|||||||
6
.github/actions/test-dependencies/action.yml
vendored
6
.github/actions/test-dependencies/action.yml
vendored
@@ -10,7 +10,7 @@ inputs:
|
|||||||
bitcoin-version:
|
bitcoin-version:
|
||||||
description: "Bitcoin version to download and run as a regtest node"
|
description: "Bitcoin version to download and run as a regtest node"
|
||||||
required: false
|
required: false
|
||||||
default: 24.0.1
|
default: "27.0"
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
@@ -19,9 +19,9 @@ runs:
|
|||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
- name: Install Foundry
|
- name: Install Foundry
|
||||||
uses: foundry-rs/foundry-toolchain@cb603ca0abb544f301eaed59ac0baf579aa6aecf
|
uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773
|
||||||
with:
|
with:
|
||||||
version: nightly-09fe3e041369a816365a020f715ad6f94dbce9f2
|
version: nightly-f625d0fa7c51e65b4bf1e8f7931cd1c6e2e285e9
|
||||||
cache: false
|
cache: false
|
||||||
|
|
||||||
- name: Run a Monero Regtest Node
|
- name: Run a Monero Regtest Node
|
||||||
|
|||||||
2
.github/nightly-version
vendored
2
.github/nightly-version
vendored
@@ -1 +1 @@
|
|||||||
nightly-2024-02-07
|
nightly-2024-07-01
|
||||||
|
|||||||
18
.github/workflows/coins-tests.yml
vendored
18
.github/workflows/coins-tests.yml
vendored
@@ -30,6 +30,22 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||||
-p bitcoin-serai \
|
-p bitcoin-serai \
|
||||||
|
-p alloy-simple-request-transport \
|
||||||
-p ethereum-serai \
|
-p ethereum-serai \
|
||||||
|
-p serai-ethereum-relayer \
|
||||||
|
-p monero-io \
|
||||||
-p monero-generators \
|
-p monero-generators \
|
||||||
-p monero-serai
|
-p monero-primitives \
|
||||||
|
-p monero-mlsag \
|
||||||
|
-p monero-clsag \
|
||||||
|
-p monero-borromean \
|
||||||
|
-p monero-bulletproofs \
|
||||||
|
-p monero-serai \
|
||||||
|
-p monero-rpc \
|
||||||
|
-p monero-simple-request-rpc \
|
||||||
|
-p monero-address \
|
||||||
|
-p monero-wallet \
|
||||||
|
-p monero-seed \
|
||||||
|
-p polyseed \
|
||||||
|
-p monero-wallet-util \
|
||||||
|
-p monero-serai-verify-chain
|
||||||
|
|||||||
3
.github/workflows/common-tests.yml
vendored
3
.github/workflows/common-tests.yml
vendored
@@ -28,4 +28,5 @@ jobs:
|
|||||||
-p std-shims \
|
-p std-shims \
|
||||||
-p zalloc \
|
-p zalloc \
|
||||||
-p serai-db \
|
-p serai-db \
|
||||||
-p serai-env
|
-p serai-env \
|
||||||
|
-p simple-request
|
||||||
|
|||||||
2
.github/workflows/coordinator-tests.yml
vendored
2
.github/workflows/coordinator-tests.yml
vendored
@@ -37,4 +37,4 @@ jobs:
|
|||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
- name: Run coordinator Docker tests
|
- name: Run coordinator Docker tests
|
||||||
run: cd tests/coordinator && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-coordinator-tests
|
||||||
|
|||||||
2
.github/workflows/full-stack-tests.yml
vendored
2
.github/workflows/full-stack-tests.yml
vendored
@@ -19,4 +19,4 @@ jobs:
|
|||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
- name: Run Full Stack Docker tests
|
- name: Run Full Stack Docker tests
|
||||||
run: cd tests/full-stack && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-full-stack-tests
|
||||||
|
|||||||
2
.github/workflows/message-queue-tests.yml
vendored
2
.github/workflows/message-queue-tests.yml
vendored
@@ -33,4 +33,4 @@ jobs:
|
|||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
- name: Run message-queue Docker tests
|
- name: Run message-queue Docker tests
|
||||||
run: cd tests/message-queue && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-message-queue-tests
|
||||||
|
|||||||
31
.github/workflows/monero-tests.yaml
vendored
31
.github/workflows/monero-tests.yaml
vendored
@@ -26,7 +26,22 @@ jobs:
|
|||||||
uses: ./.github/actions/test-dependencies
|
uses: ./.github/actions/test-dependencies
|
||||||
|
|
||||||
- name: Run Unit Tests Without Features
|
- name: Run Unit Tests Without Features
|
||||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib
|
run: |
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-io --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-generators --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-primitives --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-mlsag --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-clsag --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-borromean --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-bulletproofs --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-rpc --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-address --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-seed --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package polyseed --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --lib
|
||||||
|
|
||||||
# Doesn't run unit tests with features as the tests workflow will
|
# Doesn't run unit tests with features as the tests workflow will
|
||||||
|
|
||||||
@@ -46,11 +61,17 @@ jobs:
|
|||||||
monero-version: ${{ matrix.version }}
|
monero-version: ${{ matrix.version }}
|
||||||
|
|
||||||
- name: Run Integration Tests Without Features
|
- name: Run Integration Tests Without Features
|
||||||
# Runs with the binaries feature so the binaries build
|
run: |
|
||||||
# https://github.com/rust-lang/cargo/issues/8396
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --test '*'
|
||||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --features binaries --test '*'
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --test '*'
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --test '*'
|
||||||
|
|
||||||
- name: Run Integration Tests
|
- name: Run Integration Tests
|
||||||
# Don't run if the the tests workflow also will
|
# Don't run if the the tests workflow also will
|
||||||
if: ${{ matrix.version != 'v0.18.2.0' }}
|
if: ${{ matrix.version != 'v0.18.2.0' }}
|
||||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
|
run: |
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --all-features --test '*'
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --all-features --test '*'
|
||||||
|
|||||||
2
.github/workflows/no-std.yml
vendored
2
.github/workflows/no-std.yml
vendored
@@ -32,4 +32,4 @@ jobs:
|
|||||||
run: sudo apt update && sudo apt install -y gcc-riscv64-unknown-elf gcc-multilib && rustup target add riscv32imac-unknown-none-elf
|
run: sudo apt update && sudo apt install -y gcc-riscv64-unknown-elf gcc-multilib && rustup target add riscv32imac-unknown-none-elf
|
||||||
|
|
||||||
- name: Verify no-std builds
|
- name: Verify no-std builds
|
||||||
run: cd tests/no-std && CFLAGS=-I/usr/include cargo build --target riscv32imac-unknown-none-elf
|
run: CFLAGS=-I/usr/include cargo build --target riscv32imac-unknown-none-elf -p serai-no-std-tests
|
||||||
|
|||||||
2
.github/workflows/processor-tests.yml
vendored
2
.github/workflows/processor-tests.yml
vendored
@@ -37,4 +37,4 @@ jobs:
|
|||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
- name: Run processor Docker tests
|
- name: Run processor Docker tests
|
||||||
run: cd tests/processor && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-processor-tests
|
||||||
|
|||||||
2
.github/workflows/reproducible-runtime.yml
vendored
2
.github/workflows/reproducible-runtime.yml
vendored
@@ -33,4 +33,4 @@ jobs:
|
|||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
- name: Run Reproducible Runtime tests
|
- name: Run Reproducible Runtime tests
|
||||||
run: cd tests/reproducible-runtime && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-reproducible-runtime-tests
|
||||||
|
|||||||
3
.github/workflows/tests.yml
vendored
3
.github/workflows/tests.yml
vendored
@@ -43,6 +43,7 @@ jobs:
|
|||||||
-p tendermint-machine \
|
-p tendermint-machine \
|
||||||
-p tributary-chain \
|
-p tributary-chain \
|
||||||
-p serai-coordinator \
|
-p serai-coordinator \
|
||||||
|
-p serai-orchestrator \
|
||||||
-p serai-docker-tests
|
-p serai-docker-tests
|
||||||
|
|
||||||
test-substrate:
|
test-substrate:
|
||||||
@@ -64,7 +65,9 @@ jobs:
|
|||||||
-p serai-validator-sets-pallet \
|
-p serai-validator-sets-pallet \
|
||||||
-p serai-in-instructions-primitives \
|
-p serai-in-instructions-primitives \
|
||||||
-p serai-in-instructions-pallet \
|
-p serai-in-instructions-pallet \
|
||||||
|
-p serai-signals-primitives \
|
||||||
-p serai-signals-pallet \
|
-p serai-signals-pallet \
|
||||||
|
-p serai-abi \
|
||||||
-p serai-runtime \
|
-p serai-runtime \
|
||||||
-p serai-node
|
-p serai-node
|
||||||
|
|
||||||
|
|||||||
2625
Cargo.lock
generated
2625
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
30
Cargo.toml
30
Cargo.toml
@@ -2,6 +2,8 @@
|
|||||||
resolver = "2"
|
resolver = "2"
|
||||||
members = [
|
members = [
|
||||||
# Version patches
|
# Version patches
|
||||||
|
"patches/parking_lot_core",
|
||||||
|
"patches/parking_lot",
|
||||||
"patches/zstd",
|
"patches/zstd",
|
||||||
"patches/rocksdb",
|
"patches/rocksdb",
|
||||||
"patches/proc-macro-crate",
|
"patches/proc-macro-crate",
|
||||||
@@ -36,9 +38,27 @@ members = [
|
|||||||
"crypto/schnorrkel",
|
"crypto/schnorrkel",
|
||||||
|
|
||||||
"coins/bitcoin",
|
"coins/bitcoin",
|
||||||
|
|
||||||
|
"coins/ethereum/alloy-simple-request-transport",
|
||||||
"coins/ethereum",
|
"coins/ethereum",
|
||||||
|
"coins/ethereum/relayer",
|
||||||
|
|
||||||
|
"coins/monero/io",
|
||||||
"coins/monero/generators",
|
"coins/monero/generators",
|
||||||
|
"coins/monero/primitives",
|
||||||
|
"coins/monero/ringct/mlsag",
|
||||||
|
"coins/monero/ringct/clsag",
|
||||||
|
"coins/monero/ringct/borromean",
|
||||||
|
"coins/monero/ringct/bulletproofs",
|
||||||
"coins/monero",
|
"coins/monero",
|
||||||
|
"coins/monero/rpc",
|
||||||
|
"coins/monero/rpc/simple-request",
|
||||||
|
"coins/monero/wallet/address",
|
||||||
|
"coins/monero/wallet",
|
||||||
|
"coins/monero/wallet/seed",
|
||||||
|
"coins/monero/wallet/polyseed",
|
||||||
|
"coins/monero/wallet/util",
|
||||||
|
"coins/monero/verify-chain",
|
||||||
|
|
||||||
"message-queue",
|
"message-queue",
|
||||||
|
|
||||||
@@ -54,12 +74,14 @@ members = [
|
|||||||
"substrate/coins/primitives",
|
"substrate/coins/primitives",
|
||||||
"substrate/coins/pallet",
|
"substrate/coins/pallet",
|
||||||
|
|
||||||
"substrate/in-instructions/primitives",
|
"substrate/dex/pallet",
|
||||||
"substrate/in-instructions/pallet",
|
|
||||||
|
|
||||||
"substrate/validator-sets/primitives",
|
"substrate/validator-sets/primitives",
|
||||||
"substrate/validator-sets/pallet",
|
"substrate/validator-sets/pallet",
|
||||||
|
|
||||||
|
"substrate/in-instructions/primitives",
|
||||||
|
"substrate/in-instructions/pallet",
|
||||||
|
|
||||||
"substrate/signals/primitives",
|
"substrate/signals/primitives",
|
||||||
"substrate/signals/pallet",
|
"substrate/signals/pallet",
|
||||||
|
|
||||||
@@ -109,8 +131,10 @@ panic = "unwind"
|
|||||||
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
|
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
|
||||||
|
|
||||||
# Needed due to dockertest's usage of `Rc`s when we need `Arc`s
|
# Needed due to dockertest's usage of `Rc`s when we need `Arc`s
|
||||||
dockertest = { git = "https://github.com/kayabaNerve/dockertest-rs", branch = "arc" }
|
dockertest = { git = "https://github.com/orcalabs/dockertest-rs", rev = "4dd6ae24738aa6dc5c89444cc822ea4745517493" }
|
||||||
|
|
||||||
|
parking_lot_core = { path = "patches/parking_lot_core" }
|
||||||
|
parking_lot = { path = "patches/parking_lot" }
|
||||||
# wasmtime pulls in an old version for this
|
# wasmtime pulls in an old version for this
|
||||||
zstd = { path = "patches/zstd" }
|
zstd = { path = "patches/zstd" }
|
||||||
# Needed for WAL compression
|
# Needed for WAL compression
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ license = "MIT"
|
|||||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
|
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.74"
|
rust-version = "1.79"
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
all-features = true
|
all-features = true
|
||||||
@@ -23,7 +23,7 @@ thiserror = { version = "1", default-features = false, optional = true }
|
|||||||
zeroize = { version = "^1.5", default-features = false }
|
zeroize = { version = "^1.5", default-features = false }
|
||||||
rand_core = { version = "0.6", default-features = false }
|
rand_core = { version = "0.6", default-features = false }
|
||||||
|
|
||||||
bitcoin = { version = "0.31", default-features = false, features = ["no-std"] }
|
bitcoin = { version = "0.32", default-features = false }
|
||||||
|
|
||||||
k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] }
|
k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] }
|
||||||
|
|
||||||
@@ -36,7 +36,7 @@ serde_json = { version = "1", default-features = false, optional = true }
|
|||||||
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls", "basic-auth"], optional = true }
|
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls", "basic-auth"], optional = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
secp256k1 = { version = "0.28", default-features = false, features = ["std"] }
|
secp256k1 = { version = "0.29", default-features = false, features = ["std"] }
|
||||||
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
|
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
|
||||||
|
|
||||||
|
|||||||
@@ -195,13 +195,13 @@ impl Rpc {
|
|||||||
// If this was already successfully published, consider this having succeeded
|
// If this was already successfully published, consider this having succeeded
|
||||||
if let RpcError::RequestError(Error { code, .. }) = e {
|
if let RpcError::RequestError(Error { code, .. }) = e {
|
||||||
if code == RPC_VERIFY_ALREADY_IN_CHAIN {
|
if code == RPC_VERIFY_ALREADY_IN_CHAIN {
|
||||||
return Ok(tx.txid());
|
return Ok(tx.compute_txid());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e)?
|
Err(e)?
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if txid != tx.txid() {
|
if txid != tx.compute_txid() {
|
||||||
Err(RpcError::InvalidResponse("returned TX ID inequals calculated TX ID"))?;
|
Err(RpcError::InvalidResponse("returned TX ID inequals calculated TX ID"))?;
|
||||||
}
|
}
|
||||||
Ok(txid)
|
Ok(txid)
|
||||||
@@ -215,7 +215,7 @@ impl Rpc {
|
|||||||
let tx: Transaction = encode::deserialize(&bytes)
|
let tx: Transaction = encode::deserialize(&bytes)
|
||||||
.map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized transaction"))?;
|
.map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized transaction"))?;
|
||||||
|
|
||||||
let mut tx_hash = *tx.txid().as_raw_hash().as_byte_array();
|
let mut tx_hash = *tx.compute_txid().as_raw_hash().as_byte_array();
|
||||||
tx_hash.reverse();
|
tx_hash.reverse();
|
||||||
if hash != &tx_hash {
|
if hash != &tx_hash {
|
||||||
Err(RpcError::InvalidResponse("node replied with a different transaction"))?;
|
Err(RpcError::InvalidResponse("node replied with a different transaction"))?;
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ fn test_algorithm() {
|
|||||||
.verify_schnorr(
|
.verify_schnorr(
|
||||||
&Signature::from_slice(&sig)
|
&Signature::from_slice(&sig)
|
||||||
.expect("couldn't convert produced signature to secp256k1::Signature"),
|
.expect("couldn't convert produced signature to secp256k1::Signature"),
|
||||||
&Message::from(Hash::hash(MESSAGE)),
|
&Message::from_digest_slice(Hash::hash(MESSAGE).as_ref()).unwrap(),
|
||||||
&x_only(&keys[&Participant::new(1).unwrap()].group_key()),
|
&x_only(&keys[&Participant::new(1).unwrap()].group_key()),
|
||||||
)
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use std_shims::{
|
|||||||
io::{self, Write},
|
io::{self, Write},
|
||||||
};
|
};
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
use std_shims::io::Read;
|
use std::io::{Read, BufReader};
|
||||||
|
|
||||||
use k256::{
|
use k256::{
|
||||||
elliptic_curve::sec1::{Tag, ToEncodedPoint},
|
elliptic_curve::sec1::{Tag, ToEncodedPoint},
|
||||||
@@ -18,8 +18,8 @@ use frost::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use bitcoin::{
|
use bitcoin::{
|
||||||
consensus::encode::serialize, key::TweakedPublicKey, address::Payload, OutPoint, ScriptBuf,
|
consensus::encode::serialize, key::TweakedPublicKey, OutPoint, ScriptBuf, TxOut, Transaction,
|
||||||
TxOut, Transaction, Block,
|
Block,
|
||||||
};
|
};
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
use bitcoin::consensus::encode::Decodable;
|
use bitcoin::consensus::encode::Decodable;
|
||||||
@@ -46,12 +46,12 @@ pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
|
|||||||
/// Return the Taproot address payload for a public key.
|
/// Return the Taproot address payload for a public key.
|
||||||
///
|
///
|
||||||
/// If the key is odd, this will return None.
|
/// If the key is odd, this will return None.
|
||||||
pub fn address_payload(key: ProjectivePoint) -> Option<Payload> {
|
pub fn p2tr_script_buf(key: ProjectivePoint) -> Option<ScriptBuf> {
|
||||||
if key.to_encoded_point(true).tag() != Tag::CompressedEvenY {
|
if key.to_encoded_point(true).tag() != Tag::CompressedEvenY {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(Payload::p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key))))
|
Some(ScriptBuf::new_p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key))))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A spendable output.
|
/// A spendable output.
|
||||||
@@ -89,11 +89,17 @@ impl ReceivedOutput {
|
|||||||
/// Read a ReceivedOutput from a generic satisfying Read.
|
/// Read a ReceivedOutput from a generic satisfying Read.
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
|
||||||
Ok(ReceivedOutput {
|
let offset = Secp256k1::read_F(r)?;
|
||||||
offset: Secp256k1::read_F(r)?,
|
let output;
|
||||||
output: TxOut::consensus_decode(r).map_err(|_| io::Error::other("invalid TxOut"))?,
|
let outpoint;
|
||||||
outpoint: OutPoint::consensus_decode(r).map_err(|_| io::Error::other("invalid OutPoint"))?,
|
{
|
||||||
})
|
let mut buf_r = BufReader::with_capacity(0, r);
|
||||||
|
output =
|
||||||
|
TxOut::consensus_decode(&mut buf_r).map_err(|_| io::Error::other("invalid TxOut"))?;
|
||||||
|
outpoint =
|
||||||
|
OutPoint::consensus_decode(&mut buf_r).map_err(|_| io::Error::other("invalid OutPoint"))?;
|
||||||
|
}
|
||||||
|
Ok(ReceivedOutput { offset, output, outpoint })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write a ReceivedOutput to a generic satisfying Write.
|
/// Write a ReceivedOutput to a generic satisfying Write.
|
||||||
@@ -124,7 +130,7 @@ impl Scanner {
|
|||||||
/// Returns None if this key can't be scanned for.
|
/// Returns None if this key can't be scanned for.
|
||||||
pub fn new(key: ProjectivePoint) -> Option<Scanner> {
|
pub fn new(key: ProjectivePoint) -> Option<Scanner> {
|
||||||
let mut scripts = HashMap::new();
|
let mut scripts = HashMap::new();
|
||||||
scripts.insert(address_payload(key)?.script_pubkey(), Scalar::ZERO);
|
scripts.insert(p2tr_script_buf(key)?, Scalar::ZERO);
|
||||||
Some(Scanner { key, scripts })
|
Some(Scanner { key, scripts })
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -141,9 +147,8 @@ impl Scanner {
|
|||||||
// chance of being even
|
// chance of being even
|
||||||
// That means this should terminate within a very small amount of iterations
|
// That means this should terminate within a very small amount of iterations
|
||||||
loop {
|
loop {
|
||||||
match address_payload(self.key + (ProjectivePoint::GENERATOR * offset)) {
|
match p2tr_script_buf(self.key + (ProjectivePoint::GENERATOR * offset)) {
|
||||||
Some(address) => {
|
Some(script) => {
|
||||||
let script = address.script_pubkey();
|
|
||||||
if self.scripts.contains_key(&script) {
|
if self.scripts.contains_key(&script) {
|
||||||
None?;
|
None?;
|
||||||
}
|
}
|
||||||
@@ -166,7 +171,7 @@ impl Scanner {
|
|||||||
res.push(ReceivedOutput {
|
res.push(ReceivedOutput {
|
||||||
offset: *offset,
|
offset: *offset,
|
||||||
output: output.clone(),
|
output: output.clone(),
|
||||||
outpoint: OutPoint::new(tx.txid(), vout),
|
outpoint: OutPoint::new(tx.compute_txid(), vout),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,12 +18,12 @@ use bitcoin::{
|
|||||||
absolute::LockTime,
|
absolute::LockTime,
|
||||||
script::{PushBytesBuf, ScriptBuf},
|
script::{PushBytesBuf, ScriptBuf},
|
||||||
transaction::{Version, Transaction},
|
transaction::{Version, Transaction},
|
||||||
OutPoint, Sequence, Witness, TxIn, Amount, TxOut, Address,
|
OutPoint, Sequence, Witness, TxIn, Amount, TxOut,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
crypto::Schnorr,
|
crypto::Schnorr,
|
||||||
wallet::{ReceivedOutput, address_payload},
|
wallet::{ReceivedOutput, p2tr_script_buf},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
@@ -61,7 +61,11 @@ pub struct SignableTransaction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SignableTransaction {
|
impl SignableTransaction {
|
||||||
fn calculate_weight(inputs: usize, payments: &[(Address, u64)], change: Option<&Address>) -> u64 {
|
fn calculate_weight(
|
||||||
|
inputs: usize,
|
||||||
|
payments: &[(ScriptBuf, u64)],
|
||||||
|
change: Option<&ScriptBuf>,
|
||||||
|
) -> u64 {
|
||||||
// Expand this a full transaction in order to use the bitcoin library's weight function
|
// Expand this a full transaction in order to use the bitcoin library's weight function
|
||||||
let mut tx = Transaction {
|
let mut tx = Transaction {
|
||||||
version: Version(2),
|
version: Version(2),
|
||||||
@@ -86,14 +90,14 @@ impl SignableTransaction {
|
|||||||
// The script pub key is not of a fixed size and does have to be used here
|
// The script pub key is not of a fixed size and does have to be used here
|
||||||
.map(|payment| TxOut {
|
.map(|payment| TxOut {
|
||||||
value: Amount::from_sat(payment.1),
|
value: Amount::from_sat(payment.1),
|
||||||
script_pubkey: payment.0.script_pubkey(),
|
script_pubkey: payment.0.clone(),
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
};
|
};
|
||||||
if let Some(change) = change {
|
if let Some(change) = change {
|
||||||
// Use a 0 value since we're currently unsure what the change amount will be, and since
|
// Use a 0 value since we're currently unsure what the change amount will be, and since
|
||||||
// the value is fixed size (so any value could be used here)
|
// the value is fixed size (so any value could be used here)
|
||||||
tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.script_pubkey() });
|
tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.clone() });
|
||||||
}
|
}
|
||||||
u64::from(tx.weight())
|
u64::from(tx.weight())
|
||||||
}
|
}
|
||||||
@@ -121,8 +125,8 @@ impl SignableTransaction {
|
|||||||
/// If data is specified, an OP_RETURN output will be added with it.
|
/// If data is specified, an OP_RETURN output will be added with it.
|
||||||
pub fn new(
|
pub fn new(
|
||||||
mut inputs: Vec<ReceivedOutput>,
|
mut inputs: Vec<ReceivedOutput>,
|
||||||
payments: &[(Address, u64)],
|
payments: &[(ScriptBuf, u64)],
|
||||||
change: Option<&Address>,
|
change: Option<ScriptBuf>,
|
||||||
data: Option<Vec<u8>>,
|
data: Option<Vec<u8>>,
|
||||||
fee_per_weight: u64,
|
fee_per_weight: u64,
|
||||||
) -> Result<SignableTransaction, TransactionError> {
|
) -> Result<SignableTransaction, TransactionError> {
|
||||||
@@ -159,10 +163,7 @@ impl SignableTransaction {
|
|||||||
let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>();
|
let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>();
|
||||||
let mut tx_outs = payments
|
let mut tx_outs = payments
|
||||||
.iter()
|
.iter()
|
||||||
.map(|payment| TxOut {
|
.map(|payment| TxOut { value: Amount::from_sat(payment.1), script_pubkey: payment.0.clone() })
|
||||||
value: Amount::from_sat(payment.1),
|
|
||||||
script_pubkey: payment.0.script_pubkey(),
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
// Add the OP_RETURN output
|
// Add the OP_RETURN output
|
||||||
@@ -213,12 +214,11 @@ impl SignableTransaction {
|
|||||||
|
|
||||||
// If there's a change address, check if there's change to give it
|
// If there's a change address, check if there's change to give it
|
||||||
if let Some(change) = change {
|
if let Some(change) = change {
|
||||||
let weight_with_change = Self::calculate_weight(tx_ins.len(), payments, Some(change));
|
let weight_with_change = Self::calculate_weight(tx_ins.len(), payments, Some(&change));
|
||||||
let fee_with_change = fee_per_weight * weight_with_change;
|
let fee_with_change = fee_per_weight * weight_with_change;
|
||||||
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
|
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
|
||||||
if value >= DUST {
|
if value >= DUST {
|
||||||
tx_outs
|
tx_outs.push(TxOut { value: Amount::from_sat(value), script_pubkey: change });
|
||||||
.push(TxOut { value: Amount::from_sat(value), script_pubkey: change.script_pubkey() });
|
|
||||||
weight = weight_with_change;
|
weight = weight_with_change;
|
||||||
needed_fee = fee_with_change;
|
needed_fee = fee_with_change;
|
||||||
}
|
}
|
||||||
@@ -248,7 +248,7 @@ impl SignableTransaction {
|
|||||||
|
|
||||||
/// Returns the TX ID of the transaction this will create.
|
/// Returns the TX ID of the transaction this will create.
|
||||||
pub fn txid(&self) -> [u8; 32] {
|
pub fn txid(&self) -> [u8; 32] {
|
||||||
let mut res = self.tx.txid().to_byte_array();
|
let mut res = self.tx.compute_txid().to_byte_array();
|
||||||
res.reverse();
|
res.reverse();
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
@@ -288,7 +288,7 @@ impl SignableTransaction {
|
|||||||
transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes());
|
transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes());
|
||||||
|
|
||||||
let offset = keys.clone().offset(self.offsets[i]);
|
let offset = keys.clone().offset(self.offsets[i]);
|
||||||
if address_payload(offset.group_key())?.script_pubkey() != self.prevouts[i].script_pubkey {
|
if p2tr_script_buf(offset.group_key())? != self.prevouts[i].script_pubkey {
|
||||||
None?;
|
None?;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -375,7 +375,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
|
|||||||
msg: &[u8],
|
msg: &[u8],
|
||||||
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
|
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
|
||||||
if !msg.is_empty() {
|
if !msg.is_empty() {
|
||||||
panic!("message was passed to the TransactionMachine when it generates its own");
|
panic!("message was passed to the TransactionSignMachine when it generates its own");
|
||||||
}
|
}
|
||||||
|
|
||||||
let commitments = (0 .. self.sigs.len())
|
let commitments = (0 .. self.sigs.len())
|
||||||
|
|||||||
@@ -22,11 +22,10 @@ use bitcoin_serai::{
|
|||||||
hashes::Hash as HashTrait,
|
hashes::Hash as HashTrait,
|
||||||
blockdata::opcodes::all::OP_RETURN,
|
blockdata::opcodes::all::OP_RETURN,
|
||||||
script::{PushBytesBuf, Instruction, Instructions, Script},
|
script::{PushBytesBuf, Instruction, Instructions, Script},
|
||||||
address::NetworkChecked,
|
|
||||||
OutPoint, Amount, TxOut, Transaction, Network, Address,
|
OutPoint, Amount, TxOut, Transaction, Network, Address,
|
||||||
},
|
},
|
||||||
wallet::{
|
wallet::{
|
||||||
tweak_keys, address_payload, ReceivedOutput, Scanner, TransactionError, SignableTransaction,
|
tweak_keys, p2tr_script_buf, ReceivedOutput, Scanner, TransactionError, SignableTransaction,
|
||||||
},
|
},
|
||||||
rpc::Rpc,
|
rpc::Rpc,
|
||||||
};
|
};
|
||||||
@@ -48,7 +47,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
|
|||||||
"generatetoaddress",
|
"generatetoaddress",
|
||||||
serde_json::json!([
|
serde_json::json!([
|
||||||
1,
|
1,
|
||||||
Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())
|
Address::from_script(&p2tr_script_buf(key).unwrap(), Network::Regtest).unwrap()
|
||||||
]),
|
]),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
@@ -69,7 +68,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
|
|||||||
assert_eq!(outputs, scanner.scan_transaction(&block.txdata[0]));
|
assert_eq!(outputs, scanner.scan_transaction(&block.txdata[0]));
|
||||||
|
|
||||||
assert_eq!(outputs.len(), 1);
|
assert_eq!(outputs.len(), 1);
|
||||||
assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].txid(), 0));
|
assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].compute_txid(), 0));
|
||||||
assert_eq!(outputs[0].value(), block.txdata[0].output[0].value.to_sat());
|
assert_eq!(outputs[0].value(), block.txdata[0].output[0].value.to_sat());
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
@@ -193,7 +192,7 @@ async_sequential! {
|
|||||||
assert_eq!(output.offset(), Scalar::ZERO);
|
assert_eq!(output.offset(), Scalar::ZERO);
|
||||||
|
|
||||||
let inputs = vec![output];
|
let inputs = vec![output];
|
||||||
let addr = || Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap());
|
let addr = || p2tr_script_buf(key).unwrap();
|
||||||
let payments = vec![(addr(), 1000)];
|
let payments = vec![(addr(), 1000)];
|
||||||
|
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok());
|
assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok());
|
||||||
@@ -206,7 +205,7 @@ async_sequential! {
|
|||||||
// No change
|
// No change
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &[(addr(), 1000)], None, None, FEE).is_ok());
|
assert!(SignableTransaction::new(inputs.clone(), &[(addr(), 1000)], None, None, FEE).is_ok());
|
||||||
// Consolidation TX
|
// Consolidation TX
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &[], Some(&addr()), None, FEE).is_ok());
|
assert!(SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, FEE).is_ok());
|
||||||
// Data
|
// Data
|
||||||
assert!(SignableTransaction::new(inputs.clone(), &[], None, Some(vec![]), FEE).is_ok());
|
assert!(SignableTransaction::new(inputs.clone(), &[], None, Some(vec![]), FEE).is_ok());
|
||||||
// No outputs
|
// No outputs
|
||||||
@@ -229,7 +228,7 @@ async_sequential! {
|
|||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
SignableTransaction::new(inputs.clone(), &[], Some(&addr()), None, 0),
|
SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, 0),
|
||||||
Err(TransactionError::TooLowFee),
|
Err(TransactionError::TooLowFee),
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -261,20 +260,19 @@ async_sequential! {
|
|||||||
|
|
||||||
// Declare payments, change, fee
|
// Declare payments, change, fee
|
||||||
let payments = [
|
let payments = [
|
||||||
(Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap()), 1005),
|
(p2tr_script_buf(key).unwrap(), 1005),
|
||||||
(Address::<NetworkChecked>::new(Network::Regtest, address_payload(offset_key).unwrap()), 1007)
|
(p2tr_script_buf(offset_key).unwrap(), 1007)
|
||||||
];
|
];
|
||||||
|
|
||||||
let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
|
let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
|
||||||
let change_key = key + (ProjectivePoint::GENERATOR * change_offset);
|
let change_key = key + (ProjectivePoint::GENERATOR * change_offset);
|
||||||
let change_addr =
|
let change_addr = p2tr_script_buf(change_key).unwrap();
|
||||||
Address::<NetworkChecked>::new(Network::Regtest, address_payload(change_key).unwrap());
|
|
||||||
|
|
||||||
// Create and sign the TX
|
// Create and sign the TX
|
||||||
let tx = SignableTransaction::new(
|
let tx = SignableTransaction::new(
|
||||||
vec![output.clone(), offset_output.clone()],
|
vec![output.clone(), offset_output.clone()],
|
||||||
&payments,
|
&payments,
|
||||||
Some(&change_addr),
|
Some(change_addr.clone()),
|
||||||
None,
|
None,
|
||||||
FEE
|
FEE
|
||||||
).unwrap();
|
).unwrap();
|
||||||
@@ -287,7 +285,7 @@ async_sequential! {
|
|||||||
// Ensure we can scan it
|
// Ensure we can scan it
|
||||||
let outputs = scanner.scan_transaction(&tx);
|
let outputs = scanner.scan_transaction(&tx);
|
||||||
for (o, output) in outputs.iter().enumerate() {
|
for (o, output) in outputs.iter().enumerate() {
|
||||||
assert_eq!(output.outpoint(), &OutPoint::new(tx.txid(), u32::try_from(o).unwrap()));
|
assert_eq!(output.outpoint(), &OutPoint::new(tx.compute_txid(), u32::try_from(o).unwrap()));
|
||||||
assert_eq!(&ReceivedOutput::read::<&[u8]>(&mut output.serialize().as_ref()).unwrap(), output);
|
assert_eq!(&ReceivedOutput::read::<&[u8]>(&mut output.serialize().as_ref()).unwrap(), output);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -299,7 +297,7 @@ async_sequential! {
|
|||||||
for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) {
|
for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
output,
|
output,
|
||||||
&TxOut { script_pubkey: payment.0.script_pubkey(), value: Amount::from_sat(payment.1) },
|
&TxOut { script_pubkey: payment.0.clone(), value: Amount::from_sat(payment.1) },
|
||||||
);
|
);
|
||||||
assert_eq!(scanned.value(), payment.1 );
|
assert_eq!(scanned.value(), payment.1 );
|
||||||
}
|
}
|
||||||
@@ -314,13 +312,13 @@ async_sequential! {
|
|||||||
input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee;
|
input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee;
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tx.output[2],
|
tx.output[2],
|
||||||
TxOut { script_pubkey: change_addr.script_pubkey(), value: Amount::from_sat(change_amount) },
|
TxOut { script_pubkey: change_addr, value: Amount::from_sat(change_amount) },
|
||||||
);
|
);
|
||||||
|
|
||||||
// This also tests send_raw_transaction and get_transaction, which the RPC test can't
|
// This also tests send_raw_transaction and get_transaction, which the RPC test can't
|
||||||
// effectively test
|
// effectively test
|
||||||
rpc.send_raw_transaction(&tx).await.unwrap();
|
rpc.send_raw_transaction(&tx).await.unwrap();
|
||||||
let mut hash = *tx.txid().as_raw_hash().as_byte_array();
|
let mut hash = *tx.compute_txid().as_raw_hash().as_byte_array();
|
||||||
hash.reverse();
|
hash.reverse();
|
||||||
assert_eq!(tx, rpc.get_transaction(&hash).await.unwrap());
|
assert_eq!(tx, rpc.get_transaction(&hash).await.unwrap());
|
||||||
assert_eq!(expected_id, hash);
|
assert_eq!(expected_id, hash);
|
||||||
@@ -344,7 +342,7 @@ async_sequential! {
|
|||||||
&SignableTransaction::new(
|
&SignableTransaction::new(
|
||||||
vec![output],
|
vec![output],
|
||||||
&[],
|
&[],
|
||||||
Some(&Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())),
|
Some(p2tr_script_buf(key).unwrap()),
|
||||||
Some(data.clone()),
|
Some(data.clone()),
|
||||||
FEE
|
FEE
|
||||||
).unwrap()
|
).unwrap()
|
||||||
|
|||||||
4
coins/ethereum/.gitignore
vendored
4
coins/ethereum/.gitignore
vendored
@@ -1,7 +1,3 @@
|
|||||||
# Solidity build outputs
|
# Solidity build outputs
|
||||||
cache
|
cache
|
||||||
artifacts
|
artifacts
|
||||||
|
|
||||||
# Auto-generated ABI files
|
|
||||||
src/abi/schnorr.rs
|
|
||||||
src/abi/router.rs
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
|
|||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
|
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
publish = false
|
publish = false
|
||||||
rust-version = "1.74"
|
rust-version = "1.79"
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
all-features = true
|
all-features = true
|
||||||
@@ -18,28 +18,32 @@ workspace = true
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
thiserror = { version = "1", default-features = false }
|
thiserror = { version = "1", default-features = false }
|
||||||
eyre = { version = "0.6", default-features = false }
|
|
||||||
|
|
||||||
sha3 = { version = "0.10", default-features = false, features = ["std"] }
|
|
||||||
|
|
||||||
group = { version = "0.13", default-features = false }
|
|
||||||
k256 = { version = "^0.13.1", default-features = false, features = ["std", "ecdsa"] }
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] }
|
|
||||||
|
|
||||||
ethers-core = { version = "2", default-features = false }
|
|
||||||
ethers-providers = { version = "2", default-features = false }
|
|
||||||
ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] }
|
|
||||||
|
|
||||||
[build-dependencies]
|
|
||||||
ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
rand_core = { version = "0.6", default-features = false, features = ["std"] }
|
rand_core = { version = "0.6", default-features = false, features = ["std"] }
|
||||||
|
|
||||||
hex = { version = "0.4", default-features = false, features = ["std"] }
|
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", default-features = false, features = ["recommended"] }
|
||||||
serde = { version = "1", default-features = false, features = ["std"] }
|
|
||||||
serde_json = { version = "1", default-features = false, features = ["std"] }
|
|
||||||
|
|
||||||
sha2 = { version = "0.10", default-features = false, features = ["std"] }
|
group = { version = "0.13", default-features = false }
|
||||||
|
k256 = { version = "^0.13.1", default-features = false, features = ["std", "ecdsa", "arithmetic"] }
|
||||||
|
frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["secp256k1"] }
|
||||||
|
|
||||||
|
alloy-core = { version = "0.7", default-features = false }
|
||||||
|
alloy-sol-types = { version = "0.7", default-features = false, features = ["json"] }
|
||||||
|
alloy-consensus = { version = "0.1", default-features = false, features = ["k256"] }
|
||||||
|
alloy-network = { version = "0.1", default-features = false }
|
||||||
|
alloy-rpc-types-eth = { version = "0.1", default-features = false }
|
||||||
|
alloy-rpc-client = { version = "0.1", default-features = false }
|
||||||
|
alloy-simple-request-transport = { path = "./alloy-simple-request-transport", default-features = false }
|
||||||
|
alloy-provider = { version = "0.1", default-features = false }
|
||||||
|
|
||||||
|
alloy-node-bindings = { version = "0.1", default-features = false, optional = true }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["tests"] }
|
||||||
|
|
||||||
tokio = { version = "1", features = ["macros"] }
|
tokio = { version = "1", features = ["macros"] }
|
||||||
|
|
||||||
|
alloy-node-bindings = { version = "0.1", default-features = false }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
tests = ["alloy-node-bindings", "frost/tests"]
|
||||||
|
|||||||
@@ -3,6 +3,12 @@
|
|||||||
This package contains Ethereum-related functionality, specifically deploying and
|
This package contains Ethereum-related functionality, specifically deploying and
|
||||||
interacting with Serai contracts.
|
interacting with Serai contracts.
|
||||||
|
|
||||||
|
While `monero-serai` and `bitcoin-serai` are general purpose libraries,
|
||||||
|
`ethereum-serai` is Serai specific. If any of the utilities are generally
|
||||||
|
desired, please fork and maintain your own copy to ensure the desired
|
||||||
|
functionality is preserved, or open an issue to request we make this library
|
||||||
|
general purpose.
|
||||||
|
|
||||||
### Dependencies
|
### Dependencies
|
||||||
|
|
||||||
- solc
|
- solc
|
||||||
|
|||||||
29
coins/ethereum/alloy-simple-request-transport/Cargo.toml
Normal file
29
coins/ethereum/alloy-simple-request-transport/Cargo.toml
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
[package]
|
||||||
|
name = "alloy-simple-request-transport"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "A transport for alloy based off simple-request"
|
||||||
|
license = "MIT"
|
||||||
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum/alloy-simple-request-transport"
|
||||||
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
|
edition = "2021"
|
||||||
|
rust-version = "1.74"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
tower = "0.4"
|
||||||
|
|
||||||
|
serde_json = { version = "1", default-features = false }
|
||||||
|
simple-request = { path = "../../../common/request", default-features = false }
|
||||||
|
|
||||||
|
alloy-json-rpc = { version = "0.1", default-features = false }
|
||||||
|
alloy-transport = { version = "0.1", default-features = false }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = ["tls"]
|
||||||
|
tls = ["simple-request/tls"]
|
||||||
21
coins/ethereum/alloy-simple-request-transport/LICENSE
Normal file
21
coins/ethereum/alloy-simple-request-transport/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2024 Luke Parker
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
4
coins/ethereum/alloy-simple-request-transport/README.md
Normal file
4
coins/ethereum/alloy-simple-request-transport/README.md
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# Alloy Simple Request Transport
|
||||||
|
|
||||||
|
A transport for alloy based on simple-request, a small HTTP client built around
|
||||||
|
hyper.
|
||||||
60
coins/ethereum/alloy-simple-request-transport/src/lib.rs
Normal file
60
coins/ethereum/alloy-simple-request-transport/src/lib.rs
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![doc = include_str!("../README.md")]
|
||||||
|
|
||||||
|
use core::task;
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
use alloy_json_rpc::{RequestPacket, ResponsePacket};
|
||||||
|
use alloy_transport::{TransportError, TransportErrorKind, TransportFut};
|
||||||
|
|
||||||
|
use simple_request::{hyper, Request, Client};
|
||||||
|
|
||||||
|
use tower::Service;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct SimpleRequest {
|
||||||
|
client: Client,
|
||||||
|
url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SimpleRequest {
|
||||||
|
pub fn new(url: String) -> Self {
|
||||||
|
Self { client: Client::with_connection_pool(), url }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Service<RequestPacket> for SimpleRequest {
|
||||||
|
type Response = ResponsePacket;
|
||||||
|
type Error = TransportError;
|
||||||
|
type Future = TransportFut<'static>;
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn poll_ready(&mut self, _cx: &mut task::Context<'_>) -> task::Poll<Result<(), Self::Error>> {
|
||||||
|
task::Poll::Ready(Ok(()))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn call(&mut self, req: RequestPacket) -> Self::Future {
|
||||||
|
let inner = self.clone();
|
||||||
|
Box::pin(async move {
|
||||||
|
let packet = req.serialize().map_err(TransportError::SerError)?;
|
||||||
|
let request = Request::from(
|
||||||
|
hyper::Request::post(&inner.url)
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.body(serde_json::to_vec(&packet).map_err(TransportError::SerError)?.into())
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut res = inner
|
||||||
|
.client
|
||||||
|
.request(request)
|
||||||
|
.await
|
||||||
|
.map_err(|e| TransportErrorKind::custom(io::Error::other(format!("{e:?}"))))?
|
||||||
|
.body()
|
||||||
|
.await
|
||||||
|
.map_err(|e| TransportErrorKind::custom(io::Error::other(format!("{e:?}"))))?;
|
||||||
|
|
||||||
|
serde_json::from_reader(&mut res).map_err(|e| TransportError::deser_err(e, ""))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
use ethers_contract::Abigen;
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
println!("cargo:rerun-if-changed=contracts/*");
|
println!("cargo:rerun-if-changed=contracts/*");
|
||||||
println!("cargo:rerun-if-changed=artifacts/*");
|
println!("cargo:rerun-if-changed=artifacts/*");
|
||||||
@@ -21,22 +19,23 @@ fn main() {
|
|||||||
"--base-path", ".",
|
"--base-path", ".",
|
||||||
"-o", "./artifacts", "--overwrite",
|
"-o", "./artifacts", "--overwrite",
|
||||||
"--bin", "--abi",
|
"--bin", "--abi",
|
||||||
"--optimize",
|
"--via-ir", "--optimize",
|
||||||
"./contracts/Schnorr.sol", "./contracts/Router.sol",
|
|
||||||
|
"./contracts/IERC20.sol",
|
||||||
|
|
||||||
|
"./contracts/Schnorr.sol",
|
||||||
|
"./contracts/Deployer.sol",
|
||||||
|
"./contracts/Sandbox.sol",
|
||||||
|
"./contracts/Router.sol",
|
||||||
|
|
||||||
|
"./src/tests/contracts/Schnorr.sol",
|
||||||
|
"./src/tests/contracts/ERC20.sol",
|
||||||
|
|
||||||
|
"--no-color",
|
||||||
];
|
];
|
||||||
assert!(Command::new("solc").args(args).status().unwrap().success());
|
let solc = Command::new("solc").args(args).output().unwrap();
|
||||||
|
assert!(solc.status.success());
|
||||||
Abigen::new("Schnorr", "./artifacts/Schnorr.abi")
|
for line in String::from_utf8(solc.stderr).unwrap().lines() {
|
||||||
.unwrap()
|
assert!(!line.starts_with("Error:"));
|
||||||
.generate()
|
}
|
||||||
.unwrap()
|
|
||||||
.write_to_file("./src/abi/schnorr.rs")
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
Abigen::new("Router", "./artifacts/Router.abi")
|
|
||||||
.unwrap()
|
|
||||||
.generate()
|
|
||||||
.unwrap()
|
|
||||||
.write_to_file("./src/abi/router.rs")
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
|
|||||||
52
coins/ethereum/contracts/Deployer.sol
Normal file
52
coins/ethereum/contracts/Deployer.sol
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
// SPDX-License-Identifier: AGPLv3
|
||||||
|
pragma solidity ^0.8.0;
|
||||||
|
|
||||||
|
/*
|
||||||
|
The expected deployment process of the Router is as follows:
|
||||||
|
|
||||||
|
1) A transaction deploying Deployer is made. Then, a deterministic signature is
|
||||||
|
created such that an account with an unknown private key is the creator of
|
||||||
|
the contract. Anyone can fund this address, and once anyone does, the
|
||||||
|
transaction deploying Deployer can be published by anyone. No other
|
||||||
|
transaction may be made from that account.
|
||||||
|
|
||||||
|
2) Anyone deploys the Router through the Deployer. This uses a sequential nonce
|
||||||
|
such that meet-in-the-middle attacks, with complexity 2**80, aren't feasible.
|
||||||
|
While such attacks would still be feasible if the Deployer's address was
|
||||||
|
controllable, the usage of a deterministic signature with a NUMS method
|
||||||
|
prevents that.
|
||||||
|
|
||||||
|
This doesn't have any denial-of-service risks and will resolve once anyone steps
|
||||||
|
forward as deployer. This does fail to guarantee an identical address across
|
||||||
|
every chain, though it enables letting anyone efficiently ask the Deployer for
|
||||||
|
the address (with the Deployer having an identical address on every chain).
|
||||||
|
|
||||||
|
Unfortunately, guaranteeing identical addresses aren't feasible. We'd need the
|
||||||
|
Deployer contract to use a consistent salt for the Router, yet the Router must
|
||||||
|
be deployed with a specific public key for Serai. Since Ethereum isn't able to
|
||||||
|
determine a valid public key (one the result of a Serai DKG) from a dishonest
|
||||||
|
public key, we have to allow multiple deployments with Serai being the one to
|
||||||
|
determine which to use.
|
||||||
|
|
||||||
|
The alternative would be to have a council publish the Serai key on-Ethereum,
|
||||||
|
with Serai verifying the published result. This would introduce a DoS risk in
|
||||||
|
the council not publishing the correct key/not publishing any key.
|
||||||
|
*/
|
||||||
|
|
||||||
|
contract Deployer {
|
||||||
|
event Deployment(bytes32 indexed init_code_hash, address created);
|
||||||
|
|
||||||
|
error DeploymentFailed();
|
||||||
|
|
||||||
|
function deploy(bytes memory init_code) external {
|
||||||
|
address created;
|
||||||
|
assembly {
|
||||||
|
created := create(0, add(init_code, 0x20), mload(init_code))
|
||||||
|
}
|
||||||
|
if (created == address(0)) {
|
||||||
|
revert DeploymentFailed();
|
||||||
|
}
|
||||||
|
// These may be emitted out of order upon re-entrancy
|
||||||
|
emit Deployment(keccak256(init_code), created);
|
||||||
|
}
|
||||||
|
}
|
||||||
20
coins/ethereum/contracts/IERC20.sol
Normal file
20
coins/ethereum/contracts/IERC20.sol
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
// SPDX-License-Identifier: CC0
|
||||||
|
pragma solidity ^0.8.0;
|
||||||
|
|
||||||
|
interface IERC20 {
|
||||||
|
event Transfer(address indexed from, address indexed to, uint256 value);
|
||||||
|
event Approval(address indexed owner, address indexed spender, uint256 value);
|
||||||
|
|
||||||
|
function name() external view returns (string memory);
|
||||||
|
function symbol() external view returns (string memory);
|
||||||
|
function decimals() external view returns (uint8);
|
||||||
|
|
||||||
|
function totalSupply() external view returns (uint256);
|
||||||
|
|
||||||
|
function balanceOf(address owner) external view returns (uint256);
|
||||||
|
function transfer(address to, uint256 value) external returns (bool);
|
||||||
|
function transferFrom(address from, address to, uint256 value) external returns (bool);
|
||||||
|
|
||||||
|
function approve(address spender, uint256 value) external returns (bool);
|
||||||
|
function allowance(address owner, address spender) external view returns (uint256);
|
||||||
|
}
|
||||||
@@ -1,27 +1,24 @@
|
|||||||
// SPDX-License-Identifier: AGPLv3
|
// SPDX-License-Identifier: AGPLv3
|
||||||
pragma solidity ^0.8.0;
|
pragma solidity ^0.8.0;
|
||||||
|
|
||||||
|
import "./IERC20.sol";
|
||||||
|
|
||||||
import "./Schnorr.sol";
|
import "./Schnorr.sol";
|
||||||
|
import "./Sandbox.sol";
|
||||||
|
|
||||||
contract Router is Schnorr {
|
contract Router {
|
||||||
// Contract initializer
|
// Nonce is incremented for each batch of transactions executed/key update
|
||||||
// TODO: Replace with a MuSig of the genesis validators
|
|
||||||
address public initializer;
|
|
||||||
|
|
||||||
// Nonce is incremented for each batch of transactions executed
|
|
||||||
uint256 public nonce;
|
uint256 public nonce;
|
||||||
|
|
||||||
// fixed parity for the public keys used in this contract
|
// Current public key's x-coordinate
|
||||||
uint8 constant public KEY_PARITY = 27;
|
// This key must always have the parity defined within the Schnorr contract
|
||||||
|
|
||||||
// current public key's x-coordinate
|
|
||||||
// note: this key must always use the fixed parity defined above
|
|
||||||
bytes32 public seraiKey;
|
bytes32 public seraiKey;
|
||||||
|
|
||||||
struct OutInstruction {
|
struct OutInstruction {
|
||||||
address to;
|
address to;
|
||||||
|
Call[] calls;
|
||||||
|
|
||||||
uint256 value;
|
uint256 value;
|
||||||
bytes data;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Signature {
|
struct Signature {
|
||||||
@@ -29,62 +26,197 @@ contract Router is Schnorr {
|
|||||||
bytes32 s;
|
bytes32 s;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
event SeraiKeyUpdated(
|
||||||
|
uint256 indexed nonce,
|
||||||
|
bytes32 indexed key,
|
||||||
|
Signature signature
|
||||||
|
);
|
||||||
|
event InInstruction(
|
||||||
|
address indexed from,
|
||||||
|
address indexed coin,
|
||||||
|
uint256 amount,
|
||||||
|
bytes instruction
|
||||||
|
);
|
||||||
// success is a uint256 representing a bitfield of transaction successes
|
// success is a uint256 representing a bitfield of transaction successes
|
||||||
event Executed(uint256 nonce, bytes32 batch, uint256 success);
|
event Executed(
|
||||||
|
uint256 indexed nonce,
|
||||||
|
bytes32 indexed batch,
|
||||||
|
uint256 success,
|
||||||
|
Signature signature
|
||||||
|
);
|
||||||
|
|
||||||
// error types
|
// error types
|
||||||
error NotInitializer();
|
|
||||||
error AlreadyInitialized();
|
|
||||||
error InvalidKey();
|
error InvalidKey();
|
||||||
|
error InvalidSignature();
|
||||||
|
error InvalidAmount();
|
||||||
|
error FailedTransfer();
|
||||||
error TooManyTransactions();
|
error TooManyTransactions();
|
||||||
|
|
||||||
constructor() {
|
modifier _updateSeraiKeyAtEndOfFn(
|
||||||
initializer = msg.sender;
|
uint256 _nonce,
|
||||||
|
bytes32 key,
|
||||||
|
Signature memory sig
|
||||||
|
) {
|
||||||
|
if (
|
||||||
|
(key == bytes32(0)) ||
|
||||||
|
((bytes32(uint256(key) % Schnorr.Q)) != key)
|
||||||
|
) {
|
||||||
|
revert InvalidKey();
|
||||||
|
}
|
||||||
|
|
||||||
|
_;
|
||||||
|
|
||||||
|
seraiKey = key;
|
||||||
|
emit SeraiKeyUpdated(_nonce, key, sig);
|
||||||
}
|
}
|
||||||
|
|
||||||
// initSeraiKey can be called by the contract initializer to set the first
|
constructor(bytes32 _seraiKey) _updateSeraiKeyAtEndOfFn(
|
||||||
// public key, only if the public key has yet to be set.
|
0,
|
||||||
function initSeraiKey(bytes32 _seraiKey) external {
|
_seraiKey,
|
||||||
if (msg.sender != initializer) revert NotInitializer();
|
Signature({ c: bytes32(0), s: bytes32(0) })
|
||||||
if (seraiKey != 0) revert AlreadyInitialized();
|
) {
|
||||||
if (_seraiKey == bytes32(0)) revert InvalidKey();
|
nonce = 1;
|
||||||
seraiKey = _seraiKey;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// updateSeraiKey validates the given Schnorr signature against the current public key,
|
// updateSeraiKey validates the given Schnorr signature against the current
|
||||||
// and if successful, updates the contract's public key to the given one.
|
// public key, and if successful, updates the contract's public key to the
|
||||||
|
// given one.
|
||||||
function updateSeraiKey(
|
function updateSeraiKey(
|
||||||
bytes32 _seraiKey,
|
bytes32 _seraiKey,
|
||||||
Signature memory sig
|
Signature calldata sig
|
||||||
) public {
|
) external _updateSeraiKeyAtEndOfFn(nonce, _seraiKey, sig) {
|
||||||
if (_seraiKey == bytes32(0)) revert InvalidKey();
|
bytes memory message =
|
||||||
bytes32 message = keccak256(abi.encodePacked("updateSeraiKey", _seraiKey));
|
abi.encodePacked("updateSeraiKey", block.chainid, nonce, _seraiKey);
|
||||||
if (!verify(KEY_PARITY, seraiKey, message, sig.c, sig.s)) revert InvalidSignature();
|
nonce++;
|
||||||
seraiKey = _seraiKey;
|
|
||||||
|
if (!Schnorr.verify(seraiKey, message, sig.c, sig.s)) {
|
||||||
|
revert InvalidSignature();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// execute accepts a list of transactions to execute as well as a Schnorr signature.
|
function inInstruction(
|
||||||
|
address coin,
|
||||||
|
uint256 amount,
|
||||||
|
bytes memory instruction
|
||||||
|
) external payable {
|
||||||
|
if (coin == address(0)) {
|
||||||
|
if (amount != msg.value) {
|
||||||
|
revert InvalidAmount();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
(bool success, bytes memory res) =
|
||||||
|
address(coin).call(
|
||||||
|
abi.encodeWithSelector(
|
||||||
|
IERC20.transferFrom.selector,
|
||||||
|
msg.sender,
|
||||||
|
address(this),
|
||||||
|
amount
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Require there was nothing returned, which is done by some non-standard
|
||||||
|
// tokens, or that the ERC20 contract did in fact return true
|
||||||
|
bool nonStandardResOrTrue =
|
||||||
|
(res.length == 0) || abi.decode(res, (bool));
|
||||||
|
if (!(success && nonStandardResOrTrue)) {
|
||||||
|
revert FailedTransfer();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Due to fee-on-transfer tokens, emitting the amount directly is frowned upon.
|
||||||
|
The amount instructed to transfer may not actually be the amount
|
||||||
|
transferred.
|
||||||
|
|
||||||
|
If we add nonReentrant to every single function which can effect the
|
||||||
|
balance, we can check the amount exactly matches. This prevents transfers of
|
||||||
|
less value than expected occurring, at least, not without an additional
|
||||||
|
transfer to top up the difference (which isn't routed through this contract
|
||||||
|
and accordingly isn't trying to artificially create events).
|
||||||
|
|
||||||
|
If we don't add nonReentrant, a transfer can be started, and then a new
|
||||||
|
transfer for the difference can follow it up (again and again until a
|
||||||
|
rounding error is reached). This contract would believe all transfers were
|
||||||
|
done in full, despite each only being done in part (except for the last
|
||||||
|
one).
|
||||||
|
|
||||||
|
Given fee-on-transfer tokens aren't intended to be supported, the only
|
||||||
|
token planned to be supported is Dai and it doesn't have any fee-on-transfer
|
||||||
|
logic, fee-on-transfer tokens aren't even able to be supported at this time,
|
||||||
|
we simply classify this entire class of tokens as non-standard
|
||||||
|
implementations which induce undefined behavior. It is the Serai network's
|
||||||
|
role not to add support for any non-standard implementations.
|
||||||
|
*/
|
||||||
|
emit InInstruction(msg.sender, coin, amount, instruction);
|
||||||
|
}
|
||||||
|
|
||||||
|
// execute accepts a list of transactions to execute as well as a signature.
|
||||||
// if signature verification passes, the given transactions are executed.
|
// if signature verification passes, the given transactions are executed.
|
||||||
// if signature verification fails, this function will revert.
|
// if signature verification fails, this function will revert.
|
||||||
function execute(
|
function execute(
|
||||||
OutInstruction[] calldata transactions,
|
OutInstruction[] calldata transactions,
|
||||||
Signature memory sig
|
Signature calldata sig
|
||||||
) public {
|
) external {
|
||||||
if (transactions.length > 256) revert TooManyTransactions();
|
if (transactions.length > 256) {
|
||||||
|
revert TooManyTransactions();
|
||||||
|
}
|
||||||
|
|
||||||
bytes32 message = keccak256(abi.encode("execute", nonce, transactions));
|
bytes memory message =
|
||||||
|
abi.encode("execute", block.chainid, nonce, transactions);
|
||||||
|
uint256 executed_with_nonce = nonce;
|
||||||
// This prevents re-entrancy from causing double spends yet does allow
|
// This prevents re-entrancy from causing double spends yet does allow
|
||||||
// out-of-order execution via re-entrancy
|
// out-of-order execution via re-entrancy
|
||||||
nonce++;
|
nonce++;
|
||||||
if (!verify(KEY_PARITY, seraiKey, message, sig.c, sig.s)) revert InvalidSignature();
|
|
||||||
|
if (!Schnorr.verify(seraiKey, message, sig.c, sig.s)) {
|
||||||
|
revert InvalidSignature();
|
||||||
|
}
|
||||||
|
|
||||||
uint256 successes;
|
uint256 successes;
|
||||||
for(uint256 i = 0; i < transactions.length; i++) {
|
for (uint256 i = 0; i < transactions.length; i++) {
|
||||||
(bool success, ) = transactions[i].to.call{value: transactions[i].value, gas: 200_000}(transactions[i].data);
|
bool success;
|
||||||
|
|
||||||
|
// If there are no calls, send to `to` the value
|
||||||
|
if (transactions[i].calls.length == 0) {
|
||||||
|
(success, ) = transactions[i].to.call{
|
||||||
|
value: transactions[i].value,
|
||||||
|
gas: 5_000
|
||||||
|
}("");
|
||||||
|
} else {
|
||||||
|
// If there are calls, ignore `to`. Deploy a new Sandbox and proxy the
|
||||||
|
// calls through that
|
||||||
|
//
|
||||||
|
// We could use a single sandbox in order to reduce gas costs, yet that
|
||||||
|
// risks one person creating an approval that's hooked before another
|
||||||
|
// user's intended action executes, in order to drain their coins
|
||||||
|
//
|
||||||
|
// While technically, that would be a flaw in the sandboxed flow, this
|
||||||
|
// is robust and prevents such flaws from being possible
|
||||||
|
//
|
||||||
|
// We also don't want people to set state via the Sandbox and expect it
|
||||||
|
// future available when anyone else could set a distinct value
|
||||||
|
Sandbox sandbox = new Sandbox();
|
||||||
|
(success, ) = address(sandbox).call{
|
||||||
|
value: transactions[i].value,
|
||||||
|
// TODO: Have the Call specify the gas up front
|
||||||
|
gas: 350_000
|
||||||
|
}(
|
||||||
|
abi.encodeWithSelector(
|
||||||
|
Sandbox.sandbox.selector,
|
||||||
|
transactions[i].calls
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
assembly {
|
assembly {
|
||||||
successes := or(successes, shl(i, success))
|
successes := or(successes, shl(i, success))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
emit Executed(nonce, message, successes);
|
emit Executed(
|
||||||
|
executed_with_nonce,
|
||||||
|
keccak256(message),
|
||||||
|
successes,
|
||||||
|
sig
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
48
coins/ethereum/contracts/Sandbox.sol
Normal file
48
coins/ethereum/contracts/Sandbox.sol
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
// SPDX-License-Identifier: AGPLv3
|
||||||
|
pragma solidity ^0.8.24;
|
||||||
|
|
||||||
|
struct Call {
|
||||||
|
address to;
|
||||||
|
uint256 value;
|
||||||
|
bytes data;
|
||||||
|
}
|
||||||
|
|
||||||
|
// A minimal sandbox focused on gas efficiency.
|
||||||
|
//
|
||||||
|
// The first call is executed if any of the calls fail, making it a fallback.
|
||||||
|
// All other calls are executed sequentially.
|
||||||
|
contract Sandbox {
|
||||||
|
error AlreadyCalled();
|
||||||
|
error CallsFailed();
|
||||||
|
|
||||||
|
function sandbox(Call[] calldata calls) external payable {
|
||||||
|
// Prevent re-entrancy due to this executing arbitrary calls from anyone
|
||||||
|
// and anywhere
|
||||||
|
bool called;
|
||||||
|
assembly { called := tload(0) }
|
||||||
|
if (called) {
|
||||||
|
revert AlreadyCalled();
|
||||||
|
}
|
||||||
|
assembly { tstore(0, 1) }
|
||||||
|
|
||||||
|
// Execute the calls, starting from 1
|
||||||
|
for (uint256 i = 1; i < calls.length; i++) {
|
||||||
|
(bool success, ) =
|
||||||
|
calls[i].to.call{ value: calls[i].value }(calls[i].data);
|
||||||
|
|
||||||
|
// If this call failed, execute the fallback (call 0)
|
||||||
|
if (!success) {
|
||||||
|
(success, ) =
|
||||||
|
calls[0].to.call{ value: address(this).balance }(calls[0].data);
|
||||||
|
// If this call also failed, revert entirely
|
||||||
|
if (!success) {
|
||||||
|
revert CallsFailed();
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// We don't clear the re-entrancy guard as this contract should never be
|
||||||
|
// called again, so there's no reason to spend the effort
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,38 +2,43 @@
|
|||||||
pragma solidity ^0.8.0;
|
pragma solidity ^0.8.0;
|
||||||
|
|
||||||
// see https://github.com/noot/schnorr-verify for implementation details
|
// see https://github.com/noot/schnorr-verify for implementation details
|
||||||
contract Schnorr {
|
library Schnorr {
|
||||||
// secp256k1 group order
|
// secp256k1 group order
|
||||||
uint256 constant public Q =
|
uint256 constant public Q =
|
||||||
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
|
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
|
||||||
|
|
||||||
error InvalidSOrA();
|
// Fixed parity for the public keys used in this contract
|
||||||
error InvalidSignature();
|
// This avoids spending a word passing the parity in a similar style to
|
||||||
|
// Bitcoin's Taproot
|
||||||
|
uint8 constant public KEY_PARITY = 27;
|
||||||
|
|
||||||
// parity := public key y-coord parity (27 or 28)
|
error InvalidSOrA();
|
||||||
// px := public key x-coord
|
error MalformedSignature();
|
||||||
|
|
||||||
|
// px := public key x-coord, where the public key has a parity of KEY_PARITY
|
||||||
// message := 32-byte hash of the message
|
// message := 32-byte hash of the message
|
||||||
// c := schnorr signature challenge
|
// c := schnorr signature challenge
|
||||||
// s := schnorr signature
|
// s := schnorr signature
|
||||||
function verify(
|
function verify(
|
||||||
uint8 parity,
|
|
||||||
bytes32 px,
|
bytes32 px,
|
||||||
bytes32 message,
|
bytes memory message,
|
||||||
bytes32 c,
|
bytes32 c,
|
||||||
bytes32 s
|
bytes32 s
|
||||||
) public view returns (bool) {
|
) internal pure returns (bool) {
|
||||||
// ecrecover = (m, v, r, s);
|
// ecrecover = (m, v, r, s) -> key
|
||||||
|
// We instead pass the following to obtain the nonce (not the key)
|
||||||
|
// Then we hash it and verify it matches the challenge
|
||||||
bytes32 sa = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
|
bytes32 sa = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
|
||||||
bytes32 ca = bytes32(Q - mulmod(uint256(c), uint256(px), Q));
|
bytes32 ca = bytes32(Q - mulmod(uint256(c), uint256(px), Q));
|
||||||
|
|
||||||
|
// For safety, we want each input to ecrecover to be 0 (sa, px, ca)
|
||||||
|
// The ecreover precomple checks `r` and `s` (`px` and `ca`) are non-zero
|
||||||
|
// That leaves us to check `sa` are non-zero
|
||||||
if (sa == 0) revert InvalidSOrA();
|
if (sa == 0) revert InvalidSOrA();
|
||||||
// the ecrecover precompile implementation checks that the `r` and `s`
|
address R = ecrecover(sa, KEY_PARITY, px, ca);
|
||||||
// inputs are non-zero (in this case, `px` and `ca`), thus we don't need to
|
if (R == address(0)) revert MalformedSignature();
|
||||||
// check if they're zero.
|
|
||||||
address R = ecrecover(sa, parity, px, ca);
|
// Check the signature is correct by rebuilding the challenge
|
||||||
if (R == address(0)) revert InvalidSignature();
|
return c == keccak256(abi.encodePacked(R, px, message));
|
||||||
return c == keccak256(
|
|
||||||
abi.encodePacked(R, uint8(parity), px, block.chainid, message)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
30
coins/ethereum/relayer/Cargo.toml
Normal file
30
coins/ethereum/relayer/Cargo.toml
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
[package]
|
||||||
|
name = "serai-ethereum-relayer"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "A relayer for Serai's Ethereum transactions"
|
||||||
|
license = "AGPL-3.0-only"
|
||||||
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum/relayer"
|
||||||
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
|
keywords = []
|
||||||
|
edition = "2021"
|
||||||
|
publish = false
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
log = { version = "0.4", default-features = false, features = ["std"] }
|
||||||
|
env_logger = { version = "0.10", default-features = false, features = ["humantime"] }
|
||||||
|
|
||||||
|
tokio = { version = "1", default-features = false, features = ["rt", "time", "io-util", "net", "macros"] }
|
||||||
|
|
||||||
|
serai-env = { path = "../../../common/env" }
|
||||||
|
serai-db = { path = "../../../common/db" }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
parity-db = ["serai-db/parity-db"]
|
||||||
|
rocksdb = ["serai-db/rocksdb"]
|
||||||
15
coins/ethereum/relayer/LICENSE
Normal file
15
coins/ethereum/relayer/LICENSE
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
AGPL-3.0-only license
|
||||||
|
|
||||||
|
Copyright (c) 2023-2024 Luke Parker
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU Affero General Public License Version 3 as
|
||||||
|
published by the Free Software Foundation.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU Affero General Public License
|
||||||
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
4
coins/ethereum/relayer/README.md
Normal file
4
coins/ethereum/relayer/README.md
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# Ethereum Transaction Relayer
|
||||||
|
|
||||||
|
This server collects Ethereum router commands to be published, offering an RPC
|
||||||
|
to fetch them.
|
||||||
100
coins/ethereum/relayer/src/main.rs
Normal file
100
coins/ethereum/relayer/src/main.rs
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
pub(crate) use tokio::{
|
||||||
|
io::{AsyncReadExt, AsyncWriteExt},
|
||||||
|
net::TcpListener,
|
||||||
|
};
|
||||||
|
|
||||||
|
use serai_db::{Get, DbTxn, Db as DbTrait};
|
||||||
|
|
||||||
|
#[tokio::main(flavor = "current_thread")]
|
||||||
|
async fn main() {
|
||||||
|
// Override the panic handler with one which will panic if any tokio task panics
|
||||||
|
{
|
||||||
|
let existing = std::panic::take_hook();
|
||||||
|
std::panic::set_hook(Box::new(move |panic| {
|
||||||
|
existing(panic);
|
||||||
|
const MSG: &str = "exiting the process due to a task panicking";
|
||||||
|
println!("{MSG}");
|
||||||
|
log::error!("{MSG}");
|
||||||
|
std::process::exit(1);
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
if std::env::var("RUST_LOG").is_err() {
|
||||||
|
std::env::set_var("RUST_LOG", serai_env::var("RUST_LOG").unwrap_or_else(|| "info".to_string()));
|
||||||
|
}
|
||||||
|
env_logger::init();
|
||||||
|
|
||||||
|
log::info!("Starting Ethereum relayer server...");
|
||||||
|
|
||||||
|
// Open the DB
|
||||||
|
#[allow(unused_variables, unreachable_code)]
|
||||||
|
let db = {
|
||||||
|
#[cfg(all(feature = "parity-db", feature = "rocksdb"))]
|
||||||
|
panic!("built with parity-db and rocksdb");
|
||||||
|
#[cfg(all(feature = "parity-db", not(feature = "rocksdb")))]
|
||||||
|
let db =
|
||||||
|
serai_db::new_parity_db(&serai_env::var("DB_PATH").expect("path to DB wasn't specified"));
|
||||||
|
#[cfg(feature = "rocksdb")]
|
||||||
|
let db =
|
||||||
|
serai_db::new_rocksdb(&serai_env::var("DB_PATH").expect("path to DB wasn't specified"));
|
||||||
|
db
|
||||||
|
};
|
||||||
|
|
||||||
|
// Start command recipience server
|
||||||
|
// This should not be publicly exposed
|
||||||
|
// TODO: Add auth
|
||||||
|
tokio::spawn({
|
||||||
|
let db = db.clone();
|
||||||
|
async move {
|
||||||
|
// 5132 ^ ((b'E' << 8) | b'R')
|
||||||
|
let server = TcpListener::bind("0.0.0.0:20830").await.unwrap();
|
||||||
|
loop {
|
||||||
|
let (mut socket, _) = server.accept().await.unwrap();
|
||||||
|
let db = db.clone();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let mut db = db.clone();
|
||||||
|
loop {
|
||||||
|
let Ok(msg_len) = socket.read_u32_le().await else { break };
|
||||||
|
let mut buf = vec![0; usize::try_from(msg_len).unwrap()];
|
||||||
|
let Ok(_) = socket.read_exact(&mut buf).await else { break };
|
||||||
|
|
||||||
|
if buf.len() < 5 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let nonce = u32::from_le_bytes(buf[.. 4].try_into().unwrap());
|
||||||
|
let mut txn = db.txn();
|
||||||
|
txn.put(nonce.to_le_bytes(), &buf[4 ..]);
|
||||||
|
txn.commit();
|
||||||
|
|
||||||
|
let Ok(()) = socket.write_all(&[1]).await else { break };
|
||||||
|
|
||||||
|
log::info!("received signed command #{nonce}");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start command fetch server
|
||||||
|
// 5132 ^ ((b'E' << 8) | b'R') + 1
|
||||||
|
let server = TcpListener::bind("0.0.0.0:20831").await.unwrap();
|
||||||
|
loop {
|
||||||
|
let (mut socket, _) = server.accept().await.unwrap();
|
||||||
|
let db = db.clone();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let db = db.clone();
|
||||||
|
loop {
|
||||||
|
// Nonce to get the router comamnd for
|
||||||
|
let mut buf = vec![0; 4];
|
||||||
|
let Ok(_) = socket.read_exact(&mut buf).await else { break };
|
||||||
|
|
||||||
|
let command = db.get(&buf[.. 4]).unwrap_or(vec![]);
|
||||||
|
let Ok(()) = socket.write_all(&u32::try_from(command.len()).unwrap().to_le_bytes()).await
|
||||||
|
else {
|
||||||
|
break;
|
||||||
|
};
|
||||||
|
let Ok(()) = socket.write_all(&command).await else { break };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,37 @@
|
|||||||
|
use alloy_sol_types::sol;
|
||||||
|
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
|
#[allow(warnings)]
|
||||||
|
#[allow(needless_pass_by_value)]
|
||||||
#[allow(clippy::all)]
|
#[allow(clippy::all)]
|
||||||
pub(crate) mod schnorr;
|
#[allow(clippy::ignored_unit_patterns)]
|
||||||
|
#[allow(clippy::redundant_closure_for_method_calls)]
|
||||||
|
mod erc20_container {
|
||||||
|
use super::*;
|
||||||
|
sol!("contracts/IERC20.sol");
|
||||||
|
}
|
||||||
|
pub use erc20_container::IERC20 as erc20;
|
||||||
|
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
|
#[allow(warnings)]
|
||||||
|
#[allow(needless_pass_by_value)]
|
||||||
#[allow(clippy::all)]
|
#[allow(clippy::all)]
|
||||||
pub(crate) mod router;
|
#[allow(clippy::ignored_unit_patterns)]
|
||||||
|
#[allow(clippy::redundant_closure_for_method_calls)]
|
||||||
|
mod deployer_container {
|
||||||
|
use super::*;
|
||||||
|
sol!("contracts/Deployer.sol");
|
||||||
|
}
|
||||||
|
pub use deployer_container::Deployer as deployer;
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
#[allow(warnings)]
|
||||||
|
#[allow(needless_pass_by_value)]
|
||||||
|
#[allow(clippy::all)]
|
||||||
|
#[allow(clippy::ignored_unit_patterns)]
|
||||||
|
#[allow(clippy::redundant_closure_for_method_calls)]
|
||||||
|
mod router_container {
|
||||||
|
use super::*;
|
||||||
|
sol!(Router, "artifacts/Router.abi");
|
||||||
|
}
|
||||||
|
pub use router_container::Router as router;
|
||||||
|
|||||||
@@ -1,91 +1,188 @@
|
|||||||
use sha3::{Digest, Keccak256};
|
|
||||||
|
|
||||||
use group::ff::PrimeField;
|
use group::ff::PrimeField;
|
||||||
use k256::{
|
use k256::{
|
||||||
elliptic_curve::{
|
elliptic_curve::{ops::Reduce, point::AffineCoordinates, sec1::ToEncodedPoint},
|
||||||
bigint::ArrayEncoding, ops::Reduce, point::AffineCoordinates, sec1::ToEncodedPoint,
|
ProjectivePoint, Scalar, U256 as KU256,
|
||||||
},
|
|
||||||
ProjectivePoint, Scalar, U256,
|
|
||||||
};
|
};
|
||||||
|
#[cfg(test)]
|
||||||
|
use k256::{elliptic_curve::point::DecompressPoint, AffinePoint};
|
||||||
|
|
||||||
use frost::{
|
use frost::{
|
||||||
algorithm::{Hram, SchnorrSignature},
|
algorithm::{Hram, SchnorrSignature},
|
||||||
curve::Secp256k1,
|
curve::{Ciphersuite, Secp256k1},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use alloy_core::primitives::{Parity, Signature as AlloySignature};
|
||||||
|
use alloy_consensus::{SignableTransaction, Signed, TxLegacy};
|
||||||
|
|
||||||
|
use crate::abi::router::{Signature as AbiSignature};
|
||||||
|
|
||||||
pub(crate) fn keccak256(data: &[u8]) -> [u8; 32] {
|
pub(crate) fn keccak256(data: &[u8]) -> [u8; 32] {
|
||||||
Keccak256::digest(data).into()
|
alloy_core::primitives::keccak256(data).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn address(point: &ProjectivePoint) -> [u8; 20] {
|
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||||
|
<Scalar as Reduce<KU256>>::reduce_bytes(&keccak256(data).into())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn address(point: &ProjectivePoint) -> [u8; 20] {
|
||||||
let encoded_point = point.to_encoded_point(false);
|
let encoded_point = point.to_encoded_point(false);
|
||||||
// Last 20 bytes of the hash of the concatenated x and y coordinates
|
// Last 20 bytes of the hash of the concatenated x and y coordinates
|
||||||
// We obtain the concatenated x and y coordinates via the uncompressed encoding of the point
|
// We obtain the concatenated x and y coordinates via the uncompressed encoding of the point
|
||||||
keccak256(&encoded_point.as_ref()[1 .. 65])[12 ..].try_into().unwrap()
|
keccak256(&encoded_point.as_ref()[1 .. 65])[12 ..].try_into().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Deterministically sign a transaction.
|
||||||
|
///
|
||||||
|
/// This function panics if passed a transaction with a non-None chain ID.
|
||||||
|
pub fn deterministically_sign(tx: &TxLegacy) -> Signed<TxLegacy> {
|
||||||
|
assert!(
|
||||||
|
tx.chain_id.is_none(),
|
||||||
|
"chain ID was Some when deterministically signing a TX (causing a non-deterministic signer)"
|
||||||
|
);
|
||||||
|
|
||||||
|
let sig_hash = tx.signature_hash().0;
|
||||||
|
let mut r = hash_to_scalar(&[sig_hash.as_slice(), b"r"].concat());
|
||||||
|
let mut s = hash_to_scalar(&[sig_hash.as_slice(), b"s"].concat());
|
||||||
|
loop {
|
||||||
|
let r_bytes: [u8; 32] = r.to_repr().into();
|
||||||
|
let s_bytes: [u8; 32] = s.to_repr().into();
|
||||||
|
let v = Parity::NonEip155(false);
|
||||||
|
let signature =
|
||||||
|
AlloySignature::from_scalars_and_parity(r_bytes.into(), s_bytes.into(), v).unwrap();
|
||||||
|
let tx = tx.clone().into_signed(signature);
|
||||||
|
if tx.recover_signer().is_ok() {
|
||||||
|
return tx;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-hash until valid
|
||||||
|
r = hash_to_scalar(r_bytes.as_ref());
|
||||||
|
s = hash_to_scalar(s_bytes.as_ref());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The public key for a Schnorr-signing account.
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||||
pub struct PublicKey {
|
pub struct PublicKey {
|
||||||
pub A: ProjectivePoint,
|
pub(crate) A: ProjectivePoint,
|
||||||
pub px: Scalar,
|
pub(crate) px: Scalar,
|
||||||
pub parity: u8,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PublicKey {
|
impl PublicKey {
|
||||||
|
/// Construct a new `PublicKey`.
|
||||||
|
///
|
||||||
|
/// This will return None if the provided point isn't eligible to be a public key (due to
|
||||||
|
/// bounds such as parity).
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
pub fn new(A: ProjectivePoint) -> Option<PublicKey> {
|
pub fn new(A: ProjectivePoint) -> Option<PublicKey> {
|
||||||
let affine = A.to_affine();
|
let affine = A.to_affine();
|
||||||
let parity = u8::from(bool::from(affine.y_is_odd())) + 27;
|
// Only allow even keys to save a word within Ethereum
|
||||||
if parity != 27 {
|
let is_odd = bool::from(affine.y_is_odd());
|
||||||
|
if is_odd {
|
||||||
None?;
|
None?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let x_coord = affine.x();
|
let x_coord = affine.x();
|
||||||
let x_coord_scalar = <Scalar as Reduce<U256>>::reduce_bytes(&x_coord);
|
let x_coord_scalar = <Scalar as Reduce<KU256>>::reduce_bytes(&x_coord);
|
||||||
// Return None if a reduction would occur
|
// Return None if a reduction would occur
|
||||||
|
// Reductions would be incredibly unlikely and shouldn't be an issue, yet it's one less
|
||||||
|
// headache/concern to have
|
||||||
|
// This does ban a trivial amoount of public keys
|
||||||
if x_coord_scalar.to_repr() != x_coord {
|
if x_coord_scalar.to_repr() != x_coord {
|
||||||
None?;
|
None?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(PublicKey { A, px: x_coord_scalar, parity })
|
Some(PublicKey { A, px: x_coord_scalar })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn point(&self) -> ProjectivePoint {
|
||||||
|
self.A
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn eth_repr(&self) -> [u8; 32] {
|
||||||
|
self.px.to_repr().into()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub(crate) fn from_eth_repr(repr: [u8; 32]) -> Option<Self> {
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let A = Option::<AffinePoint>::from(AffinePoint::decompress(&repr.into(), 0.into()))?.into();
|
||||||
|
Option::from(Scalar::from_repr(repr.into())).map(|px| PublicKey { A, px })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The HRAm to use for the Schnorr contract.
|
||||||
#[derive(Clone, Default)]
|
#[derive(Clone, Default)]
|
||||||
pub struct EthereumHram {}
|
pub struct EthereumHram {}
|
||||||
impl Hram<Secp256k1> for EthereumHram {
|
impl Hram<Secp256k1> for EthereumHram {
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
||||||
let a_encoded_point = A.to_encoded_point(true);
|
let x_coord = A.to_affine().x();
|
||||||
let mut a_encoded = a_encoded_point.as_ref().to_owned();
|
|
||||||
a_encoded[0] += 25; // Ethereum uses 27/28 for point parity
|
|
||||||
assert!((a_encoded[0] == 27) || (a_encoded[0] == 28));
|
|
||||||
let mut data = address(R).to_vec();
|
let mut data = address(R).to_vec();
|
||||||
data.append(&mut a_encoded);
|
data.extend(x_coord.as_slice());
|
||||||
data.extend(m);
|
data.extend(m);
|
||||||
Scalar::reduce(U256::from_be_slice(&keccak256(&data)))
|
|
||||||
|
<Scalar as Reduce<KU256>>::reduce_bytes(&keccak256(&data).into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A signature for the Schnorr contract.
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||||
pub struct Signature {
|
pub struct Signature {
|
||||||
pub(crate) c: Scalar,
|
pub(crate) c: Scalar,
|
||||||
pub(crate) s: Scalar,
|
pub(crate) s: Scalar,
|
||||||
}
|
}
|
||||||
impl Signature {
|
impl Signature {
|
||||||
|
pub fn verify(&self, public_key: &PublicKey, message: &[u8]) -> bool {
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let R = (Secp256k1::generator() * self.s) - (public_key.A * self.c);
|
||||||
|
EthereumHram::hram(&R, &public_key.A, message) == self.c
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a new `Signature`.
|
||||||
|
///
|
||||||
|
/// This will return None if the signature is invalid.
|
||||||
pub fn new(
|
pub fn new(
|
||||||
public_key: &PublicKey,
|
public_key: &PublicKey,
|
||||||
chain_id: U256,
|
message: &[u8],
|
||||||
m: &[u8],
|
|
||||||
signature: SchnorrSignature<Secp256k1>,
|
signature: SchnorrSignature<Secp256k1>,
|
||||||
) -> Option<Signature> {
|
) -> Option<Signature> {
|
||||||
let c = EthereumHram::hram(
|
let c = EthereumHram::hram(&signature.R, &public_key.A, message);
|
||||||
&signature.R,
|
|
||||||
&public_key.A,
|
|
||||||
&[chain_id.to_be_byte_array().as_slice(), &keccak256(m)].concat(),
|
|
||||||
);
|
|
||||||
if !signature.verify(public_key.A, c) {
|
if !signature.verify(public_key.A, c) {
|
||||||
None?;
|
None?;
|
||||||
}
|
}
|
||||||
Some(Signature { c, s: signature.s })
|
|
||||||
|
let res = Signature { c, s: signature.s };
|
||||||
|
assert!(res.verify(public_key, message));
|
||||||
|
Some(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn c(&self) -> Scalar {
|
||||||
|
self.c
|
||||||
|
}
|
||||||
|
pub fn s(&self) -> Scalar {
|
||||||
|
self.s
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn to_bytes(&self) -> [u8; 64] {
|
||||||
|
let mut res = [0; 64];
|
||||||
|
res[.. 32].copy_from_slice(self.c.to_repr().as_ref());
|
||||||
|
res[32 ..].copy_from_slice(self.s.to_repr().as_ref());
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_bytes(bytes: [u8; 64]) -> std::io::Result<Self> {
|
||||||
|
let mut reader = bytes.as_slice();
|
||||||
|
let c = Secp256k1::read_F(&mut reader)?;
|
||||||
|
let s = Secp256k1::read_F(&mut reader)?;
|
||||||
|
Ok(Signature { c, s })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl From<&Signature> for AbiSignature {
|
||||||
|
fn from(sig: &Signature) -> AbiSignature {
|
||||||
|
let c: [u8; 32] = sig.c.to_repr().into();
|
||||||
|
let s: [u8; 32] = sig.s.to_repr().into();
|
||||||
|
AbiSignature { c: c.into(), s: s.into() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
113
coins/ethereum/src/deployer.rs
Normal file
113
coins/ethereum/src/deployer.rs
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use alloy_core::primitives::{hex::FromHex, Address, B256, U256, Bytes, TxKind};
|
||||||
|
use alloy_consensus::{Signed, TxLegacy};
|
||||||
|
|
||||||
|
use alloy_sol_types::{SolCall, SolEvent};
|
||||||
|
|
||||||
|
use alloy_rpc_types_eth::{BlockNumberOrTag, Filter};
|
||||||
|
use alloy_simple_request_transport::SimpleRequest;
|
||||||
|
use alloy_provider::{Provider, RootProvider};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
Error,
|
||||||
|
crypto::{self, keccak256, PublicKey},
|
||||||
|
router::Router,
|
||||||
|
};
|
||||||
|
pub use crate::abi::deployer as abi;
|
||||||
|
|
||||||
|
/// The Deployer contract for the Router contract.
|
||||||
|
///
|
||||||
|
/// This Deployer has a deterministic address, letting it be immediately identified on any
|
||||||
|
/// compatible chain. It then supports retrieving the Router contract's address (which isn't
|
||||||
|
/// deterministic) using a single log query.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Deployer;
|
||||||
|
impl Deployer {
|
||||||
|
/// Obtain the transaction to deploy this contract, already signed.
|
||||||
|
///
|
||||||
|
/// The account this transaction is sent from (which is populated in `from`) must be sufficiently
|
||||||
|
/// funded for this transaction to be submitted. This account has no known private key to anyone,
|
||||||
|
/// so ETH sent can be neither misappropriated nor returned.
|
||||||
|
pub fn deployment_tx() -> Signed<TxLegacy> {
|
||||||
|
let bytecode = include_str!("../artifacts/Deployer.bin");
|
||||||
|
let bytecode =
|
||||||
|
Bytes::from_hex(bytecode).expect("compiled-in Deployer bytecode wasn't valid hex");
|
||||||
|
|
||||||
|
let tx = TxLegacy {
|
||||||
|
chain_id: None,
|
||||||
|
nonce: 0,
|
||||||
|
gas_price: 100_000_000_000u128,
|
||||||
|
// TODO: Use a more accurate gas limit
|
||||||
|
gas_limit: 1_000_000u128,
|
||||||
|
to: TxKind::Create,
|
||||||
|
value: U256::ZERO,
|
||||||
|
input: bytecode,
|
||||||
|
};
|
||||||
|
|
||||||
|
crypto::deterministically_sign(&tx)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Obtain the deterministic address for this contract.
|
||||||
|
pub fn address() -> [u8; 20] {
|
||||||
|
let deployer_deployer =
|
||||||
|
Self::deployment_tx().recover_signer().expect("deployment_tx didn't have a valid signature");
|
||||||
|
**Address::create(&deployer_deployer, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Construct a new view of the `Deployer`.
|
||||||
|
pub async fn new(provider: Arc<RootProvider<SimpleRequest>>) -> Result<Option<Self>, Error> {
|
||||||
|
let address = Self::address();
|
||||||
|
let code = provider.get_code_at(address.into()).await.map_err(|_| Error::ConnectionError)?;
|
||||||
|
// Contract has yet to be deployed
|
||||||
|
if code.is_empty() {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
Ok(Some(Self))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Yield the `ContractCall` necessary to deploy the Router.
|
||||||
|
pub fn deploy_router(&self, key: &PublicKey) -> TxLegacy {
|
||||||
|
TxLegacy {
|
||||||
|
to: TxKind::Call(Self::address().into()),
|
||||||
|
input: abi::deployCall::new((Router::init_code(key).into(),)).abi_encode().into(),
|
||||||
|
gas_limit: 1_000_000,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find the first Router deployed with the specified key as its first key.
|
||||||
|
///
|
||||||
|
/// This is the Router Serai will use, and is the only way to construct a `Router`.
|
||||||
|
pub async fn find_router(
|
||||||
|
&self,
|
||||||
|
provider: Arc<RootProvider<SimpleRequest>>,
|
||||||
|
key: &PublicKey,
|
||||||
|
) -> Result<Option<Router>, Error> {
|
||||||
|
let init_code = Router::init_code(key);
|
||||||
|
let init_code_hash = keccak256(&init_code);
|
||||||
|
|
||||||
|
#[cfg(not(test))]
|
||||||
|
let to_block = BlockNumberOrTag::Finalized;
|
||||||
|
#[cfg(test)]
|
||||||
|
let to_block = BlockNumberOrTag::Latest;
|
||||||
|
|
||||||
|
// Find the first log using this init code (where the init code is binding to the key)
|
||||||
|
// TODO: Make an abstraction for event filtering (de-duplicating common code)
|
||||||
|
let filter =
|
||||||
|
Filter::new().from_block(0).to_block(to_block).address(Address::from(Self::address()));
|
||||||
|
let filter = filter.event_signature(abi::Deployment::SIGNATURE_HASH);
|
||||||
|
let filter = filter.topic1(B256::from(init_code_hash));
|
||||||
|
let logs = provider.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
|
||||||
|
|
||||||
|
let Some(first_log) = logs.first() else { return Ok(None) };
|
||||||
|
let router = first_log
|
||||||
|
.log_decode::<abi::Deployment>()
|
||||||
|
.map_err(|_| Error::ConnectionError)?
|
||||||
|
.inner
|
||||||
|
.data
|
||||||
|
.created;
|
||||||
|
|
||||||
|
Ok(Some(Router::new(provider, router)))
|
||||||
|
}
|
||||||
|
}
|
||||||
105
coins/ethereum/src/erc20.rs
Normal file
105
coins/ethereum/src/erc20.rs
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
use std::{sync::Arc, collections::HashSet};
|
||||||
|
|
||||||
|
use alloy_core::primitives::{Address, B256, U256};
|
||||||
|
|
||||||
|
use alloy_sol_types::{SolInterface, SolEvent};
|
||||||
|
|
||||||
|
use alloy_rpc_types_eth::Filter;
|
||||||
|
use alloy_simple_request_transport::SimpleRequest;
|
||||||
|
use alloy_provider::{Provider, RootProvider};
|
||||||
|
|
||||||
|
use crate::Error;
|
||||||
|
pub use crate::abi::erc20 as abi;
|
||||||
|
use abi::{IERC20Calls, Transfer, transferCall, transferFromCall};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct TopLevelErc20Transfer {
|
||||||
|
pub id: [u8; 32],
|
||||||
|
pub from: [u8; 20],
|
||||||
|
pub amount: U256,
|
||||||
|
pub data: Vec<u8>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A view for an ERC20 contract.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Erc20(Arc<RootProvider<SimpleRequest>>, Address);
|
||||||
|
impl Erc20 {
|
||||||
|
/// Construct a new view of the specified ERC20 contract.
|
||||||
|
pub fn new(provider: Arc<RootProvider<SimpleRequest>>, address: [u8; 20]) -> Self {
|
||||||
|
Self(provider, Address::from(&address))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn top_level_transfers(
|
||||||
|
&self,
|
||||||
|
block: u64,
|
||||||
|
to: [u8; 20],
|
||||||
|
) -> Result<Vec<TopLevelErc20Transfer>, Error> {
|
||||||
|
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
|
||||||
|
let filter = filter.event_signature(Transfer::SIGNATURE_HASH);
|
||||||
|
let mut to_topic = [0; 32];
|
||||||
|
to_topic[12 ..].copy_from_slice(&to);
|
||||||
|
let filter = filter.topic2(B256::from(to_topic));
|
||||||
|
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
|
||||||
|
|
||||||
|
let mut handled = HashSet::new();
|
||||||
|
|
||||||
|
let mut top_level_transfers = vec![];
|
||||||
|
for log in logs {
|
||||||
|
// Double check the address which emitted this log
|
||||||
|
if log.address() != self.1 {
|
||||||
|
Err(Error::ConnectionError)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?;
|
||||||
|
let tx =
|
||||||
|
self.0.get_transaction_by_hash(tx_id).await.ok().flatten().ok_or(Error::ConnectionError)?;
|
||||||
|
|
||||||
|
// If this is a top-level call...
|
||||||
|
if tx.to == Some(self.1) {
|
||||||
|
// And we recognize the call...
|
||||||
|
// Don't validate the encoding as this can't be re-encoded to an identical bytestring due
|
||||||
|
// to the InInstruction appended
|
||||||
|
if let Ok(call) = IERC20Calls::abi_decode(&tx.input, false) {
|
||||||
|
// Extract the top-level call's from/to/value
|
||||||
|
let (from, call_to, value) = match call {
|
||||||
|
IERC20Calls::transfer(transferCall { to: call_to, value }) => (tx.from, call_to, value),
|
||||||
|
IERC20Calls::transferFrom(transferFromCall { from, to: call_to, value }) => {
|
||||||
|
(from, call_to, value)
|
||||||
|
}
|
||||||
|
// Treat any other function selectors as unrecognized
|
||||||
|
_ => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
let log = log.log_decode::<Transfer>().map_err(|_| Error::ConnectionError)?.inner.data;
|
||||||
|
|
||||||
|
// Ensure the top-level transfer is equivalent, and this presumably isn't a log for an
|
||||||
|
// internal transfer
|
||||||
|
if (log.from != from) || (call_to != to) || (value != log.value) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now that the top-level transfer is confirmed to be equivalent to the log, ensure it's
|
||||||
|
// the only log we handle
|
||||||
|
if handled.contains(&tx_id) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
handled.insert(tx_id);
|
||||||
|
|
||||||
|
// Read the data appended after
|
||||||
|
let encoded = call.abi_encode();
|
||||||
|
let data = tx.input.as_ref()[encoded.len() ..].to_vec();
|
||||||
|
|
||||||
|
// Push the transfer
|
||||||
|
top_level_transfers.push(TopLevelErc20Transfer {
|
||||||
|
// Since we'll only handle one log for this TX, set the ID to the TX ID
|
||||||
|
id: *tx_id,
|
||||||
|
from: *log.from.0,
|
||||||
|
amount: log.value,
|
||||||
|
data,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(top_level_transfers)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,16 +1,35 @@
|
|||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
pub mod alloy {
|
||||||
|
pub use alloy_core::primitives;
|
||||||
|
pub use alloy_core as core;
|
||||||
|
pub use alloy_sol_types as sol_types;
|
||||||
|
|
||||||
|
pub use alloy_consensus as consensus;
|
||||||
|
pub use alloy_network as network;
|
||||||
|
pub use alloy_rpc_types_eth as rpc_types;
|
||||||
|
pub use alloy_simple_request_transport as simple_request_transport;
|
||||||
|
pub use alloy_rpc_client as rpc_client;
|
||||||
|
pub use alloy_provider as provider;
|
||||||
|
}
|
||||||
|
|
||||||
pub mod crypto;
|
pub mod crypto;
|
||||||
|
|
||||||
pub(crate) mod abi;
|
pub(crate) mod abi;
|
||||||
pub mod schnorr;
|
|
||||||
|
pub mod erc20;
|
||||||
|
pub mod deployer;
|
||||||
pub mod router;
|
pub mod router;
|
||||||
|
|
||||||
#[cfg(test)]
|
pub mod machine;
|
||||||
mod tests;
|
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
#[cfg(any(test, feature = "tests"))]
|
||||||
|
pub mod tests;
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error("failed to verify Schnorr signature")]
|
#[error("failed to verify Schnorr signature")]
|
||||||
InvalidSignature,
|
InvalidSignature,
|
||||||
|
#[error("couldn't make call/send TX")]
|
||||||
|
ConnectionError,
|
||||||
}
|
}
|
||||||
|
|||||||
414
coins/ethereum/src/machine.rs
Normal file
414
coins/ethereum/src/machine.rs
Normal file
@@ -0,0 +1,414 @@
|
|||||||
|
use std::{
|
||||||
|
io::{self, Read},
|
||||||
|
collections::HashMap,
|
||||||
|
};
|
||||||
|
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
|
use transcript::{Transcript, RecommendedTranscript};
|
||||||
|
|
||||||
|
use group::GroupEncoding;
|
||||||
|
use frost::{
|
||||||
|
curve::{Ciphersuite, Secp256k1},
|
||||||
|
Participant, ThresholdKeys, FrostError,
|
||||||
|
algorithm::Schnorr,
|
||||||
|
sign::*,
|
||||||
|
};
|
||||||
|
|
||||||
|
use alloy_core::primitives::U256;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
crypto::{PublicKey, EthereumHram, Signature},
|
||||||
|
router::{
|
||||||
|
abi::{Call as AbiCall, OutInstruction as AbiOutInstruction},
|
||||||
|
Router,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub struct Call {
|
||||||
|
pub to: [u8; 20],
|
||||||
|
pub value: U256,
|
||||||
|
pub data: Vec<u8>,
|
||||||
|
}
|
||||||
|
impl Call {
|
||||||
|
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
||||||
|
let mut to = [0; 20];
|
||||||
|
reader.read_exact(&mut to)?;
|
||||||
|
|
||||||
|
let value = {
|
||||||
|
let mut value_bytes = [0; 32];
|
||||||
|
reader.read_exact(&mut value_bytes)?;
|
||||||
|
U256::from_le_slice(&value_bytes)
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut data_len = {
|
||||||
|
let mut data_len = [0; 4];
|
||||||
|
reader.read_exact(&mut data_len)?;
|
||||||
|
usize::try_from(u32::from_le_bytes(data_len)).expect("u32 couldn't fit within a usize")
|
||||||
|
};
|
||||||
|
|
||||||
|
// A valid DoS would be to claim a 4 GB data is present for only 4 bytes
|
||||||
|
// We read this in 1 KB chunks to only read data actually present (with a max DoS of 1 KB)
|
||||||
|
let mut data = vec![];
|
||||||
|
while data_len > 0 {
|
||||||
|
let chunk_len = data_len.min(1024);
|
||||||
|
let mut chunk = vec![0; chunk_len];
|
||||||
|
reader.read_exact(&mut chunk)?;
|
||||||
|
data.extend(&chunk);
|
||||||
|
data_len -= chunk_len;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Call { to, value, data })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||||
|
writer.write_all(&self.to)?;
|
||||||
|
writer.write_all(&self.value.as_le_bytes())?;
|
||||||
|
|
||||||
|
let data_len = u32::try_from(self.data.len())
|
||||||
|
.map_err(|_| io::Error::other("call data length exceeded 2**32"))?;
|
||||||
|
writer.write_all(&data_len.to_le_bytes())?;
|
||||||
|
writer.write_all(&self.data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl From<Call> for AbiCall {
|
||||||
|
fn from(call: Call) -> AbiCall {
|
||||||
|
AbiCall { to: call.to.into(), value: call.value, data: call.data.into() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub enum OutInstructionTarget {
|
||||||
|
Direct([u8; 20]),
|
||||||
|
Calls(Vec<Call>),
|
||||||
|
}
|
||||||
|
impl OutInstructionTarget {
|
||||||
|
fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
||||||
|
let mut kind = [0xff];
|
||||||
|
reader.read_exact(&mut kind)?;
|
||||||
|
|
||||||
|
match kind[0] {
|
||||||
|
0 => {
|
||||||
|
let mut addr = [0; 20];
|
||||||
|
reader.read_exact(&mut addr)?;
|
||||||
|
Ok(OutInstructionTarget::Direct(addr))
|
||||||
|
}
|
||||||
|
1 => {
|
||||||
|
let mut calls_len = [0; 4];
|
||||||
|
reader.read_exact(&mut calls_len)?;
|
||||||
|
let calls_len = u32::from_le_bytes(calls_len);
|
||||||
|
|
||||||
|
let mut calls = vec![];
|
||||||
|
for _ in 0 .. calls_len {
|
||||||
|
calls.push(Call::read(reader)?);
|
||||||
|
}
|
||||||
|
Ok(OutInstructionTarget::Calls(calls))
|
||||||
|
}
|
||||||
|
_ => Err(io::Error::other("unrecognized OutInstructionTarget"))?,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||||
|
match self {
|
||||||
|
OutInstructionTarget::Direct(addr) => {
|
||||||
|
writer.write_all(&[0])?;
|
||||||
|
writer.write_all(addr)?;
|
||||||
|
}
|
||||||
|
OutInstructionTarget::Calls(calls) => {
|
||||||
|
writer.write_all(&[1])?;
|
||||||
|
let call_len = u32::try_from(calls.len())
|
||||||
|
.map_err(|_| io::Error::other("amount of calls exceeded 2**32"))?;
|
||||||
|
writer.write_all(&call_len.to_le_bytes())?;
|
||||||
|
for call in calls {
|
||||||
|
call.write(writer)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub struct OutInstruction {
|
||||||
|
pub target: OutInstructionTarget,
|
||||||
|
pub value: U256,
|
||||||
|
}
|
||||||
|
impl OutInstruction {
|
||||||
|
fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
||||||
|
let target = OutInstructionTarget::read(reader)?;
|
||||||
|
|
||||||
|
let value = {
|
||||||
|
let mut value_bytes = [0; 32];
|
||||||
|
reader.read_exact(&mut value_bytes)?;
|
||||||
|
U256::from_le_slice(&value_bytes)
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(OutInstruction { target, value })
|
||||||
|
}
|
||||||
|
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||||
|
self.target.write(writer)?;
|
||||||
|
writer.write_all(&self.value.as_le_bytes())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl From<OutInstruction> for AbiOutInstruction {
|
||||||
|
fn from(instruction: OutInstruction) -> AbiOutInstruction {
|
||||||
|
match instruction.target {
|
||||||
|
OutInstructionTarget::Direct(addr) => {
|
||||||
|
AbiOutInstruction { to: addr.into(), calls: vec![], value: instruction.value }
|
||||||
|
}
|
||||||
|
OutInstructionTarget::Calls(calls) => AbiOutInstruction {
|
||||||
|
to: [0; 20].into(),
|
||||||
|
calls: calls.into_iter().map(Into::into).collect(),
|
||||||
|
value: instruction.value,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub enum RouterCommand {
|
||||||
|
UpdateSeraiKey { chain_id: U256, nonce: U256, key: PublicKey },
|
||||||
|
Execute { chain_id: U256, nonce: U256, outs: Vec<OutInstruction> },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RouterCommand {
|
||||||
|
pub fn msg(&self) -> Vec<u8> {
|
||||||
|
match self {
|
||||||
|
RouterCommand::UpdateSeraiKey { chain_id, nonce, key } => {
|
||||||
|
Router::update_serai_key_message(*chain_id, *nonce, key)
|
||||||
|
}
|
||||||
|
RouterCommand::Execute { chain_id, nonce, outs } => Router::execute_message(
|
||||||
|
*chain_id,
|
||||||
|
*nonce,
|
||||||
|
outs.iter().map(|out| out.clone().into()).collect(),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
||||||
|
let mut kind = [0xff];
|
||||||
|
reader.read_exact(&mut kind)?;
|
||||||
|
|
||||||
|
match kind[0] {
|
||||||
|
0 => {
|
||||||
|
let mut chain_id = [0; 32];
|
||||||
|
reader.read_exact(&mut chain_id)?;
|
||||||
|
|
||||||
|
let mut nonce = [0; 32];
|
||||||
|
reader.read_exact(&mut nonce)?;
|
||||||
|
|
||||||
|
let key = PublicKey::new(Secp256k1::read_G(reader)?)
|
||||||
|
.ok_or(io::Error::other("key for RouterCommand doesn't have an eth representation"))?;
|
||||||
|
Ok(RouterCommand::UpdateSeraiKey {
|
||||||
|
chain_id: U256::from_le_slice(&chain_id),
|
||||||
|
nonce: U256::from_le_slice(&nonce),
|
||||||
|
key,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
1 => {
|
||||||
|
let mut chain_id = [0; 32];
|
||||||
|
reader.read_exact(&mut chain_id)?;
|
||||||
|
let chain_id = U256::from_le_slice(&chain_id);
|
||||||
|
|
||||||
|
let mut nonce = [0; 32];
|
||||||
|
reader.read_exact(&mut nonce)?;
|
||||||
|
let nonce = U256::from_le_slice(&nonce);
|
||||||
|
|
||||||
|
let mut outs_len = [0; 4];
|
||||||
|
reader.read_exact(&mut outs_len)?;
|
||||||
|
let outs_len = u32::from_le_bytes(outs_len);
|
||||||
|
|
||||||
|
let mut outs = vec![];
|
||||||
|
for _ in 0 .. outs_len {
|
||||||
|
outs.push(OutInstruction::read(reader)?);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(RouterCommand::Execute { chain_id, nonce, outs })
|
||||||
|
}
|
||||||
|
_ => Err(io::Error::other("reading unknown type of RouterCommand"))?,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||||
|
match self {
|
||||||
|
RouterCommand::UpdateSeraiKey { chain_id, nonce, key } => {
|
||||||
|
writer.write_all(&[0])?;
|
||||||
|
writer.write_all(&chain_id.as_le_bytes())?;
|
||||||
|
writer.write_all(&nonce.as_le_bytes())?;
|
||||||
|
writer.write_all(&key.A.to_bytes())
|
||||||
|
}
|
||||||
|
RouterCommand::Execute { chain_id, nonce, outs } => {
|
||||||
|
writer.write_all(&[1])?;
|
||||||
|
writer.write_all(&chain_id.as_le_bytes())?;
|
||||||
|
writer.write_all(&nonce.as_le_bytes())?;
|
||||||
|
writer.write_all(&u32::try_from(outs.len()).unwrap().to_le_bytes())?;
|
||||||
|
for out in outs {
|
||||||
|
out.write(writer)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut res = vec![];
|
||||||
|
self.write(&mut res).unwrap();
|
||||||
|
res
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub struct SignedRouterCommand {
|
||||||
|
command: RouterCommand,
|
||||||
|
signature: Signature,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SignedRouterCommand {
|
||||||
|
pub fn new(key: &PublicKey, command: RouterCommand, signature: &[u8; 64]) -> Option<Self> {
|
||||||
|
let c = Secp256k1::read_F(&mut &signature[.. 32]).ok()?;
|
||||||
|
let s = Secp256k1::read_F(&mut &signature[32 ..]).ok()?;
|
||||||
|
let signature = Signature { c, s };
|
||||||
|
|
||||||
|
if !signature.verify(key, &command.msg()) {
|
||||||
|
None?
|
||||||
|
}
|
||||||
|
Some(SignedRouterCommand { command, signature })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn command(&self) -> &RouterCommand {
|
||||||
|
&self.command
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn signature(&self) -> &Signature {
|
||||||
|
&self.signature
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
||||||
|
let command = RouterCommand::read(reader)?;
|
||||||
|
|
||||||
|
let mut sig = [0; 64];
|
||||||
|
reader.read_exact(&mut sig)?;
|
||||||
|
let signature = Signature::from_bytes(sig)?;
|
||||||
|
|
||||||
|
Ok(SignedRouterCommand { command, signature })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||||
|
self.command.write(writer)?;
|
||||||
|
writer.write_all(&self.signature.to_bytes())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct RouterCommandMachine {
|
||||||
|
key: PublicKey,
|
||||||
|
command: RouterCommand,
|
||||||
|
machine: AlgorithmMachine<Secp256k1, Schnorr<Secp256k1, RecommendedTranscript, EthereumHram>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RouterCommandMachine {
|
||||||
|
pub fn new(keys: ThresholdKeys<Secp256k1>, command: RouterCommand) -> Option<Self> {
|
||||||
|
// The Schnorr algorithm should be fine without this, even when using the IETF variant
|
||||||
|
// If this is better and more comprehensive, we should do it, even if not necessary
|
||||||
|
let mut transcript = RecommendedTranscript::new(b"ethereum-serai RouterCommandMachine v0.1");
|
||||||
|
let key = keys.group_key();
|
||||||
|
transcript.append_message(b"key", key.to_bytes());
|
||||||
|
transcript.append_message(b"command", command.serialize());
|
||||||
|
|
||||||
|
Some(Self {
|
||||||
|
key: PublicKey::new(key)?,
|
||||||
|
command,
|
||||||
|
machine: AlgorithmMachine::new(Schnorr::new(transcript), keys),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PreprocessMachine for RouterCommandMachine {
|
||||||
|
type Preprocess = Preprocess<Secp256k1, ()>;
|
||||||
|
type Signature = SignedRouterCommand;
|
||||||
|
type SignMachine = RouterCommandSignMachine;
|
||||||
|
|
||||||
|
fn preprocess<R: RngCore + CryptoRng>(
|
||||||
|
self,
|
||||||
|
rng: &mut R,
|
||||||
|
) -> (Self::SignMachine, Self::Preprocess) {
|
||||||
|
let (machine, preprocess) = self.machine.preprocess(rng);
|
||||||
|
|
||||||
|
(RouterCommandSignMachine { key: self.key, command: self.command, machine }, preprocess)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct RouterCommandSignMachine {
|
||||||
|
key: PublicKey,
|
||||||
|
command: RouterCommand,
|
||||||
|
machine: AlgorithmSignMachine<Secp256k1, Schnorr<Secp256k1, RecommendedTranscript, EthereumHram>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SignMachine<SignedRouterCommand> for RouterCommandSignMachine {
|
||||||
|
type Params = ();
|
||||||
|
type Keys = ThresholdKeys<Secp256k1>;
|
||||||
|
type Preprocess = Preprocess<Secp256k1, ()>;
|
||||||
|
type SignatureShare = SignatureShare<Secp256k1>;
|
||||||
|
type SignatureMachine = RouterCommandSignatureMachine;
|
||||||
|
|
||||||
|
fn cache(self) -> CachedPreprocess {
|
||||||
|
unimplemented!(
|
||||||
|
"RouterCommand machines don't support caching their preprocesses due to {}",
|
||||||
|
"being already bound to a specific command"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_cache(
|
||||||
|
(): (),
|
||||||
|
_: ThresholdKeys<Secp256k1>,
|
||||||
|
_: CachedPreprocess,
|
||||||
|
) -> (Self, Self::Preprocess) {
|
||||||
|
unimplemented!(
|
||||||
|
"RouterCommand machines don't support caching their preprocesses due to {}",
|
||||||
|
"being already bound to a specific command"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
|
||||||
|
self.machine.read_preprocess(reader)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sign(
|
||||||
|
self,
|
||||||
|
commitments: HashMap<Participant, Self::Preprocess>,
|
||||||
|
msg: &[u8],
|
||||||
|
) -> Result<(RouterCommandSignatureMachine, Self::SignatureShare), FrostError> {
|
||||||
|
if !msg.is_empty() {
|
||||||
|
panic!("message was passed to a RouterCommand machine when it generates its own");
|
||||||
|
}
|
||||||
|
|
||||||
|
let (machine, share) = self.machine.sign(commitments, &self.command.msg())?;
|
||||||
|
|
||||||
|
Ok((RouterCommandSignatureMachine { key: self.key, command: self.command, machine }, share))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct RouterCommandSignatureMachine {
|
||||||
|
key: PublicKey,
|
||||||
|
command: RouterCommand,
|
||||||
|
machine:
|
||||||
|
AlgorithmSignatureMachine<Secp256k1, Schnorr<Secp256k1, RecommendedTranscript, EthereumHram>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SignatureMachine<SignedRouterCommand> for RouterCommandSignatureMachine {
|
||||||
|
type SignatureShare = SignatureShare<Secp256k1>;
|
||||||
|
|
||||||
|
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare> {
|
||||||
|
self.machine.read_share(reader)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn complete(
|
||||||
|
self,
|
||||||
|
shares: HashMap<Participant, Self::SignatureShare>,
|
||||||
|
) -> Result<SignedRouterCommand, FrostError> {
|
||||||
|
let sig = self.machine.complete(shares)?;
|
||||||
|
let signature = Signature::new(&self.key, &self.command.msg(), sig)
|
||||||
|
.expect("machine produced an invalid signature");
|
||||||
|
Ok(SignedRouterCommand { command: self.command, signature })
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,30 +1,443 @@
|
|||||||
pub use crate::abi::router::*;
|
use std::{sync::Arc, io, collections::HashSet};
|
||||||
|
|
||||||
/*
|
use k256::{
|
||||||
use crate::crypto::{ProcessedSignature, PublicKey};
|
elliptic_curve::{group::GroupEncoding, sec1},
|
||||||
use ethers::{contract::ContractFactory, prelude::*, solc::artifacts::contract::ContractBytecode};
|
ProjectivePoint,
|
||||||
use eyre::Result;
|
};
|
||||||
use std::{convert::From, fs::File, sync::Arc};
|
|
||||||
|
|
||||||
pub async fn router_update_public_key<M: Middleware + 'static>(
|
use alloy_core::primitives::{hex::FromHex, Address, U256, Bytes, TxKind};
|
||||||
contract: &Router<M>,
|
#[cfg(test)]
|
||||||
public_key: &PublicKey,
|
use alloy_core::primitives::B256;
|
||||||
signature: &ProcessedSignature,
|
use alloy_consensus::TxLegacy;
|
||||||
) -> std::result::Result<Option<TransactionReceipt>, eyre::ErrReport> {
|
|
||||||
let tx = contract.update_public_key(public_key.px.to_bytes().into(), signature.into());
|
use alloy_sol_types::{SolValue, SolConstructor, SolCall, SolEvent};
|
||||||
let pending_tx = tx.send().await?;
|
|
||||||
let receipt = pending_tx.await?;
|
use alloy_rpc_types_eth::Filter;
|
||||||
Ok(receipt)
|
#[cfg(test)]
|
||||||
|
use alloy_rpc_types_eth::{BlockId, TransactionRequest, TransactionInput};
|
||||||
|
use alloy_simple_request_transport::SimpleRequest;
|
||||||
|
use alloy_provider::{Provider, RootProvider};
|
||||||
|
|
||||||
|
pub use crate::{
|
||||||
|
Error,
|
||||||
|
crypto::{PublicKey, Signature},
|
||||||
|
abi::{erc20::Transfer, router as abi},
|
||||||
|
};
|
||||||
|
use abi::{SeraiKeyUpdated, InInstruction as InInstructionEvent, Executed as ExecutedEvent};
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub enum Coin {
|
||||||
|
Ether,
|
||||||
|
Erc20([u8; 20]),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn router_execute<M: Middleware + 'static>(
|
impl Coin {
|
||||||
contract: &Router<M>,
|
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
||||||
txs: Vec<Rtransaction>,
|
let mut kind = [0xff];
|
||||||
signature: &ProcessedSignature,
|
reader.read_exact(&mut kind)?;
|
||||||
) -> std::result::Result<Option<TransactionReceipt>, eyre::ErrReport> {
|
Ok(match kind[0] {
|
||||||
let tx = contract.execute(txs, signature.into()).send();
|
0 => Coin::Ether,
|
||||||
let pending_tx = tx.send().await?;
|
1 => {
|
||||||
let receipt = pending_tx.await?;
|
let mut address = [0; 20];
|
||||||
Ok(receipt)
|
reader.read_exact(&mut address)?;
|
||||||
|
Coin::Erc20(address)
|
||||||
|
}
|
||||||
|
_ => Err(io::Error::other("unrecognized Coin type"))?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||||
|
match self {
|
||||||
|
Coin::Ether => writer.write_all(&[0]),
|
||||||
|
Coin::Erc20(token) => {
|
||||||
|
writer.write_all(&[1])?;
|
||||||
|
writer.write_all(token)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub struct InInstruction {
|
||||||
|
pub id: ([u8; 32], u64),
|
||||||
|
pub from: [u8; 20],
|
||||||
|
pub coin: Coin,
|
||||||
|
pub amount: U256,
|
||||||
|
pub data: Vec<u8>,
|
||||||
|
pub key_at_end_of_block: ProjectivePoint,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InInstruction {
|
||||||
|
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
||||||
|
let id = {
|
||||||
|
let mut id_hash = [0; 32];
|
||||||
|
reader.read_exact(&mut id_hash)?;
|
||||||
|
let mut id_pos = [0; 8];
|
||||||
|
reader.read_exact(&mut id_pos)?;
|
||||||
|
let id_pos = u64::from_le_bytes(id_pos);
|
||||||
|
(id_hash, id_pos)
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut from = [0; 20];
|
||||||
|
reader.read_exact(&mut from)?;
|
||||||
|
|
||||||
|
let coin = Coin::read(reader)?;
|
||||||
|
let mut amount = [0; 32];
|
||||||
|
reader.read_exact(&mut amount)?;
|
||||||
|
let amount = U256::from_le_slice(&amount);
|
||||||
|
|
||||||
|
let mut data_len = [0; 4];
|
||||||
|
reader.read_exact(&mut data_len)?;
|
||||||
|
let data_len = usize::try_from(u32::from_le_bytes(data_len))
|
||||||
|
.map_err(|_| io::Error::other("InInstruction data exceeded 2**32 in length"))?;
|
||||||
|
let mut data = vec![0; data_len];
|
||||||
|
reader.read_exact(&mut data)?;
|
||||||
|
|
||||||
|
let mut key_at_end_of_block = <ProjectivePoint as GroupEncoding>::Repr::default();
|
||||||
|
reader.read_exact(&mut key_at_end_of_block)?;
|
||||||
|
let key_at_end_of_block = Option::from(ProjectivePoint::from_bytes(&key_at_end_of_block))
|
||||||
|
.ok_or(io::Error::other("InInstruction had key at end of block which wasn't valid"))?;
|
||||||
|
|
||||||
|
Ok(InInstruction { id, from, coin, amount, data, key_at_end_of_block })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||||
|
writer.write_all(&self.id.0)?;
|
||||||
|
writer.write_all(&self.id.1.to_le_bytes())?;
|
||||||
|
|
||||||
|
writer.write_all(&self.from)?;
|
||||||
|
|
||||||
|
self.coin.write(writer)?;
|
||||||
|
writer.write_all(&self.amount.as_le_bytes())?;
|
||||||
|
|
||||||
|
writer.write_all(
|
||||||
|
&u32::try_from(self.data.len())
|
||||||
|
.map_err(|_| {
|
||||||
|
io::Error::other("InInstruction being written had data exceeding 2**32 in length")
|
||||||
|
})?
|
||||||
|
.to_le_bytes(),
|
||||||
|
)?;
|
||||||
|
writer.write_all(&self.data)?;
|
||||||
|
|
||||||
|
writer.write_all(&self.key_at_end_of_block.to_bytes())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub struct Executed {
|
||||||
|
pub tx_id: [u8; 32],
|
||||||
|
pub nonce: u64,
|
||||||
|
pub signature: [u8; 64],
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The contract Serai uses to manage its state.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Router(Arc<RootProvider<SimpleRequest>>, Address);
|
||||||
|
impl Router {
|
||||||
|
pub(crate) fn code() -> Vec<u8> {
|
||||||
|
let bytecode = include_str!("../artifacts/Router.bin");
|
||||||
|
Bytes::from_hex(bytecode).expect("compiled-in Router bytecode wasn't valid hex").to_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn init_code(key: &PublicKey) -> Vec<u8> {
|
||||||
|
let mut bytecode = Self::code();
|
||||||
|
// Append the constructor arguments
|
||||||
|
bytecode.extend((abi::constructorCall { _seraiKey: key.eth_repr().into() }).abi_encode());
|
||||||
|
bytecode
|
||||||
|
}
|
||||||
|
|
||||||
|
// This isn't pub in order to force users to use `Deployer::find_router`.
|
||||||
|
pub(crate) fn new(provider: Arc<RootProvider<SimpleRequest>>, address: Address) -> Self {
|
||||||
|
Self(provider, address)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn address(&self) -> [u8; 20] {
|
||||||
|
**self.1
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the key for Serai at the specified block.
|
||||||
|
#[cfg(test)]
|
||||||
|
pub async fn serai_key(&self, at: [u8; 32]) -> Result<PublicKey, Error> {
|
||||||
|
let call = TransactionRequest::default()
|
||||||
|
.to(self.1)
|
||||||
|
.input(TransactionInput::new(abi::seraiKeyCall::new(()).abi_encode().into()));
|
||||||
|
let bytes = self
|
||||||
|
.0
|
||||||
|
.call(&call)
|
||||||
|
.block(BlockId::Hash(B256::from(at).into()))
|
||||||
|
.await
|
||||||
|
.map_err(|_| Error::ConnectionError)?;
|
||||||
|
let res =
|
||||||
|
abi::seraiKeyCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?;
|
||||||
|
PublicKey::from_eth_repr(res._0.0).ok_or(Error::ConnectionError)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the message to be signed in order to update the key for Serai.
|
||||||
|
pub(crate) fn update_serai_key_message(chain_id: U256, nonce: U256, key: &PublicKey) -> Vec<u8> {
|
||||||
|
let mut buffer = b"updateSeraiKey".to_vec();
|
||||||
|
buffer.extend(&chain_id.to_be_bytes::<32>());
|
||||||
|
buffer.extend(&nonce.to_be_bytes::<32>());
|
||||||
|
buffer.extend(&key.eth_repr());
|
||||||
|
buffer
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update the key representing Serai.
|
||||||
|
pub fn update_serai_key(&self, public_key: &PublicKey, sig: &Signature) -> TxLegacy {
|
||||||
|
// TODO: Set a more accurate gas
|
||||||
|
TxLegacy {
|
||||||
|
to: TxKind::Call(self.1),
|
||||||
|
input: abi::updateSeraiKeyCall::new((public_key.eth_repr().into(), sig.into()))
|
||||||
|
.abi_encode()
|
||||||
|
.into(),
|
||||||
|
gas_limit: 100_000,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the current nonce for the published batches.
|
||||||
|
#[cfg(test)]
|
||||||
|
pub async fn nonce(&self, at: [u8; 32]) -> Result<U256, Error> {
|
||||||
|
let call = TransactionRequest::default()
|
||||||
|
.to(self.1)
|
||||||
|
.input(TransactionInput::new(abi::nonceCall::new(()).abi_encode().into()));
|
||||||
|
let bytes = self
|
||||||
|
.0
|
||||||
|
.call(&call)
|
||||||
|
.block(BlockId::Hash(B256::from(at).into()))
|
||||||
|
.await
|
||||||
|
.map_err(|_| Error::ConnectionError)?;
|
||||||
|
let res =
|
||||||
|
abi::nonceCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?;
|
||||||
|
Ok(res._0)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the message to be signed in order to update the key for Serai.
|
||||||
|
pub(crate) fn execute_message(
|
||||||
|
chain_id: U256,
|
||||||
|
nonce: U256,
|
||||||
|
outs: Vec<abi::OutInstruction>,
|
||||||
|
) -> Vec<u8> {
|
||||||
|
("execute".to_string(), chain_id, nonce, outs).abi_encode_params()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Execute a batch of `OutInstruction`s.
|
||||||
|
pub fn execute(&self, outs: &[abi::OutInstruction], sig: &Signature) -> TxLegacy {
|
||||||
|
TxLegacy {
|
||||||
|
to: TxKind::Call(self.1),
|
||||||
|
input: abi::executeCall::new((outs.to_vec(), sig.into())).abi_encode().into(),
|
||||||
|
// TODO
|
||||||
|
gas_limit: 100_000 + ((200_000 + 10_000) * u128::try_from(outs.len()).unwrap()),
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn key_at_end_of_block(&self, block: u64) -> Result<Option<ProjectivePoint>, Error> {
|
||||||
|
let filter = Filter::new().from_block(0).to_block(block).address(self.1);
|
||||||
|
let filter = filter.event_signature(SeraiKeyUpdated::SIGNATURE_HASH);
|
||||||
|
let all_keys = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
|
||||||
|
if all_keys.is_empty() {
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let last_key_x_coordinate_log = all_keys.last().ok_or(Error::ConnectionError)?;
|
||||||
|
let last_key_x_coordinate = last_key_x_coordinate_log
|
||||||
|
.log_decode::<SeraiKeyUpdated>()
|
||||||
|
.map_err(|_| Error::ConnectionError)?
|
||||||
|
.inner
|
||||||
|
.data
|
||||||
|
.key;
|
||||||
|
|
||||||
|
let mut compressed_point = <ProjectivePoint as GroupEncoding>::Repr::default();
|
||||||
|
compressed_point[0] = u8::from(sec1::Tag::CompressedEvenY);
|
||||||
|
compressed_point[1 ..].copy_from_slice(last_key_x_coordinate.as_slice());
|
||||||
|
|
||||||
|
let key =
|
||||||
|
Option::from(ProjectivePoint::from_bytes(&compressed_point)).ok_or(Error::ConnectionError)?;
|
||||||
|
Ok(Some(key))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn in_instructions(
|
||||||
|
&self,
|
||||||
|
block: u64,
|
||||||
|
allowed_tokens: &HashSet<[u8; 20]>,
|
||||||
|
) -> Result<Vec<InInstruction>, Error> {
|
||||||
|
let Some(key_at_end_of_block) = self.key_at_end_of_block(block).await? else {
|
||||||
|
return Ok(vec![]);
|
||||||
|
};
|
||||||
|
|
||||||
|
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
|
||||||
|
let filter = filter.event_signature(InInstructionEvent::SIGNATURE_HASH);
|
||||||
|
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
|
||||||
|
|
||||||
|
let mut transfer_check = HashSet::new();
|
||||||
|
let mut in_instructions = vec![];
|
||||||
|
for log in logs {
|
||||||
|
// Double check the address which emitted this log
|
||||||
|
if log.address() != self.1 {
|
||||||
|
Err(Error::ConnectionError)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let id = (
|
||||||
|
log.block_hash.ok_or(Error::ConnectionError)?.into(),
|
||||||
|
log.log_index.ok_or(Error::ConnectionError)?,
|
||||||
|
);
|
||||||
|
|
||||||
|
let tx_hash = log.transaction_hash.ok_or(Error::ConnectionError)?;
|
||||||
|
let tx = self
|
||||||
|
.0
|
||||||
|
.get_transaction_by_hash(tx_hash)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten()
|
||||||
|
.ok_or(Error::ConnectionError)?;
|
||||||
|
|
||||||
|
let log =
|
||||||
|
log.log_decode::<InInstructionEvent>().map_err(|_| Error::ConnectionError)?.inner.data;
|
||||||
|
|
||||||
|
let coin = if log.coin.0 == [0; 20] {
|
||||||
|
Coin::Ether
|
||||||
|
} else {
|
||||||
|
let token = *log.coin.0;
|
||||||
|
|
||||||
|
if !allowed_tokens.contains(&token) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If this also counts as a top-level transfer via the token, drop it
|
||||||
|
//
|
||||||
|
// Necessary in order to handle a potential edge case with some theoretical token
|
||||||
|
// implementations
|
||||||
|
//
|
||||||
|
// This will either let it be handled by the top-level transfer hook or will drop it
|
||||||
|
// entirely on the side of caution
|
||||||
|
if tx.to == Some(token.into()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get all logs for this TX
|
||||||
|
let receipt = self
|
||||||
|
.0
|
||||||
|
.get_transaction_receipt(tx_hash)
|
||||||
|
.await
|
||||||
|
.map_err(|_| Error::ConnectionError)?
|
||||||
|
.ok_or(Error::ConnectionError)?;
|
||||||
|
let tx_logs = receipt.inner.logs();
|
||||||
|
|
||||||
|
// Find a matching transfer log
|
||||||
|
let mut found_transfer = false;
|
||||||
|
for tx_log in tx_logs {
|
||||||
|
let log_index = tx_log.log_index.ok_or(Error::ConnectionError)?;
|
||||||
|
// Ensure we didn't already use this transfer to check a distinct InInstruction event
|
||||||
|
if transfer_check.contains(&log_index) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this log is from the token we expected to be transferred
|
||||||
|
if tx_log.address().0 != token {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// Check if this is a transfer log
|
||||||
|
// https://github.com/alloy-rs/core/issues/589
|
||||||
|
if tx_log.topics()[0] != Transfer::SIGNATURE_HASH {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let Ok(transfer) = Transfer::decode_log(&tx_log.inner.clone(), true) else { continue };
|
||||||
|
// Check if this is a transfer to us for the expected amount
|
||||||
|
if (transfer.to == self.1) && (transfer.value == log.amount) {
|
||||||
|
transfer_check.insert(log_index);
|
||||||
|
found_transfer = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found_transfer {
|
||||||
|
// This shouldn't be a ConnectionError
|
||||||
|
// This is an exploit, a non-conforming ERC20, or an invalid connection
|
||||||
|
// This should halt the process which is sufficient, yet this is sub-optimal
|
||||||
|
// TODO
|
||||||
|
Err(Error::ConnectionError)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Coin::Erc20(token)
|
||||||
|
};
|
||||||
|
|
||||||
|
in_instructions.push(InInstruction {
|
||||||
|
id,
|
||||||
|
from: *log.from.0,
|
||||||
|
coin,
|
||||||
|
amount: log.amount,
|
||||||
|
data: log.instruction.as_ref().to_vec(),
|
||||||
|
key_at_end_of_block,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(in_instructions)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn executed_commands(&self, block: u64) -> Result<Vec<Executed>, Error> {
|
||||||
|
let mut res = vec![];
|
||||||
|
|
||||||
|
{
|
||||||
|
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
|
||||||
|
let filter = filter.event_signature(SeraiKeyUpdated::SIGNATURE_HASH);
|
||||||
|
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
|
||||||
|
|
||||||
|
for log in logs {
|
||||||
|
// Double check the address which emitted this log
|
||||||
|
if log.address() != self.1 {
|
||||||
|
Err(Error::ConnectionError)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?.into();
|
||||||
|
|
||||||
|
let log =
|
||||||
|
log.log_decode::<SeraiKeyUpdated>().map_err(|_| Error::ConnectionError)?.inner.data;
|
||||||
|
|
||||||
|
let mut signature = [0; 64];
|
||||||
|
signature[.. 32].copy_from_slice(log.signature.c.as_ref());
|
||||||
|
signature[32 ..].copy_from_slice(log.signature.s.as_ref());
|
||||||
|
res.push(Executed {
|
||||||
|
tx_id,
|
||||||
|
nonce: log.nonce.try_into().map_err(|_| Error::ConnectionError)?,
|
||||||
|
signature,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
|
||||||
|
let filter = filter.event_signature(ExecutedEvent::SIGNATURE_HASH);
|
||||||
|
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
|
||||||
|
|
||||||
|
for log in logs {
|
||||||
|
// Double check the address which emitted this log
|
||||||
|
if log.address() != self.1 {
|
||||||
|
Err(Error::ConnectionError)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?.into();
|
||||||
|
|
||||||
|
let log = log.log_decode::<ExecutedEvent>().map_err(|_| Error::ConnectionError)?.inner.data;
|
||||||
|
|
||||||
|
let mut signature = [0; 64];
|
||||||
|
signature[.. 32].copy_from_slice(log.signature.c.as_ref());
|
||||||
|
signature[32 ..].copy_from_slice(log.signature.s.as_ref());
|
||||||
|
res.push(Executed {
|
||||||
|
tx_id,
|
||||||
|
nonce: log.nonce.try_into().map_err(|_| Error::ConnectionError)?,
|
||||||
|
signature,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "tests")]
|
||||||
|
pub fn key_updated_filter(&self) -> Filter {
|
||||||
|
Filter::new().address(self.1).event_signature(SeraiKeyUpdated::SIGNATURE_HASH)
|
||||||
|
}
|
||||||
|
#[cfg(feature = "tests")]
|
||||||
|
pub fn executed_filter(&self) -> Filter {
|
||||||
|
Filter::new().address(self.1).event_signature(ExecutedEvent::SIGNATURE_HASH)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
*/
|
|
||||||
|
|||||||
@@ -1,34 +0,0 @@
|
|||||||
use eyre::{eyre, Result};
|
|
||||||
|
|
||||||
use group::ff::PrimeField;
|
|
||||||
|
|
||||||
use ethers_providers::{Provider, Http};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Error,
|
|
||||||
crypto::{keccak256, PublicKey, Signature},
|
|
||||||
};
|
|
||||||
pub use crate::abi::schnorr::*;
|
|
||||||
|
|
||||||
pub async fn call_verify(
|
|
||||||
contract: &Schnorr<Provider<Http>>,
|
|
||||||
public_key: &PublicKey,
|
|
||||||
message: &[u8],
|
|
||||||
signature: &Signature,
|
|
||||||
) -> Result<()> {
|
|
||||||
if contract
|
|
||||||
.verify(
|
|
||||||
public_key.parity,
|
|
||||||
public_key.px.to_repr().into(),
|
|
||||||
keccak256(message),
|
|
||||||
signature.c.to_repr().into(),
|
|
||||||
signature.s.to_repr().into(),
|
|
||||||
)
|
|
||||||
.call()
|
|
||||||
.await?
|
|
||||||
{
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(eyre!(Error::InvalidSignature))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
13
coins/ethereum/src/tests/abi/mod.rs
Normal file
13
coins/ethereum/src/tests/abi/mod.rs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
use alloy_sol_types::sol;
|
||||||
|
|
||||||
|
#[rustfmt::skip]
|
||||||
|
#[allow(warnings)]
|
||||||
|
#[allow(needless_pass_by_value)]
|
||||||
|
#[allow(clippy::all)]
|
||||||
|
#[allow(clippy::ignored_unit_patterns)]
|
||||||
|
#[allow(clippy::redundant_closure_for_method_calls)]
|
||||||
|
mod schnorr_container {
|
||||||
|
use super::*;
|
||||||
|
sol!("src/tests/contracts/Schnorr.sol");
|
||||||
|
}
|
||||||
|
pub(crate) use schnorr_container::TestSchnorr as schnorr;
|
||||||
51
coins/ethereum/src/tests/contracts/ERC20.sol
Normal file
51
coins/ethereum/src/tests/contracts/ERC20.sol
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
// SPDX-License-Identifier: AGPLv3
|
||||||
|
pragma solidity ^0.8.0;
|
||||||
|
|
||||||
|
contract TestERC20 {
|
||||||
|
event Transfer(address indexed from, address indexed to, uint256 value);
|
||||||
|
event Approval(address indexed owner, address indexed spender, uint256 value);
|
||||||
|
|
||||||
|
function name() public pure returns (string memory) {
|
||||||
|
return "Test ERC20";
|
||||||
|
}
|
||||||
|
function symbol() public pure returns (string memory) {
|
||||||
|
return "TEST";
|
||||||
|
}
|
||||||
|
function decimals() public pure returns (uint8) {
|
||||||
|
return 18;
|
||||||
|
}
|
||||||
|
|
||||||
|
function totalSupply() public pure returns (uint256) {
|
||||||
|
return 1_000_000 * 10e18;
|
||||||
|
}
|
||||||
|
|
||||||
|
mapping(address => uint256) balances;
|
||||||
|
mapping(address => mapping(address => uint256)) allowances;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
balances[msg.sender] = totalSupply();
|
||||||
|
}
|
||||||
|
|
||||||
|
function balanceOf(address owner) public view returns (uint256) {
|
||||||
|
return balances[owner];
|
||||||
|
}
|
||||||
|
function transfer(address to, uint256 value) public returns (bool) {
|
||||||
|
balances[msg.sender] -= value;
|
||||||
|
balances[to] += value;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function transferFrom(address from, address to, uint256 value) public returns (bool) {
|
||||||
|
allowances[from][msg.sender] -= value;
|
||||||
|
balances[from] -= value;
|
||||||
|
balances[to] += value;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
function approve(address spender, uint256 value) public returns (bool) {
|
||||||
|
allowances[msg.sender][spender] = value;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
function allowance(address owner, address spender) public view returns (uint256) {
|
||||||
|
return allowances[owner][spender];
|
||||||
|
}
|
||||||
|
}
|
||||||
15
coins/ethereum/src/tests/contracts/Schnorr.sol
Normal file
15
coins/ethereum/src/tests/contracts/Schnorr.sol
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
// SPDX-License-Identifier: AGPLv3
|
||||||
|
pragma solidity ^0.8.0;
|
||||||
|
|
||||||
|
import "../../../contracts/Schnorr.sol";
|
||||||
|
|
||||||
|
contract TestSchnorr {
|
||||||
|
function verify(
|
||||||
|
bytes32 px,
|
||||||
|
bytes calldata message,
|
||||||
|
bytes32 c,
|
||||||
|
bytes32 s
|
||||||
|
) external pure returns (bool) {
|
||||||
|
return Schnorr.verify(px, message, c, s);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,49 +1,33 @@
|
|||||||
use rand_core::OsRng;
|
use rand_core::OsRng;
|
||||||
|
|
||||||
use sha2::Sha256;
|
use group::ff::{Field, PrimeField};
|
||||||
use sha3::{Digest, Keccak256};
|
|
||||||
|
|
||||||
use group::Group;
|
|
||||||
use k256::{
|
use k256::{
|
||||||
ecdsa::{hazmat::SignPrimitive, signature::DigestVerifier, SigningKey, VerifyingKey},
|
ecdsa::{
|
||||||
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, point::DecompressPoint},
|
self, hazmat::SignPrimitive, signature::hazmat::PrehashVerifier, SigningKey, VerifyingKey,
|
||||||
U256, Scalar, AffinePoint, ProjectivePoint,
|
},
|
||||||
|
Scalar, ProjectivePoint,
|
||||||
};
|
};
|
||||||
|
|
||||||
use frost::{
|
use frost::{
|
||||||
curve::Secp256k1,
|
curve::{Ciphersuite, Secp256k1},
|
||||||
algorithm::{Hram, IetfSchnorr},
|
algorithm::{Hram, IetfSchnorr},
|
||||||
tests::{algorithm_machines, sign},
|
tests::{algorithm_machines, sign},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{crypto::*, tests::key_gen};
|
use crate::{crypto::*, tests::key_gen};
|
||||||
|
|
||||||
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
// The ecrecover opcode, yet with parity replacing v
|
||||||
Scalar::reduce(U256::from_be_slice(&keccak256(data)))
|
pub(crate) fn ecrecover(message: Scalar, odd_y: bool, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
|
||||||
}
|
let sig = ecdsa::Signature::from_scalars(r, s).ok()?;
|
||||||
|
let message: [u8; 32] = message.to_repr().into();
|
||||||
pub(crate) fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
|
alloy_core::primitives::Signature::from_signature_and_parity(
|
||||||
if r.is_zero().into() || s.is_zero().into() || !((v == 27) || (v == 28)) {
|
sig,
|
||||||
return None;
|
alloy_core::primitives::Parity::Parity(odd_y),
|
||||||
}
|
)
|
||||||
|
.ok()?
|
||||||
#[allow(non_snake_case)]
|
.recover_address_from_prehash(&alloy_core::primitives::B256::from(message))
|
||||||
let R = AffinePoint::decompress(&r.to_bytes(), (v - 27).into());
|
.ok()
|
||||||
#[allow(non_snake_case)]
|
.map(Into::into)
|
||||||
if let Some(R) = Option::<AffinePoint>::from(R) {
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let R = ProjectivePoint::from(R);
|
|
||||||
|
|
||||||
let r = r.invert().unwrap();
|
|
||||||
let u1 = ProjectivePoint::GENERATOR * (-message * r);
|
|
||||||
let u2 = R * (s * r);
|
|
||||||
let key: ProjectivePoint = u1 + u2;
|
|
||||||
if !bool::from(key.is_identity()) {
|
|
||||||
return Some(address(&key));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -55,20 +39,23 @@ fn test_ecrecover() {
|
|||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
const MESSAGE: &[u8] = b"Hello, World!";
|
||||||
let (sig, recovery_id) = private
|
let (sig, recovery_id) = private
|
||||||
.as_nonzero_scalar()
|
.as_nonzero_scalar()
|
||||||
.try_sign_prehashed_rfc6979::<Sha256>(&Keccak256::digest(MESSAGE), b"")
|
.try_sign_prehashed(
|
||||||
|
<Secp256k1 as Ciphersuite>::F::random(&mut OsRng),
|
||||||
|
&keccak256(MESSAGE).into(),
|
||||||
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// Sanity check the signature verifies
|
// Sanity check the signature verifies
|
||||||
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
|
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
|
||||||
{
|
{
|
||||||
assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ());
|
assert_eq!(public.verify_prehash(&keccak256(MESSAGE), &sig).unwrap(), ());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Perform the ecrecover
|
// Perform the ecrecover
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
ecrecover(
|
ecrecover(
|
||||||
hash_to_scalar(MESSAGE),
|
hash_to_scalar(MESSAGE),
|
||||||
u8::from(recovery_id.unwrap().is_y_odd()) + 27,
|
u8::from(recovery_id.unwrap().is_y_odd()) == 1,
|
||||||
*sig.r(),
|
*sig.r(),
|
||||||
*sig.s()
|
*sig.s()
|
||||||
)
|
)
|
||||||
@@ -93,18 +80,13 @@ fn test_signing() {
|
|||||||
pub fn preprocess_signature_for_ecrecover(
|
pub fn preprocess_signature_for_ecrecover(
|
||||||
R: ProjectivePoint,
|
R: ProjectivePoint,
|
||||||
public_key: &PublicKey,
|
public_key: &PublicKey,
|
||||||
chain_id: U256,
|
|
||||||
m: &[u8],
|
m: &[u8],
|
||||||
s: Scalar,
|
s: Scalar,
|
||||||
) -> (u8, Scalar, Scalar) {
|
) -> (Scalar, Scalar) {
|
||||||
let c = EthereumHram::hram(
|
let c = EthereumHram::hram(&R, &public_key.A, m);
|
||||||
&R,
|
|
||||||
&public_key.A,
|
|
||||||
&[chain_id.to_be_byte_array().as_slice(), &keccak256(m)].concat(),
|
|
||||||
);
|
|
||||||
let sa = -(s * public_key.px);
|
let sa = -(s * public_key.px);
|
||||||
let ca = -(c * public_key.px);
|
let ca = -(c * public_key.px);
|
||||||
(public_key.parity, sa, ca)
|
(sa, ca)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -112,21 +94,12 @@ fn test_ecrecover_hack() {
|
|||||||
let (keys, public_key) = key_gen();
|
let (keys, public_key) = key_gen();
|
||||||
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
const MESSAGE: &[u8] = b"Hello, World!";
|
||||||
let hashed_message = keccak256(MESSAGE);
|
|
||||||
let chain_id = U256::ONE;
|
|
||||||
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
|
||||||
|
|
||||||
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
||||||
let sig = sign(
|
let sig =
|
||||||
&mut OsRng,
|
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
|
||||||
&algo,
|
|
||||||
keys.clone(),
|
|
||||||
algorithm_machines(&mut OsRng, &algo, &keys),
|
|
||||||
full_message,
|
|
||||||
);
|
|
||||||
|
|
||||||
let (parity, sa, ca) =
|
let (sa, ca) = preprocess_signature_for_ecrecover(sig.R, &public_key, MESSAGE, sig.s);
|
||||||
preprocess_signature_for_ecrecover(sig.R, &public_key, chain_id, MESSAGE, sig.s);
|
let q = ecrecover(sa, false, public_key.px, ca).unwrap();
|
||||||
let q = ecrecover(sa, parity, public_key.px, ca).unwrap();
|
|
||||||
assert_eq!(q, address(&sig.R));
|
assert_eq!(q, address(&sig.R));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,22 +1,30 @@
|
|||||||
use std::{sync::Arc, time::Duration, fs::File, collections::HashMap};
|
use std::{sync::Arc, collections::HashMap};
|
||||||
|
|
||||||
use rand_core::OsRng;
|
use rand_core::OsRng;
|
||||||
|
|
||||||
use group::ff::PrimeField;
|
|
||||||
use k256::{Scalar, ProjectivePoint};
|
use k256::{Scalar, ProjectivePoint};
|
||||||
use frost::{curve::Secp256k1, Participant, ThresholdKeys, tests::key_gen as frost_key_gen};
|
use frost::{curve::Secp256k1, Participant, ThresholdKeys, tests::key_gen as frost_key_gen};
|
||||||
|
|
||||||
use ethers_core::{
|
use alloy_core::{
|
||||||
types::{H160, Signature as EthersSignature},
|
primitives::{Address, U256, Bytes, TxKind},
|
||||||
abi::Abi,
|
hex::FromHex,
|
||||||
};
|
};
|
||||||
use ethers_contract::ContractFactory;
|
use alloy_consensus::{SignableTransaction, TxLegacy};
|
||||||
use ethers_providers::{Middleware, Provider, Http};
|
|
||||||
|
|
||||||
use crate::crypto::PublicKey;
|
use alloy_rpc_types_eth::TransactionReceipt;
|
||||||
|
use alloy_simple_request_transport::SimpleRequest;
|
||||||
|
use alloy_provider::{Provider, RootProvider};
|
||||||
|
|
||||||
|
use crate::crypto::{address, deterministically_sign, PublicKey};
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
mod crypto;
|
mod crypto;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod abi;
|
||||||
|
#[cfg(test)]
|
||||||
mod schnorr;
|
mod schnorr;
|
||||||
|
#[cfg(test)]
|
||||||
mod router;
|
mod router;
|
||||||
|
|
||||||
pub fn key_gen() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, PublicKey) {
|
pub fn key_gen() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, PublicKey) {
|
||||||
@@ -36,57 +44,88 @@ pub fn key_gen() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, PublicKey)
|
|||||||
(keys, public_key)
|
(keys, public_key)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Replace with a contract deployment from an unknown account, so the environment solely has
|
// TODO: Use a proper error here
|
||||||
// to fund the deployer, not create/pass a wallet
|
pub async fn send(
|
||||||
// TODO: Deterministic deployments across chains
|
provider: &RootProvider<SimpleRequest>,
|
||||||
|
wallet: &k256::ecdsa::SigningKey,
|
||||||
|
mut tx: TxLegacy,
|
||||||
|
) -> Option<TransactionReceipt> {
|
||||||
|
let verifying_key = *wallet.verifying_key().as_affine();
|
||||||
|
let address = Address::from(address(&verifying_key.into()));
|
||||||
|
|
||||||
|
// https://github.com/alloy-rs/alloy/issues/539
|
||||||
|
// let chain_id = provider.get_chain_id().await.unwrap();
|
||||||
|
// tx.chain_id = Some(chain_id);
|
||||||
|
tx.chain_id = None;
|
||||||
|
tx.nonce = provider.get_transaction_count(address).await.unwrap();
|
||||||
|
// 100 gwei
|
||||||
|
tx.gas_price = 100_000_000_000u128;
|
||||||
|
|
||||||
|
let sig = wallet.sign_prehash_recoverable(tx.signature_hash().as_ref()).unwrap();
|
||||||
|
assert_eq!(address, tx.clone().into_signed(sig.into()).recover_signer().unwrap());
|
||||||
|
assert!(
|
||||||
|
provider.get_balance(address).await.unwrap() >
|
||||||
|
((U256::from(tx.gas_price) * U256::from(tx.gas_limit)) + tx.value)
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut bytes = vec![];
|
||||||
|
tx.encode_with_signature_fields(&sig.into(), &mut bytes);
|
||||||
|
let pending_tx = provider.send_raw_transaction(&bytes).await.ok()?;
|
||||||
|
pending_tx.get_receipt().await.ok()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn fund_account(
|
||||||
|
provider: &RootProvider<SimpleRequest>,
|
||||||
|
wallet: &k256::ecdsa::SigningKey,
|
||||||
|
to_fund: Address,
|
||||||
|
value: U256,
|
||||||
|
) -> Option<()> {
|
||||||
|
let funding_tx =
|
||||||
|
TxLegacy { to: TxKind::Call(to_fund), gas_limit: 21_000, value, ..Default::default() };
|
||||||
|
assert!(send(provider, wallet, funding_tx).await.unwrap().status());
|
||||||
|
|
||||||
|
Some(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Use a proper error here
|
||||||
pub async fn deploy_contract(
|
pub async fn deploy_contract(
|
||||||
chain_id: u32,
|
client: Arc<RootProvider<SimpleRequest>>,
|
||||||
client: Arc<Provider<Http>>,
|
|
||||||
wallet: &k256::ecdsa::SigningKey,
|
wallet: &k256::ecdsa::SigningKey,
|
||||||
name: &str,
|
name: &str,
|
||||||
) -> eyre::Result<H160> {
|
) -> Option<Address> {
|
||||||
let abi: Abi =
|
|
||||||
serde_json::from_reader(File::open(format!("./artifacts/{name}.abi")).unwrap()).unwrap();
|
|
||||||
|
|
||||||
let hex_bin_buf = std::fs::read_to_string(format!("./artifacts/{name}.bin")).unwrap();
|
let hex_bin_buf = std::fs::read_to_string(format!("./artifacts/{name}.bin")).unwrap();
|
||||||
let hex_bin =
|
let hex_bin =
|
||||||
if let Some(stripped) = hex_bin_buf.strip_prefix("0x") { stripped } else { &hex_bin_buf };
|
if let Some(stripped) = hex_bin_buf.strip_prefix("0x") { stripped } else { &hex_bin_buf };
|
||||||
let bin = hex::decode(hex_bin).unwrap();
|
let bin = Bytes::from_hex(hex_bin).unwrap();
|
||||||
let factory = ContractFactory::new(abi, bin.into(), client.clone());
|
|
||||||
|
|
||||||
let mut deployment_tx = factory.deploy(())?.tx;
|
let deployment_tx = TxLegacy {
|
||||||
deployment_tx.set_chain_id(chain_id);
|
chain_id: None,
|
||||||
deployment_tx.set_gas(1_000_000);
|
nonce: 0,
|
||||||
let (max_fee_per_gas, max_priority_fee_per_gas) = client.estimate_eip1559_fees(None).await?;
|
// 100 gwei
|
||||||
deployment_tx.as_eip1559_mut().unwrap().max_fee_per_gas = Some(max_fee_per_gas);
|
gas_price: 100_000_000_000u128,
|
||||||
deployment_tx.as_eip1559_mut().unwrap().max_priority_fee_per_gas = Some(max_priority_fee_per_gas);
|
gas_limit: 1_000_000,
|
||||||
|
to: TxKind::Create,
|
||||||
|
value: U256::ZERO,
|
||||||
|
input: bin,
|
||||||
|
};
|
||||||
|
|
||||||
let sig_hash = deployment_tx.sighash();
|
let deployment_tx = deterministically_sign(&deployment_tx);
|
||||||
let (sig, rid) = wallet.sign_prehash_recoverable(sig_hash.as_ref()).unwrap();
|
|
||||||
|
|
||||||
// EIP-155 v
|
// Fund the deployer address
|
||||||
let mut v = u64::from(rid.to_byte());
|
fund_account(
|
||||||
assert!((v == 0) || (v == 1));
|
&client,
|
||||||
v += u64::from((chain_id * 2) + 35);
|
wallet,
|
||||||
|
deployment_tx.recover_signer().unwrap(),
|
||||||
|
U256::from(deployment_tx.tx().gas_limit) * U256::from(deployment_tx.tx().gas_price),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let r = sig.r().to_repr();
|
let (deployment_tx, sig, _) = deployment_tx.into_parts();
|
||||||
let r_ref: &[u8] = r.as_ref();
|
let mut bytes = vec![];
|
||||||
let s = sig.s().to_repr();
|
deployment_tx.encode_with_signature_fields(&sig, &mut bytes);
|
||||||
let s_ref: &[u8] = s.as_ref();
|
let pending_tx = client.send_raw_transaction(&bytes).await.ok()?;
|
||||||
let deployment_tx =
|
let receipt = pending_tx.get_receipt().await.ok()?;
|
||||||
deployment_tx.rlp_signed(&EthersSignature { r: r_ref.into(), s: s_ref.into(), v });
|
assert!(receipt.status());
|
||||||
|
|
||||||
let pending_tx = client.send_raw_transaction(deployment_tx).await?;
|
Some(receipt.contract_address.unwrap())
|
||||||
|
|
||||||
let mut receipt;
|
|
||||||
while {
|
|
||||||
receipt = client.get_transaction_receipt(pending_tx.tx_hash()).await?;
|
|
||||||
receipt.is_none()
|
|
||||||
} {
|
|
||||||
tokio::time::sleep(Duration::from_secs(6)).await;
|
|
||||||
}
|
|
||||||
let receipt = receipt.unwrap();
|
|
||||||
assert!(receipt.status == Some(1.into()));
|
|
||||||
|
|
||||||
Ok(receipt.contract_address.unwrap())
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,8 @@ use std::{convert::TryFrom, sync::Arc, collections::HashMap};
|
|||||||
|
|
||||||
use rand_core::OsRng;
|
use rand_core::OsRng;
|
||||||
|
|
||||||
use group::ff::PrimeField;
|
use group::Group;
|
||||||
|
use k256::ProjectivePoint;
|
||||||
use frost::{
|
use frost::{
|
||||||
curve::Secp256k1,
|
curve::Secp256k1,
|
||||||
Participant, ThresholdKeys,
|
Participant, ThresholdKeys,
|
||||||
@@ -10,100 +11,174 @@ use frost::{
|
|||||||
tests::{algorithm_machines, sign},
|
tests::{algorithm_machines, sign},
|
||||||
};
|
};
|
||||||
|
|
||||||
use ethers_core::{
|
use alloy_core::primitives::{Address, U256};
|
||||||
types::{H160, U256, Bytes},
|
|
||||||
abi::AbiEncode,
|
use alloy_simple_request_transport::SimpleRequest;
|
||||||
utils::{Anvil, AnvilInstance},
|
use alloy_rpc_types_eth::BlockTransactionsKind;
|
||||||
};
|
use alloy_rpc_client::ClientBuilder;
|
||||||
use ethers_providers::{Middleware, Provider, Http};
|
use alloy_provider::{Provider, RootProvider};
|
||||||
|
|
||||||
|
use alloy_node_bindings::{Anvil, AnvilInstance};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
crypto::{keccak256, PublicKey, EthereumHram, Signature},
|
crypto::*,
|
||||||
router::{self, *},
|
deployer::Deployer,
|
||||||
tests::{key_gen, deploy_contract},
|
router::{Router, abi as router},
|
||||||
|
tests::{key_gen, send, fund_account},
|
||||||
};
|
};
|
||||||
|
|
||||||
async fn setup_test() -> (
|
async fn setup_test() -> (
|
||||||
u32,
|
|
||||||
AnvilInstance,
|
AnvilInstance,
|
||||||
Router<Provider<Http>>,
|
Arc<RootProvider<SimpleRequest>>,
|
||||||
|
u64,
|
||||||
|
Router,
|
||||||
HashMap<Participant, ThresholdKeys<Secp256k1>>,
|
HashMap<Participant, ThresholdKeys<Secp256k1>>,
|
||||||
PublicKey,
|
PublicKey,
|
||||||
) {
|
) {
|
||||||
let anvil = Anvil::new().spawn();
|
let anvil = Anvil::new().spawn();
|
||||||
|
|
||||||
let provider = Provider::<Http>::try_from(anvil.endpoint()).unwrap();
|
let provider = RootProvider::new(
|
||||||
let chain_id = provider.get_chainid().await.unwrap().as_u32();
|
ClientBuilder::default().transport(SimpleRequest::new(anvil.endpoint()), true),
|
||||||
|
);
|
||||||
|
let chain_id = provider.get_chain_id().await.unwrap();
|
||||||
let wallet = anvil.keys()[0].clone().into();
|
let wallet = anvil.keys()[0].clone().into();
|
||||||
let client = Arc::new(provider);
|
let client = Arc::new(provider);
|
||||||
|
|
||||||
let contract_address =
|
// Make sure the Deployer constructor returns None, as it doesn't exist yet
|
||||||
deploy_contract(chain_id, client.clone(), &wallet, "Router").await.unwrap();
|
assert!(Deployer::new(client.clone()).await.unwrap().is_none());
|
||||||
let contract = Router::new(contract_address, client.clone());
|
|
||||||
|
// Deploy the Deployer
|
||||||
|
let tx = Deployer::deployment_tx();
|
||||||
|
fund_account(
|
||||||
|
&client,
|
||||||
|
&wallet,
|
||||||
|
tx.recover_signer().unwrap(),
|
||||||
|
U256::from(tx.tx().gas_limit) * U256::from(tx.tx().gas_price),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let (tx, sig, _) = tx.into_parts();
|
||||||
|
let mut bytes = vec![];
|
||||||
|
tx.encode_with_signature_fields(&sig, &mut bytes);
|
||||||
|
|
||||||
|
let pending_tx = client.send_raw_transaction(&bytes).await.unwrap();
|
||||||
|
let receipt = pending_tx.get_receipt().await.unwrap();
|
||||||
|
assert!(receipt.status());
|
||||||
|
let deployer =
|
||||||
|
Deployer::new(client.clone()).await.expect("network error").expect("deployer wasn't deployed");
|
||||||
|
|
||||||
let (keys, public_key) = key_gen();
|
let (keys, public_key) = key_gen();
|
||||||
|
|
||||||
// Set the key to the threshold keys
|
// Verify the Router constructor returns None, as it doesn't exist yet
|
||||||
let tx = contract.init_serai_key(public_key.px.to_repr().into()).gas(100_000);
|
assert!(deployer.find_router(client.clone(), &public_key).await.unwrap().is_none());
|
||||||
let pending_tx = tx.send().await.unwrap();
|
|
||||||
let receipt = pending_tx.await.unwrap().unwrap();
|
|
||||||
assert!(receipt.status == Some(1.into()));
|
|
||||||
|
|
||||||
(chain_id, anvil, contract, keys, public_key)
|
// Deploy the router
|
||||||
|
let receipt = send(&client, &anvil.keys()[0].clone().into(), deployer.deploy_router(&public_key))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(receipt.status());
|
||||||
|
let contract = deployer.find_router(client.clone(), &public_key).await.unwrap().unwrap();
|
||||||
|
|
||||||
|
(anvil, client, chain_id, contract, keys, public_key)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn latest_block_hash(client: &RootProvider<SimpleRequest>) -> [u8; 32] {
|
||||||
|
client
|
||||||
|
.get_block(client.get_block_number().await.unwrap().into(), BlockTransactionsKind::Hashes)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.unwrap()
|
||||||
|
.header
|
||||||
|
.hash
|
||||||
|
.unwrap()
|
||||||
|
.0
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_deploy_contract() {
|
async fn test_deploy_contract() {
|
||||||
setup_test().await;
|
let (_anvil, client, _, router, _, public_key) = setup_test().await;
|
||||||
|
|
||||||
|
let block_hash = latest_block_hash(&client).await;
|
||||||
|
assert_eq!(router.serai_key(block_hash).await.unwrap(), public_key);
|
||||||
|
assert_eq!(router.nonce(block_hash).await.unwrap(), U256::try_from(1u64).unwrap());
|
||||||
|
// TODO: Check it emitted SeraiKeyUpdated(public_key) at its genesis
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hash_and_sign(
|
pub fn hash_and_sign(
|
||||||
keys: &HashMap<Participant, ThresholdKeys<Secp256k1>>,
|
keys: &HashMap<Participant, ThresholdKeys<Secp256k1>>,
|
||||||
public_key: &PublicKey,
|
public_key: &PublicKey,
|
||||||
chain_id: U256,
|
|
||||||
message: &[u8],
|
message: &[u8],
|
||||||
) -> Signature {
|
) -> Signature {
|
||||||
let hashed_message = keccak256(message);
|
|
||||||
|
|
||||||
let mut chain_id_bytes = [0; 32];
|
|
||||||
chain_id.to_big_endian(&mut chain_id_bytes);
|
|
||||||
let full_message = &[chain_id_bytes.as_slice(), &hashed_message].concat();
|
|
||||||
|
|
||||||
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
||||||
let sig = sign(
|
let sig =
|
||||||
&mut OsRng,
|
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, keys), message);
|
||||||
&algo,
|
|
||||||
keys.clone(),
|
|
||||||
algorithm_machines(&mut OsRng, &algo, keys),
|
|
||||||
full_message,
|
|
||||||
);
|
|
||||||
|
|
||||||
Signature::new(public_key, k256::U256::from_words(chain_id.0), message, sig).unwrap()
|
Signature::new(public_key, message, sig).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_router_update_serai_key() {
|
||||||
|
let (anvil, client, chain_id, contract, keys, public_key) = setup_test().await;
|
||||||
|
|
||||||
|
let next_key = loop {
|
||||||
|
let point = ProjectivePoint::random(&mut OsRng);
|
||||||
|
let Some(next_key) = PublicKey::new(point) else { continue };
|
||||||
|
break next_key;
|
||||||
|
};
|
||||||
|
|
||||||
|
let message = Router::update_serai_key_message(
|
||||||
|
U256::try_from(chain_id).unwrap(),
|
||||||
|
U256::try_from(1u64).unwrap(),
|
||||||
|
&next_key,
|
||||||
|
);
|
||||||
|
let sig = hash_and_sign(&keys, &public_key, &message);
|
||||||
|
|
||||||
|
let first_block_hash = latest_block_hash(&client).await;
|
||||||
|
assert_eq!(contract.serai_key(first_block_hash).await.unwrap(), public_key);
|
||||||
|
|
||||||
|
let receipt =
|
||||||
|
send(&client, &anvil.keys()[0].clone().into(), contract.update_serai_key(&next_key, &sig))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert!(receipt.status());
|
||||||
|
|
||||||
|
let second_block_hash = latest_block_hash(&client).await;
|
||||||
|
assert_eq!(contract.serai_key(second_block_hash).await.unwrap(), next_key);
|
||||||
|
// Check this does still offer the historical state
|
||||||
|
assert_eq!(contract.serai_key(first_block_hash).await.unwrap(), public_key);
|
||||||
|
// TODO: Check logs
|
||||||
|
|
||||||
|
println!("gas used: {:?}", receipt.gas_used);
|
||||||
|
// println!("logs: {:?}", receipt.logs);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_router_execute() {
|
async fn test_router_execute() {
|
||||||
let (chain_id, _anvil, contract, keys, public_key) = setup_test().await;
|
let (anvil, client, chain_id, contract, keys, public_key) = setup_test().await;
|
||||||
|
|
||||||
let to = H160([0u8; 20]);
|
let to = Address::from([0; 20]);
|
||||||
let value = U256([0u64; 4]);
|
let value = U256::ZERO;
|
||||||
let data = Bytes::from([0]);
|
let tx = router::OutInstruction { to, value, calls: vec![] };
|
||||||
let tx = OutInstruction { to, value, data: data.clone() };
|
let txs = vec![tx];
|
||||||
|
|
||||||
let nonce_call = contract.nonce();
|
let first_block_hash = latest_block_hash(&client).await;
|
||||||
let nonce = nonce_call.call().await.unwrap();
|
let nonce = contract.nonce(first_block_hash).await.unwrap();
|
||||||
|
assert_eq!(nonce, U256::try_from(1u64).unwrap());
|
||||||
|
|
||||||
let encoded =
|
let message = Router::execute_message(U256::try_from(chain_id).unwrap(), nonce, txs.clone());
|
||||||
("execute".to_string(), nonce, vec![router::OutInstruction { to, value, data }]).encode();
|
let sig = hash_and_sign(&keys, &public_key, &message);
|
||||||
let sig = hash_and_sign(&keys, &public_key, chain_id.into(), &encoded);
|
|
||||||
|
|
||||||
let tx = contract
|
let receipt =
|
||||||
.execute(vec![tx], router::Signature { c: sig.c.to_repr().into(), s: sig.s.to_repr().into() })
|
send(&client, &anvil.keys()[0].clone().into(), contract.execute(&txs, &sig)).await.unwrap();
|
||||||
.gas(300_000);
|
assert!(receipt.status());
|
||||||
let pending_tx = tx.send().await.unwrap();
|
|
||||||
let receipt = dbg!(pending_tx.await.unwrap().unwrap());
|
|
||||||
assert!(receipt.status == Some(1.into()));
|
|
||||||
|
|
||||||
println!("gas used: {:?}", receipt.cumulative_gas_used);
|
let second_block_hash = latest_block_hash(&client).await;
|
||||||
println!("logs: {:?}", receipt.logs);
|
assert_eq!(contract.nonce(second_block_hash).await.unwrap(), U256::try_from(2u64).unwrap());
|
||||||
|
// Check this does still offer the historical state
|
||||||
|
assert_eq!(contract.nonce(first_block_hash).await.unwrap(), U256::try_from(1u64).unwrap());
|
||||||
|
// TODO: Check logs
|
||||||
|
|
||||||
|
println!("gas used: {:?}", receipt.gas_used);
|
||||||
|
// println!("logs: {:?}", receipt.logs);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
use std::{convert::TryFrom, sync::Arc};
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rand_core::OsRng;
|
use rand_core::OsRng;
|
||||||
|
|
||||||
use ::k256::{elliptic_curve::bigint::ArrayEncoding, U256, Scalar};
|
use group::ff::PrimeField;
|
||||||
|
use k256::Scalar;
|
||||||
use ethers_core::utils::{keccak256, Anvil, AnvilInstance};
|
|
||||||
use ethers_providers::{Middleware, Provider, Http};
|
|
||||||
|
|
||||||
use frost::{
|
use frost::{
|
||||||
curve::Secp256k1,
|
curve::Secp256k1,
|
||||||
@@ -13,24 +11,34 @@ use frost::{
|
|||||||
tests::{algorithm_machines, sign},
|
tests::{algorithm_machines, sign},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use alloy_core::primitives::Address;
|
||||||
|
|
||||||
|
use alloy_sol_types::SolCall;
|
||||||
|
|
||||||
|
use alloy_rpc_types_eth::{TransactionInput, TransactionRequest};
|
||||||
|
use alloy_simple_request_transport::SimpleRequest;
|
||||||
|
use alloy_rpc_client::ClientBuilder;
|
||||||
|
use alloy_provider::{Provider, RootProvider};
|
||||||
|
|
||||||
|
use alloy_node_bindings::{Anvil, AnvilInstance};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
|
Error,
|
||||||
crypto::*,
|
crypto::*,
|
||||||
schnorr::*,
|
tests::{key_gen, deploy_contract, abi::schnorr as abi},
|
||||||
tests::{key_gen, deploy_contract},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
async fn setup_test() -> (u32, AnvilInstance, Schnorr<Provider<Http>>) {
|
async fn setup_test() -> (AnvilInstance, Arc<RootProvider<SimpleRequest>>, Address) {
|
||||||
let anvil = Anvil::new().spawn();
|
let anvil = Anvil::new().spawn();
|
||||||
|
|
||||||
let provider = Provider::<Http>::try_from(anvil.endpoint()).unwrap();
|
let provider = RootProvider::new(
|
||||||
let chain_id = provider.get_chainid().await.unwrap().as_u32();
|
ClientBuilder::default().transport(SimpleRequest::new(anvil.endpoint()), true),
|
||||||
|
);
|
||||||
let wallet = anvil.keys()[0].clone().into();
|
let wallet = anvil.keys()[0].clone().into();
|
||||||
let client = Arc::new(provider);
|
let client = Arc::new(provider);
|
||||||
|
|
||||||
let contract_address =
|
let address = deploy_contract(client.clone(), &wallet, "TestSchnorr").await.unwrap();
|
||||||
deploy_contract(chain_id, client.clone(), &wallet, "Schnorr").await.unwrap();
|
(anvil, client, address)
|
||||||
let contract = Schnorr::new(contract_address, client.clone());
|
|
||||||
(chain_id, anvil, contract)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
@@ -38,30 +46,48 @@ async fn test_deploy_contract() {
|
|||||||
setup_test().await;
|
setup_test().await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub async fn call_verify(
|
||||||
|
provider: &RootProvider<SimpleRequest>,
|
||||||
|
contract: Address,
|
||||||
|
public_key: &PublicKey,
|
||||||
|
message: &[u8],
|
||||||
|
signature: &Signature,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let px: [u8; 32] = public_key.px.to_repr().into();
|
||||||
|
let c_bytes: [u8; 32] = signature.c.to_repr().into();
|
||||||
|
let s_bytes: [u8; 32] = signature.s.to_repr().into();
|
||||||
|
let call = TransactionRequest::default().to(contract).input(TransactionInput::new(
|
||||||
|
abi::verifyCall::new((px.into(), message.to_vec().into(), c_bytes.into(), s_bytes.into()))
|
||||||
|
.abi_encode()
|
||||||
|
.into(),
|
||||||
|
));
|
||||||
|
let bytes = provider.call(&call).await.map_err(|_| Error::ConnectionError)?;
|
||||||
|
let res =
|
||||||
|
abi::verifyCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?;
|
||||||
|
|
||||||
|
if res._0 {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(Error::InvalidSignature)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_ecrecover_hack() {
|
async fn test_ecrecover_hack() {
|
||||||
let (chain_id, _anvil, contract) = setup_test().await;
|
let (_anvil, client, contract) = setup_test().await;
|
||||||
let chain_id = U256::from(chain_id);
|
|
||||||
|
|
||||||
let (keys, public_key) = key_gen();
|
let (keys, public_key) = key_gen();
|
||||||
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
const MESSAGE: &[u8] = b"Hello, World!";
|
||||||
let hashed_message = keccak256(MESSAGE);
|
|
||||||
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
|
||||||
|
|
||||||
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
||||||
let sig = sign(
|
let sig =
|
||||||
&mut OsRng,
|
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
|
||||||
&algo,
|
let sig = Signature::new(&public_key, MESSAGE, sig).unwrap();
|
||||||
keys.clone(),
|
|
||||||
algorithm_machines(&mut OsRng, &algo, &keys),
|
|
||||||
full_message,
|
|
||||||
);
|
|
||||||
let sig = Signature::new(&public_key, chain_id, MESSAGE, sig).unwrap();
|
|
||||||
|
|
||||||
call_verify(&contract, &public_key, MESSAGE, &sig).await.unwrap();
|
call_verify(&client, contract, &public_key, MESSAGE, &sig).await.unwrap();
|
||||||
// Test an invalid signature fails
|
// Test an invalid signature fails
|
||||||
let mut sig = sig;
|
let mut sig = sig;
|
||||||
sig.s += Scalar::ONE;
|
sig.s += Scalar::ONE;
|
||||||
assert!(call_verify(&contract, &public_key, MESSAGE, &sig).await.is_err());
|
assert!(call_verify(&client, contract, &public_key, MESSAGE, &sig).await.is_err());
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ license = "MIT"
|
|||||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.74"
|
rust-version = "1.79"
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
all-features = true
|
all-features = true
|
||||||
@@ -18,96 +18,35 @@ workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
std-shims = { path = "../../common/std-shims", version = "^0.1.1", default-features = false }
|
std-shims = { path = "../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||||
|
|
||||||
async-trait = { version = "0.1", default-features = false }
|
|
||||||
thiserror = { version = "1", default-features = false, optional = true }
|
|
||||||
|
|
||||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||||
subtle = { version = "^2.4", default-features = false }
|
|
||||||
|
|
||||||
rand_core = { version = "0.6", default-features = false }
|
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
|
||||||
# Used to send transactions
|
|
||||||
rand = { version = "0.8", default-features = false }
|
|
||||||
rand_chacha = { version = "0.3", default-features = false }
|
|
||||||
# Used to select decoys
|
|
||||||
rand_distr = { version = "0.4", default-features = false }
|
|
||||||
|
|
||||||
sha3 = { version = "0.10", default-features = false }
|
|
||||||
pbkdf2 = { version = "0.12", features = ["simple"], default-features = false }
|
|
||||||
|
|
||||||
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "precomputed-tables"] }
|
|
||||||
|
|
||||||
# Used for the hash to curve, along with the more complicated proofs
|
|
||||||
group = { version = "0.13", default-features = false }
|
|
||||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
|
||||||
multiexp = { path = "../../crypto/multiexp", version = "0.4", default-features = false, features = ["batch"] }
|
|
||||||
|
|
||||||
# Needed for multisig
|
|
||||||
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
|
|
||||||
dleq = { path = "../../crypto/dleq", version = "0.4", default-features = false, features = ["serialize"], optional = true }
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["ed25519"], optional = true }
|
|
||||||
|
|
||||||
|
monero-io = { path = "io", version = "0.1", default-features = false }
|
||||||
monero-generators = { path = "generators", version = "0.4", default-features = false }
|
monero-generators = { path = "generators", version = "0.4", default-features = false }
|
||||||
|
monero-primitives = { path = "primitives", version = "0.1", default-features = false }
|
||||||
async-lock = { version = "3", default-features = false, optional = true }
|
monero-mlsag = { path = "ringct/mlsag", version = "0.1", default-features = false }
|
||||||
|
monero-clsag = { path = "ringct/clsag", version = "0.1", default-features = false }
|
||||||
|
monero-borromean = { path = "ringct/borromean", version = "0.1", default-features = false }
|
||||||
|
monero-bulletproofs = { path = "ringct/bulletproofs", version = "0.1", default-features = false }
|
||||||
|
|
||||||
hex-literal = "0.4"
|
hex-literal = "0.4"
|
||||||
hex = { version = "0.4", default-features = false, features = ["alloc"] }
|
|
||||||
serde = { version = "1", default-features = false, features = ["derive", "alloc"] }
|
|
||||||
serde_json = { version = "1", default-features = false, features = ["alloc"] }
|
|
||||||
|
|
||||||
base58-monero = { version = "2", default-features = false, features = ["check"] }
|
|
||||||
|
|
||||||
# Used for the provided HTTP RPC
|
|
||||||
digest_auth = { version = "0.3", default-features = false, optional = true }
|
|
||||||
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls"], optional = true }
|
|
||||||
tokio = { version = "1", default-features = false, optional = true }
|
|
||||||
|
|
||||||
[build-dependencies]
|
|
||||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
|
||||||
monero-generators = { path = "generators", version = "0.4", default-features = false }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
tokio = { version = "1", features = ["sync", "macros"] }
|
|
||||||
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
std = [
|
std = [
|
||||||
"std-shims/std",
|
"std-shims/std",
|
||||||
|
|
||||||
"thiserror",
|
|
||||||
|
|
||||||
"zeroize/std",
|
"zeroize/std",
|
||||||
"subtle/std",
|
|
||||||
|
|
||||||
"rand_core/std",
|
|
||||||
"rand/std",
|
|
||||||
"rand_chacha/std",
|
|
||||||
"rand_distr/std",
|
|
||||||
|
|
||||||
"sha3/std",
|
|
||||||
"pbkdf2/std",
|
|
||||||
|
|
||||||
"multiexp/std",
|
|
||||||
|
|
||||||
"transcript/std",
|
|
||||||
"dleq/std",
|
|
||||||
|
|
||||||
|
"monero-io/std",
|
||||||
"monero-generators/std",
|
"monero-generators/std",
|
||||||
|
"monero-primitives/std",
|
||||||
"async-lock?/std",
|
"monero-mlsag/std",
|
||||||
|
"monero-clsag/std",
|
||||||
"hex/std",
|
"monero-borromean/std",
|
||||||
"serde/std",
|
"monero-bulletproofs/std",
|
||||||
"serde_json/std",
|
|
||||||
|
|
||||||
"base58-monero/std",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
cache-distribution = ["async-lock"]
|
compile-time-generators = ["curve25519-dalek/precomputed-tables", "monero-bulletproofs/compile-time-generators"]
|
||||||
http-rpc = ["digest_auth", "simple-request", "tokio"]
|
multisig = ["monero-clsag/multisig", "std"]
|
||||||
multisig = ["transcript", "frost", "dleq", "std"]
|
default = ["std", "compile-time-generators"]
|
||||||
binaries = ["tokio/rt-multi-thread", "tokio/macros", "http-rpc"]
|
|
||||||
experimental = []
|
|
||||||
|
|
||||||
default = ["std", "http-rpc"]
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2022-2023 Luke Parker
|
Copyright (c) 2022-2024 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
@@ -1,49 +1,28 @@
|
|||||||
# monero-serai
|
# monero-serai
|
||||||
|
|
||||||
A modern Monero transaction library intended for usage in wallets. It prides
|
A modern Monero transaction library. It provides a modern, Rust-friendly view of
|
||||||
itself on accuracy, correctness, and removing common pit falls developers may
|
the Monero protocol.
|
||||||
face.
|
|
||||||
|
|
||||||
monero-serai also offers the following features:
|
This library is usable under no-std when the `std` feature (on by default) is
|
||||||
|
disabled.
|
||||||
|
|
||||||
- Featured Addresses
|
### Wallet Functionality
|
||||||
- A FROST-based multisig orders of magnitude more performant than Monero's
|
|
||||||
|
|
||||||
### Purpose and support
|
monero-serai originally included wallet functionality. That has been moved to
|
||||||
|
monero-wallet.
|
||||||
|
|
||||||
|
### Purpose and Support
|
||||||
|
|
||||||
monero-serai was written for Serai, a decentralized exchange aiming to support
|
monero-serai was written for Serai, a decentralized exchange aiming to support
|
||||||
Monero. Despite this, monero-serai is intended to be a widely usable library,
|
Monero. Despite this, monero-serai is intended to be a widely usable library,
|
||||||
accurate to Monero. monero-serai guarantees the functionality needed for Serai,
|
accurate to Monero. monero-serai guarantees the functionality needed for Serai,
|
||||||
yet will not deprive functionality from other users.
|
yet does not include any functionality specific to Serai.
|
||||||
|
|
||||||
Various legacy transaction formats are not currently implemented, yet we are
|
### Cargo Features
|
||||||
willing to add support for them. There aren't active development efforts around
|
|
||||||
them however.
|
|
||||||
|
|
||||||
### Caveats
|
- `std` (on by default): Enables `std` (and with it, more efficient internal
|
||||||
|
implementations).
|
||||||
This library DOES attempt to do the following:
|
- `compile-time-generators` (on by default): Derives the generators at
|
||||||
|
compile-time so they don't need to be derived at runtime. This is recommended
|
||||||
- Create on-chain transactions identical to how wallet2 would (unless told not
|
if program size doesn't need to be kept minimal.
|
||||||
to)
|
- `multisig`: Enables the `multisig` feature for all dependencies.
|
||||||
- Not be detectable as monero-serai when scanning outputs
|
|
||||||
- Not reveal spent outputs to the connected RPC node
|
|
||||||
|
|
||||||
This library DOES NOT attempt to do the following:
|
|
||||||
|
|
||||||
- Have identical RPC behavior when creating transactions
|
|
||||||
- Be a wallet
|
|
||||||
|
|
||||||
This means that monero-serai shouldn't be fingerprintable on-chain. It also
|
|
||||||
shouldn't be fingerprintable if a targeted attack occurs to detect if the
|
|
||||||
receiving wallet is monero-serai or wallet2. It also should be generally safe
|
|
||||||
for usage with remote nodes.
|
|
||||||
|
|
||||||
It won't hide from remote nodes it's monero-serai however, potentially
|
|
||||||
allowing a remote node to profile you. The implications of this are left to the
|
|
||||||
user to consider.
|
|
||||||
|
|
||||||
It also won't act as a wallet, just as a transaction library. wallet2 has
|
|
||||||
several *non-transaction-level* policies, such as always attempting to use two
|
|
||||||
inputs to create transactions. These are considered out of scope to
|
|
||||||
monero-serai.
|
|
||||||
|
|||||||
@@ -1,67 +0,0 @@
|
|||||||
use std::{
|
|
||||||
io::Write,
|
|
||||||
env,
|
|
||||||
path::Path,
|
|
||||||
fs::{File, remove_file},
|
|
||||||
};
|
|
||||||
|
|
||||||
use dalek_ff_group::EdwardsPoint;
|
|
||||||
|
|
||||||
use monero_generators::bulletproofs_generators;
|
|
||||||
|
|
||||||
fn serialize(generators_string: &mut String, points: &[EdwardsPoint]) {
|
|
||||||
for generator in points {
|
|
||||||
generators_string.extend(
|
|
||||||
format!(
|
|
||||||
"
|
|
||||||
dalek_ff_group::EdwardsPoint(
|
|
||||||
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap()
|
|
||||||
),
|
|
||||||
",
|
|
||||||
generator.compress().to_bytes()
|
|
||||||
)
|
|
||||||
.chars(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn generators(prefix: &'static str, path: &str) {
|
|
||||||
let generators = bulletproofs_generators(prefix.as_bytes());
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let mut G_str = String::new();
|
|
||||||
serialize(&mut G_str, &generators.G);
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let mut H_str = String::new();
|
|
||||||
serialize(&mut H_str, &generators.H);
|
|
||||||
|
|
||||||
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
|
|
||||||
let _ = remove_file(&path);
|
|
||||||
File::create(&path)
|
|
||||||
.unwrap()
|
|
||||||
.write_all(
|
|
||||||
format!(
|
|
||||||
"
|
|
||||||
pub(crate) static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
|
|
||||||
pub fn GENERATORS() -> &'static Generators {{
|
|
||||||
GENERATORS_CELL.get_or_init(|| Generators {{
|
|
||||||
G: vec![
|
|
||||||
{G_str}
|
|
||||||
],
|
|
||||||
H: vec![
|
|
||||||
{H_str}
|
|
||||||
],
|
|
||||||
}})
|
|
||||||
}}
|
|
||||||
",
|
|
||||||
)
|
|
||||||
.as_bytes(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
println!("cargo:rerun-if-changed=build.rs");
|
|
||||||
|
|
||||||
generators("bulletproof", "generators.rs");
|
|
||||||
generators("bulletproof_plus", "generators_plus.rs");
|
|
||||||
}
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "monero-generators"
|
name = "monero-generators"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
description = "Monero's hash_to_point and generators"
|
description = "Monero's hash to point function and generators"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
@@ -20,15 +20,27 @@ std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-fe
|
|||||||
subtle = { version = "^2.4", default-features = false }
|
subtle = { version = "^2.4", default-features = false }
|
||||||
|
|
||||||
sha3 = { version = "0.10", default-features = false }
|
sha3 = { version = "0.10", default-features = false }
|
||||||
|
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
|
||||||
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "precomputed-tables"] }
|
|
||||||
|
|
||||||
group = { version = "0.13", default-features = false }
|
group = { version = "0.13", default-features = false }
|
||||||
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
||||||
|
|
||||||
|
monero-io = { path = "../io", version = "0.1", default-features = false }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
hex = "0.4"
|
hex = "0.4"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
std = ["std-shims/std", "subtle/std", "sha3/std", "dalek-ff-group/std"]
|
std = [
|
||||||
|
"std-shims/std",
|
||||||
|
|
||||||
|
"subtle/std",
|
||||||
|
|
||||||
|
"sha3/std",
|
||||||
|
|
||||||
|
"group/alloc",
|
||||||
|
"dalek-ff-group/std",
|
||||||
|
|
||||||
|
"monero-io/std"
|
||||||
|
]
|
||||||
default = ["std"]
|
default = ["std"]
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2022-2023 Luke Parker
|
Copyright (c) 2022-2024 Luke Parker
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
@@ -1,7 +1,13 @@
|
|||||||
# Monero Generators
|
# Monero Generators
|
||||||
|
|
||||||
Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
|
Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
|
||||||
An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
|
An implementation of Monero's `hash_to_ec` is included, as needed to generate
|
||||||
`hash_to_point` here, is included, as needed to generate generators.
|
the generators.
|
||||||
|
|
||||||
This library is usable under no-std when the `std` feature is disabled.
|
This library is usable under no-std when the `std` feature (on by default) is
|
||||||
|
disabled.
|
||||||
|
|
||||||
|
### Cargo Features
|
||||||
|
|
||||||
|
- `std` (on by default): Enables `std` (and with it, more efficient internal
|
||||||
|
implementations).
|
||||||
|
|||||||
@@ -1,27 +1,20 @@
|
|||||||
use subtle::ConditionallySelectable;
|
use subtle::ConditionallySelectable;
|
||||||
|
|
||||||
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
use curve25519_dalek::edwards::EdwardsPoint;
|
||||||
|
|
||||||
use group::ff::{Field, PrimeField};
|
use group::ff::{Field, PrimeField};
|
||||||
use dalek_ff_group::FieldElement;
|
use dalek_ff_group::FieldElement;
|
||||||
|
|
||||||
use crate::hash;
|
use monero_io::decompress_point;
|
||||||
|
|
||||||
/// Decompress canonically encoded ed25519 point
|
use crate::keccak256;
|
||||||
/// It does not check if the point is in the prime order subgroup
|
|
||||||
pub fn decompress_point(bytes: [u8; 32]) -> Option<EdwardsPoint> {
|
|
||||||
CompressedEdwardsY(bytes)
|
|
||||||
.decompress()
|
|
||||||
// Ban points which are either unreduced or -0
|
|
||||||
.filter(|point| point.compress().to_bytes() == bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Monero's hash to point function, as named `hash_to_ec`.
|
/// Monero's `hash_to_ec` function.
|
||||||
pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
|
pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
let A = FieldElement::from(486662u64);
|
let A = FieldElement::from(486662u64);
|
||||||
|
|
||||||
let v = FieldElement::from_square(hash(&bytes)).double();
|
let v = FieldElement::from_square(keccak256(&bytes)).double();
|
||||||
let w = v + FieldElement::ONE;
|
let w = v + FieldElement::ONE;
|
||||||
let x = w.square() + (-A.square() * v);
|
let x = w.square() + (-A.square() * v);
|
||||||
|
|
||||||
|
|||||||
@@ -1,45 +1,46 @@
|
|||||||
//! Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
//!
|
#![doc = include_str!("../README.md")]
|
||||||
//! An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
|
#![deny(missing_docs)]
|
||||||
//! `hash_to_point` here, is included, as needed to generate generators.
|
|
||||||
|
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
|
|
||||||
use std_shims::{sync::OnceLock, vec::Vec};
|
use std_shims::{sync::OnceLock, vec::Vec};
|
||||||
|
|
||||||
use sha3::{Digest, Keccak256};
|
use sha3::{Digest, Keccak256};
|
||||||
|
|
||||||
use curve25519_dalek::edwards::{EdwardsPoint as DalekPoint};
|
use curve25519_dalek::{constants::ED25519_BASEPOINT_POINT, edwards::EdwardsPoint};
|
||||||
|
|
||||||
use group::{Group, GroupEncoding};
|
use monero_io::{write_varint, decompress_point};
|
||||||
use dalek_ff_group::EdwardsPoint;
|
|
||||||
|
|
||||||
mod varint;
|
|
||||||
use varint::write_varint;
|
|
||||||
|
|
||||||
mod hash_to_point;
|
mod hash_to_point;
|
||||||
pub use hash_to_point::{hash_to_point, decompress_point};
|
pub use hash_to_point::hash_to_point;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
fn hash(data: &[u8]) -> [u8; 32] {
|
fn keccak256(data: &[u8]) -> [u8; 32] {
|
||||||
Keccak256::digest(data).into()
|
Keccak256::digest(data).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
static H_CELL: OnceLock<DalekPoint> = OnceLock::new();
|
static H_CELL: OnceLock<EdwardsPoint> = OnceLock::new();
|
||||||
/// Monero's alternate generator `H`, used for amounts in Pedersen commitments.
|
/// Monero's `H` generator.
|
||||||
|
///
|
||||||
|
/// Contrary to convention (`G` for values, `H` for randomness), `H` is used by Monero for amounts
|
||||||
|
/// within Pedersen commitments.
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
pub fn H() -> DalekPoint {
|
pub fn H() -> EdwardsPoint {
|
||||||
*H_CELL.get_or_init(|| {
|
*H_CELL.get_or_init(|| {
|
||||||
decompress_point(hash(&EdwardsPoint::generator().to_bytes())).unwrap().mul_by_cofactor()
|
decompress_point(keccak256(&ED25519_BASEPOINT_POINT.compress().to_bytes()))
|
||||||
|
.unwrap()
|
||||||
|
.mul_by_cofactor()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
static H_POW_2_CELL: OnceLock<[DalekPoint; 64]> = OnceLock::new();
|
static H_POW_2_CELL: OnceLock<[EdwardsPoint; 64]> = OnceLock::new();
|
||||||
/// Monero's alternate generator `H`, multiplied by 2**i for i in 1 ..= 64.
|
/// Monero's `H` generator, multiplied by 2**i for i in 1 ..= 64.
|
||||||
|
///
|
||||||
|
/// This table is useful when working with amounts, which are u64s.
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
pub fn H_pow_2() -> &'static [DalekPoint; 64] {
|
pub fn H_pow_2() -> &'static [EdwardsPoint; 64] {
|
||||||
H_POW_2_CELL.get_or_init(|| {
|
H_POW_2_CELL.get_or_init(|| {
|
||||||
let mut res = [H(); 64];
|
let mut res = [H(); 64];
|
||||||
for i in 1 .. 64 {
|
for i in 1 .. 64 {
|
||||||
@@ -49,31 +50,45 @@ pub fn H_pow_2() -> &'static [DalekPoint; 64] {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const MAX_M: usize = 16;
|
/// The maximum amount of commitments provable for within a single range proof.
|
||||||
const N: usize = 64;
|
pub const MAX_COMMITMENTS: usize = 16;
|
||||||
const MAX_MN: usize = MAX_M * N;
|
/// The amount of bits a value within a commitment may use.
|
||||||
|
pub const COMMITMENT_BITS: usize = 64;
|
||||||
|
/// The logarithm (over 2) of the amount of bits a value within a commitment may use.
|
||||||
|
pub const LOG_COMMITMENT_BITS: usize = 6; // 2 ** 6 == N
|
||||||
|
|
||||||
/// Container struct for Bulletproofs(+) generators.
|
/// Container struct for Bulletproofs(+) generators.
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
pub struct Generators {
|
pub struct Generators {
|
||||||
|
/// The G (bold) vector of generators.
|
||||||
pub G: Vec<EdwardsPoint>,
|
pub G: Vec<EdwardsPoint>,
|
||||||
|
/// The H (bold) vector of generators.
|
||||||
pub H: Vec<EdwardsPoint>,
|
pub H: Vec<EdwardsPoint>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate generators as needed for Bulletproofs(+), as Monero does.
|
/// Generate generators as needed for Bulletproofs(+), as Monero does.
|
||||||
|
///
|
||||||
|
/// Consumers should not call this function ad-hoc, yet call it within a build script or use a
|
||||||
|
/// once-initialized static.
|
||||||
pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
|
pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
|
||||||
|
// The maximum amount of bits used within a single range proof.
|
||||||
|
const MAX_MN: usize = MAX_COMMITMENTS * COMMITMENT_BITS;
|
||||||
|
|
||||||
|
let mut preimage = H().compress().to_bytes().to_vec();
|
||||||
|
preimage.extend(dst);
|
||||||
|
|
||||||
let mut res = Generators { G: Vec::with_capacity(MAX_MN), H: Vec::with_capacity(MAX_MN) };
|
let mut res = Generators { G: Vec::with_capacity(MAX_MN), H: Vec::with_capacity(MAX_MN) };
|
||||||
for i in 0 .. MAX_MN {
|
for i in 0 .. MAX_MN {
|
||||||
|
// We generate a pair of generators per iteration
|
||||||
let i = 2 * i;
|
let i = 2 * i;
|
||||||
|
|
||||||
let mut even = H().compress().to_bytes().to_vec();
|
let mut even = preimage.clone();
|
||||||
even.extend(dst);
|
write_varint(&i, &mut even).unwrap();
|
||||||
let mut odd = even.clone();
|
res.H.push(hash_to_point(keccak256(&even)));
|
||||||
|
|
||||||
write_varint(&i.try_into().unwrap(), &mut even).unwrap();
|
let mut odd = preimage.clone();
|
||||||
write_varint(&(i + 1).try_into().unwrap(), &mut odd).unwrap();
|
write_varint(&(i + 1), &mut odd).unwrap();
|
||||||
res.H.push(EdwardsPoint(hash_to_point(hash(&even))));
|
res.G.push(hash_to_point(keccak256(&odd)));
|
||||||
res.G.push(EdwardsPoint(hash_to_point(hash(&odd))));
|
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,38 +0,0 @@
|
|||||||
use crate::{decompress_point, hash_to_point};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn crypto_tests() {
|
|
||||||
// tests.txt file copied from monero repo
|
|
||||||
// https://github.com/monero-project/monero/
|
|
||||||
// blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/tests/crypto/tests.txt
|
|
||||||
let reader = include_str!("./tests.txt");
|
|
||||||
|
|
||||||
for line in reader.lines() {
|
|
||||||
let mut words = line.split_whitespace();
|
|
||||||
let command = words.next().unwrap();
|
|
||||||
|
|
||||||
match command {
|
|
||||||
"check_key" => {
|
|
||||||
let key = words.next().unwrap();
|
|
||||||
let expected = match words.next().unwrap() {
|
|
||||||
"true" => true,
|
|
||||||
"false" => false,
|
|
||||||
_ => unreachable!("invalid result"),
|
|
||||||
};
|
|
||||||
|
|
||||||
let actual = decompress_point(hex::decode(key).unwrap().try_into().unwrap());
|
|
||||||
|
|
||||||
assert_eq!(actual.is_some(), expected);
|
|
||||||
}
|
|
||||||
"hash_to_ec" => {
|
|
||||||
let bytes = words.next().unwrap();
|
|
||||||
let expected = words.next().unwrap();
|
|
||||||
|
|
||||||
let actual = hash_to_point(hex::decode(bytes).unwrap().try_into().unwrap());
|
|
||||||
|
|
||||||
assert_eq!(hex::encode(actual.compress().to_bytes()), expected);
|
|
||||||
}
|
|
||||||
_ => unreachable!("unknown command"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1 +1,36 @@
|
|||||||
mod hash_to_point;
|
use crate::{decompress_point, hash_to_point};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_vectors() {
|
||||||
|
// tests.txt file copied from monero repo
|
||||||
|
// https://github.com/monero-project/monero/
|
||||||
|
// blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/tests/crypto/tests.txt
|
||||||
|
let reader = include_str!("./tests.txt");
|
||||||
|
|
||||||
|
for line in reader.lines() {
|
||||||
|
let mut words = line.split_whitespace();
|
||||||
|
let command = words.next().unwrap();
|
||||||
|
|
||||||
|
match command {
|
||||||
|
"check_key" => {
|
||||||
|
let key = words.next().unwrap();
|
||||||
|
let expected = match words.next().unwrap() {
|
||||||
|
"true" => true,
|
||||||
|
"false" => false,
|
||||||
|
_ => unreachable!("invalid result"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let actual = decompress_point(hex::decode(key).unwrap().try_into().unwrap());
|
||||||
|
assert_eq!(actual.is_some(), expected);
|
||||||
|
}
|
||||||
|
"hash_to_ec" => {
|
||||||
|
let bytes = words.next().unwrap();
|
||||||
|
let expected = words.next().unwrap();
|
||||||
|
|
||||||
|
let actual = hash_to_point(hex::decode(bytes).unwrap().try_into().unwrap());
|
||||||
|
assert_eq!(hex::encode(actual.compress().to_bytes()), expected);
|
||||||
|
}
|
||||||
|
_ => unreachable!("unknown command"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
use std_shims::io::{self, Write};
|
|
||||||
|
|
||||||
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
|
||||||
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
|
||||||
let mut varint = *varint;
|
|
||||||
while {
|
|
||||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
|
||||||
varint >>= 7;
|
|
||||||
if varint != 0 {
|
|
||||||
b |= VARINT_CONTINUATION_MASK;
|
|
||||||
}
|
|
||||||
w.write_all(&[b])?;
|
|
||||||
varint != 0
|
|
||||||
} {}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
24
coins/monero/io/Cargo.toml
Normal file
24
coins/monero/io/Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
[package]
|
||||||
|
name = "monero-io"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Serialization functions, as within the Monero protocol"
|
||||||
|
license = "MIT"
|
||||||
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/io"
|
||||||
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
|
edition = "2021"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||||
|
|
||||||
|
curve25519-dalek = { version = "4", default-features = false, features = ["alloc"] }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
std = ["std-shims/std"]
|
||||||
|
default = ["std"]
|
||||||
21
coins/monero/io/LICENSE
Normal file
21
coins/monero/io/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2022-2024 Luke Parker
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
11
coins/monero/io/README.md
Normal file
11
coins/monero/io/README.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# Monero IO
|
||||||
|
|
||||||
|
Serialization functions, as within the Monero protocol.
|
||||||
|
|
||||||
|
This library is usable under no-std when the `std` feature (on by default) is
|
||||||
|
disabled.
|
||||||
|
|
||||||
|
### Cargo Features
|
||||||
|
|
||||||
|
- `std` (on by default): Enables `std` (and with it, more efficient internal
|
||||||
|
implementations).
|
||||||
219
coins/monero/io/src/lib.rs
Normal file
219
coins/monero/io/src/lib.rs
Normal file
@@ -0,0 +1,219 @@
|
|||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![doc = include_str!("../README.md")]
|
||||||
|
#![deny(missing_docs)]
|
||||||
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
|
|
||||||
|
use core::fmt::Debug;
|
||||||
|
use std_shims::{
|
||||||
|
vec,
|
||||||
|
vec::Vec,
|
||||||
|
io::{self, Read, Write},
|
||||||
|
};
|
||||||
|
|
||||||
|
use curve25519_dalek::{
|
||||||
|
scalar::Scalar,
|
||||||
|
edwards::{EdwardsPoint, CompressedEdwardsY},
|
||||||
|
};
|
||||||
|
|
||||||
|
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
||||||
|
|
||||||
|
mod sealed {
|
||||||
|
/// A trait for a number readable/writable as a VarInt.
|
||||||
|
///
|
||||||
|
/// This is sealed to prevent unintended implementations.
|
||||||
|
pub trait VarInt: TryInto<u64> + TryFrom<u64> + Copy {
|
||||||
|
const BITS: usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VarInt for u8 {
|
||||||
|
const BITS: usize = 8;
|
||||||
|
}
|
||||||
|
impl VarInt for u32 {
|
||||||
|
const BITS: usize = 32;
|
||||||
|
}
|
||||||
|
impl VarInt for u64 {
|
||||||
|
const BITS: usize = 64;
|
||||||
|
}
|
||||||
|
impl VarInt for usize {
|
||||||
|
const BITS: usize = core::mem::size_of::<usize>() * 8;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The amount of bytes this number will take when serialized as a VarInt.
|
||||||
|
///
|
||||||
|
/// This function will panic if the VarInt exceeds u64::MAX.
|
||||||
|
pub fn varint_len<V: sealed::VarInt>(varint: V) -> usize {
|
||||||
|
let varint_u64: u64 = varint.try_into().map_err(|_| "varint exceeded u64").unwrap();
|
||||||
|
((usize::try_from(u64::BITS - varint_u64.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a byte.
|
||||||
|
///
|
||||||
|
/// This is used as a building block within generic functions.
|
||||||
|
pub fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
|
||||||
|
w.write_all(&[*byte])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a number, VarInt-encoded.
|
||||||
|
///
|
||||||
|
/// This will panic if the VarInt exceeds u64::MAX.
|
||||||
|
pub fn write_varint<W: Write, U: sealed::VarInt>(varint: &U, w: &mut W) -> io::Result<()> {
|
||||||
|
let mut varint: u64 = (*varint).try_into().map_err(|_| "varint exceeded u64").unwrap();
|
||||||
|
while {
|
||||||
|
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
||||||
|
varint >>= 7;
|
||||||
|
if varint != 0 {
|
||||||
|
b |= VARINT_CONTINUATION_MASK;
|
||||||
|
}
|
||||||
|
write_byte(&b, w)?;
|
||||||
|
varint != 0
|
||||||
|
} {}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a scalar.
|
||||||
|
pub fn write_scalar<W: Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
|
||||||
|
w.write_all(&scalar.to_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a point.
|
||||||
|
pub fn write_point<W: Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
|
||||||
|
w.write_all(&point.compress().to_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a list of elements, without length-prefixing.
|
||||||
|
pub fn write_raw_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
||||||
|
f: F,
|
||||||
|
values: &[T],
|
||||||
|
w: &mut W,
|
||||||
|
) -> io::Result<()> {
|
||||||
|
for value in values {
|
||||||
|
f(value, w)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a list of elements, with length-prefixing.
|
||||||
|
pub fn write_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
||||||
|
f: F,
|
||||||
|
values: &[T],
|
||||||
|
w: &mut W,
|
||||||
|
) -> io::Result<()> {
|
||||||
|
write_varint(&values.len(), w)?;
|
||||||
|
write_raw_vec(f, values, w)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a constant amount of bytes.
|
||||||
|
pub fn read_bytes<R: Read, const N: usize>(r: &mut R) -> io::Result<[u8; N]> {
|
||||||
|
let mut res = [0; N];
|
||||||
|
r.read_exact(&mut res)?;
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a single byte.
|
||||||
|
pub fn read_byte<R: Read>(r: &mut R) -> io::Result<u8> {
|
||||||
|
Ok(read_bytes::<_, 1>(r)?[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a u16, little-endian encoded.
|
||||||
|
pub fn read_u16<R: Read>(r: &mut R) -> io::Result<u16> {
|
||||||
|
read_bytes(r).map(u16::from_le_bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a u32, little-endian encoded.
|
||||||
|
pub fn read_u32<R: Read>(r: &mut R) -> io::Result<u32> {
|
||||||
|
read_bytes(r).map(u32::from_le_bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a u64, little-endian encoded.
|
||||||
|
pub fn read_u64<R: Read>(r: &mut R) -> io::Result<u64> {
|
||||||
|
read_bytes(r).map(u64::from_le_bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a canonically-encoded VarInt.
|
||||||
|
pub fn read_varint<R: Read, U: sealed::VarInt>(r: &mut R) -> io::Result<U> {
|
||||||
|
let mut bits = 0;
|
||||||
|
let mut res = 0;
|
||||||
|
while {
|
||||||
|
let b = read_byte(r)?;
|
||||||
|
if (bits != 0) && (b == 0) {
|
||||||
|
Err(io::Error::other("non-canonical varint"))?;
|
||||||
|
}
|
||||||
|
if ((bits + 7) >= U::BITS) && (b >= (1 << (U::BITS - bits))) {
|
||||||
|
Err(io::Error::other("varint overflow"))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits;
|
||||||
|
bits += 7;
|
||||||
|
b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK
|
||||||
|
} {}
|
||||||
|
res.try_into().map_err(|_| io::Error::other("VarInt does not fit into integer type"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a canonically-encoded scalar.
|
||||||
|
///
|
||||||
|
/// Some scalars within the Monero protocol are not enforced to be canonically encoded. For such
|
||||||
|
/// scalars, they should be represented as `[u8; 32]` and later converted to scalars as relevant.
|
||||||
|
pub fn read_scalar<R: Read>(r: &mut R) -> io::Result<Scalar> {
|
||||||
|
Option::from(Scalar::from_canonical_bytes(read_bytes(r)?))
|
||||||
|
.ok_or_else(|| io::Error::other("unreduced scalar"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decompress a canonically-encoded Ed25519 point.
|
||||||
|
///
|
||||||
|
/// Ed25519 is of order `8 * l`. This function ensures each of those `8 * l` points have a singular
|
||||||
|
/// encoding by checking points aren't encoded with an unreduced field element, and aren't negative
|
||||||
|
/// when the negative is equivalent (0 == -0).
|
||||||
|
///
|
||||||
|
/// Since this decodes an Ed25519 point, it does not check the point is in the prime-order
|
||||||
|
/// subgroup. Torsioned points do have a canonical encoding, and only aren't canonical when
|
||||||
|
/// considered in relation to the prime-order subgroup.
|
||||||
|
pub fn decompress_point(bytes: [u8; 32]) -> Option<EdwardsPoint> {
|
||||||
|
CompressedEdwardsY(bytes)
|
||||||
|
.decompress()
|
||||||
|
// Ban points which are either unreduced or -0
|
||||||
|
.filter(|point| point.compress().to_bytes() == bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a canonically-encoded Ed25519 point.
|
||||||
|
///
|
||||||
|
/// This internally calls `decompress_point` and has the same definition of canonicity. This
|
||||||
|
/// function does not check the resulting point is within the prime-order subgroup.
|
||||||
|
pub fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||||
|
let bytes = read_bytes(r)?;
|
||||||
|
decompress_point(bytes).ok_or_else(|| io::Error::other("invalid point"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a canonically-encoded Ed25519 point, within the prime-order subgroup.
|
||||||
|
pub fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||||
|
read_point(r)
|
||||||
|
.ok()
|
||||||
|
.filter(EdwardsPoint::is_torsion_free)
|
||||||
|
.ok_or_else(|| io::Error::other("invalid point"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a variable-length list of elements, without length-prefixing.
|
||||||
|
pub fn read_raw_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
||||||
|
f: F,
|
||||||
|
len: usize,
|
||||||
|
r: &mut R,
|
||||||
|
) -> io::Result<Vec<T>> {
|
||||||
|
let mut res = vec![];
|
||||||
|
for _ in 0 .. len {
|
||||||
|
res.push(f(r)?);
|
||||||
|
}
|
||||||
|
Ok(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a constant-length list of elements.
|
||||||
|
pub fn read_array<R: Read, T: Debug, F: Fn(&mut R) -> io::Result<T>, const N: usize>(
|
||||||
|
f: F,
|
||||||
|
r: &mut R,
|
||||||
|
) -> io::Result<[T; N]> {
|
||||||
|
read_raw_vec(f, N, r).map(|vec| vec.try_into().unwrap())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a length-prefixed variable-length list of elements.
|
||||||
|
pub fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, r: &mut R) -> io::Result<Vec<T>> {
|
||||||
|
read_raw_vec(f, read_varint(r)?, r)
|
||||||
|
}
|
||||||
44
coins/monero/primitives/Cargo.toml
Normal file
44
coins/monero/primitives/Cargo.toml
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
[package]
|
||||||
|
name = "monero-primitives"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Primitives for the Monero protocol"
|
||||||
|
license = "MIT"
|
||||||
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/primitives"
|
||||||
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
|
edition = "2021"
|
||||||
|
rust-version = "1.79"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||||
|
|
||||||
|
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||||
|
|
||||||
|
# Cryptographic dependencies
|
||||||
|
sha3 = { version = "0.10", default-features = false }
|
||||||
|
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
|
||||||
|
|
||||||
|
# Other Monero dependencies
|
||||||
|
monero-io = { path = "../io", version = "0.1", default-features = false }
|
||||||
|
monero-generators = { path = "../generators", version = "0.4", default-features = false }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
hex = { version = "0.4", default-features = false, features = ["alloc"] }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
std = [
|
||||||
|
"std-shims/std",
|
||||||
|
|
||||||
|
"zeroize/std",
|
||||||
|
|
||||||
|
"sha3/std",
|
||||||
|
|
||||||
|
"monero-generators/std",
|
||||||
|
]
|
||||||
|
default = ["std"]
|
||||||
21
coins/monero/primitives/LICENSE
Normal file
21
coins/monero/primitives/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2022-2024 Luke Parker
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
11
coins/monero/primitives/README.md
Normal file
11
coins/monero/primitives/README.md
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
# Monero Primitives
|
||||||
|
|
||||||
|
Primitive structures and functions for the Monero protocol.
|
||||||
|
|
||||||
|
This library is usable under no-std when the `std` feature (on by default) is
|
||||||
|
disabled.
|
||||||
|
|
||||||
|
### Cargo Features
|
||||||
|
|
||||||
|
- `std` (on by default): Enables `std` (and with it, more efficient internal
|
||||||
|
implementations).
|
||||||
248
coins/monero/primitives/src/lib.rs
Normal file
248
coins/monero/primitives/src/lib.rs
Normal file
@@ -0,0 +1,248 @@
|
|||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![doc = include_str!("../README.md")]
|
||||||
|
#![deny(missing_docs)]
|
||||||
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
|
|
||||||
|
use std_shims::{io, vec::Vec};
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
use std_shims::sync::OnceLock;
|
||||||
|
|
||||||
|
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||||
|
|
||||||
|
use sha3::{Digest, Keccak256};
|
||||||
|
use curve25519_dalek::{
|
||||||
|
constants::ED25519_BASEPOINT_POINT,
|
||||||
|
traits::VartimePrecomputedMultiscalarMul,
|
||||||
|
scalar::Scalar,
|
||||||
|
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation},
|
||||||
|
};
|
||||||
|
|
||||||
|
use monero_io::*;
|
||||||
|
use monero_generators::H;
|
||||||
|
|
||||||
|
mod unreduced_scalar;
|
||||||
|
pub use unreduced_scalar::UnreducedScalar;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
|
// On std, we cache some variables in statics.
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
static INV_EIGHT_CELL: OnceLock<Scalar> = OnceLock::new();
|
||||||
|
/// The inverse of 8 over l.
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub fn INV_EIGHT() -> Scalar {
|
||||||
|
*INV_EIGHT_CELL.get_or_init(|| Scalar::from(8u8).invert())
|
||||||
|
}
|
||||||
|
// In no-std environments, we prefer the reduced memory use and calculate it ad-hoc.
|
||||||
|
/// The inverse of 8 over l.
|
||||||
|
#[cfg(not(feature = "std"))]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub fn INV_EIGHT() -> Scalar {
|
||||||
|
Scalar::from(8u8).invert()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
static G_PRECOMP_CELL: OnceLock<VartimeEdwardsPrecomputation> = OnceLock::new();
|
||||||
|
/// A cached (if std) pre-computation of the Ed25519 generator, G.
|
||||||
|
#[cfg(feature = "std")]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub fn G_PRECOMP() -> &'static VartimeEdwardsPrecomputation {
|
||||||
|
G_PRECOMP_CELL.get_or_init(|| VartimeEdwardsPrecomputation::new([ED25519_BASEPOINT_POINT]))
|
||||||
|
}
|
||||||
|
/// A cached (if std) pre-computation of the Ed25519 generator, G.
|
||||||
|
#[cfg(not(feature = "std"))]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub fn G_PRECOMP() -> VartimeEdwardsPrecomputation {
|
||||||
|
VartimeEdwardsPrecomputation::new([ED25519_BASEPOINT_POINT])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The Keccak-256 hash function.
|
||||||
|
pub fn keccak256(data: impl AsRef<[u8]>) -> [u8; 32] {
|
||||||
|
Keccak256::digest(data.as_ref()).into()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Hash the provided data to a scalar via keccak256(data) % l.
|
||||||
|
///
|
||||||
|
/// This function panics if it finds the Keccak-256 preimage for [0; 32].
|
||||||
|
pub fn keccak256_to_scalar(data: impl AsRef<[u8]>) -> Scalar {
|
||||||
|
let scalar = Scalar::from_bytes_mod_order(keccak256(data.as_ref()));
|
||||||
|
// Monero will explicitly error in this case
|
||||||
|
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
|
||||||
|
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
|
||||||
|
// not generate/verify a proof we believe to be valid when it isn't
|
||||||
|
assert!(scalar != Scalar::ZERO, "ZERO HASH: {:?}", data.as_ref());
|
||||||
|
scalar
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Transparent structure representing a Pedersen commitment's contents.
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||||
|
pub struct Commitment {
|
||||||
|
/// The mask for this commitment.
|
||||||
|
pub mask: Scalar,
|
||||||
|
/// The amount committed to by this commitment.
|
||||||
|
pub amount: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl core::fmt::Debug for Commitment {
|
||||||
|
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||||
|
fmt.debug_struct("Commitment").field("amount", &self.amount).finish_non_exhaustive()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Commitment {
|
||||||
|
/// A commitment to zero, defined with a mask of 1 (as to not be the identity).
|
||||||
|
pub fn zero() -> Commitment {
|
||||||
|
Commitment { mask: Scalar::ONE, amount: 0 }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new Commitment.
|
||||||
|
pub fn new(mask: Scalar, amount: u64) -> Commitment {
|
||||||
|
Commitment { mask, amount }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Calculate the Pedersen commitment, as a point, from this transparent structure.
|
||||||
|
pub fn calculate(&self) -> EdwardsPoint {
|
||||||
|
EdwardsPoint::vartime_double_scalar_mul_basepoint(&Scalar::from(self.amount), &H(), &self.mask)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write the Commitment.
|
||||||
|
///
|
||||||
|
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||||
|
/// defined serialization.
|
||||||
|
pub fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
w.write_all(&self.mask.to_bytes())?;
|
||||||
|
w.write_all(&self.amount.to_le_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Serialize the Commitment to a `Vec<u8>`.
|
||||||
|
///
|
||||||
|
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||||
|
/// defined serialization.
|
||||||
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut res = Vec::with_capacity(32 + 8);
|
||||||
|
self.write(&mut res).unwrap();
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a Commitment.
|
||||||
|
///
|
||||||
|
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||||
|
/// defined serialization.
|
||||||
|
pub fn read<R: io::Read>(r: &mut R) -> io::Result<Commitment> {
|
||||||
|
Ok(Commitment::new(read_scalar(r)?, read_u64(r)?))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decoy data, as used for producing Monero's ring signatures.
|
||||||
|
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||||
|
pub struct Decoys {
|
||||||
|
offsets: Vec<u64>,
|
||||||
|
signer_index: u8,
|
||||||
|
ring: Vec<[EdwardsPoint; 2]>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl core::fmt::Debug for Decoys {
|
||||||
|
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||||
|
fmt
|
||||||
|
.debug_struct("Decoys")
|
||||||
|
.field("offsets", &self.offsets)
|
||||||
|
.field("ring", &self.ring)
|
||||||
|
.finish_non_exhaustive()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::len_without_is_empty)]
|
||||||
|
impl Decoys {
|
||||||
|
/// Create a new instance of decoy data.
|
||||||
|
///
|
||||||
|
/// `offsets` are the positions of each ring member within the Monero blockchain, offset from the
|
||||||
|
/// prior member's position (with the initial ring member offset from 0).
|
||||||
|
pub fn new(offsets: Vec<u64>, signer_index: u8, ring: Vec<[EdwardsPoint; 2]>) -> Option<Self> {
|
||||||
|
if (offsets.len() != ring.len()) || (usize::from(signer_index) >= ring.len()) {
|
||||||
|
None?;
|
||||||
|
}
|
||||||
|
Some(Decoys { offsets, signer_index, ring })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The length of the ring.
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.offsets.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The positions of the ring members within the Monero blockchain, as their offsets.
|
||||||
|
///
|
||||||
|
/// The list is formatted as the position of the first ring member, then the offset from each
|
||||||
|
/// ring member to its prior.
|
||||||
|
pub fn offsets(&self) -> &[u64] {
|
||||||
|
&self.offsets
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The positions of the ring members within the Monero blockchain.
|
||||||
|
pub fn positions(&self) -> Vec<u64> {
|
||||||
|
let mut res = Vec::with_capacity(self.len());
|
||||||
|
res.push(self.offsets[0]);
|
||||||
|
for m in 1 .. self.len() {
|
||||||
|
res.push(res[m - 1] + self.offsets[m]);
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The index of the signer within the ring.
|
||||||
|
pub fn signer_index(&self) -> u8 {
|
||||||
|
self.signer_index
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The ring.
|
||||||
|
pub fn ring(&self) -> &[[EdwardsPoint; 2]] {
|
||||||
|
&self.ring
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The [key, commitment] pair of the signer.
|
||||||
|
pub fn signer_ring_members(&self) -> [EdwardsPoint; 2] {
|
||||||
|
self.ring[usize::from(self.signer_index)]
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write the Decoys.
|
||||||
|
///
|
||||||
|
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||||
|
/// defined serialization.
|
||||||
|
pub fn write(&self, w: &mut impl io::Write) -> io::Result<()> {
|
||||||
|
write_vec(write_varint, &self.offsets, w)?;
|
||||||
|
w.write_all(&[self.signer_index])?;
|
||||||
|
write_vec(
|
||||||
|
|pair, w| {
|
||||||
|
write_point(&pair[0], w)?;
|
||||||
|
write_point(&pair[1], w)
|
||||||
|
},
|
||||||
|
&self.ring,
|
||||||
|
w,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Serialize the Decoys to a `Vec<u8>`.
|
||||||
|
///
|
||||||
|
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||||
|
/// defined serialization.
|
||||||
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut res =
|
||||||
|
Vec::with_capacity((1 + (2 * self.offsets.len())) + 1 + 1 + (self.ring.len() * 64));
|
||||||
|
self.write(&mut res).unwrap();
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a set of Decoys.
|
||||||
|
///
|
||||||
|
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||||
|
/// defined serialization.
|
||||||
|
pub fn read(r: &mut impl io::Read) -> io::Result<Decoys> {
|
||||||
|
Decoys::new(
|
||||||
|
read_vec(read_varint, r)?,
|
||||||
|
read_byte(r)?,
|
||||||
|
read_vec(|r| Ok([read_point(r)?, read_point(r)?]), r)?,
|
||||||
|
)
|
||||||
|
.ok_or_else(|| io::Error::other("invalid Decoys"))
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
use curve25519_dalek::scalar::Scalar;
|
use curve25519_dalek::scalar::Scalar;
|
||||||
|
|
||||||
use crate::unreduced_scalar::*;
|
use crate::UnreducedScalar;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn recover_scalars() {
|
fn recover_scalars() {
|
||||||
@@ -1,18 +1,19 @@
|
|||||||
use core::cmp::Ordering;
|
use core::cmp::Ordering;
|
||||||
|
|
||||||
use std_shims::{
|
use std_shims::{
|
||||||
sync::OnceLock,
|
sync::OnceLock,
|
||||||
io::{self, *},
|
io::{self, *},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use zeroize::Zeroize;
|
||||||
|
|
||||||
use curve25519_dalek::scalar::Scalar;
|
use curve25519_dalek::scalar::Scalar;
|
||||||
|
|
||||||
use crate::serialize::*;
|
use monero_io::*;
|
||||||
|
|
||||||
static PRECOMPUTED_SCALARS_CELL: OnceLock<[Scalar; 8]> = OnceLock::new();
|
static PRECOMPUTED_SCALARS_CELL: OnceLock<[Scalar; 8]> = OnceLock::new();
|
||||||
/// Precomputed scalars used to recover an incorrectly reduced scalar.
|
// Precomputed scalars used to recover an incorrectly reduced scalar.
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
pub(crate) fn PRECOMPUTED_SCALARS() -> [Scalar; 8] {
|
fn PRECOMPUTED_SCALARS() -> [Scalar; 8] {
|
||||||
*PRECOMPUTED_SCALARS_CELL.get_or_init(|| {
|
*PRECOMPUTED_SCALARS_CELL.get_or_init(|| {
|
||||||
let mut precomputed_scalars = [Scalar::ONE; 8];
|
let mut precomputed_scalars = [Scalar::ONE; 8];
|
||||||
for (i, scalar) in precomputed_scalars.iter_mut().enumerate().skip(1) {
|
for (i, scalar) in precomputed_scalars.iter_mut().enumerate().skip(1) {
|
||||||
@@ -22,22 +23,27 @@ pub(crate) fn PRECOMPUTED_SCALARS() -> [Scalar; 8] {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
/// An unreduced scalar.
|
||||||
|
///
|
||||||
|
/// While most of modern Monero enforces scalars be reduced, certain legacy parts of the code did
|
||||||
|
/// not. These section can generally simply be read as a scalar/reduced into a scalar when the time
|
||||||
|
/// comes, yet a couple have non-standard reductions performed.
|
||||||
|
///
|
||||||
|
/// This struct delays scalar conversions and offers the non-standard reduction.
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
pub struct UnreducedScalar(pub [u8; 32]);
|
pub struct UnreducedScalar(pub [u8; 32]);
|
||||||
|
|
||||||
impl UnreducedScalar {
|
impl UnreducedScalar {
|
||||||
|
/// Write an UnreducedScalar.
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
w.write_all(&self.0)
|
w.write_all(&self.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Read an UnreducedScalar.
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<UnreducedScalar> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<UnreducedScalar> {
|
||||||
Ok(UnreducedScalar(read_bytes(r)?))
|
Ok(UnreducedScalar(read_bytes(r)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_bytes(&self) -> &[u8; 32] {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
fn as_bits(&self) -> [u8; 256] {
|
fn as_bits(&self) -> [u8; 256] {
|
||||||
let mut bits = [0; 256];
|
let mut bits = [0; 256];
|
||||||
for (i, bit) in bits.iter_mut().enumerate() {
|
for (i, bit) in bits.iter_mut().enumerate() {
|
||||||
@@ -47,12 +53,12 @@ impl UnreducedScalar {
|
|||||||
bits
|
bits
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Computes the non-adjacent form of this scalar with width 5.
|
// Computes the non-adjacent form of this scalar with width 5.
|
||||||
///
|
//
|
||||||
/// This matches Monero's `slide` function and intentionally gives incorrect outputs under
|
// This matches Monero's `slide` function and intentionally gives incorrect outputs under
|
||||||
/// certain conditions in order to match Monero.
|
// certain conditions in order to match Monero.
|
||||||
///
|
//
|
||||||
/// This function does not execute in constant time.
|
// This function does not execute in constant time.
|
||||||
fn non_adjacent_form(&self) -> [i8; 256] {
|
fn non_adjacent_form(&self) -> [i8; 256] {
|
||||||
let bits = self.as_bits();
|
let bits = self.as_bits();
|
||||||
let mut naf = [0i8; 256];
|
let mut naf = [0i8; 256];
|
||||||
@@ -108,11 +114,11 @@ impl UnreducedScalar {
|
|||||||
/// Recover the scalar that an array of bytes was incorrectly interpreted as by Monero's `slide`
|
/// Recover the scalar that an array of bytes was incorrectly interpreted as by Monero's `slide`
|
||||||
/// function.
|
/// function.
|
||||||
///
|
///
|
||||||
/// In Borromean range proofs Monero was not checking that the scalars used were
|
/// In Borromean range proofs, Monero was not checking that the scalars used were
|
||||||
/// reduced. This lead to the scalar stored being interpreted as a different scalar,
|
/// reduced. This lead to the scalar stored being interpreted as a different scalar.
|
||||||
/// this function recovers that scalar.
|
/// This function recovers that scalar.
|
||||||
///
|
///
|
||||||
/// See: https://github.com/monero-project/monero/issues/8438
|
/// See <https://github.com/monero-project/monero/issues/8438> for more info.
|
||||||
pub fn recover_monero_slide_scalar(&self) -> Scalar {
|
pub fn recover_monero_slide_scalar(&self) -> Scalar {
|
||||||
if self.0[31] & 128 == 0 {
|
if self.0[31] & 128 == 0 {
|
||||||
// Computing the w-NAF of a number can only give an output with 1 more bit than
|
// Computing the w-NAF of a number can only give an output with 1 more bit than
|
||||||
41
coins/monero/ringct/borromean/Cargo.toml
Normal file
41
coins/monero/ringct/borromean/Cargo.toml
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
[package]
|
||||||
|
name = "monero-borromean"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Borromean ring signatures arranged into a range proof, as done by the Monero protocol"
|
||||||
|
license = "MIT"
|
||||||
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/ringct/borromean"
|
||||||
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
|
edition = "2021"
|
||||||
|
rust-version = "1.79"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||||
|
|
||||||
|
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||||
|
|
||||||
|
# Cryptographic dependencies
|
||||||
|
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
|
||||||
|
|
||||||
|
# Other Monero dependencies
|
||||||
|
monero-io = { path = "../../io", version = "0.1", default-features = false }
|
||||||
|
monero-generators = { path = "../../generators", version = "0.4", default-features = false }
|
||||||
|
monero-primitives = { path = "../../primitives", version = "0.1", default-features = false }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
std = [
|
||||||
|
"std-shims/std",
|
||||||
|
|
||||||
|
"zeroize/std",
|
||||||
|
|
||||||
|
"monero-io/std",
|
||||||
|
"monero-generators/std",
|
||||||
|
"monero-primitives/std",
|
||||||
|
]
|
||||||
|
default = ["std"]
|
||||||
21
coins/monero/ringct/borromean/LICENSE
Normal file
21
coins/monero/ringct/borromean/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2022-2024 Luke Parker
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
12
coins/monero/ringct/borromean/README.md
Normal file
12
coins/monero/ringct/borromean/README.md
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# Monero Borromean
|
||||||
|
|
||||||
|
Borromean ring signatures arranged into a range proof, as done by the Monero
|
||||||
|
protocol.
|
||||||
|
|
||||||
|
This library is usable under no-std when the `std` feature (on by default) is
|
||||||
|
disabled.
|
||||||
|
|
||||||
|
### Cargo Features
|
||||||
|
|
||||||
|
- `std` (on by default): Enables `std` (and with it, more efficient internal
|
||||||
|
implementations).
|
||||||
@@ -1,26 +1,35 @@
|
|||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![doc = include_str!("../README.md")]
|
||||||
|
#![deny(missing_docs)]
|
||||||
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
use core::fmt::Debug;
|
use core::fmt::Debug;
|
||||||
use std_shims::io::{self, Read, Write};
|
use std_shims::io::{self, Read, Write};
|
||||||
|
|
||||||
|
use zeroize::Zeroize;
|
||||||
|
|
||||||
use curve25519_dalek::{traits::Identity, Scalar, EdwardsPoint};
|
use curve25519_dalek::{traits::Identity, Scalar, EdwardsPoint};
|
||||||
|
|
||||||
|
use monero_io::*;
|
||||||
use monero_generators::H_pow_2;
|
use monero_generators::H_pow_2;
|
||||||
|
use monero_primitives::{keccak256_to_scalar, UnreducedScalar};
|
||||||
|
|
||||||
use crate::{hash_to_scalar, unreduced_scalar::UnreducedScalar, serialize::*};
|
// 64 Borromean ring signatures, as needed for a 64-bit range proof.
|
||||||
|
//
|
||||||
/// 64 Borromean ring signatures.
|
// s0 and s1 are stored as `UnreducedScalar`s due to Monero not requiring they were reduced.
|
||||||
///
|
// `UnreducedScalar` preserves their original byte encoding and implements a custom reduction
|
||||||
/// s0 and s1 are stored as `UnreducedScalar`s due to Monero not requiring they were reduced.
|
// algorithm which was in use.
|
||||||
/// `UnreducedScalar` preserves their original byte encoding and implements a custom reduction
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
/// algorithm which was in use.
|
struct BorromeanSignatures {
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
s0: [UnreducedScalar; 64],
|
||||||
pub struct BorromeanSignatures {
|
s1: [UnreducedScalar; 64],
|
||||||
pub s0: [UnreducedScalar; 64],
|
ee: Scalar,
|
||||||
pub s1: [UnreducedScalar; 64],
|
|
||||||
pub ee: Scalar,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BorromeanSignatures {
|
impl BorromeanSignatures {
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<BorromeanSignatures> {
|
// Read a set of BorromeanSignatures.
|
||||||
|
fn read<R: Read>(r: &mut R) -> io::Result<BorromeanSignatures> {
|
||||||
Ok(BorromeanSignatures {
|
Ok(BorromeanSignatures {
|
||||||
s0: read_array(UnreducedScalar::read, r)?,
|
s0: read_array(UnreducedScalar::read, r)?,
|
||||||
s1: read_array(UnreducedScalar::read, r)?,
|
s1: read_array(UnreducedScalar::read, r)?,
|
||||||
@@ -28,7 +37,8 @@ impl BorromeanSignatures {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
// Write the set of BorromeanSignatures.
|
||||||
|
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
for s0 in &self.s0 {
|
for s0 in &self.s0 {
|
||||||
s0.write(w)?;
|
s0.write(w)?;
|
||||||
}
|
}
|
||||||
@@ -50,36 +60,41 @@ impl BorromeanSignatures {
|
|||||||
);
|
);
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
||||||
&hash_to_scalar(LL.compress().as_bytes()),
|
&keccak256_to_scalar(LL.compress().as_bytes()),
|
||||||
&keys_b[i],
|
&keys_b[i],
|
||||||
&self.s1[i].recover_monero_slide_scalar(),
|
&self.s1[i].recover_monero_slide_scalar(),
|
||||||
);
|
);
|
||||||
transcript[(i * 32) .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes());
|
transcript[(i * 32) .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes());
|
||||||
}
|
}
|
||||||
|
|
||||||
hash_to_scalar(&transcript) == self.ee
|
keccak256_to_scalar(transcript) == self.ee
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A range proof premised on Borromean ring signatures.
|
/// A range proof premised on Borromean ring signatures.
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
pub struct BorromeanRange {
|
pub struct BorromeanRange {
|
||||||
pub sigs: BorromeanSignatures,
|
sigs: BorromeanSignatures,
|
||||||
pub bit_commitments: [EdwardsPoint; 64],
|
bit_commitments: [EdwardsPoint; 64],
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BorromeanRange {
|
impl BorromeanRange {
|
||||||
|
/// Read a BorromeanRange proof.
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<BorromeanRange> {
|
pub fn read<R: Read>(r: &mut R) -> io::Result<BorromeanRange> {
|
||||||
Ok(BorromeanRange {
|
Ok(BorromeanRange {
|
||||||
sigs: BorromeanSignatures::read(r)?,
|
sigs: BorromeanSignatures::read(r)?,
|
||||||
bit_commitments: read_array(read_point, r)?,
|
bit_commitments: read_array(read_point, r)?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Write the BorromeanRange proof.
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
self.sigs.write(w)?;
|
self.sigs.write(w)?;
|
||||||
write_raw_vec(write_point, &self.bit_commitments, w)
|
write_raw_vec(write_point, &self.bit_commitments, w)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Verify the commitment contains a 64-bit value.
|
||||||
|
#[must_use]
|
||||||
pub fn verify(&self, commitment: &EdwardsPoint) -> bool {
|
pub fn verify(&self, commitment: &EdwardsPoint) -> bool {
|
||||||
if &self.bit_commitments.iter().sum::<EdwardsPoint>() != commitment {
|
if &self.bit_commitments.iter().sum::<EdwardsPoint>() != commitment {
|
||||||
return false;
|
return false;
|
||||||
57
coins/monero/ringct/bulletproofs/Cargo.toml
Normal file
57
coins/monero/ringct/bulletproofs/Cargo.toml
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
[package]
|
||||||
|
name = "monero-bulletproofs"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "Bulletproofs(+) range proofs, as defined by the Monero protocol"
|
||||||
|
license = "MIT"
|
||||||
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/ringct/bulletproofs"
|
||||||
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
|
edition = "2021"
|
||||||
|
rust-version = "1.79"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||||
|
|
||||||
|
thiserror = { version = "1", default-features = false, optional = true }
|
||||||
|
|
||||||
|
rand_core = { version = "0.6", default-features = false }
|
||||||
|
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||||
|
subtle = { version = "^2.4", default-features = false }
|
||||||
|
|
||||||
|
# Cryptographic dependencies
|
||||||
|
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
|
||||||
|
|
||||||
|
# Other Monero dependencies
|
||||||
|
monero-io = { path = "../../io", version = "0.1", default-features = false }
|
||||||
|
monero-generators = { path = "../../generators", version = "0.4", default-features = false }
|
||||||
|
monero-primitives = { path = "../../primitives", version = "0.1", default-features = false }
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
|
||||||
|
monero-generators = { path = "../../generators", version = "0.4", default-features = false }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
hex-literal = "0.4"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
std = [
|
||||||
|
"std-shims/std",
|
||||||
|
|
||||||
|
"thiserror",
|
||||||
|
|
||||||
|
"rand_core/std",
|
||||||
|
"zeroize/std",
|
||||||
|
"subtle/std",
|
||||||
|
|
||||||
|
"monero-io/std",
|
||||||
|
"monero-generators/std",
|
||||||
|
"monero-primitives/std",
|
||||||
|
]
|
||||||
|
compile-time-generators = ["curve25519-dalek/precomputed-tables"]
|
||||||
|
default = ["std", "compile-time-generators"]
|
||||||
21
coins/monero/ringct/bulletproofs/LICENSE
Normal file
21
coins/monero/ringct/bulletproofs/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2022-2024 Luke Parker
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
14
coins/monero/ringct/bulletproofs/README.md
Normal file
14
coins/monero/ringct/bulletproofs/README.md
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# Monero Bulletproofs(+)
|
||||||
|
|
||||||
|
Bulletproofs(+) range proofs, as defined by the Monero protocol.
|
||||||
|
|
||||||
|
This library is usable under no-std when the `std` feature (on by default) is
|
||||||
|
disabled.
|
||||||
|
|
||||||
|
### Cargo Features
|
||||||
|
|
||||||
|
- `std` (on by default): Enables `std` (and with it, more efficient internal
|
||||||
|
implementations).
|
||||||
|
- `compile-time-generators` (on by default): Derives the generators at
|
||||||
|
compile-time so they don't need to be derived at runtime. This is recommended
|
||||||
|
if program size doesn't need to be kept minimal.
|
||||||
88
coins/monero/ringct/bulletproofs/build.rs
Normal file
88
coins/monero/ringct/bulletproofs/build.rs
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
use std::{
|
||||||
|
io::Write,
|
||||||
|
env,
|
||||||
|
path::Path,
|
||||||
|
fs::{File, remove_file},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(feature = "compile-time-generators")]
|
||||||
|
fn generators(prefix: &'static str, path: &str) {
|
||||||
|
use curve25519_dalek::EdwardsPoint;
|
||||||
|
|
||||||
|
use monero_generators::bulletproofs_generators;
|
||||||
|
|
||||||
|
fn serialize(generators_string: &mut String, points: &[EdwardsPoint]) {
|
||||||
|
for generator in points {
|
||||||
|
generators_string.extend(
|
||||||
|
format!(
|
||||||
|
"
|
||||||
|
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap(),
|
||||||
|
",
|
||||||
|
generator.compress().to_bytes()
|
||||||
|
)
|
||||||
|
.chars(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let generators = bulletproofs_generators(prefix.as_bytes());
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let mut G_str = String::new();
|
||||||
|
serialize(&mut G_str, &generators.G);
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let mut H_str = String::new();
|
||||||
|
serialize(&mut H_str, &generators.H);
|
||||||
|
|
||||||
|
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
|
||||||
|
let _ = remove_file(&path);
|
||||||
|
File::create(&path)
|
||||||
|
.unwrap()
|
||||||
|
.write_all(
|
||||||
|
format!(
|
||||||
|
"
|
||||||
|
static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
|
||||||
|
pub(crate) fn GENERATORS() -> &'static Generators {{
|
||||||
|
GENERATORS_CELL.get_or_init(|| Generators {{
|
||||||
|
G: std_shims::vec![
|
||||||
|
{G_str}
|
||||||
|
],
|
||||||
|
H: std_shims::vec![
|
||||||
|
{H_str}
|
||||||
|
],
|
||||||
|
}})
|
||||||
|
}}
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.as_bytes(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "compile-time-generators"))]
|
||||||
|
fn generators(prefix: &'static str, path: &str) {
|
||||||
|
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
|
||||||
|
let _ = remove_file(&path);
|
||||||
|
File::create(&path)
|
||||||
|
.unwrap()
|
||||||
|
.write_all(
|
||||||
|
format!(
|
||||||
|
r#"
|
||||||
|
static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
|
||||||
|
pub(crate) fn GENERATORS() -> &'static Generators {{
|
||||||
|
GENERATORS_CELL.get_or_init(|| {{
|
||||||
|
monero_generators::bulletproofs_generators(b"{prefix}")
|
||||||
|
}})
|
||||||
|
}}
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.as_bytes(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
println!("cargo:rerun-if-changed=build.rs");
|
||||||
|
|
||||||
|
generators("bulletproof", "generators.rs");
|
||||||
|
generators("bulletproof_plus", "generators_plus.rs");
|
||||||
|
}
|
||||||
101
coins/monero/ringct/bulletproofs/src/batch_verifier.rs
Normal file
101
coins/monero/ringct/bulletproofs/src/batch_verifier.rs
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
use std_shims::vec::Vec;
|
||||||
|
|
||||||
|
use curve25519_dalek::{
|
||||||
|
constants::ED25519_BASEPOINT_POINT,
|
||||||
|
traits::{IsIdentity, VartimeMultiscalarMul},
|
||||||
|
scalar::Scalar,
|
||||||
|
edwards::EdwardsPoint,
|
||||||
|
};
|
||||||
|
|
||||||
|
use monero_generators::{H, Generators};
|
||||||
|
|
||||||
|
use crate::{original, plus};
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub(crate) struct InternalBatchVerifier {
|
||||||
|
pub(crate) g: Scalar,
|
||||||
|
pub(crate) h: Scalar,
|
||||||
|
pub(crate) g_bold: Vec<Scalar>,
|
||||||
|
pub(crate) h_bold: Vec<Scalar>,
|
||||||
|
pub(crate) other: Vec<(Scalar, EdwardsPoint)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl InternalBatchVerifier {
|
||||||
|
#[must_use]
|
||||||
|
fn verify(self, G: EdwardsPoint, H: EdwardsPoint, generators: &Generators) -> bool {
|
||||||
|
let capacity = 2 + self.g_bold.len() + self.h_bold.len() + self.other.len();
|
||||||
|
let mut scalars = Vec::with_capacity(capacity);
|
||||||
|
let mut points = Vec::with_capacity(capacity);
|
||||||
|
|
||||||
|
scalars.push(self.g);
|
||||||
|
points.push(G);
|
||||||
|
|
||||||
|
scalars.push(self.h);
|
||||||
|
points.push(H);
|
||||||
|
|
||||||
|
for (i, g_bold) in self.g_bold.into_iter().enumerate() {
|
||||||
|
scalars.push(g_bold);
|
||||||
|
points.push(generators.G[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i, h_bold) in self.h_bold.into_iter().enumerate() {
|
||||||
|
scalars.push(h_bold);
|
||||||
|
points.push(generators.H[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (scalar, point) in self.other {
|
||||||
|
scalars.push(scalar);
|
||||||
|
points.push(point);
|
||||||
|
}
|
||||||
|
|
||||||
|
EdwardsPoint::vartime_multiscalar_mul(scalars, points).is_identity()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub(crate) struct BulletproofsBatchVerifier(pub(crate) InternalBatchVerifier);
|
||||||
|
impl BulletproofsBatchVerifier {
|
||||||
|
#[must_use]
|
||||||
|
pub(crate) fn verify(self) -> bool {
|
||||||
|
self.0.verify(ED25519_BASEPOINT_POINT, H(), original::GENERATORS())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub(crate) struct BulletproofsPlusBatchVerifier(pub(crate) InternalBatchVerifier);
|
||||||
|
impl BulletproofsPlusBatchVerifier {
|
||||||
|
#[must_use]
|
||||||
|
pub(crate) fn verify(self) -> bool {
|
||||||
|
// Bulletproofs+ is written as per the paper, with G for the value and H for the mask
|
||||||
|
// Monero uses H for the value and G for the mask
|
||||||
|
self.0.verify(H(), ED25519_BASEPOINT_POINT, plus::GENERATORS())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A batch verifier for Bulletproofs(+).
|
||||||
|
///
|
||||||
|
/// This uses a fixed layout such that all fixed points only incur a single point scaling,
|
||||||
|
/// regardless of the amounts of proofs verified. For all variable points (commitments), they're
|
||||||
|
/// accumulated with the fixed points into a single multiscalar multiplication.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct BatchVerifier {
|
||||||
|
pub(crate) original: BulletproofsBatchVerifier,
|
||||||
|
pub(crate) plus: BulletproofsPlusBatchVerifier,
|
||||||
|
}
|
||||||
|
impl BatchVerifier {
|
||||||
|
/// Create a new batch verifier.
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
original: BulletproofsBatchVerifier(InternalBatchVerifier::default()),
|
||||||
|
plus: BulletproofsPlusBatchVerifier(InternalBatchVerifier::default()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify all of the proofs queued within this batch verifier.
|
||||||
|
///
|
||||||
|
/// This uses a variable-time multiscalar multiplication internally.
|
||||||
|
#[must_use]
|
||||||
|
pub fn verify(self) -> bool {
|
||||||
|
self.original.verify() && self.plus.verify()
|
||||||
|
}
|
||||||
|
}
|
||||||
74
coins/monero/ringct/bulletproofs/src/core.rs
Normal file
74
coins/monero/ringct/bulletproofs/src/core.rs
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
use std_shims::{vec, vec::Vec};
|
||||||
|
|
||||||
|
use curve25519_dalek::{
|
||||||
|
traits::{MultiscalarMul, VartimeMultiscalarMul},
|
||||||
|
scalar::Scalar,
|
||||||
|
edwards::EdwardsPoint,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(crate) use monero_generators::{MAX_COMMITMENTS, COMMITMENT_BITS, LOG_COMMITMENT_BITS};
|
||||||
|
|
||||||
|
pub(crate) fn multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
|
||||||
|
let mut buf_scalars = Vec::with_capacity(pairs.len());
|
||||||
|
let mut buf_points = Vec::with_capacity(pairs.len());
|
||||||
|
for (scalar, point) in pairs {
|
||||||
|
buf_scalars.push(scalar);
|
||||||
|
buf_points.push(point);
|
||||||
|
}
|
||||||
|
EdwardsPoint::multiscalar_mul(buf_scalars, buf_points)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn multiexp_vartime(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
|
||||||
|
let mut buf_scalars = Vec::with_capacity(pairs.len());
|
||||||
|
let mut buf_points = Vec::with_capacity(pairs.len());
|
||||||
|
for (scalar, point) in pairs {
|
||||||
|
buf_scalars.push(scalar);
|
||||||
|
buf_points.push(point);
|
||||||
|
}
|
||||||
|
EdwardsPoint::vartime_multiscalar_mul(buf_scalars, buf_points)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
This has room for optimization worth investigating further. It currently takes
|
||||||
|
an iterative approach. It can be optimized further via divide and conquer.
|
||||||
|
|
||||||
|
Assume there are 4 challenges.
|
||||||
|
|
||||||
|
Iterative approach (current):
|
||||||
|
1. Do the optimal multiplications across challenge column 0 and 1.
|
||||||
|
2. Do the optimal multiplications across that result and column 2.
|
||||||
|
3. Do the optimal multiplications across that result and column 3.
|
||||||
|
|
||||||
|
Divide and conquer (worth investigating further):
|
||||||
|
1. Do the optimal multiplications across challenge column 0 and 1.
|
||||||
|
2. Do the optimal multiplications across challenge column 2 and 3.
|
||||||
|
3. Multiply both results together.
|
||||||
|
|
||||||
|
When there are 4 challenges (n=16), the iterative approach does 28 multiplications
|
||||||
|
versus divide and conquer's 24.
|
||||||
|
*/
|
||||||
|
pub(crate) fn challenge_products(challenges: &[(Scalar, Scalar)]) -> Vec<Scalar> {
|
||||||
|
let mut products = vec![Scalar::ONE; 1 << challenges.len()];
|
||||||
|
|
||||||
|
if !challenges.is_empty() {
|
||||||
|
products[0] = challenges[0].1;
|
||||||
|
products[1] = challenges[0].0;
|
||||||
|
|
||||||
|
for (j, challenge) in challenges.iter().enumerate().skip(1) {
|
||||||
|
let mut slots = (1 << (j + 1)) - 1;
|
||||||
|
while slots > 0 {
|
||||||
|
products[slots] = products[slots / 2] * challenge.0;
|
||||||
|
products[slots - 1] = products[slots / 2] * challenge.1;
|
||||||
|
|
||||||
|
slots = slots.saturating_sub(2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sanity check since if the above failed to populate, it'd be critical
|
||||||
|
for product in &products {
|
||||||
|
debug_assert!(*product != Scalar::ZERO);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
products
|
||||||
|
}
|
||||||
292
coins/monero/ringct/bulletproofs/src/lib.rs
Normal file
292
coins/monero/ringct/bulletproofs/src/lib.rs
Normal file
@@ -0,0 +1,292 @@
|
|||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![doc = include_str!("../README.md")]
|
||||||
|
#![deny(missing_docs)]
|
||||||
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
|
use std_shims::{
|
||||||
|
vec,
|
||||||
|
vec::Vec,
|
||||||
|
io::{self, Read, Write},
|
||||||
|
};
|
||||||
|
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
use zeroize::Zeroizing;
|
||||||
|
|
||||||
|
use curve25519_dalek::edwards::EdwardsPoint;
|
||||||
|
|
||||||
|
use monero_io::*;
|
||||||
|
pub use monero_generators::MAX_COMMITMENTS;
|
||||||
|
use monero_primitives::Commitment;
|
||||||
|
|
||||||
|
pub(crate) mod scalar_vector;
|
||||||
|
pub(crate) mod point_vector;
|
||||||
|
|
||||||
|
pub(crate) mod core;
|
||||||
|
use crate::core::LOG_COMMITMENT_BITS;
|
||||||
|
|
||||||
|
pub(crate) mod batch_verifier;
|
||||||
|
use batch_verifier::{BulletproofsBatchVerifier, BulletproofsPlusBatchVerifier};
|
||||||
|
pub use batch_verifier::BatchVerifier;
|
||||||
|
|
||||||
|
pub(crate) mod original;
|
||||||
|
use crate::original::{
|
||||||
|
IpProof, AggregateRangeStatement as OriginalStatement, AggregateRangeWitness as OriginalWitness,
|
||||||
|
AggregateRangeProof as OriginalProof,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub(crate) mod plus;
|
||||||
|
use crate::plus::{
|
||||||
|
WipProof, AggregateRangeStatement as PlusStatement, AggregateRangeWitness as PlusWitness,
|
||||||
|
AggregateRangeProof as PlusProof,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests;
|
||||||
|
|
||||||
|
/// An error from proving/verifying Bulletproofs(+).
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||||
|
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
||||||
|
pub enum BulletproofError {
|
||||||
|
/// Proving/verifying a Bulletproof(+) range proof with no commitments.
|
||||||
|
#[cfg_attr(feature = "std", error("no commitments to prove the range for"))]
|
||||||
|
NoCommitments,
|
||||||
|
/// Proving/verifying a Bulletproof(+) range proof with more commitments than supported.
|
||||||
|
#[cfg_attr(feature = "std", error("too many commitments to prove the range for"))]
|
||||||
|
TooManyCommitments,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A Bulletproof(+).
|
||||||
|
///
|
||||||
|
/// This encapsulates either a Bulletproof or a Bulletproof+.
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
|
pub enum Bulletproof {
|
||||||
|
/// A Bulletproof.
|
||||||
|
Original(OriginalProof),
|
||||||
|
/// A Bulletproof+.
|
||||||
|
Plus(PlusProof),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Bulletproof {
|
||||||
|
fn bp_fields(plus: bool) -> usize {
|
||||||
|
if plus {
|
||||||
|
6
|
||||||
|
} else {
|
||||||
|
9
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Calculate the weight penalty for the Bulletproof(+).
|
||||||
|
///
|
||||||
|
/// Bulletproofs(+) are logarithmically sized yet linearly timed. Evaluating by their size alone
|
||||||
|
/// accordingly doesn't properly represent the burden of the proof. Monero 'claws back' some of
|
||||||
|
/// the weight lost by using a proof smaller than it is fast to compensate for this.
|
||||||
|
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
||||||
|
// src/cryptonote_basic/cryptonote_format_utils.cpp#L106-L124
|
||||||
|
pub fn calculate_bp_clawback(plus: bool, n_outputs: usize) -> (usize, usize) {
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
let mut LR_len = 0;
|
||||||
|
let mut n_padded_outputs = 1;
|
||||||
|
while n_padded_outputs < n_outputs {
|
||||||
|
LR_len += 1;
|
||||||
|
n_padded_outputs = 1 << LR_len;
|
||||||
|
}
|
||||||
|
LR_len += LOG_COMMITMENT_BITS;
|
||||||
|
|
||||||
|
let mut bp_clawback = 0;
|
||||||
|
if n_padded_outputs > 2 {
|
||||||
|
let fields = Bulletproof::bp_fields(plus);
|
||||||
|
let base = ((fields + (2 * (LOG_COMMITMENT_BITS + 1))) * 32) / 2;
|
||||||
|
let size = (fields + (2 * LR_len)) * 32;
|
||||||
|
bp_clawback = ((base * n_padded_outputs) - size) * 4 / 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
(bp_clawback, LR_len)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof.
|
||||||
|
pub fn prove<R: RngCore + CryptoRng>(
|
||||||
|
rng: &mut R,
|
||||||
|
outputs: Vec<Commitment>,
|
||||||
|
) -> Result<Bulletproof, BulletproofError> {
|
||||||
|
if outputs.is_empty() {
|
||||||
|
Err(BulletproofError::NoCommitments)?;
|
||||||
|
}
|
||||||
|
if outputs.len() > MAX_COMMITMENTS {
|
||||||
|
Err(BulletproofError::TooManyCommitments)?;
|
||||||
|
}
|
||||||
|
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||||
|
Ok(Bulletproof::Original(
|
||||||
|
OriginalStatement::new(&commitments)
|
||||||
|
.unwrap()
|
||||||
|
.prove(rng, OriginalWitness::new(outputs).unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof+.
|
||||||
|
pub fn prove_plus<R: RngCore + CryptoRng>(
|
||||||
|
rng: &mut R,
|
||||||
|
outputs: Vec<Commitment>,
|
||||||
|
) -> Result<Bulletproof, BulletproofError> {
|
||||||
|
if outputs.is_empty() {
|
||||||
|
Err(BulletproofError::NoCommitments)?;
|
||||||
|
}
|
||||||
|
if outputs.len() > MAX_COMMITMENTS {
|
||||||
|
Err(BulletproofError::TooManyCommitments)?;
|
||||||
|
}
|
||||||
|
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||||
|
Ok(Bulletproof::Plus(
|
||||||
|
PlusStatement::new(&commitments)
|
||||||
|
.unwrap()
|
||||||
|
.prove(rng, &Zeroizing::new(PlusWitness::new(outputs).unwrap()))
|
||||||
|
.unwrap(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify the given Bulletproof(+).
|
||||||
|
#[must_use]
|
||||||
|
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
|
||||||
|
match self {
|
||||||
|
Bulletproof::Original(bp) => {
|
||||||
|
let mut verifier = BulletproofsBatchVerifier::default();
|
||||||
|
let Some(statement) = OriginalStatement::new(commitments) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if !statement.verify(rng, &mut verifier, bp.clone()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
verifier.verify()
|
||||||
|
}
|
||||||
|
Bulletproof::Plus(bp) => {
|
||||||
|
let mut verifier = BulletproofsPlusBatchVerifier::default();
|
||||||
|
let Some(statement) = PlusStatement::new(commitments) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if !statement.verify(rng, &mut verifier, bp.clone()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
verifier.verify()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Accumulate the verification for the given Bulletproof(+) into the specified BatchVerifier.
|
||||||
|
///
|
||||||
|
/// Returns false if the Bulletproof(+) isn't sane, leaving the BatchVerifier in an undefined
|
||||||
|
/// state.
|
||||||
|
///
|
||||||
|
/// Returns true if the Bulletproof(+) is sane, regardless of its validity.
|
||||||
|
///
|
||||||
|
/// The BatchVerifier must have its verification function executed to actually verify this proof.
|
||||||
|
#[must_use]
|
||||||
|
pub fn batch_verify<R: RngCore + CryptoRng>(
|
||||||
|
&self,
|
||||||
|
rng: &mut R,
|
||||||
|
verifier: &mut BatchVerifier,
|
||||||
|
commitments: &[EdwardsPoint],
|
||||||
|
) -> bool {
|
||||||
|
match self {
|
||||||
|
Bulletproof::Original(bp) => {
|
||||||
|
let Some(statement) = OriginalStatement::new(commitments) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
statement.verify(rng, &mut verifier.original, bp.clone())
|
||||||
|
}
|
||||||
|
Bulletproof::Plus(bp) => {
|
||||||
|
let Some(statement) = PlusStatement::new(commitments) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
statement.verify(rng, &mut verifier.plus, bp.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_core<W: Write, F: Fn(&[EdwardsPoint], &mut W) -> io::Result<()>>(
|
||||||
|
&self,
|
||||||
|
w: &mut W,
|
||||||
|
specific_write_vec: F,
|
||||||
|
) -> io::Result<()> {
|
||||||
|
match self {
|
||||||
|
Bulletproof::Original(bp) => {
|
||||||
|
write_point(&bp.A, w)?;
|
||||||
|
write_point(&bp.S, w)?;
|
||||||
|
write_point(&bp.T1, w)?;
|
||||||
|
write_point(&bp.T2, w)?;
|
||||||
|
write_scalar(&bp.tau_x, w)?;
|
||||||
|
write_scalar(&bp.mu, w)?;
|
||||||
|
specific_write_vec(&bp.ip.L, w)?;
|
||||||
|
specific_write_vec(&bp.ip.R, w)?;
|
||||||
|
write_scalar(&bp.ip.a, w)?;
|
||||||
|
write_scalar(&bp.ip.b, w)?;
|
||||||
|
write_scalar(&bp.t_hat, w)
|
||||||
|
}
|
||||||
|
|
||||||
|
Bulletproof::Plus(bp) => {
|
||||||
|
write_point(&bp.A, w)?;
|
||||||
|
write_point(&bp.wip.A, w)?;
|
||||||
|
write_point(&bp.wip.B, w)?;
|
||||||
|
write_scalar(&bp.wip.r_answer, w)?;
|
||||||
|
write_scalar(&bp.wip.s_answer, w)?;
|
||||||
|
write_scalar(&bp.wip.delta_answer, w)?;
|
||||||
|
specific_write_vec(&bp.wip.L, w)?;
|
||||||
|
specific_write_vec(&bp.wip.R, w)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a Bulletproof(+) for the message signed by a transaction's signature.
|
||||||
|
///
|
||||||
|
/// This has a distinct encoding from the standard encoding.
|
||||||
|
pub fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
self.write_core(w, |points, w| write_raw_vec(write_point, points, w))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Write a Bulletproof(+).
|
||||||
|
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||||
|
self.write_core(w, |points, w| write_vec(write_point, points, w))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Serialize a Bulletproof(+) to a `Vec<u8>`.
|
||||||
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
|
let mut serialized = vec![];
|
||||||
|
self.write(&mut serialized).unwrap();
|
||||||
|
serialized
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a Bulletproof.
|
||||||
|
pub fn read<R: Read>(r: &mut R) -> io::Result<Bulletproof> {
|
||||||
|
Ok(Bulletproof::Original(OriginalProof {
|
||||||
|
A: read_point(r)?,
|
||||||
|
S: read_point(r)?,
|
||||||
|
T1: read_point(r)?,
|
||||||
|
T2: read_point(r)?,
|
||||||
|
tau_x: read_scalar(r)?,
|
||||||
|
mu: read_scalar(r)?,
|
||||||
|
ip: IpProof {
|
||||||
|
L: read_vec(read_point, r)?,
|
||||||
|
R: read_vec(read_point, r)?,
|
||||||
|
a: read_scalar(r)?,
|
||||||
|
b: read_scalar(r)?,
|
||||||
|
},
|
||||||
|
t_hat: read_scalar(r)?,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read a Bulletproof+.
|
||||||
|
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Bulletproof> {
|
||||||
|
Ok(Bulletproof::Plus(PlusProof {
|
||||||
|
A: read_point(r)?,
|
||||||
|
wip: WipProof {
|
||||||
|
A: read_point(r)?,
|
||||||
|
B: read_point(r)?,
|
||||||
|
r_answer: read_scalar(r)?,
|
||||||
|
s_answer: read_scalar(r)?,
|
||||||
|
delta_answer: read_scalar(r)?,
|
||||||
|
L: read_vec(read_point, r)?.into_iter().collect(),
|
||||||
|
R: read_vec(read_point, r)?.into_iter().collect(),
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
303
coins/monero/ringct/bulletproofs/src/original/inner_product.rs
Normal file
303
coins/monero/ringct/bulletproofs/src/original/inner_product.rs
Normal file
@@ -0,0 +1,303 @@
|
|||||||
|
use std_shims::{vec, vec::Vec};
|
||||||
|
|
||||||
|
use zeroize::Zeroize;
|
||||||
|
|
||||||
|
use curve25519_dalek::{Scalar, EdwardsPoint};
|
||||||
|
|
||||||
|
use monero_generators::H;
|
||||||
|
use monero_primitives::{INV_EIGHT, keccak256_to_scalar};
|
||||||
|
use crate::{
|
||||||
|
core::{multiexp_vartime, challenge_products},
|
||||||
|
scalar_vector::ScalarVector,
|
||||||
|
point_vector::PointVector,
|
||||||
|
BulletproofsBatchVerifier,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// An error from proving/verifying Inner-Product statements.
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||||
|
pub(crate) enum IpError {
|
||||||
|
IncorrectAmountOfGenerators,
|
||||||
|
DifferingLrLengths,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The Bulletproofs Inner-Product statement.
|
||||||
|
///
|
||||||
|
/// This is for usage with Protocol 2 from the Bulletproofs paper.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub(crate) struct IpStatement {
|
||||||
|
// Weights for h_bold
|
||||||
|
h_bold_weights: ScalarVector,
|
||||||
|
// u as the discrete logarithm of G
|
||||||
|
u: Scalar,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The witness for the Bulletproofs Inner-Product statement.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub(crate) struct IpWitness {
|
||||||
|
// a
|
||||||
|
a: ScalarVector,
|
||||||
|
// b
|
||||||
|
b: ScalarVector,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IpWitness {
|
||||||
|
/// Construct a new witness for an Inner-Product statement.
|
||||||
|
///
|
||||||
|
/// This functions return None if the lengths of a, b are mismatched, not a power of two, or are
|
||||||
|
/// empty.
|
||||||
|
pub(crate) fn new(a: ScalarVector, b: ScalarVector) -> Option<Self> {
|
||||||
|
if a.0.is_empty() || (a.len() != b.len()) {
|
||||||
|
None?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut power_of_2 = 1;
|
||||||
|
while power_of_2 < a.len() {
|
||||||
|
power_of_2 <<= 1;
|
||||||
|
}
|
||||||
|
if power_of_2 != a.len() {
|
||||||
|
None?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(Self { a, b })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A proof for the Bulletproofs Inner-Product statement.
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
|
pub(crate) struct IpProof {
|
||||||
|
pub(crate) L: Vec<EdwardsPoint>,
|
||||||
|
pub(crate) R: Vec<EdwardsPoint>,
|
||||||
|
pub(crate) a: Scalar,
|
||||||
|
pub(crate) b: Scalar,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IpStatement {
|
||||||
|
/// Create a new Inner-Product statement which won't transcript P.
|
||||||
|
///
|
||||||
|
/// This MUST only be called when P is deterministic to already transcripted elements.
|
||||||
|
pub(crate) fn new_without_P_transcript(h_bold_weights: ScalarVector, u: Scalar) -> Self {
|
||||||
|
Self { h_bold_weights, u }
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transcript a round of the protocol
|
||||||
|
fn transcript_L_R(transcript: Scalar, L: EdwardsPoint, R: EdwardsPoint) -> Scalar {
|
||||||
|
let mut transcript = transcript.to_bytes().to_vec();
|
||||||
|
transcript.extend(L.compress().to_bytes());
|
||||||
|
transcript.extend(R.compress().to_bytes());
|
||||||
|
keccak256_to_scalar(transcript)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Prove for this Inner-Product statement.
|
||||||
|
///
|
||||||
|
/// Returns an error if this statement couldn't be proven for (such as if the witness isn't
|
||||||
|
/// consistent).
|
||||||
|
pub(crate) fn prove(
|
||||||
|
self,
|
||||||
|
mut transcript: Scalar,
|
||||||
|
witness: IpWitness,
|
||||||
|
) -> Result<IpProof, IpError> {
|
||||||
|
let generators = crate::original::GENERATORS();
|
||||||
|
let g_bold_slice = &generators.G[.. witness.a.len()];
|
||||||
|
let h_bold_slice = &generators.H[.. witness.a.len()];
|
||||||
|
|
||||||
|
let (mut g_bold, mut h_bold, u, mut a, mut b) = {
|
||||||
|
let IpStatement { h_bold_weights, u } = self;
|
||||||
|
let u = H() * u;
|
||||||
|
|
||||||
|
// Ensure we have the exact amount of weights
|
||||||
|
if h_bold_weights.len() != g_bold_slice.len() {
|
||||||
|
Err(IpError::IncorrectAmountOfGenerators)?;
|
||||||
|
}
|
||||||
|
// Acquire a local copy of the generators
|
||||||
|
let g_bold = PointVector(g_bold_slice.to_vec());
|
||||||
|
let h_bold = PointVector(h_bold_slice.to_vec()).mul_vec(&h_bold_weights);
|
||||||
|
|
||||||
|
let IpWitness { a, b } = witness;
|
||||||
|
|
||||||
|
(g_bold, h_bold, u, a, b)
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut L_vec = vec![];
|
||||||
|
let mut R_vec = vec![];
|
||||||
|
|
||||||
|
// `else: (n > 1)` case, lines 18-35 of the Bulletproofs paper
|
||||||
|
// This interprets `g_bold.len()` as `n`
|
||||||
|
while g_bold.len() > 1 {
|
||||||
|
// Split a, b, g_bold, h_bold as needed for lines 20-24
|
||||||
|
let (a1, a2) = a.clone().split();
|
||||||
|
let (b1, b2) = b.clone().split();
|
||||||
|
|
||||||
|
let (g_bold1, g_bold2) = g_bold.split();
|
||||||
|
let (h_bold1, h_bold2) = h_bold.split();
|
||||||
|
|
||||||
|
let n_hat = g_bold1.len();
|
||||||
|
|
||||||
|
// Sanity
|
||||||
|
debug_assert_eq!(a1.len(), n_hat);
|
||||||
|
debug_assert_eq!(a2.len(), n_hat);
|
||||||
|
debug_assert_eq!(b1.len(), n_hat);
|
||||||
|
debug_assert_eq!(b2.len(), n_hat);
|
||||||
|
debug_assert_eq!(g_bold1.len(), n_hat);
|
||||||
|
debug_assert_eq!(g_bold2.len(), n_hat);
|
||||||
|
debug_assert_eq!(h_bold1.len(), n_hat);
|
||||||
|
debug_assert_eq!(h_bold2.len(), n_hat);
|
||||||
|
|
||||||
|
// cl, cr, lines 21-22
|
||||||
|
let cl = a1.clone().inner_product(&b2);
|
||||||
|
let cr = a2.clone().inner_product(&b1);
|
||||||
|
|
||||||
|
let L = {
|
||||||
|
let mut L_terms = Vec::with_capacity(1 + (2 * g_bold1.len()));
|
||||||
|
for (a, g) in a1.0.iter().zip(g_bold2.0.iter()) {
|
||||||
|
L_terms.push((*a, *g));
|
||||||
|
}
|
||||||
|
for (b, h) in b2.0.iter().zip(h_bold1.0.iter()) {
|
||||||
|
L_terms.push((*b, *h));
|
||||||
|
}
|
||||||
|
L_terms.push((cl, u));
|
||||||
|
// Uses vartime since this isn't a ZK proof
|
||||||
|
multiexp_vartime(&L_terms)
|
||||||
|
};
|
||||||
|
L_vec.push(L * INV_EIGHT());
|
||||||
|
|
||||||
|
let R = {
|
||||||
|
let mut R_terms = Vec::with_capacity(1 + (2 * g_bold1.len()));
|
||||||
|
for (a, g) in a2.0.iter().zip(g_bold1.0.iter()) {
|
||||||
|
R_terms.push((*a, *g));
|
||||||
|
}
|
||||||
|
for (b, h) in b1.0.iter().zip(h_bold2.0.iter()) {
|
||||||
|
R_terms.push((*b, *h));
|
||||||
|
}
|
||||||
|
R_terms.push((cr, u));
|
||||||
|
multiexp_vartime(&R_terms)
|
||||||
|
};
|
||||||
|
R_vec.push(R * INV_EIGHT());
|
||||||
|
|
||||||
|
// Now that we've calculate L, R, transcript them to receive x (26-27)
|
||||||
|
transcript = Self::transcript_L_R(transcript, *L_vec.last().unwrap(), *R_vec.last().unwrap());
|
||||||
|
let x = transcript;
|
||||||
|
let x_inv = x.invert();
|
||||||
|
|
||||||
|
// The prover and verifier now calculate the following (28-31)
|
||||||
|
g_bold = PointVector(Vec::with_capacity(g_bold1.len()));
|
||||||
|
for (a, b) in g_bold1.0.into_iter().zip(g_bold2.0.into_iter()) {
|
||||||
|
g_bold.0.push(multiexp_vartime(&[(x_inv, a), (x, b)]));
|
||||||
|
}
|
||||||
|
h_bold = PointVector(Vec::with_capacity(h_bold1.len()));
|
||||||
|
for (a, b) in h_bold1.0.into_iter().zip(h_bold2.0.into_iter()) {
|
||||||
|
h_bold.0.push(multiexp_vartime(&[(x, a), (x_inv, b)]));
|
||||||
|
}
|
||||||
|
|
||||||
|
// 32-34
|
||||||
|
a = (a1 * x) + &(a2 * x_inv);
|
||||||
|
b = (b1 * x_inv) + &(b2 * x);
|
||||||
|
}
|
||||||
|
|
||||||
|
// `if n = 1` case from line 14-17
|
||||||
|
|
||||||
|
// Sanity
|
||||||
|
debug_assert_eq!(g_bold.len(), 1);
|
||||||
|
debug_assert_eq!(h_bold.len(), 1);
|
||||||
|
debug_assert_eq!(a.len(), 1);
|
||||||
|
debug_assert_eq!(b.len(), 1);
|
||||||
|
|
||||||
|
// We simply send a/b
|
||||||
|
Ok(IpProof { L: L_vec, R: R_vec, a: a[0], b: b[0] })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Queue an Inner-Product proof for batch verification.
|
||||||
|
///
|
||||||
|
/// This will return Err if there is an error. This will return Ok if the proof was successfully
|
||||||
|
/// queued for batch verification. The caller is required to verify the batch in order to ensure
|
||||||
|
/// the proof is actually correct.
|
||||||
|
pub(crate) fn verify(
|
||||||
|
self,
|
||||||
|
verifier: &mut BulletproofsBatchVerifier,
|
||||||
|
ip_rows: usize,
|
||||||
|
mut transcript: Scalar,
|
||||||
|
verifier_weight: Scalar,
|
||||||
|
proof: IpProof,
|
||||||
|
) -> Result<(), IpError> {
|
||||||
|
let generators = crate::original::GENERATORS();
|
||||||
|
let g_bold_slice = &generators.G[.. ip_rows];
|
||||||
|
let h_bold_slice = &generators.H[.. ip_rows];
|
||||||
|
|
||||||
|
let IpStatement { h_bold_weights, u } = self;
|
||||||
|
|
||||||
|
// Verify the L/R lengths
|
||||||
|
{
|
||||||
|
// Calculate the discrete log w.r.t. 2 for the amount of generators present
|
||||||
|
let mut lr_len = 0;
|
||||||
|
while (1 << lr_len) < g_bold_slice.len() {
|
||||||
|
lr_len += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// This proof has less/more terms than the passed in generators are for
|
||||||
|
if proof.L.len() != lr_len {
|
||||||
|
Err(IpError::IncorrectAmountOfGenerators)?;
|
||||||
|
}
|
||||||
|
if proof.L.len() != proof.R.len() {
|
||||||
|
Err(IpError::DifferingLrLengths)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Again, we start with the `else: (n > 1)` case
|
||||||
|
|
||||||
|
// We need x, x_inv per lines 25-27 for lines 28-31
|
||||||
|
let mut xs = Vec::with_capacity(proof.L.len());
|
||||||
|
for (L, R) in proof.L.iter().zip(proof.R.iter()) {
|
||||||
|
transcript = Self::transcript_L_R(transcript, *L, *R);
|
||||||
|
xs.push(transcript);
|
||||||
|
}
|
||||||
|
|
||||||
|
// We calculate their inverse in batch
|
||||||
|
let mut x_invs = xs.clone();
|
||||||
|
Scalar::batch_invert(&mut x_invs);
|
||||||
|
|
||||||
|
// Now, with x and x_inv, we need to calculate g_bold', h_bold', P'
|
||||||
|
//
|
||||||
|
// For the sake of performance, we solely want to calculate all of these in terms of scalings
|
||||||
|
// for g_bold, h_bold, P, and don't want to actually perform intermediary scalings of the
|
||||||
|
// points
|
||||||
|
//
|
||||||
|
// L and R are easy, as it's simply x**2, x**-2
|
||||||
|
//
|
||||||
|
// For the series of g_bold, h_bold, we use the `challenge_products` function
|
||||||
|
// For how that works, please see its own documentation
|
||||||
|
let product_cache = {
|
||||||
|
let mut challenges = Vec::with_capacity(proof.L.len());
|
||||||
|
|
||||||
|
let x_iter = xs.into_iter().zip(x_invs);
|
||||||
|
let lr_iter = proof.L.into_iter().zip(proof.R);
|
||||||
|
for ((x, x_inv), (L, R)) in x_iter.zip(lr_iter) {
|
||||||
|
challenges.push((x, x_inv));
|
||||||
|
verifier.0.other.push((verifier_weight * (x * x), L.mul_by_cofactor()));
|
||||||
|
verifier.0.other.push((verifier_weight * (x_inv * x_inv), R.mul_by_cofactor()));
|
||||||
|
}
|
||||||
|
|
||||||
|
challenge_products(&challenges)
|
||||||
|
};
|
||||||
|
|
||||||
|
// And now for the `if n = 1` case
|
||||||
|
let c = proof.a * proof.b;
|
||||||
|
|
||||||
|
// The multiexp of these terms equate to the final permutation of P
|
||||||
|
// We now add terms for a * g_bold' + b * h_bold' b + c * u, with the scalars negative such
|
||||||
|
// that the terms sum to 0 for an honest prover
|
||||||
|
|
||||||
|
// The g_bold * a term case from line 16
|
||||||
|
#[allow(clippy::needless_range_loop)]
|
||||||
|
for i in 0 .. g_bold_slice.len() {
|
||||||
|
verifier.0.g_bold[i] -= verifier_weight * product_cache[i] * proof.a;
|
||||||
|
}
|
||||||
|
// The h_bold * b term case from line 16
|
||||||
|
for i in 0 .. h_bold_slice.len() {
|
||||||
|
verifier.0.h_bold[i] -=
|
||||||
|
verifier_weight * product_cache[product_cache.len() - 1 - i] * proof.b * h_bold_weights[i];
|
||||||
|
}
|
||||||
|
// The c * u term case from line 16
|
||||||
|
verifier.0.h -= verifier_weight * c * u;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
344
coins/monero/ringct/bulletproofs/src/original/mod.rs
Normal file
344
coins/monero/ringct/bulletproofs/src/original/mod.rs
Normal file
@@ -0,0 +1,344 @@
|
|||||||
|
use std_shims::{sync::OnceLock, vec::Vec};
|
||||||
|
|
||||||
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
|
use zeroize::Zeroize;
|
||||||
|
|
||||||
|
use curve25519_dalek::{constants::ED25519_BASEPOINT_POINT, Scalar, EdwardsPoint};
|
||||||
|
|
||||||
|
use monero_generators::{H, Generators, MAX_COMMITMENTS, COMMITMENT_BITS};
|
||||||
|
use monero_primitives::{Commitment, INV_EIGHT, keccak256_to_scalar};
|
||||||
|
use crate::{core::multiexp, scalar_vector::ScalarVector, BulletproofsBatchVerifier};
|
||||||
|
|
||||||
|
pub(crate) mod inner_product;
|
||||||
|
use inner_product::*;
|
||||||
|
pub(crate) use inner_product::IpProof;
|
||||||
|
|
||||||
|
include!(concat!(env!("OUT_DIR"), "/generators.rs"));
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub(crate) struct AggregateRangeStatement<'a> {
|
||||||
|
commitments: &'a [EdwardsPoint],
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub(crate) struct AggregateRangeWitness {
|
||||||
|
commitments: Vec<Commitment>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
|
pub struct AggregateRangeProof {
|
||||||
|
pub(crate) A: EdwardsPoint,
|
||||||
|
pub(crate) S: EdwardsPoint,
|
||||||
|
pub(crate) T1: EdwardsPoint,
|
||||||
|
pub(crate) T2: EdwardsPoint,
|
||||||
|
pub(crate) tau_x: Scalar,
|
||||||
|
pub(crate) mu: Scalar,
|
||||||
|
pub(crate) t_hat: Scalar,
|
||||||
|
pub(crate) ip: IpProof,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> AggregateRangeStatement<'a> {
|
||||||
|
pub(crate) fn new(commitments: &'a [EdwardsPoint]) -> Option<Self> {
|
||||||
|
if commitments.is_empty() || (commitments.len() > MAX_COMMITMENTS) {
|
||||||
|
None?;
|
||||||
|
}
|
||||||
|
Some(Self { commitments })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AggregateRangeWitness {
|
||||||
|
pub(crate) fn new(commitments: Vec<Commitment>) -> Option<Self> {
|
||||||
|
if commitments.is_empty() || (commitments.len() > MAX_COMMITMENTS) {
|
||||||
|
None?;
|
||||||
|
}
|
||||||
|
Some(Self { commitments })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> AggregateRangeStatement<'a> {
|
||||||
|
fn initial_transcript(&self) -> (Scalar, Vec<EdwardsPoint>) {
|
||||||
|
let V = self.commitments.iter().map(|c| c * INV_EIGHT()).collect::<Vec<_>>();
|
||||||
|
(keccak256_to_scalar(V.iter().flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>()), V)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn transcript_A_S(transcript: Scalar, A: EdwardsPoint, S: EdwardsPoint) -> (Scalar, Scalar) {
|
||||||
|
let mut buf = Vec::with_capacity(96);
|
||||||
|
buf.extend(transcript.to_bytes());
|
||||||
|
buf.extend(A.compress().to_bytes());
|
||||||
|
buf.extend(S.compress().to_bytes());
|
||||||
|
let y = keccak256_to_scalar(buf);
|
||||||
|
let z = keccak256_to_scalar(y.to_bytes());
|
||||||
|
(y, z)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn transcript_T12(transcript: Scalar, T1: EdwardsPoint, T2: EdwardsPoint) -> Scalar {
|
||||||
|
let mut buf = Vec::with_capacity(128);
|
||||||
|
buf.extend(transcript.to_bytes());
|
||||||
|
buf.extend(transcript.to_bytes());
|
||||||
|
buf.extend(T1.compress().to_bytes());
|
||||||
|
buf.extend(T2.compress().to_bytes());
|
||||||
|
keccak256_to_scalar(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn transcript_tau_x_mu_t_hat(
|
||||||
|
transcript: Scalar,
|
||||||
|
tau_x: Scalar,
|
||||||
|
mu: Scalar,
|
||||||
|
t_hat: Scalar,
|
||||||
|
) -> Scalar {
|
||||||
|
let mut buf = Vec::with_capacity(128);
|
||||||
|
buf.extend(transcript.to_bytes());
|
||||||
|
buf.extend(transcript.to_bytes());
|
||||||
|
buf.extend(tau_x.to_bytes());
|
||||||
|
buf.extend(mu.to_bytes());
|
||||||
|
buf.extend(t_hat.to_bytes());
|
||||||
|
keccak256_to_scalar(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::needless_pass_by_value)]
|
||||||
|
pub(crate) fn prove(
|
||||||
|
self,
|
||||||
|
rng: &mut (impl RngCore + CryptoRng),
|
||||||
|
witness: AggregateRangeWitness,
|
||||||
|
) -> Option<AggregateRangeProof> {
|
||||||
|
if self.commitments != witness.commitments.iter().map(Commitment::calculate).collect::<Vec<_>>()
|
||||||
|
{
|
||||||
|
None?
|
||||||
|
};
|
||||||
|
|
||||||
|
let generators = GENERATORS();
|
||||||
|
|
||||||
|
let (mut transcript, _) = self.initial_transcript();
|
||||||
|
|
||||||
|
// Find out the padded amount of commitments
|
||||||
|
let mut padded_pow_of_2 = 1;
|
||||||
|
while padded_pow_of_2 < witness.commitments.len() {
|
||||||
|
padded_pow_of_2 <<= 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut aL = ScalarVector::new(padded_pow_of_2 * COMMITMENT_BITS);
|
||||||
|
for (i, commitment) in witness.commitments.iter().enumerate() {
|
||||||
|
let mut amount = commitment.amount;
|
||||||
|
for j in 0 .. COMMITMENT_BITS {
|
||||||
|
aL[(i * COMMITMENT_BITS) + j] = Scalar::from(amount & 1);
|
||||||
|
amount >>= 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let aR = aL.clone() - Scalar::ONE;
|
||||||
|
|
||||||
|
let alpha = Scalar::random(&mut *rng);
|
||||||
|
|
||||||
|
let A = {
|
||||||
|
let mut terms = Vec::with_capacity(1 + (2 * aL.len()));
|
||||||
|
terms.push((alpha, ED25519_BASEPOINT_POINT));
|
||||||
|
for (aL, G) in aL.0.iter().zip(&generators.G) {
|
||||||
|
terms.push((*aL, *G));
|
||||||
|
}
|
||||||
|
for (aR, H) in aR.0.iter().zip(&generators.H) {
|
||||||
|
terms.push((*aR, *H));
|
||||||
|
}
|
||||||
|
let res = multiexp(&terms) * INV_EIGHT();
|
||||||
|
terms.zeroize();
|
||||||
|
res
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut sL = ScalarVector::new(padded_pow_of_2 * COMMITMENT_BITS);
|
||||||
|
let mut sR = ScalarVector::new(padded_pow_of_2 * COMMITMENT_BITS);
|
||||||
|
for i in 0 .. (padded_pow_of_2 * COMMITMENT_BITS) {
|
||||||
|
sL[i] = Scalar::random(&mut *rng);
|
||||||
|
sR[i] = Scalar::random(&mut *rng);
|
||||||
|
}
|
||||||
|
let rho = Scalar::random(&mut *rng);
|
||||||
|
|
||||||
|
let S = {
|
||||||
|
let mut terms = Vec::with_capacity(1 + (2 * sL.len()));
|
||||||
|
terms.push((rho, ED25519_BASEPOINT_POINT));
|
||||||
|
for (sL, G) in sL.0.iter().zip(&generators.G) {
|
||||||
|
terms.push((*sL, *G));
|
||||||
|
}
|
||||||
|
for (sR, H) in sR.0.iter().zip(&generators.H) {
|
||||||
|
terms.push((*sR, *H));
|
||||||
|
}
|
||||||
|
let res = multiexp(&terms) * INV_EIGHT();
|
||||||
|
terms.zeroize();
|
||||||
|
res
|
||||||
|
};
|
||||||
|
|
||||||
|
let (y, z) = Self::transcript_A_S(transcript, A, S);
|
||||||
|
transcript = z;
|
||||||
|
|
||||||
|
let twos = ScalarVector::powers(Scalar::from(2u8), COMMITMENT_BITS);
|
||||||
|
|
||||||
|
let l = [aL - z, sL];
|
||||||
|
let y_pow_n = ScalarVector::powers(y, aR.len());
|
||||||
|
let mut r = [((aR + z) * &y_pow_n), sR * &y_pow_n];
|
||||||
|
{
|
||||||
|
let mut z_current = z * z;
|
||||||
|
for j in 0 .. padded_pow_of_2 {
|
||||||
|
for i in 0 .. COMMITMENT_BITS {
|
||||||
|
r[0].0[(j * COMMITMENT_BITS) + i] += z_current * twos[i];
|
||||||
|
}
|
||||||
|
z_current *= z;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let t1 = (l[0].clone().inner_product(&r[1])) + (r[0].clone().inner_product(&l[1]));
|
||||||
|
let t2 = l[1].clone().inner_product(&r[1]);
|
||||||
|
|
||||||
|
let tau_1 = Scalar::random(&mut *rng);
|
||||||
|
let T1 = {
|
||||||
|
let mut T1_terms = [(t1, H()), (tau_1, ED25519_BASEPOINT_POINT)];
|
||||||
|
for term in &mut T1_terms {
|
||||||
|
term.0 *= INV_EIGHT();
|
||||||
|
}
|
||||||
|
let T1 = multiexp(&T1_terms);
|
||||||
|
T1_terms.zeroize();
|
||||||
|
T1
|
||||||
|
};
|
||||||
|
let tau_2 = Scalar::random(&mut *rng);
|
||||||
|
let T2 = {
|
||||||
|
let mut T2_terms = [(t2, H()), (tau_2, ED25519_BASEPOINT_POINT)];
|
||||||
|
for term in &mut T2_terms {
|
||||||
|
term.0 *= INV_EIGHT();
|
||||||
|
}
|
||||||
|
let T2 = multiexp(&T2_terms);
|
||||||
|
T2_terms.zeroize();
|
||||||
|
T2
|
||||||
|
};
|
||||||
|
|
||||||
|
transcript = Self::transcript_T12(transcript, T1, T2);
|
||||||
|
let x = transcript;
|
||||||
|
|
||||||
|
let [l0, l1] = l;
|
||||||
|
let l = l0 + &(l1 * x);
|
||||||
|
let [r0, r1] = r;
|
||||||
|
let r = r0 + &(r1 * x);
|
||||||
|
let t_hat = l.clone().inner_product(&r);
|
||||||
|
let mut tau_x = ((tau_2 * x) + tau_1) * x;
|
||||||
|
{
|
||||||
|
let mut z_current = z * z;
|
||||||
|
for commitment in &witness.commitments {
|
||||||
|
tau_x += z_current * commitment.mask;
|
||||||
|
z_current *= z;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mu = alpha + (rho * x);
|
||||||
|
|
||||||
|
let y_inv_pow_n = ScalarVector::powers(y.invert(), l.len());
|
||||||
|
|
||||||
|
transcript = Self::transcript_tau_x_mu_t_hat(transcript, tau_x, mu, t_hat);
|
||||||
|
let x_ip = transcript;
|
||||||
|
|
||||||
|
let ip = IpStatement::new_without_P_transcript(y_inv_pow_n, x_ip)
|
||||||
|
.prove(transcript, IpWitness::new(l, r).unwrap())
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let res = AggregateRangeProof { A, S, T1, T2, tau_x, mu, t_hat, ip };
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
{
|
||||||
|
let mut verifier = BulletproofsBatchVerifier::default();
|
||||||
|
debug_assert!(self.verify(rng, &mut verifier, res.clone()));
|
||||||
|
debug_assert!(verifier.verify());
|
||||||
|
}
|
||||||
|
Some(res)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub(crate) fn verify(
|
||||||
|
self,
|
||||||
|
rng: &mut (impl RngCore + CryptoRng),
|
||||||
|
verifier: &mut BulletproofsBatchVerifier,
|
||||||
|
mut proof: AggregateRangeProof,
|
||||||
|
) -> bool {
|
||||||
|
let mut padded_pow_of_2 = 1;
|
||||||
|
while padded_pow_of_2 < self.commitments.len() {
|
||||||
|
padded_pow_of_2 <<= 1;
|
||||||
|
}
|
||||||
|
let ip_rows = padded_pow_of_2 * COMMITMENT_BITS;
|
||||||
|
|
||||||
|
while verifier.0.g_bold.len() < ip_rows {
|
||||||
|
verifier.0.g_bold.push(Scalar::ZERO);
|
||||||
|
verifier.0.h_bold.push(Scalar::ZERO);
|
||||||
|
}
|
||||||
|
|
||||||
|
let (mut transcript, mut commitments) = self.initial_transcript();
|
||||||
|
for commitment in &mut commitments {
|
||||||
|
*commitment = commitment.mul_by_cofactor();
|
||||||
|
}
|
||||||
|
|
||||||
|
let (y, z) = Self::transcript_A_S(transcript, proof.A, proof.S);
|
||||||
|
transcript = z;
|
||||||
|
transcript = Self::transcript_T12(transcript, proof.T1, proof.T2);
|
||||||
|
let x = transcript;
|
||||||
|
transcript = Self::transcript_tau_x_mu_t_hat(transcript, proof.tau_x, proof.mu, proof.t_hat);
|
||||||
|
let x_ip = transcript;
|
||||||
|
|
||||||
|
proof.A = proof.A.mul_by_cofactor();
|
||||||
|
proof.S = proof.S.mul_by_cofactor();
|
||||||
|
proof.T1 = proof.T1.mul_by_cofactor();
|
||||||
|
proof.T2 = proof.T2.mul_by_cofactor();
|
||||||
|
|
||||||
|
let y_pow_n = ScalarVector::powers(y, ip_rows);
|
||||||
|
let y_inv_pow_n = ScalarVector::powers(y.invert(), ip_rows);
|
||||||
|
|
||||||
|
let twos = ScalarVector::powers(Scalar::from(2u8), COMMITMENT_BITS);
|
||||||
|
|
||||||
|
// 65
|
||||||
|
{
|
||||||
|
let weight = Scalar::random(&mut *rng);
|
||||||
|
verifier.0.h += weight * proof.t_hat;
|
||||||
|
verifier.0.g += weight * proof.tau_x;
|
||||||
|
|
||||||
|
// Now that we've accumulated the lhs, negate the weight and accumulate the rhs
|
||||||
|
// These will now sum to 0 if equal
|
||||||
|
let weight = -weight;
|
||||||
|
|
||||||
|
verifier.0.h += weight * (z - (z * z)) * y_pow_n.sum();
|
||||||
|
|
||||||
|
let mut z_current = z * z;
|
||||||
|
for commitment in &commitments {
|
||||||
|
verifier.0.other.push((weight * z_current, *commitment));
|
||||||
|
z_current *= z;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut z_current = z * z * z;
|
||||||
|
for _ in 0 .. padded_pow_of_2 {
|
||||||
|
verifier.0.h -= weight * z_current * twos.clone().sum();
|
||||||
|
z_current *= z;
|
||||||
|
}
|
||||||
|
verifier.0.other.push((weight * x, proof.T1));
|
||||||
|
verifier.0.other.push((weight * (x * x), proof.T2));
|
||||||
|
}
|
||||||
|
|
||||||
|
let ip_weight = Scalar::random(&mut *rng);
|
||||||
|
|
||||||
|
// 66
|
||||||
|
verifier.0.other.push((ip_weight, proof.A));
|
||||||
|
verifier.0.other.push((ip_weight * x, proof.S));
|
||||||
|
// TODO: g_sum
|
||||||
|
for i in 0 .. ip_rows {
|
||||||
|
verifier.0.g_bold[i] += ip_weight * -z;
|
||||||
|
}
|
||||||
|
// TODO: h_sum
|
||||||
|
for i in 0 .. ip_rows {
|
||||||
|
verifier.0.h_bold[i] += ip_weight * z;
|
||||||
|
}
|
||||||
|
{
|
||||||
|
let mut z_current = z * z;
|
||||||
|
for j in 0 .. padded_pow_of_2 {
|
||||||
|
for i in 0 .. COMMITMENT_BITS {
|
||||||
|
let full_i = (j * COMMITMENT_BITS) + i;
|
||||||
|
verifier.0.h_bold[full_i] += ip_weight * y_inv_pow_n[full_i] * z_current * twos[i];
|
||||||
|
}
|
||||||
|
z_current *= z;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
verifier.0.h += ip_weight * x_ip * proof.t_hat;
|
||||||
|
|
||||||
|
// 67, 68
|
||||||
|
verifier.0.g += ip_weight * -proof.mu;
|
||||||
|
let res = IpStatement::new_without_P_transcript(y_inv_pow_n, x_ip)
|
||||||
|
.verify(verifier, ip_rows, transcript, ip_weight, proof.ip);
|
||||||
|
res.is_ok()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,93 +1,85 @@
|
|||||||
use std_shims::vec::Vec;
|
use std_shims::{vec, vec::Vec};
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||||
|
|
||||||
use multiexp::{multiexp, multiexp_vartime, BatchVerifier};
|
use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::EdwardsPoint};
|
||||||
use group::{
|
|
||||||
ff::{Field, PrimeField},
|
use monero_primitives::{INV_EIGHT, Commitment, keccak256_to_scalar};
|
||||||
Group, GroupEncoding,
|
|
||||||
};
|
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
Commitment,
|
batch_verifier::BulletproofsPlusBatchVerifier,
|
||||||
ringct::{
|
core::{MAX_COMMITMENTS, COMMITMENT_BITS, multiexp, multiexp_vartime},
|
||||||
bulletproofs::core::{MAX_M, N},
|
plus::{
|
||||||
bulletproofs::plus::{
|
ScalarVector, PointVector, GeneratorsList, BpPlusGenerators,
|
||||||
ScalarVector, PointVector, GeneratorsList, Generators,
|
transcript::*,
|
||||||
transcript::*,
|
weighted_inner_product::{WipStatement, WipWitness, WipProof},
|
||||||
weighted_inner_product::{WipStatement, WipWitness, WipProof},
|
padded_pow_of_2, u64_decompose,
|
||||||
padded_pow_of_2, u64_decompose,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
// Figure 3
|
// Figure 3 of the Bulletproofs+ Paper
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) struct AggregateRangeStatement {
|
pub(crate) struct AggregateRangeStatement<'a> {
|
||||||
generators: Generators,
|
generators: BpPlusGenerators,
|
||||||
V: Vec<EdwardsPoint>,
|
V: &'a [EdwardsPoint],
|
||||||
}
|
|
||||||
|
|
||||||
impl Zeroize for AggregateRangeStatement {
|
|
||||||
fn zeroize(&mut self) {
|
|
||||||
self.V.zeroize();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
|
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
|
||||||
pub(crate) struct AggregateRangeWitness {
|
pub(crate) struct AggregateRangeWitness(Vec<Commitment>);
|
||||||
values: Vec<u64>,
|
|
||||||
gammas: Vec<Scalar>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AggregateRangeWitness {
|
impl AggregateRangeWitness {
|
||||||
pub(crate) fn new(commitments: &[Commitment]) -> Option<Self> {
|
pub(crate) fn new(commitments: Vec<Commitment>) -> Option<Self> {
|
||||||
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
if commitments.is_empty() || (commitments.len() > MAX_COMMITMENTS) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut values = Vec::with_capacity(commitments.len());
|
Some(AggregateRangeWitness(commitments))
|
||||||
let mut gammas = Vec::with_capacity(commitments.len());
|
|
||||||
for commitment in commitments {
|
|
||||||
values.push(commitment.amount);
|
|
||||||
gammas.push(Scalar(commitment.mask));
|
|
||||||
}
|
|
||||||
Some(AggregateRangeWitness { values, gammas })
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Internal structure representing a Bulletproof+, as defined by Monero..
|
||||||
|
#[doc(hidden)]
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
pub struct AggregateRangeProof {
|
pub struct AggregateRangeProof {
|
||||||
pub(crate) A: EdwardsPoint,
|
pub(crate) A: EdwardsPoint,
|
||||||
pub(crate) wip: WipProof,
|
pub(crate) wip: WipProof,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AggregateRangeStatement {
|
struct AHatComputation {
|
||||||
pub(crate) fn new(V: Vec<EdwardsPoint>) -> Option<Self> {
|
y: Scalar,
|
||||||
if V.is_empty() || (V.len() > MAX_M) {
|
d_descending_y_plus_z: ScalarVector,
|
||||||
|
y_mn_plus_one: Scalar,
|
||||||
|
z: Scalar,
|
||||||
|
z_pow: ScalarVector,
|
||||||
|
A_hat: EdwardsPoint,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> AggregateRangeStatement<'a> {
|
||||||
|
pub(crate) fn new(V: &'a [EdwardsPoint]) -> Option<Self> {
|
||||||
|
if V.is_empty() || (V.len() > MAX_COMMITMENTS) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(Self { generators: Generators::new(), V })
|
Some(Self { generators: BpPlusGenerators::new(), V })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn transcript_A(transcript: &mut Scalar, A: EdwardsPoint) -> (Scalar, Scalar) {
|
fn transcript_A(transcript: &mut Scalar, A: EdwardsPoint) -> (Scalar, Scalar) {
|
||||||
let y = hash_to_scalar(&[transcript.to_repr().as_ref(), A.to_bytes().as_ref()].concat());
|
let y = keccak256_to_scalar(
|
||||||
let z = hash_to_scalar(y.to_bytes().as_ref());
|
[transcript.to_bytes().as_ref(), A.compress().to_bytes().as_ref()].concat(),
|
||||||
|
);
|
||||||
|
let z = keccak256_to_scalar(y.to_bytes().as_ref());
|
||||||
*transcript = z;
|
*transcript = z;
|
||||||
(y, z)
|
(y, z)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn d_j(j: usize, m: usize) -> ScalarVector {
|
fn d_j(j: usize, m: usize) -> ScalarVector {
|
||||||
let mut d_j = Vec::with_capacity(m * N);
|
let mut d_j = Vec::with_capacity(m * COMMITMENT_BITS);
|
||||||
for _ in 0 .. (j - 1) * N {
|
for _ in 0 .. (j - 1) * COMMITMENT_BITS {
|
||||||
d_j.push(Scalar::ZERO);
|
d_j.push(Scalar::ZERO);
|
||||||
}
|
}
|
||||||
d_j.append(&mut ScalarVector::powers(Scalar::from(2u8), N).0);
|
d_j.append(&mut ScalarVector::powers(Scalar::from(2u8), COMMITMENT_BITS).0);
|
||||||
for _ in 0 .. (m - j) * N {
|
for _ in 0 .. (m - j) * COMMITMENT_BITS {
|
||||||
d_j.push(Scalar::ZERO);
|
d_j.push(Scalar::ZERO);
|
||||||
}
|
}
|
||||||
ScalarVector(d_j)
|
ScalarVector(d_j)
|
||||||
@@ -95,23 +87,26 @@ impl AggregateRangeStatement {
|
|||||||
|
|
||||||
fn compute_A_hat(
|
fn compute_A_hat(
|
||||||
mut V: PointVector,
|
mut V: PointVector,
|
||||||
generators: &Generators,
|
generators: &BpPlusGenerators,
|
||||||
transcript: &mut Scalar,
|
transcript: &mut Scalar,
|
||||||
mut A: EdwardsPoint,
|
mut A: EdwardsPoint,
|
||||||
) -> (Scalar, ScalarVector, Scalar, Scalar, ScalarVector, EdwardsPoint) {
|
) -> AHatComputation {
|
||||||
let (y, z) = Self::transcript_A(transcript, A);
|
let (y, z) = Self::transcript_A(transcript, A);
|
||||||
A = A.mul_by_cofactor();
|
A = A.mul_by_cofactor();
|
||||||
|
|
||||||
while V.len() < padded_pow_of_2(V.len()) {
|
while V.len() < padded_pow_of_2(V.len()) {
|
||||||
V.0.push(EdwardsPoint::identity());
|
V.0.push(EdwardsPoint::identity());
|
||||||
}
|
}
|
||||||
let mn = V.len() * N;
|
let mn = V.len() * COMMITMENT_BITS;
|
||||||
|
|
||||||
|
// 2, 4, 6, 8... powers of z, of length equivalent to the amount of commitments
|
||||||
let mut z_pow = Vec::with_capacity(V.len());
|
let mut z_pow = Vec::with_capacity(V.len());
|
||||||
|
// z**2
|
||||||
|
z_pow.push(z * z);
|
||||||
|
|
||||||
let mut d = ScalarVector::new(mn);
|
let mut d = ScalarVector::new(mn);
|
||||||
for j in 1 ..= V.len() {
|
for j in 1 ..= V.len() {
|
||||||
z_pow.push(z.pow(Scalar::from(2 * u64::try_from(j).unwrap()))); // TODO: Optimize this
|
z_pow.push(*z_pow.last().unwrap() * z_pow[0]);
|
||||||
d = d + &(Self::d_j(j, V.len()) * (z_pow[j - 1]));
|
d = d + &(Self::d_j(j, V.len()) * (z_pow[j - 1]));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -137,23 +132,23 @@ impl AggregateRangeStatement {
|
|||||||
let neg_z = -z;
|
let neg_z = -z;
|
||||||
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 2);
|
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 2);
|
||||||
for (i, d_y_z) in d_descending_y_plus_z.0.iter().enumerate() {
|
for (i, d_y_z) in d_descending_y_plus_z.0.iter().enumerate() {
|
||||||
A_terms.push((neg_z, generators.generator(GeneratorsList::GBold1, i)));
|
A_terms.push((neg_z, generators.generator(GeneratorsList::GBold, i)));
|
||||||
A_terms.push((*d_y_z, generators.generator(GeneratorsList::HBold1, i)));
|
A_terms.push((*d_y_z, generators.generator(GeneratorsList::HBold, i)));
|
||||||
}
|
}
|
||||||
A_terms.push((y_mn_plus_one, commitment_accum));
|
A_terms.push((y_mn_plus_one, commitment_accum));
|
||||||
A_terms.push((
|
A_terms.push((
|
||||||
((y_pows * z) - (d.sum() * y_mn_plus_one * z) - (y_pows * z.square())),
|
((y_pows * z) - (d.sum() * y_mn_plus_one * z) - (y_pows * (z * z))),
|
||||||
Generators::g(),
|
BpPlusGenerators::g(),
|
||||||
));
|
));
|
||||||
|
|
||||||
(
|
AHatComputation {
|
||||||
y,
|
y,
|
||||||
d_descending_y_plus_z,
|
d_descending_y_plus_z,
|
||||||
y_mn_plus_one,
|
y_mn_plus_one,
|
||||||
z,
|
z,
|
||||||
ScalarVector(z_pow),
|
z_pow: ScalarVector(z_pow),
|
||||||
A + multiexp_vartime(&A_terms),
|
A_hat: A + multiexp_vartime(&A_terms),
|
||||||
)
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
||||||
@@ -162,13 +157,11 @@ impl AggregateRangeStatement {
|
|||||||
witness: &AggregateRangeWitness,
|
witness: &AggregateRangeWitness,
|
||||||
) -> Option<AggregateRangeProof> {
|
) -> Option<AggregateRangeProof> {
|
||||||
// Check for consistency with the witness
|
// Check for consistency with the witness
|
||||||
if self.V.len() != witness.values.len() {
|
if self.V.len() != witness.0.len() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
for (commitment, (value, gamma)) in
|
for (commitment, witness) in self.V.iter().zip(witness.0.iter()) {
|
||||||
self.V.iter().zip(witness.values.iter().zip(witness.gammas.iter()))
|
if witness.calculate() != *commitment {
|
||||||
{
|
|
||||||
if Commitment::new(**gamma, *value).calculate() != **commitment {
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -181,22 +174,28 @@ impl AggregateRangeStatement {
|
|||||||
// Commitments aren't transmitted INV_EIGHT though, so this multiplies by INV_EIGHT to enable
|
// Commitments aren't transmitted INV_EIGHT though, so this multiplies by INV_EIGHT to enable
|
||||||
// clearing its cofactor without mutating the value
|
// clearing its cofactor without mutating the value
|
||||||
// For some reason, these values are transcripted * INV_EIGHT, not as transmitted
|
// For some reason, these values are transcripted * INV_EIGHT, not as transmitted
|
||||||
let mut V = V.into_iter().map(|V| EdwardsPoint(V.0 * crate::INV_EIGHT())).collect::<Vec<_>>();
|
let V = V.iter().map(|V| V * INV_EIGHT()).collect::<Vec<_>>();
|
||||||
let mut transcript = initial_transcript(V.iter());
|
let mut transcript = initial_transcript(V.iter());
|
||||||
V.iter_mut().for_each(|V| *V = V.mul_by_cofactor());
|
let mut V = V.iter().map(EdwardsPoint::mul_by_cofactor).collect::<Vec<_>>();
|
||||||
|
|
||||||
// Pad V
|
// Pad V
|
||||||
while V.len() < padded_pow_of_2(V.len()) {
|
while V.len() < padded_pow_of_2(V.len()) {
|
||||||
V.push(EdwardsPoint::identity());
|
V.push(EdwardsPoint::identity());
|
||||||
}
|
}
|
||||||
|
|
||||||
let generators = generators.reduce(V.len() * N);
|
let generators = generators.reduce(V.len() * COMMITMENT_BITS);
|
||||||
|
|
||||||
let mut d_js = Vec::with_capacity(V.len());
|
let mut d_js = Vec::with_capacity(V.len());
|
||||||
let mut a_l = ScalarVector(Vec::with_capacity(V.len() * N));
|
let mut a_l = ScalarVector(Vec::with_capacity(V.len() * COMMITMENT_BITS));
|
||||||
for j in 1 ..= V.len() {
|
for j in 1 ..= V.len() {
|
||||||
d_js.push(Self::d_j(j, V.len()));
|
d_js.push(Self::d_j(j, V.len()));
|
||||||
a_l.0.append(&mut u64_decompose(*witness.values.get(j - 1).unwrap_or(&0)).0);
|
#[allow(clippy::map_unwrap_or)]
|
||||||
|
a_l.0.append(
|
||||||
|
&mut u64_decompose(
|
||||||
|
*witness.0.get(j - 1).map(|commitment| &commitment.amount).unwrap_or(&0),
|
||||||
|
)
|
||||||
|
.0,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let a_r = a_l.clone() - Scalar::ONE;
|
let a_r = a_l.clone() - Scalar::ONE;
|
||||||
@@ -205,26 +204,26 @@ impl AggregateRangeStatement {
|
|||||||
|
|
||||||
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 1);
|
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 1);
|
||||||
for (i, a_l) in a_l.0.iter().enumerate() {
|
for (i, a_l) in a_l.0.iter().enumerate() {
|
||||||
A_terms.push((*a_l, generators.generator(GeneratorsList::GBold1, i)));
|
A_terms.push((*a_l, generators.generator(GeneratorsList::GBold, i)));
|
||||||
}
|
}
|
||||||
for (i, a_r) in a_r.0.iter().enumerate() {
|
for (i, a_r) in a_r.0.iter().enumerate() {
|
||||||
A_terms.push((*a_r, generators.generator(GeneratorsList::HBold1, i)));
|
A_terms.push((*a_r, generators.generator(GeneratorsList::HBold, i)));
|
||||||
}
|
}
|
||||||
A_terms.push((alpha, Generators::h()));
|
A_terms.push((alpha, BpPlusGenerators::h()));
|
||||||
let mut A = multiexp(&A_terms);
|
let mut A = multiexp(&A_terms);
|
||||||
A_terms.zeroize();
|
A_terms.zeroize();
|
||||||
|
|
||||||
// Multiply by INV_EIGHT per earlier commentary
|
// Multiply by INV_EIGHT per earlier commentary
|
||||||
A.0 *= crate::INV_EIGHT();
|
A *= INV_EIGHT();
|
||||||
|
|
||||||
let (y, d_descending_y_plus_z, y_mn_plus_one, z, z_pow, A_hat) =
|
let AHatComputation { y, d_descending_y_plus_z, y_mn_plus_one, z, z_pow, A_hat } =
|
||||||
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, A);
|
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, A);
|
||||||
|
|
||||||
let a_l = a_l - z;
|
let a_l = a_l - z;
|
||||||
let a_r = a_r + &d_descending_y_plus_z;
|
let a_r = a_r + &d_descending_y_plus_z;
|
||||||
let mut alpha = alpha;
|
let mut alpha = alpha;
|
||||||
for j in 1 ..= witness.gammas.len() {
|
for j in 1 ..= witness.0.len() {
|
||||||
alpha += z_pow[j - 1] * witness.gammas[j - 1] * y_mn_plus_one;
|
alpha += z_pow[j - 1] * witness.0[j - 1].mask * y_mn_plus_one;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(AggregateRangeProof {
|
Some(AggregateRangeProof {
|
||||||
@@ -235,23 +234,22 @@ impl AggregateRangeStatement {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn verify<Id: Copy + Zeroize, R: RngCore + CryptoRng>(
|
pub(crate) fn verify<R: RngCore + CryptoRng>(
|
||||||
self,
|
self,
|
||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
verifier: &mut BatchVerifier<Id, EdwardsPoint>,
|
verifier: &mut BulletproofsPlusBatchVerifier,
|
||||||
id: Id,
|
|
||||||
proof: AggregateRangeProof,
|
proof: AggregateRangeProof,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let Self { generators, V } = self;
|
let Self { generators, V } = self;
|
||||||
|
|
||||||
let mut V = V.into_iter().map(|V| EdwardsPoint(V.0 * crate::INV_EIGHT())).collect::<Vec<_>>();
|
let V = V.iter().map(|V| V * INV_EIGHT()).collect::<Vec<_>>();
|
||||||
let mut transcript = initial_transcript(V.iter());
|
let mut transcript = initial_transcript(V.iter());
|
||||||
V.iter_mut().for_each(|V| *V = V.mul_by_cofactor());
|
let V = V.iter().map(EdwardsPoint::mul_by_cofactor).collect::<Vec<_>>();
|
||||||
|
|
||||||
let generators = generators.reduce(V.len() * N);
|
let generators = generators.reduce(V.len() * COMMITMENT_BITS);
|
||||||
|
|
||||||
let (y, _, _, _, _, A_hat) =
|
let AHatComputation { y, A_hat, .. } =
|
||||||
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, proof.A);
|
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, proof.A);
|
||||||
WipStatement::new(generators, A_hat, y).verify(rng, verifier, id, transcript, proof.wip)
|
WipStatement::new(generators, A_hat, y).verify(rng, verifier, transcript, proof.wip)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,11 +1,12 @@
|
|||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
use group::Group;
|
use std_shims::sync::OnceLock;
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
pub(crate) use crate::ringct::bulletproofs::scalar_vector::ScalarVector;
|
use curve25519_dalek::{constants::ED25519_BASEPOINT_POINT, scalar::Scalar, edwards::EdwardsPoint};
|
||||||
mod point_vector;
|
|
||||||
pub(crate) use point_vector::PointVector;
|
use monero_generators::{H, Generators};
|
||||||
|
|
||||||
|
pub(crate) use crate::{scalar_vector::ScalarVector, point_vector::PointVector};
|
||||||
|
|
||||||
pub(crate) mod transcript;
|
pub(crate) mod transcript;
|
||||||
pub(crate) mod weighted_inner_product;
|
pub(crate) mod weighted_inner_product;
|
||||||
@@ -23,55 +24,50 @@ pub(crate) fn padded_pow_of_2(i: usize) -> usize {
|
|||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||||
pub(crate) enum GeneratorsList {
|
pub(crate) enum GeneratorsList {
|
||||||
GBold1,
|
GBold,
|
||||||
HBold1,
|
HBold,
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Table these
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) struct Generators {
|
pub(crate) struct BpPlusGenerators {
|
||||||
g_bold1: &'static [EdwardsPoint],
|
g_bold: &'static [EdwardsPoint],
|
||||||
h_bold1: &'static [EdwardsPoint],
|
h_bold: &'static [EdwardsPoint],
|
||||||
}
|
}
|
||||||
|
|
||||||
mod generators {
|
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
|
||||||
use std_shims::sync::OnceLock;
|
|
||||||
use monero_generators::Generators;
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Generators {
|
impl BpPlusGenerators {
|
||||||
#[allow(clippy::new_without_default)]
|
#[allow(clippy::new_without_default)]
|
||||||
pub(crate) fn new() -> Self {
|
pub(crate) fn new() -> Self {
|
||||||
let gens = generators::GENERATORS();
|
let gens = GENERATORS();
|
||||||
Generators { g_bold1: &gens.G, h_bold1: &gens.H }
|
BpPlusGenerators { g_bold: &gens.G, h_bold: &gens.H }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn len(&self) -> usize {
|
pub(crate) fn len(&self) -> usize {
|
||||||
self.g_bold1.len()
|
self.g_bold.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn g() -> EdwardsPoint {
|
pub(crate) fn g() -> EdwardsPoint {
|
||||||
dalek_ff_group::EdwardsPoint(crate::H())
|
H()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn h() -> EdwardsPoint {
|
pub(crate) fn h() -> EdwardsPoint {
|
||||||
EdwardsPoint::generator()
|
ED25519_BASEPOINT_POINT
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn generator(&self, list: GeneratorsList, i: usize) -> EdwardsPoint {
|
pub(crate) fn generator(&self, list: GeneratorsList, i: usize) -> EdwardsPoint {
|
||||||
match list {
|
match list {
|
||||||
GeneratorsList::GBold1 => self.g_bold1[i],
|
GeneratorsList::GBold => self.g_bold[i],
|
||||||
GeneratorsList::HBold1 => self.h_bold1[i],
|
GeneratorsList::HBold => self.h_bold[i],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn reduce(&self, generators: usize) -> Self {
|
pub(crate) fn reduce(&self, generators: usize) -> Self {
|
||||||
// Round to the nearest power of 2
|
// Round to the nearest power of 2
|
||||||
let generators = padded_pow_of_2(generators);
|
let generators = padded_pow_of_2(generators);
|
||||||
assert!(generators <= self.g_bold1.len());
|
assert!(generators <= self.g_bold.len());
|
||||||
|
|
||||||
Generators { g_bold1: &self.g_bold1[.. generators], h_bold1: &self.h_bold1[.. generators] }
|
BpPlusGenerators { g_bold: &self.g_bold[.. generators], h_bold: &self.h_bold[.. generators] }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
20
coins/monero/ringct/bulletproofs/src/plus/transcript.rs
Normal file
20
coins/monero/ringct/bulletproofs/src/plus/transcript.rs
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
use std_shims::{sync::OnceLock, vec::Vec};
|
||||||
|
|
||||||
|
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
||||||
|
|
||||||
|
use monero_generators::hash_to_point;
|
||||||
|
use monero_primitives::{keccak256, keccak256_to_scalar};
|
||||||
|
|
||||||
|
// Monero starts BP+ transcripts with the following constant.
|
||||||
|
static TRANSCRIPT_CELL: OnceLock<[u8; 32]> = OnceLock::new();
|
||||||
|
pub(crate) fn TRANSCRIPT() -> [u8; 32] {
|
||||||
|
// Why this uses a hash_to_point is completely unknown.
|
||||||
|
*TRANSCRIPT_CELL
|
||||||
|
.get_or_init(|| hash_to_point(keccak256(b"bulletproof_plus_transcript")).compress().to_bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn initial_transcript(commitments: core::slice::Iter<'_, EdwardsPoint>) -> Scalar {
|
||||||
|
let commitments_hash =
|
||||||
|
keccak256_to_scalar(commitments.flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>());
|
||||||
|
keccak256_to_scalar([TRANSCRIPT().as_ref(), &commitments_hash.to_bytes()].concat())
|
||||||
|
}
|
||||||
@@ -1,24 +1,21 @@
|
|||||||
use std_shims::vec::Vec;
|
use std_shims::{vec, vec::Vec};
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
use rand_core::{RngCore, CryptoRng};
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||||
|
|
||||||
use multiexp::{BatchVerifier, multiexp, multiexp_vartime};
|
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
||||||
use group::{
|
|
||||||
ff::{Field, PrimeField},
|
|
||||||
GroupEncoding,
|
|
||||||
};
|
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use crate::ringct::bulletproofs::plus::{
|
use monero_primitives::{INV_EIGHT, keccak256_to_scalar};
|
||||||
ScalarVector, PointVector, GeneratorsList, Generators, padded_pow_of_2, transcript::*,
|
use crate::{
|
||||||
|
core::{multiexp, multiexp_vartime, challenge_products},
|
||||||
|
batch_verifier::BulletproofsPlusBatchVerifier,
|
||||||
|
plus::{ScalarVector, PointVector, GeneratorsList, BpPlusGenerators, padded_pow_of_2},
|
||||||
};
|
};
|
||||||
|
|
||||||
// Figure 1
|
// Figure 1 of the Bulletproofs+ paper
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) struct WipStatement {
|
pub(crate) struct WipStatement {
|
||||||
generators: Generators,
|
generators: BpPlusGenerators,
|
||||||
P: EdwardsPoint,
|
P: EdwardsPoint,
|
||||||
y: ScalarVector,
|
y: ScalarVector,
|
||||||
}
|
}
|
||||||
@@ -68,7 +65,7 @@ pub(crate) struct WipProof {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl WipStatement {
|
impl WipStatement {
|
||||||
pub(crate) fn new(generators: Generators, P: EdwardsPoint, y: Scalar) -> Self {
|
pub(crate) fn new(generators: BpPlusGenerators, P: EdwardsPoint, y: Scalar) -> Self {
|
||||||
debug_assert_eq!(generators.len(), padded_pow_of_2(generators.len()));
|
debug_assert_eq!(generators.len(), padded_pow_of_2(generators.len()));
|
||||||
|
|
||||||
// y ** n
|
// y ** n
|
||||||
@@ -82,16 +79,26 @@ impl WipStatement {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn transcript_L_R(transcript: &mut Scalar, L: EdwardsPoint, R: EdwardsPoint) -> Scalar {
|
fn transcript_L_R(transcript: &mut Scalar, L: EdwardsPoint, R: EdwardsPoint) -> Scalar {
|
||||||
let e = hash_to_scalar(
|
let e = keccak256_to_scalar(
|
||||||
&[transcript.to_repr().as_ref(), L.to_bytes().as_ref(), R.to_bytes().as_ref()].concat(),
|
[
|
||||||
|
transcript.to_bytes().as_ref(),
|
||||||
|
L.compress().to_bytes().as_ref(),
|
||||||
|
R.compress().to_bytes().as_ref(),
|
||||||
|
]
|
||||||
|
.concat(),
|
||||||
);
|
);
|
||||||
*transcript = e;
|
*transcript = e;
|
||||||
e
|
e
|
||||||
}
|
}
|
||||||
|
|
||||||
fn transcript_A_B(transcript: &mut Scalar, A: EdwardsPoint, B: EdwardsPoint) -> Scalar {
|
fn transcript_A_B(transcript: &mut Scalar, A: EdwardsPoint, B: EdwardsPoint) -> Scalar {
|
||||||
let e = hash_to_scalar(
|
let e = keccak256_to_scalar(
|
||||||
&[transcript.to_repr().as_ref(), A.to_bytes().as_ref(), B.to_bytes().as_ref()].concat(),
|
[
|
||||||
|
transcript.to_bytes().as_ref(),
|
||||||
|
A.compress().to_bytes().as_ref(),
|
||||||
|
B.compress().to_bytes().as_ref(),
|
||||||
|
]
|
||||||
|
.concat(),
|
||||||
);
|
);
|
||||||
*transcript = e;
|
*transcript = e;
|
||||||
e
|
e
|
||||||
@@ -100,9 +107,6 @@ impl WipStatement {
|
|||||||
// Prover's variant of the shared code block to calculate G/H/P when n > 1
|
// Prover's variant of the shared code block to calculate G/H/P when n > 1
|
||||||
// Returns each permutation of G/H since the prover needs to do operation on each permutation
|
// Returns each permutation of G/H since the prover needs to do operation on each permutation
|
||||||
// P is dropped as it's unused in the prover's path
|
// P is dropped as it's unused in the prover's path
|
||||||
// TODO: It'd still probably be faster to keep in terms of the original generators, both between
|
|
||||||
// the reduced amount of group operations and the potential tabling of the generators under
|
|
||||||
// multiexp
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
fn next_G_H(
|
fn next_G_H(
|
||||||
transcript: &mut Scalar,
|
transcript: &mut Scalar,
|
||||||
@@ -119,7 +123,7 @@ impl WipStatement {
|
|||||||
debug_assert_eq!(g_bold1.len(), h_bold1.len());
|
debug_assert_eq!(g_bold1.len(), h_bold1.len());
|
||||||
|
|
||||||
let e = Self::transcript_L_R(transcript, L, R);
|
let e = Self::transcript_L_R(transcript, L, R);
|
||||||
let inv_e = e.invert().unwrap();
|
let inv_e = e.invert();
|
||||||
|
|
||||||
// This vartime is safe as all of these arguments are public
|
// This vartime is safe as all of these arguments are public
|
||||||
let mut new_g_bold = Vec::with_capacity(g_bold1.len());
|
let mut new_g_bold = Vec::with_capacity(g_bold1.len());
|
||||||
@@ -133,57 +137,12 @@ impl WipStatement {
|
|||||||
new_h_bold.push(multiexp_vartime(&[(e, h_bold.0), (inv_e, h_bold.1)]));
|
new_h_bold.push(multiexp_vartime(&[(e, h_bold.0), (inv_e, h_bold.1)]));
|
||||||
}
|
}
|
||||||
|
|
||||||
let e_square = e.square();
|
let e_square = e * e;
|
||||||
let inv_e_square = inv_e.square();
|
let inv_e_square = inv_e * inv_e;
|
||||||
|
|
||||||
(e, inv_e, e_square, inv_e_square, PointVector(new_g_bold), PointVector(new_h_bold))
|
(e, inv_e, e_square, inv_e_square, PointVector(new_g_bold), PointVector(new_h_bold))
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
This has room for optimization worth investigating further. It currently takes
|
|
||||||
an iterative approach. It can be optimized further via divide and conquer.
|
|
||||||
|
|
||||||
Assume there are 4 challenges.
|
|
||||||
|
|
||||||
Iterative approach (current):
|
|
||||||
1. Do the optimal multiplications across challenge column 0 and 1.
|
|
||||||
2. Do the optimal multiplications across that result and column 2.
|
|
||||||
3. Do the optimal multiplications across that result and column 3.
|
|
||||||
|
|
||||||
Divide and conquer (worth investigating further):
|
|
||||||
1. Do the optimal multiplications across challenge column 0 and 1.
|
|
||||||
2. Do the optimal multiplications across challenge column 2 and 3.
|
|
||||||
3. Multiply both results together.
|
|
||||||
|
|
||||||
When there are 4 challenges (n=16), the iterative approach does 28 multiplications
|
|
||||||
versus divide and conquer's 24.
|
|
||||||
*/
|
|
||||||
fn challenge_products(challenges: &[(Scalar, Scalar)]) -> Vec<Scalar> {
|
|
||||||
let mut products = vec![Scalar::ONE; 1 << challenges.len()];
|
|
||||||
|
|
||||||
if !challenges.is_empty() {
|
|
||||||
products[0] = challenges[0].1;
|
|
||||||
products[1] = challenges[0].0;
|
|
||||||
|
|
||||||
for (j, challenge) in challenges.iter().enumerate().skip(1) {
|
|
||||||
let mut slots = (1 << (j + 1)) - 1;
|
|
||||||
while slots > 0 {
|
|
||||||
products[slots] = products[slots / 2] * challenge.0;
|
|
||||||
products[slots - 1] = products[slots / 2] * challenge.1;
|
|
||||||
|
|
||||||
slots = slots.saturating_sub(2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sanity check since if the above failed to populate, it'd be critical
|
|
||||||
for product in &products {
|
|
||||||
debug_assert!(!bool::from(product.is_zero()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
products
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
||||||
self,
|
self,
|
||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
@@ -197,12 +156,12 @@ impl WipStatement {
|
|||||||
if generators.len() != witness.a.len() {
|
if generators.len() != witness.a.len() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let (g, h) = (Generators::g(), Generators::h());
|
let (g, h) = (BpPlusGenerators::g(), BpPlusGenerators::h());
|
||||||
let mut g_bold = vec![];
|
let mut g_bold = vec![];
|
||||||
let mut h_bold = vec![];
|
let mut h_bold = vec![];
|
||||||
for i in 0 .. generators.len() {
|
for i in 0 .. generators.len() {
|
||||||
g_bold.push(generators.generator(GeneratorsList::GBold1, i));
|
g_bold.push(generators.generator(GeneratorsList::GBold, i));
|
||||||
h_bold.push(generators.generator(GeneratorsList::HBold1, i));
|
h_bold.push(generators.generator(GeneratorsList::HBold, i));
|
||||||
}
|
}
|
||||||
let mut g_bold = PointVector(g_bold);
|
let mut g_bold = PointVector(g_bold);
|
||||||
let mut h_bold = PointVector(h_bold);
|
let mut h_bold = PointVector(h_bold);
|
||||||
@@ -261,7 +220,7 @@ impl WipStatement {
|
|||||||
let c_r = (a2.clone() * y_n_hat).weighted_inner_product(&b1, &y);
|
let c_r = (a2.clone() * y_n_hat).weighted_inner_product(&b1, &y);
|
||||||
|
|
||||||
// TODO: Calculate these with a batch inversion
|
// TODO: Calculate these with a batch inversion
|
||||||
let y_inv_n_hat = y_n_hat.invert().unwrap();
|
let y_inv_n_hat = y_n_hat.invert();
|
||||||
|
|
||||||
let mut L_terms = (a1.clone() * y_inv_n_hat)
|
let mut L_terms = (a1.clone() * y_inv_n_hat)
|
||||||
.0
|
.0
|
||||||
@@ -271,7 +230,7 @@ impl WipStatement {
|
|||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
L_terms.push((c_l, g));
|
L_terms.push((c_l, g));
|
||||||
L_terms.push((d_l, h));
|
L_terms.push((d_l, h));
|
||||||
let L = multiexp(&L_terms) * Scalar(crate::INV_EIGHT());
|
let L = multiexp(&L_terms) * INV_EIGHT();
|
||||||
L_vec.push(L);
|
L_vec.push(L);
|
||||||
L_terms.zeroize();
|
L_terms.zeroize();
|
||||||
|
|
||||||
@@ -283,7 +242,7 @@ impl WipStatement {
|
|||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
R_terms.push((c_r, g));
|
R_terms.push((c_r, g));
|
||||||
R_terms.push((d_r, h));
|
R_terms.push((d_r, h));
|
||||||
let R = multiexp(&R_terms) * Scalar(crate::INV_EIGHT());
|
let R = multiexp(&R_terms) * INV_EIGHT();
|
||||||
R_vec.push(R);
|
R_vec.push(R);
|
||||||
R_terms.zeroize();
|
R_terms.zeroize();
|
||||||
|
|
||||||
@@ -316,33 +275,32 @@ impl WipStatement {
|
|||||||
|
|
||||||
let mut A_terms =
|
let mut A_terms =
|
||||||
vec![(r, g_bold[0]), (s, h_bold[0]), ((ry * b[0]) + (s * y[0] * a[0]), g), (delta, h)];
|
vec![(r, g_bold[0]), (s, h_bold[0]), ((ry * b[0]) + (s * y[0] * a[0]), g), (delta, h)];
|
||||||
let A = multiexp(&A_terms) * Scalar(crate::INV_EIGHT());
|
let A = multiexp(&A_terms) * INV_EIGHT();
|
||||||
A_terms.zeroize();
|
A_terms.zeroize();
|
||||||
|
|
||||||
let mut B_terms = vec![(ry * s, g), (eta, h)];
|
let mut B_terms = vec![(ry * s, g), (eta, h)];
|
||||||
let B = multiexp(&B_terms) * Scalar(crate::INV_EIGHT());
|
let B = multiexp(&B_terms) * INV_EIGHT();
|
||||||
B_terms.zeroize();
|
B_terms.zeroize();
|
||||||
|
|
||||||
let e = Self::transcript_A_B(&mut transcript, A, B);
|
let e = Self::transcript_A_B(&mut transcript, A, B);
|
||||||
|
|
||||||
let r_answer = r + (a[0] * e);
|
let r_answer = r + (a[0] * e);
|
||||||
let s_answer = s + (b[0] * e);
|
let s_answer = s + (b[0] * e);
|
||||||
let delta_answer = eta + (delta * e) + (alpha * e.square());
|
let delta_answer = eta + (delta * e) + (alpha * (e * e));
|
||||||
|
|
||||||
Some(WipProof { L: L_vec, R: R_vec, A, B, r_answer, s_answer, delta_answer })
|
Some(WipProof { L: L_vec, R: R_vec, A, B, r_answer, s_answer, delta_answer })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn verify<Id: Copy + Zeroize, R: RngCore + CryptoRng>(
|
pub(crate) fn verify<R: RngCore + CryptoRng>(
|
||||||
self,
|
self,
|
||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
verifier: &mut BatchVerifier<Id, EdwardsPoint>,
|
verifier: &mut BulletproofsPlusBatchVerifier,
|
||||||
id: Id,
|
|
||||||
mut transcript: Scalar,
|
mut transcript: Scalar,
|
||||||
mut proof: WipProof,
|
mut proof: WipProof,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let WipStatement { generators, P, y } = self;
|
let verifier_weight = Scalar::random(rng);
|
||||||
|
|
||||||
let (g, h) = (Generators::g(), Generators::h());
|
let WipStatement { generators, P, y } = self;
|
||||||
|
|
||||||
// Verify the L/R lengths
|
// Verify the L/R lengths
|
||||||
{
|
{
|
||||||
@@ -359,7 +317,7 @@ impl WipStatement {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let inv_y = {
|
let inv_y = {
|
||||||
let inv_y = y[0].invert().unwrap();
|
let inv_y = y[0].invert();
|
||||||
let mut res = Vec::with_capacity(y.len());
|
let mut res = Vec::with_capacity(y.len());
|
||||||
res.push(inv_y);
|
res.push(inv_y);
|
||||||
while res.len() < y.len() {
|
while res.len() < y.len() {
|
||||||
@@ -368,51 +326,49 @@ impl WipStatement {
|
|||||||
res
|
res
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut P_terms = vec![(Scalar::ONE, P)];
|
let mut e_is = Vec::with_capacity(proof.L.len());
|
||||||
P_terms.reserve(6 + (2 * generators.len()) + proof.L.len());
|
for (L, R) in proof.L.iter_mut().zip(proof.R.iter_mut()) {
|
||||||
|
e_is.push(Self::transcript_L_R(&mut transcript, *L, *R));
|
||||||
let mut challenges = Vec::with_capacity(proof.L.len());
|
*L = L.mul_by_cofactor();
|
||||||
let product_cache = {
|
*R = R.mul_by_cofactor();
|
||||||
let mut es = Vec::with_capacity(proof.L.len());
|
}
|
||||||
for (L, R) in proof.L.iter_mut().zip(proof.R.iter_mut()) {
|
|
||||||
es.push(Self::transcript_L_R(&mut transcript, *L, *R));
|
|
||||||
*L = L.mul_by_cofactor();
|
|
||||||
*R = R.mul_by_cofactor();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut inv_es = es.clone();
|
|
||||||
let mut scratch = vec![Scalar::ZERO; es.len()];
|
|
||||||
group::ff::BatchInverter::invert_with_external_scratch(&mut inv_es, &mut scratch);
|
|
||||||
drop(scratch);
|
|
||||||
|
|
||||||
debug_assert_eq!(es.len(), inv_es.len());
|
|
||||||
debug_assert_eq!(es.len(), proof.L.len());
|
|
||||||
debug_assert_eq!(es.len(), proof.R.len());
|
|
||||||
for ((e, inv_e), (L, R)) in
|
|
||||||
es.drain(..).zip(inv_es.drain(..)).zip(proof.L.iter().zip(proof.R.iter()))
|
|
||||||
{
|
|
||||||
debug_assert_eq!(e.invert().unwrap(), inv_e);
|
|
||||||
|
|
||||||
challenges.push((e, inv_e));
|
|
||||||
|
|
||||||
let e_square = e.square();
|
|
||||||
let inv_e_square = inv_e.square();
|
|
||||||
P_terms.push((e_square, *L));
|
|
||||||
P_terms.push((inv_e_square, *R));
|
|
||||||
}
|
|
||||||
|
|
||||||
Self::challenge_products(&challenges)
|
|
||||||
};
|
|
||||||
|
|
||||||
let e = Self::transcript_A_B(&mut transcript, proof.A, proof.B);
|
let e = Self::transcript_A_B(&mut transcript, proof.A, proof.B);
|
||||||
proof.A = proof.A.mul_by_cofactor();
|
proof.A = proof.A.mul_by_cofactor();
|
||||||
proof.B = proof.B.mul_by_cofactor();
|
proof.B = proof.B.mul_by_cofactor();
|
||||||
let neg_e_square = -e.square();
|
let neg_e_square = verifier_weight * -(e * e);
|
||||||
|
|
||||||
let mut multiexp = P_terms;
|
verifier.0.other.push((neg_e_square, P));
|
||||||
multiexp.reserve(4 + (2 * generators.len()));
|
|
||||||
for (scalar, _) in &mut multiexp {
|
let mut challenges = Vec::with_capacity(proof.L.len());
|
||||||
*scalar *= neg_e_square;
|
let product_cache = {
|
||||||
|
let mut inv_e_is = e_is.clone();
|
||||||
|
Scalar::batch_invert(&mut inv_e_is);
|
||||||
|
|
||||||
|
debug_assert_eq!(e_is.len(), inv_e_is.len());
|
||||||
|
debug_assert_eq!(e_is.len(), proof.L.len());
|
||||||
|
debug_assert_eq!(e_is.len(), proof.R.len());
|
||||||
|
for ((e_i, inv_e_i), (L, R)) in
|
||||||
|
e_is.drain(..).zip(inv_e_is.drain(..)).zip(proof.L.iter().zip(proof.R.iter()))
|
||||||
|
{
|
||||||
|
debug_assert_eq!(e_i.invert(), inv_e_i);
|
||||||
|
|
||||||
|
challenges.push((e_i, inv_e_i));
|
||||||
|
|
||||||
|
let e_i_square = e_i * e_i;
|
||||||
|
let inv_e_i_square = inv_e_i * inv_e_i;
|
||||||
|
verifier.0.other.push((neg_e_square * e_i_square, *L));
|
||||||
|
verifier.0.other.push((neg_e_square * inv_e_i_square, *R));
|
||||||
|
}
|
||||||
|
|
||||||
|
challenge_products(&challenges)
|
||||||
|
};
|
||||||
|
|
||||||
|
while verifier.0.g_bold.len() < generators.len() {
|
||||||
|
verifier.0.g_bold.push(Scalar::ZERO);
|
||||||
|
}
|
||||||
|
while verifier.0.h_bold.len() < generators.len() {
|
||||||
|
verifier.0.h_bold.push(Scalar::ZERO);
|
||||||
}
|
}
|
||||||
|
|
||||||
let re = proof.r_answer * e;
|
let re = proof.r_answer * e;
|
||||||
@@ -421,23 +377,18 @@ impl WipStatement {
|
|||||||
if i > 0 {
|
if i > 0 {
|
||||||
scalar *= inv_y[i - 1];
|
scalar *= inv_y[i - 1];
|
||||||
}
|
}
|
||||||
multiexp.push((scalar, generators.generator(GeneratorsList::GBold1, i)));
|
verifier.0.g_bold[i] += verifier_weight * scalar;
|
||||||
}
|
}
|
||||||
|
|
||||||
let se = proof.s_answer * e;
|
let se = proof.s_answer * e;
|
||||||
for i in 0 .. generators.len() {
|
for i in 0 .. generators.len() {
|
||||||
multiexp.push((
|
verifier.0.h_bold[i] += verifier_weight * (se * product_cache[product_cache.len() - 1 - i]);
|
||||||
se * product_cache[product_cache.len() - 1 - i],
|
|
||||||
generators.generator(GeneratorsList::HBold1, i),
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
multiexp.push((-e, proof.A));
|
verifier.0.other.push((verifier_weight * -e, proof.A));
|
||||||
multiexp.push((proof.r_answer * y[0] * proof.s_answer, g));
|
verifier.0.g += verifier_weight * (proof.r_answer * y[0] * proof.s_answer);
|
||||||
multiexp.push((proof.delta_answer, h));
|
verifier.0.h += verifier_weight * proof.delta_answer;
|
||||||
multiexp.push((-Scalar::ONE, proof.B));
|
verifier.0.other.push((-verifier_weight, proof.B));
|
||||||
|
|
||||||
verifier.queue(rng, id, multiexp);
|
|
||||||
|
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
@@ -1,16 +1,16 @@
|
|||||||
use core::ops::{Index, IndexMut};
|
use core::ops::{Index, IndexMut};
|
||||||
use std_shims::vec::Vec;
|
use std_shims::vec::Vec;
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
use zeroize::Zeroize;
|
||||||
|
|
||||||
use dalek_ff_group::EdwardsPoint;
|
use curve25519_dalek::edwards::EdwardsPoint;
|
||||||
|
|
||||||
|
use crate::scalar_vector::ScalarVector;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
use multiexp::multiexp;
|
use crate::core::multiexp;
|
||||||
#[cfg(test)]
|
|
||||||
use crate::ringct::bulletproofs::plus::ScalarVector;
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||||
pub(crate) struct PointVector(pub(crate) Vec<EdwardsPoint>);
|
pub(crate) struct PointVector(pub(crate) Vec<EdwardsPoint>);
|
||||||
|
|
||||||
impl Index<usize> for PointVector {
|
impl Index<usize> for PointVector {
|
||||||
@@ -27,6 +27,15 @@ impl IndexMut<usize> for PointVector {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PointVector {
|
impl PointVector {
|
||||||
|
pub(crate) fn mul_vec(&self, vector: &ScalarVector) -> Self {
|
||||||
|
assert_eq!(self.len(), vector.len());
|
||||||
|
let mut res = self.clone();
|
||||||
|
for (i, val) in res.0.iter_mut().enumerate() {
|
||||||
|
*val *= vector.0[i];
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) fn multiexp(&self, vector: &ScalarVector) -> EdwardsPoint {
|
pub(crate) fn multiexp(&self, vector: &ScalarVector) -> EdwardsPoint {
|
||||||
debug_assert_eq!(self.len(), vector.len());
|
debug_assert_eq!(self.len(), vector.len());
|
||||||
@@ -2,13 +2,13 @@ use core::{
|
|||||||
borrow::Borrow,
|
borrow::Borrow,
|
||||||
ops::{Index, IndexMut, Add, Sub, Mul},
|
ops::{Index, IndexMut, Add, Sub, Mul},
|
||||||
};
|
};
|
||||||
use std_shims::vec::Vec;
|
use std_shims::{vec, vec::Vec};
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||||
|
|
||||||
use group::ff::Field;
|
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
use multiexp::multiexp;
|
use crate::core::multiexp;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||||
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
||||||
56
coins/monero/ringct/bulletproofs/src/tests/mod.rs
Normal file
56
coins/monero/ringct/bulletproofs/src/tests/mod.rs
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
use rand_core::{RngCore, OsRng};
|
||||||
|
|
||||||
|
use curve25519_dalek::scalar::Scalar;
|
||||||
|
|
||||||
|
use monero_primitives::Commitment;
|
||||||
|
use crate::{batch_verifier::BatchVerifier, Bulletproof, BulletproofError};
|
||||||
|
|
||||||
|
mod original;
|
||||||
|
mod plus;
|
||||||
|
|
||||||
|
macro_rules! bulletproofs_tests {
|
||||||
|
($name: ident, $max: ident, $plus: literal) => {
|
||||||
|
#[test]
|
||||||
|
fn $name() {
|
||||||
|
// Create Bulletproofs for all possible output quantities
|
||||||
|
let mut verifier = BatchVerifier::new();
|
||||||
|
for i in 1 ..= 16 {
|
||||||
|
let commitments = (1 ..= i)
|
||||||
|
.map(|_| Commitment::new(Scalar::random(&mut OsRng), OsRng.next_u64()))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let bp = if $plus {
|
||||||
|
Bulletproof::prove_plus(&mut OsRng, commitments.clone()).unwrap()
|
||||||
|
} else {
|
||||||
|
Bulletproof::prove(&mut OsRng, commitments.clone()).unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
|
let commitments = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||||
|
assert!(bp.verify(&mut OsRng, &commitments));
|
||||||
|
assert!(bp.batch_verify(&mut OsRng, &mut verifier, &commitments));
|
||||||
|
}
|
||||||
|
assert!(verifier.verify());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn $max() {
|
||||||
|
// Check Bulletproofs errors if we try to prove for too many outputs
|
||||||
|
let mut commitments = vec![];
|
||||||
|
for _ in 0 .. 17 {
|
||||||
|
commitments.push(Commitment::new(Scalar::ZERO, 0));
|
||||||
|
}
|
||||||
|
assert_eq!(
|
||||||
|
(if $plus {
|
||||||
|
Bulletproof::prove_plus(&mut OsRng, commitments)
|
||||||
|
} else {
|
||||||
|
Bulletproof::prove(&mut OsRng, commitments)
|
||||||
|
})
|
||||||
|
.unwrap_err(),
|
||||||
|
BulletproofError::TooManyCommitments,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
bulletproofs_tests!(bulletproofs, bulletproofs_max, false);
|
||||||
|
bulletproofs_tests!(bulletproofs_plus, bulletproofs_plus_max, true);
|
||||||
@@ -0,0 +1,75 @@
|
|||||||
|
// The inner product relation is P = sum(g_bold * a, h_bold * b, g * (a * b))
|
||||||
|
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use curve25519_dalek::Scalar;
|
||||||
|
|
||||||
|
use monero_generators::H;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
scalar_vector::ScalarVector,
|
||||||
|
point_vector::PointVector,
|
||||||
|
original::{
|
||||||
|
GENERATORS,
|
||||||
|
inner_product::{IpStatement, IpWitness},
|
||||||
|
},
|
||||||
|
BulletproofsBatchVerifier,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_zero_inner_product() {
|
||||||
|
let statement =
|
||||||
|
IpStatement::new_without_P_transcript(ScalarVector(vec![Scalar::ONE; 1]), Scalar::ONE);
|
||||||
|
let witness = IpWitness::new(ScalarVector::new(1), ScalarVector::new(1)).unwrap();
|
||||||
|
|
||||||
|
let transcript = Scalar::random(&mut OsRng);
|
||||||
|
let proof = statement.clone().prove(transcript, witness).unwrap();
|
||||||
|
|
||||||
|
let mut verifier = BulletproofsBatchVerifier::default();
|
||||||
|
verifier.0.g_bold = vec![Scalar::ZERO; 1];
|
||||||
|
verifier.0.h_bold = vec![Scalar::ZERO; 1];
|
||||||
|
statement.verify(&mut verifier, 1, transcript, Scalar::random(&mut OsRng), proof).unwrap();
|
||||||
|
assert!(verifier.verify());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_inner_product() {
|
||||||
|
// P = sum(g_bold * a, h_bold * b, g * u * <a, b>)
|
||||||
|
let generators = GENERATORS();
|
||||||
|
let mut verifier = BulletproofsBatchVerifier::default();
|
||||||
|
verifier.0.g_bold = vec![Scalar::ZERO; 32];
|
||||||
|
verifier.0.h_bold = vec![Scalar::ZERO; 32];
|
||||||
|
for i in [1, 2, 4, 8, 16, 32] {
|
||||||
|
let g = H();
|
||||||
|
let mut g_bold = vec![];
|
||||||
|
let mut h_bold = vec![];
|
||||||
|
for i in 0 .. i {
|
||||||
|
g_bold.push(generators.G[i]);
|
||||||
|
h_bold.push(generators.H[i]);
|
||||||
|
}
|
||||||
|
let g_bold = PointVector(g_bold);
|
||||||
|
let h_bold = PointVector(h_bold);
|
||||||
|
|
||||||
|
let mut a = ScalarVector::new(i);
|
||||||
|
let mut b = ScalarVector::new(i);
|
||||||
|
|
||||||
|
for i in 0 .. i {
|
||||||
|
a[i] = Scalar::random(&mut OsRng);
|
||||||
|
b[i] = Scalar::random(&mut OsRng);
|
||||||
|
}
|
||||||
|
|
||||||
|
let P = g_bold.multiexp(&a) + h_bold.multiexp(&b) + (g * a.clone().inner_product(&b));
|
||||||
|
|
||||||
|
let statement =
|
||||||
|
IpStatement::new_without_P_transcript(ScalarVector(vec![Scalar::ONE; i]), Scalar::ONE);
|
||||||
|
let witness = IpWitness::new(a, b).unwrap();
|
||||||
|
|
||||||
|
let transcript = Scalar::random(&mut OsRng);
|
||||||
|
let proof = statement.clone().prove(transcript, witness).unwrap();
|
||||||
|
|
||||||
|
let weight = Scalar::random(&mut OsRng);
|
||||||
|
verifier.0.other.push((weight, P));
|
||||||
|
statement.verify(&mut verifier, i, transcript, weight, proof).unwrap();
|
||||||
|
}
|
||||||
|
assert!(verifier.verify());
|
||||||
|
}
|
||||||
62
coins/monero/ringct/bulletproofs/src/tests/original/mod.rs
Normal file
62
coins/monero/ringct/bulletproofs/src/tests/original/mod.rs
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
use hex_literal::hex;
|
||||||
|
use rand_core::OsRng;
|
||||||
|
|
||||||
|
use curve25519_dalek::scalar::Scalar;
|
||||||
|
|
||||||
|
use monero_io::decompress_point;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
original::{IpProof, AggregateRangeProof as OriginalProof},
|
||||||
|
Bulletproof,
|
||||||
|
};
|
||||||
|
|
||||||
|
mod inner_product;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn bulletproofs_vector() {
|
||||||
|
let scalar = |scalar| Scalar::from_canonical_bytes(scalar).unwrap();
|
||||||
|
let point = |point| decompress_point(point).unwrap();
|
||||||
|
|
||||||
|
// Generated from Monero
|
||||||
|
assert!(Bulletproof::Original(OriginalProof {
|
||||||
|
A: point(hex!("ef32c0b9551b804decdcb107eb22aa715b7ce259bf3c5cac20e24dfa6b28ac71")),
|
||||||
|
S: point(hex!("e1285960861783574ee2b689ae53622834eb0b035d6943103f960cd23e063fa0")),
|
||||||
|
T1: point(hex!("4ea07735f184ba159d0e0eb662bac8cde3eb7d39f31e567b0fbda3aa23fe5620")),
|
||||||
|
T2: point(hex!("b8390aa4b60b255630d40e592f55ec6b7ab5e3a96bfcdcd6f1cd1d2fc95f441e")),
|
||||||
|
tau_x: scalar(hex!("5957dba8ea9afb23d6e81cc048a92f2d502c10c749dc1b2bd148ae8d41ec7107")),
|
||||||
|
mu: scalar(hex!("923023b234c2e64774b820b4961f7181f6c1dc152c438643e5a25b0bf271bc02")),
|
||||||
|
ip: IpProof {
|
||||||
|
L: vec![
|
||||||
|
point(hex!("c45f656316b9ebf9d357fb6a9f85b5f09e0b991dd50a6e0ae9b02de3946c9d99")),
|
||||||
|
point(hex!("9304d2bf0f27183a2acc58cc755a0348da11bd345485fda41b872fee89e72aac")),
|
||||||
|
point(hex!("1bb8b71925d155dd9569f64129ea049d6149fdc4e7a42a86d9478801d922129b")),
|
||||||
|
point(hex!("5756a7bf887aa72b9a952f92f47182122e7b19d89e5dd434c747492b00e1c6b7")),
|
||||||
|
point(hex!("6e497c910d102592830555356af5ff8340e8d141e3fb60ea24cfa587e964f07d")),
|
||||||
|
point(hex!("f4fa3898e7b08e039183d444f3d55040f3c790ed806cb314de49f3068bdbb218")),
|
||||||
|
point(hex!("0bbc37597c3ead517a3841e159c8b7b79a5ceaee24b2a9a20350127aab428713")),
|
||||||
|
],
|
||||||
|
R: vec![
|
||||||
|
point(hex!("609420ba1702781692e84accfd225adb3d077aedc3cf8125563400466b52dbd9")),
|
||||||
|
point(hex!("fb4e1d079e7a2b0ec14f7e2a3943bf50b6d60bc346a54fcf562fb234b342abf8")),
|
||||||
|
point(hex!("6ae3ac97289c48ce95b9c557289e82a34932055f7f5e32720139824fe81b12e5")),
|
||||||
|
point(hex!("d071cc2ffbdab2d840326ad15f68c01da6482271cae3cf644670d1632f29a15c")),
|
||||||
|
point(hex!("e52a1754b95e1060589ba7ce0c43d0060820ebfc0d49dc52884bc3c65ad18af5")),
|
||||||
|
point(hex!("41573b06140108539957df71aceb4b1816d2409ce896659aa5c86f037ca5e851")),
|
||||||
|
point(hex!("a65970b2cc3c7b08b2b5b739dbc8e71e646783c41c625e2a5b1535e3d2e0f742")),
|
||||||
|
],
|
||||||
|
a: scalar(hex!("0077c5383dea44d3cd1bc74849376bd60679612dc4b945255822457fa0c0a209")),
|
||||||
|
b: scalar(hex!("fe80cf5756473482581e1d38644007793ddc66fdeb9404ec1689a907e4863302")),
|
||||||
|
},
|
||||||
|
t_hat: scalar(hex!("40dfb08e09249040df997851db311bd6827c26e87d6f0f332c55be8eef10e603"))
|
||||||
|
})
|
||||||
|
.verify(
|
||||||
|
&mut OsRng,
|
||||||
|
&[
|
||||||
|
// For some reason, these vectors are * INV_EIGHT
|
||||||
|
point(hex!("8e8f23f315edae4f6c2f948d9a861e0ae32d356b933cd11d2f0e031ac744c41f"))
|
||||||
|
.mul_by_cofactor(),
|
||||||
|
point(hex!("2829cbd025aa54cd6e1b59a032564f22f0b2e5627f7f2c4297f90da438b5510f"))
|
||||||
|
.mul_by_cofactor(),
|
||||||
|
]
|
||||||
|
));
|
||||||
|
}
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
use rand_core::{RngCore, OsRng};
|
||||||
|
|
||||||
|
use curve25519_dalek::Scalar;
|
||||||
|
|
||||||
|
use monero_primitives::Commitment;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
batch_verifier::BulletproofsPlusBatchVerifier,
|
||||||
|
plus::aggregate_range_proof::{AggregateRangeStatement, AggregateRangeWitness},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_aggregate_range_proof() {
|
||||||
|
let mut verifier = BulletproofsPlusBatchVerifier::default();
|
||||||
|
for m in 1 ..= 16 {
|
||||||
|
let mut commitments = vec![];
|
||||||
|
for _ in 0 .. m {
|
||||||
|
commitments.push(Commitment::new(Scalar::random(&mut OsRng), OsRng.next_u64()));
|
||||||
|
}
|
||||||
|
let commitment_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||||
|
let statement = AggregateRangeStatement::new(&commitment_points).unwrap();
|
||||||
|
let witness = AggregateRangeWitness::new(commitments).unwrap();
|
||||||
|
|
||||||
|
let proof = statement.clone().prove(&mut OsRng, &witness).unwrap();
|
||||||
|
statement.verify(&mut OsRng, &mut verifier, proof);
|
||||||
|
}
|
||||||
|
assert!(verifier.verify());
|
||||||
|
}
|
||||||
@@ -2,13 +2,14 @@
|
|||||||
|
|
||||||
use rand_core::OsRng;
|
use rand_core::OsRng;
|
||||||
|
|
||||||
use multiexp::BatchVerifier;
|
use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::EdwardsPoint};
|
||||||
use group::{ff::Field, Group};
|
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use crate::ringct::bulletproofs::plus::{
|
use crate::{
|
||||||
ScalarVector, PointVector, GeneratorsList, Generators,
|
batch_verifier::BulletproofsPlusBatchVerifier,
|
||||||
weighted_inner_product::{WipStatement, WipWitness},
|
plus::{
|
||||||
|
ScalarVector, PointVector, GeneratorsList, BpPlusGenerators,
|
||||||
|
weighted_inner_product::{WipStatement, WipWitness},
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -17,33 +18,33 @@ fn test_zero_weighted_inner_product() {
|
|||||||
let P = EdwardsPoint::identity();
|
let P = EdwardsPoint::identity();
|
||||||
let y = Scalar::random(&mut OsRng);
|
let y = Scalar::random(&mut OsRng);
|
||||||
|
|
||||||
let generators = Generators::new().reduce(1);
|
let generators = BpPlusGenerators::new().reduce(1);
|
||||||
let statement = WipStatement::new(generators, P, y);
|
let statement = WipStatement::new(generators, P, y);
|
||||||
let witness = WipWitness::new(ScalarVector::new(1), ScalarVector::new(1), Scalar::ZERO).unwrap();
|
let witness = WipWitness::new(ScalarVector::new(1), ScalarVector::new(1), Scalar::ZERO).unwrap();
|
||||||
|
|
||||||
let transcript = Scalar::random(&mut OsRng);
|
let transcript = Scalar::random(&mut OsRng);
|
||||||
let proof = statement.clone().prove(&mut OsRng, transcript, &witness).unwrap();
|
let proof = statement.clone().prove(&mut OsRng, transcript, &witness).unwrap();
|
||||||
|
|
||||||
let mut verifier = BatchVerifier::new(1);
|
let mut verifier = BulletproofsPlusBatchVerifier::default();
|
||||||
statement.verify(&mut OsRng, &mut verifier, (), transcript, proof);
|
statement.verify(&mut OsRng, &mut verifier, transcript, proof);
|
||||||
assert!(verifier.verify_vartime());
|
assert!(verifier.verify());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_weighted_inner_product() {
|
fn test_weighted_inner_product() {
|
||||||
// P = sum(g_bold * a, h_bold * b, g * (a * y * b), h * alpha)
|
// P = sum(g_bold * a, h_bold * b, g * (a * y * b), h * alpha)
|
||||||
let mut verifier = BatchVerifier::new(6);
|
let mut verifier = BulletproofsPlusBatchVerifier::default();
|
||||||
let generators = Generators::new();
|
let generators = BpPlusGenerators::new();
|
||||||
for i in [1, 2, 4, 8, 16, 32] {
|
for i in [1, 2, 4, 8, 16, 32] {
|
||||||
let generators = generators.reduce(i);
|
let generators = generators.reduce(i);
|
||||||
let g = Generators::g();
|
let g = BpPlusGenerators::g();
|
||||||
let h = Generators::h();
|
let h = BpPlusGenerators::h();
|
||||||
assert_eq!(generators.len(), i);
|
assert_eq!(generators.len(), i);
|
||||||
let mut g_bold = vec![];
|
let mut g_bold = vec![];
|
||||||
let mut h_bold = vec![];
|
let mut h_bold = vec![];
|
||||||
for i in 0 .. i {
|
for i in 0 .. i {
|
||||||
g_bold.push(generators.generator(GeneratorsList::GBold1, i));
|
g_bold.push(generators.generator(GeneratorsList::GBold, i));
|
||||||
h_bold.push(generators.generator(GeneratorsList::HBold1, i));
|
h_bold.push(generators.generator(GeneratorsList::HBold, i));
|
||||||
}
|
}
|
||||||
let g_bold = PointVector(g_bold);
|
let g_bold = PointVector(g_bold);
|
||||||
let h_bold = PointVector(h_bold);
|
let h_bold = PointVector(h_bold);
|
||||||
@@ -75,7 +76,7 @@ fn test_weighted_inner_product() {
|
|||||||
|
|
||||||
let transcript = Scalar::random(&mut OsRng);
|
let transcript = Scalar::random(&mut OsRng);
|
||||||
let proof = statement.clone().prove(&mut OsRng, transcript, &witness).unwrap();
|
let proof = statement.clone().prove(&mut OsRng, transcript, &witness).unwrap();
|
||||||
statement.verify(&mut OsRng, &mut verifier, (), transcript, proof);
|
statement.verify(&mut OsRng, &mut verifier, transcript, proof);
|
||||||
}
|
}
|
||||||
assert!(verifier.verify_vartime());
|
assert!(verifier.verify());
|
||||||
}
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user