1 Commits

Author SHA1 Message Date
Luke Parker
466af2bc19 Experiment with a 3s message latency in the coordinator
This effects a 10s block time.
2024-03-02 14:46:06 -05:00
468 changed files with 14547 additions and 25533 deletions

View File

@@ -5,7 +5,7 @@ inputs:
version: version:
description: "Version to download and run" description: "Version to download and run"
required: false required: false
default: "27.0" default: 24.0.1
runs: runs:
using: "composite" using: "composite"

View File

@@ -42,8 +42,8 @@ runs:
shell: bash shell: bash
run: | run: |
cargo install svm-rs cargo install svm-rs
svm install 0.8.25 svm install 0.8.16
svm use 0.8.25 svm use 0.8.16
# - name: Cache Rust # - name: Cache Rust
# uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43 # uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43

View File

@@ -10,7 +10,7 @@ inputs:
bitcoin-version: bitcoin-version:
description: "Bitcoin version to download and run as a regtest node" description: "Bitcoin version to download and run as a regtest node"
required: false required: false
default: "27.1" default: 24.0.1
runs: runs:
using: "composite" using: "composite"
@@ -19,9 +19,9 @@ runs:
uses: ./.github/actions/build-dependencies uses: ./.github/actions/build-dependencies
- name: Install Foundry - name: Install Foundry
uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773 uses: foundry-rs/foundry-toolchain@cb603ca0abb544f301eaed59ac0baf579aa6aecf
with: with:
version: nightly-f625d0fa7c51e65b4bf1e8f7931cd1c6e2e285e9 version: nightly-09fe3e041369a816365a020f715ad6f94dbce9f2
cache: false cache: false
- name: Run a Monero Regtest Node - name: Run a Monero Regtest Node

View File

@@ -1 +1 @@
nightly-2024-07-01 nightly-2024-02-07

View File

@@ -30,22 +30,6 @@ jobs:
run: | run: |
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \ GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
-p bitcoin-serai \ -p bitcoin-serai \
-p alloy-simple-request-transport \
-p ethereum-serai \ -p ethereum-serai \
-p serai-ethereum-relayer \
-p monero-io \
-p monero-generators \ -p monero-generators \
-p monero-primitives \ -p monero-serai
-p monero-mlsag \
-p monero-clsag \
-p monero-borromean \
-p monero-bulletproofs \
-p monero-serai \
-p monero-rpc \
-p monero-simple-request-rpc \
-p monero-address \
-p monero-wallet \
-p monero-seed \
-p polyseed \
-p monero-wallet-util \
-p monero-serai-verify-chain

View File

@@ -28,5 +28,4 @@ jobs:
-p std-shims \ -p std-shims \
-p zalloc \ -p zalloc \
-p serai-db \ -p serai-db \
-p serai-env \ -p serai-env
-p simple-request

View File

@@ -37,4 +37,4 @@ jobs:
uses: ./.github/actions/build-dependencies uses: ./.github/actions/build-dependencies
- name: Run coordinator Docker tests - name: Run coordinator Docker tests
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-coordinator-tests run: cd tests/coordinator && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -19,4 +19,4 @@ jobs:
uses: ./.github/actions/build-dependencies uses: ./.github/actions/build-dependencies
- name: Run Full Stack Docker tests - name: Run Full Stack Docker tests
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-full-stack-tests run: cd tests/full-stack && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -33,4 +33,4 @@ jobs:
uses: ./.github/actions/build-dependencies uses: ./.github/actions/build-dependencies
- name: Run message-queue Docker tests - name: Run message-queue Docker tests
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-message-queue-tests run: cd tests/message-queue && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -26,22 +26,7 @@ jobs:
uses: ./.github/actions/test-dependencies uses: ./.github/actions/test-dependencies
- name: Run Unit Tests Without Features - name: Run Unit Tests Without Features
run: | run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-io --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-generators --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-primitives --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-mlsag --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-clsag --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-borromean --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-bulletproofs --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-rpc --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-address --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-seed --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package polyseed --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --lib
# Doesn't run unit tests with features as the tests workflow will # Doesn't run unit tests with features as the tests workflow will
@@ -61,17 +46,11 @@ jobs:
monero-version: ${{ matrix.version }} monero-version: ${{ matrix.version }}
- name: Run Integration Tests Without Features - name: Run Integration Tests Without Features
run: | # Runs with the binaries feature so the binaries build
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --test '*' # https://github.com/rust-lang/cargo/issues/8396
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*' run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --features binaries --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --test '*'
- name: Run Integration Tests - name: Run Integration Tests
# Don't run if the the tests workflow also will # Don't run if the the tests workflow also will
if: ${{ matrix.version != 'v0.18.2.0' }} if: ${{ matrix.version != 'v0.18.2.0' }}
run: | run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --all-features --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --all-features --test '*'

View File

@@ -32,4 +32,4 @@ jobs:
run: sudo apt update && sudo apt install -y gcc-riscv64-unknown-elf gcc-multilib && rustup target add riscv32imac-unknown-none-elf run: sudo apt update && sudo apt install -y gcc-riscv64-unknown-elf gcc-multilib && rustup target add riscv32imac-unknown-none-elf
- name: Verify no-std builds - name: Verify no-std builds
run: CFLAGS=-I/usr/include cargo build --target riscv32imac-unknown-none-elf -p serai-no-std-tests run: cd tests/no-std && CFLAGS=-I/usr/include cargo build --target riscv32imac-unknown-none-elf

View File

@@ -1,90 +0,0 @@
# MIT License
#
# Copyright (c) 2022 just-the-docs
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
# Sample workflow for building and deploying a Jekyll site to GitHub Pages
name: Deploy Jekyll site to Pages
on:
push:
branches:
- "develop"
paths:
- "docs/**"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
# Allow one concurrent deployment
concurrency:
group: "pages"
cancel-in-progress: true
jobs:
# Build job
build:
runs-on: ubuntu-latest
defaults:
run:
working-directory: docs
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
bundler-cache: true
cache-version: 0
working-directory: "${{ github.workspace }}/docs"
- name: Setup Pages
id: pages
uses: actions/configure-pages@v3
- name: Build with Jekyll
run: bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}"
env:
JEKYLL_ENV: production
- name: Upload artifact
uses: actions/upload-pages-artifact@v1
with:
path: "docs/_site/"
# Deployment job
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
needs: build
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v2

View File

@@ -37,4 +37,4 @@ jobs:
uses: ./.github/actions/build-dependencies uses: ./.github/actions/build-dependencies
- name: Run processor Docker tests - name: Run processor Docker tests
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-processor-tests run: cd tests/processor && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -33,4 +33,4 @@ jobs:
uses: ./.github/actions/build-dependencies uses: ./.github/actions/build-dependencies
- name: Run Reproducible Runtime tests - name: Run Reproducible Runtime tests
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-reproducible-runtime-tests run: cd tests/reproducible-runtime && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -43,7 +43,6 @@ jobs:
-p tendermint-machine \ -p tendermint-machine \
-p tributary-chain \ -p tributary-chain \
-p serai-coordinator \ -p serai-coordinator \
-p serai-orchestrator \
-p serai-docker-tests -p serai-docker-tests
test-substrate: test-substrate:
@@ -65,9 +64,7 @@ jobs:
-p serai-validator-sets-pallet \ -p serai-validator-sets-pallet \
-p serai-in-instructions-primitives \ -p serai-in-instructions-primitives \
-p serai-in-instructions-pallet \ -p serai-in-instructions-pallet \
-p serai-signals-primitives \
-p serai-signals-pallet \ -p serai-signals-pallet \
-p serai-abi \
-p serai-runtime \ -p serai-runtime \
-p serai-node -p serai-node

3033
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,10 +2,7 @@
resolver = "2" resolver = "2"
members = [ members = [
# Version patches # Version patches
"patches/parking_lot_core",
"patches/parking_lot",
"patches/zstd", "patches/zstd",
"patches/rocksdb",
"patches/proc-macro-crate", "patches/proc-macro-crate",
# std patches # std patches
@@ -38,27 +35,9 @@ members = [
"crypto/schnorrkel", "crypto/schnorrkel",
"coins/bitcoin", "coins/bitcoin",
"coins/ethereum/alloy-simple-request-transport",
"coins/ethereum", "coins/ethereum",
"coins/ethereum/relayer",
"coins/monero/io",
"coins/monero/generators", "coins/monero/generators",
"coins/monero/primitives",
"coins/monero/ringct/mlsag",
"coins/monero/ringct/clsag",
"coins/monero/ringct/borromean",
"coins/monero/ringct/bulletproofs",
"coins/monero", "coins/monero",
"coins/monero/rpc",
"coins/monero/rpc/simple-request",
"coins/monero/wallet/address",
"coins/monero/wallet",
"coins/monero/wallet/seed",
"coins/monero/wallet/polyseed",
"coins/monero/wallet/util",
"coins/monero/verify-chain",
"message-queue", "message-queue",
@@ -74,14 +53,12 @@ members = [
"substrate/coins/primitives", "substrate/coins/primitives",
"substrate/coins/pallet", "substrate/coins/pallet",
"substrate/dex/pallet", "substrate/in-instructions/primitives",
"substrate/in-instructions/pallet",
"substrate/validator-sets/primitives", "substrate/validator-sets/primitives",
"substrate/validator-sets/pallet", "substrate/validator-sets/pallet",
"substrate/in-instructions/primitives",
"substrate/in-instructions/pallet",
"substrate/signals/primitives", "substrate/signals/primitives",
"substrate/signals/pallet", "substrate/signals/pallet",
@@ -131,14 +108,10 @@ panic = "unwind"
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" } lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
# Needed due to dockertest's usage of `Rc`s when we need `Arc`s # Needed due to dockertest's usage of `Rc`s when we need `Arc`s
dockertest = { git = "https://github.com/orcalabs/dockertest-rs", rev = "4dd6ae24738aa6dc5c89444cc822ea4745517493" } dockertest = { git = "https://github.com/kayabaNerve/dockertest-rs", branch = "arc" }
parking_lot_core = { path = "patches/parking_lot_core" }
parking_lot = { path = "patches/parking_lot" }
# wasmtime pulls in an old version for this # wasmtime pulls in an old version for this
zstd = { path = "patches/zstd" } zstd = { path = "patches/zstd" }
# Needed for WAL compression
rocksdb = { path = "patches/rocksdb" }
# proc-macro-crate 2 binds to an old version of toml for msrv so we patch to 3 # proc-macro-crate 2 binds to an old version of toml for msrv so we patch to 3
proc-macro-crate = { path = "patches/proc-macro-crate" } proc-macro-crate = { path = "patches/proc-macro-crate" }

View File

@@ -5,16 +5,13 @@ Bitcoin, Ethereum, DAI, and Monero, offering a liquidity-pool-based trading
experience. Funds are stored in an economically secured threshold-multisig experience. Funds are stored in an economically secured threshold-multisig
wallet. wallet.
[Getting Started](spec/Getting%20Started.md) [Getting Started](docs/Getting%20Started.md)
### Layout ### Layout
- `audits`: Audits for various parts of Serai. - `audits`: Audits for various parts of Serai.
- `spec`: The specification of the Serai protocol, both internally and as - `docs`: Documentation on the Serai protocol.
networked.
- `docs`: User-facing documentation on the Serai protocol.
- `common`: Crates containing utilities common to a variety of areas under - `common`: Crates containing utilities common to a variety of areas under
Serai, none neatly fitting under another category. Serai, none neatly fitting under another category.

View File

@@ -6,7 +6,7 @@ license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin" repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"] authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
edition = "2021" edition = "2021"
rust-version = "1.79" rust-version = "1.74"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true
@@ -23,7 +23,7 @@ thiserror = { version = "1", default-features = false, optional = true }
zeroize = { version = "^1.5", default-features = false } zeroize = { version = "^1.5", default-features = false }
rand_core = { version = "0.6", default-features = false } rand_core = { version = "0.6", default-features = false }
bitcoin = { version = "0.32", default-features = false } bitcoin = { version = "0.31", default-features = false, features = ["no-std"] }
k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] } k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] }
@@ -36,7 +36,7 @@ serde_json = { version = "1", default-features = false, optional = true }
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls", "basic-auth"], optional = true } simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls", "basic-auth"], optional = true }
[dev-dependencies] [dev-dependencies]
secp256k1 = { version = "0.29", default-features = false, features = ["std"] } secp256k1 = { version = "0.28", default-features = false, features = ["std"] }
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] } frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }

View File

@@ -195,13 +195,13 @@ impl Rpc {
// If this was already successfully published, consider this having succeeded // If this was already successfully published, consider this having succeeded
if let RpcError::RequestError(Error { code, .. }) = e { if let RpcError::RequestError(Error { code, .. }) = e {
if code == RPC_VERIFY_ALREADY_IN_CHAIN { if code == RPC_VERIFY_ALREADY_IN_CHAIN {
return Ok(tx.compute_txid()); return Ok(tx.txid());
} }
} }
Err(e)? Err(e)?
} }
}; };
if txid != tx.compute_txid() { if txid != tx.txid() {
Err(RpcError::InvalidResponse("returned TX ID inequals calculated TX ID"))?; Err(RpcError::InvalidResponse("returned TX ID inequals calculated TX ID"))?;
} }
Ok(txid) Ok(txid)
@@ -215,7 +215,7 @@ impl Rpc {
let tx: Transaction = encode::deserialize(&bytes) let tx: Transaction = encode::deserialize(&bytes)
.map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized transaction"))?; .map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized transaction"))?;
let mut tx_hash = *tx.compute_txid().as_raw_hash().as_byte_array(); let mut tx_hash = *tx.txid().as_raw_hash().as_byte_array();
tx_hash.reverse(); tx_hash.reverse();
if hash != &tx_hash { if hash != &tx_hash {
Err(RpcError::InvalidResponse("node replied with a different transaction"))?; Err(RpcError::InvalidResponse("node replied with a different transaction"))?;

View File

@@ -39,7 +39,7 @@ fn test_algorithm() {
.verify_schnorr( .verify_schnorr(
&Signature::from_slice(&sig) &Signature::from_slice(&sig)
.expect("couldn't convert produced signature to secp256k1::Signature"), .expect("couldn't convert produced signature to secp256k1::Signature"),
&Message::from_digest_slice(Hash::hash(MESSAGE).as_ref()).unwrap(), &Message::from(Hash::hash(MESSAGE)),
&x_only(&keys[&Participant::new(1).unwrap()].group_key()), &x_only(&keys[&Participant::new(1).unwrap()].group_key()),
) )
.unwrap() .unwrap()

View File

@@ -4,7 +4,7 @@ use std_shims::{
io::{self, Write}, io::{self, Write},
}; };
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::io::{Read, BufReader}; use std_shims::io::Read;
use k256::{ use k256::{
elliptic_curve::sec1::{Tag, ToEncodedPoint}, elliptic_curve::sec1::{Tag, ToEncodedPoint},
@@ -18,8 +18,8 @@ use frost::{
}; };
use bitcoin::{ use bitcoin::{
consensus::encode::serialize, key::TweakedPublicKey, OutPoint, ScriptBuf, TxOut, Transaction, consensus::encode::serialize, key::TweakedPublicKey, address::Payload, OutPoint, ScriptBuf,
Block, TxOut, Transaction, Block,
}; };
#[cfg(feature = "std")] #[cfg(feature = "std")]
use bitcoin::consensus::encode::Decodable; use bitcoin::consensus::encode::Decodable;
@@ -46,12 +46,12 @@ pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
/// Return the Taproot address payload for a public key. /// Return the Taproot address payload for a public key.
/// ///
/// If the key is odd, this will return None. /// If the key is odd, this will return None.
pub fn p2tr_script_buf(key: ProjectivePoint) -> Option<ScriptBuf> { pub fn address_payload(key: ProjectivePoint) -> Option<Payload> {
if key.to_encoded_point(true).tag() != Tag::CompressedEvenY { if key.to_encoded_point(true).tag() != Tag::CompressedEvenY {
return None; return None;
} }
Some(ScriptBuf::new_p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key)))) Some(Payload::p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key))))
} }
/// A spendable output. /// A spendable output.
@@ -89,17 +89,11 @@ impl ReceivedOutput {
/// Read a ReceivedOutput from a generic satisfying Read. /// Read a ReceivedOutput from a generic satisfying Read.
#[cfg(feature = "std")] #[cfg(feature = "std")]
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> { pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
let offset = Secp256k1::read_F(r)?; Ok(ReceivedOutput {
let output; offset: Secp256k1::read_F(r)?,
let outpoint; output: TxOut::consensus_decode(r).map_err(|_| io::Error::other("invalid TxOut"))?,
{ outpoint: OutPoint::consensus_decode(r).map_err(|_| io::Error::other("invalid OutPoint"))?,
let mut buf_r = BufReader::with_capacity(0, r); })
output =
TxOut::consensus_decode(&mut buf_r).map_err(|_| io::Error::other("invalid TxOut"))?;
outpoint =
OutPoint::consensus_decode(&mut buf_r).map_err(|_| io::Error::other("invalid OutPoint"))?;
}
Ok(ReceivedOutput { offset, output, outpoint })
} }
/// Write a ReceivedOutput to a generic satisfying Write. /// Write a ReceivedOutput to a generic satisfying Write.
@@ -130,7 +124,7 @@ impl Scanner {
/// Returns None if this key can't be scanned for. /// Returns None if this key can't be scanned for.
pub fn new(key: ProjectivePoint) -> Option<Scanner> { pub fn new(key: ProjectivePoint) -> Option<Scanner> {
let mut scripts = HashMap::new(); let mut scripts = HashMap::new();
scripts.insert(p2tr_script_buf(key)?, Scalar::ZERO); scripts.insert(address_payload(key)?.script_pubkey(), Scalar::ZERO);
Some(Scanner { key, scripts }) Some(Scanner { key, scripts })
} }
@@ -147,8 +141,9 @@ impl Scanner {
// chance of being even // chance of being even
// That means this should terminate within a very small amount of iterations // That means this should terminate within a very small amount of iterations
loop { loop {
match p2tr_script_buf(self.key + (ProjectivePoint::GENERATOR * offset)) { match address_payload(self.key + (ProjectivePoint::GENERATOR * offset)) {
Some(script) => { Some(address) => {
let script = address.script_pubkey();
if self.scripts.contains_key(&script) { if self.scripts.contains_key(&script) {
None?; None?;
} }
@@ -171,7 +166,7 @@ impl Scanner {
res.push(ReceivedOutput { res.push(ReceivedOutput {
offset: *offset, offset: *offset,
output: output.clone(), output: output.clone(),
outpoint: OutPoint::new(tx.compute_txid(), vout), outpoint: OutPoint::new(tx.txid(), vout),
}); });
} }
} }

View File

@@ -18,12 +18,12 @@ use bitcoin::{
absolute::LockTime, absolute::LockTime,
script::{PushBytesBuf, ScriptBuf}, script::{PushBytesBuf, ScriptBuf},
transaction::{Version, Transaction}, transaction::{Version, Transaction},
OutPoint, Sequence, Witness, TxIn, Amount, TxOut, OutPoint, Sequence, Witness, TxIn, Amount, TxOut, Address,
}; };
use crate::{ use crate::{
crypto::Schnorr, crypto::Schnorr,
wallet::{ReceivedOutput, p2tr_script_buf}, wallet::{ReceivedOutput, address_payload},
}; };
#[rustfmt::skip] #[rustfmt::skip]
@@ -61,11 +61,7 @@ pub struct SignableTransaction {
} }
impl SignableTransaction { impl SignableTransaction {
fn calculate_weight_vbytes( fn calculate_weight(inputs: usize, payments: &[(Address, u64)], change: Option<&Address>) -> u64 {
inputs: usize,
payments: &[(ScriptBuf, u64)],
change: Option<&ScriptBuf>,
) -> (u64, u64) {
// Expand this a full transaction in order to use the bitcoin library's weight function // Expand this a full transaction in order to use the bitcoin library's weight function
let mut tx = Transaction { let mut tx = Transaction {
version: Version(2), version: Version(2),
@@ -90,42 +86,16 @@ impl SignableTransaction {
// The script pub key is not of a fixed size and does have to be used here // The script pub key is not of a fixed size and does have to be used here
.map(|payment| TxOut { .map(|payment| TxOut {
value: Amount::from_sat(payment.1), value: Amount::from_sat(payment.1),
script_pubkey: payment.0.clone(), script_pubkey: payment.0.script_pubkey(),
}) })
.collect(), .collect(),
}; };
if let Some(change) = change { if let Some(change) = change {
// Use a 0 value since we're currently unsure what the change amount will be, and since // Use a 0 value since we're currently unsure what the change amount will be, and since
// the value is fixed size (so any value could be used here) // the value is fixed size (so any value could be used here)
tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.clone() }); tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.script_pubkey() });
} }
u64::from(tx.weight())
let weight = tx.weight();
// Now calculate the size in vbytes
/*
"Virtual transaction size" is weight ceildiv 4 per
https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki
https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04
/src/policy/policy.cpp#L295-L298
implements this almost as expected, with an additional consideration to signature operations
Signature operations (the second argument of the following call) do not count Taproot
signatures per https://github.com/bitcoin/bips/blob/master/bip-0342.mediawiki#cite_ref-11-0
We don't risk running afoul of the Taproot signature limit as it allows at least one per
input, which is all we use
*/
(
weight.to_wu(),
u64::try_from(bitcoin::policy::get_virtual_tx_size(
i64::try_from(weight.to_wu()).unwrap(),
0i64,
))
.unwrap(),
)
} }
/// Returns the fee necessary for this transaction to achieve the fee rate specified at /// Returns the fee necessary for this transaction to achieve the fee rate specified at
@@ -151,10 +121,10 @@ impl SignableTransaction {
/// If data is specified, an OP_RETURN output will be added with it. /// If data is specified, an OP_RETURN output will be added with it.
pub fn new( pub fn new(
mut inputs: Vec<ReceivedOutput>, mut inputs: Vec<ReceivedOutput>,
payments: &[(ScriptBuf, u64)], payments: &[(Address, u64)],
change: Option<ScriptBuf>, change: Option<&Address>,
data: Option<Vec<u8>>, data: Option<Vec<u8>>,
fee_per_vbyte: u64, fee_per_weight: u64,
) -> Result<SignableTransaction, TransactionError> { ) -> Result<SignableTransaction, TransactionError> {
if inputs.is_empty() { if inputs.is_empty() {
Err(TransactionError::NoInputs)?; Err(TransactionError::NoInputs)?;
@@ -189,7 +159,10 @@ impl SignableTransaction {
let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>(); let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>();
let mut tx_outs = payments let mut tx_outs = payments
.iter() .iter()
.map(|payment| TxOut { value: Amount::from_sat(payment.1), script_pubkey: payment.0.clone() }) .map(|payment| TxOut {
value: Amount::from_sat(payment.1),
script_pubkey: payment.0.script_pubkey(),
})
.collect::<Vec<_>>(); .collect::<Vec<_>>();
// Add the OP_RETURN output // Add the OP_RETURN output
@@ -203,14 +176,34 @@ impl SignableTransaction {
}) })
} }
let (mut weight, vbytes) = Self::calculate_weight_vbytes(tx_ins.len(), payments, None); let mut weight = Self::calculate_weight(tx_ins.len(), payments, None);
let mut needed_fee = fee_per_weight * weight;
let mut needed_fee = fee_per_vbyte * vbytes; // "Virtual transaction size" is weight ceildiv 4 per
// https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki
// https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04/
// src/policy/policy.cpp#L295-L298
// implements this as expected
// Technically, it takes whatever's greater, the weight or the amount of signature operations
// multiplied by DEFAULT_BYTES_PER_SIGOP (20)
// We only use 1 signature per input, and our inputs have a weight exceeding 20
// Accordingly, our inputs' weight will always be greater than the cost of the signature ops
let vsize = weight.div_ceil(4);
debug_assert_eq!(
u64::try_from(bitcoin::policy::get_virtual_tx_size(
weight.try_into().unwrap(),
tx_ins.len().try_into().unwrap()
))
.unwrap(),
vsize
);
// Technically, if there isn't change, this TX may still pay enough of a fee to pass the // Technically, if there isn't change, this TX may still pay enough of a fee to pass the
// minimum fee. Such edge cases aren't worth programming when they go against intent, as the // minimum fee. Such edge cases aren't worth programming when they go against intent, as the
// specified fee rate is too low to be valid // specified fee rate is too low to be valid
// bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE is in sats/kilo-vbyte // bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE is in sats/kilo-vbyte
if needed_fee < ((u64::from(bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE) * vbytes) / 1000) { if needed_fee < ((u64::from(bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE) * vsize) / 1000) {
Err(TransactionError::TooLowFee)?; Err(TransactionError::TooLowFee)?;
} }
@@ -220,12 +213,12 @@ impl SignableTransaction {
// If there's a change address, check if there's change to give it // If there's a change address, check if there's change to give it
if let Some(change) = change { if let Some(change) = change {
let (weight_with_change, vbytes_with_change) = let weight_with_change = Self::calculate_weight(tx_ins.len(), payments, Some(change));
Self::calculate_weight_vbytes(tx_ins.len(), payments, Some(&change)); let fee_with_change = fee_per_weight * weight_with_change;
let fee_with_change = fee_per_vbyte * vbytes_with_change;
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) { if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
if value >= DUST { if value >= DUST {
tx_outs.push(TxOut { value: Amount::from_sat(value), script_pubkey: change }); tx_outs
.push(TxOut { value: Amount::from_sat(value), script_pubkey: change.script_pubkey() });
weight = weight_with_change; weight = weight_with_change;
needed_fee = fee_with_change; needed_fee = fee_with_change;
} }
@@ -255,7 +248,7 @@ impl SignableTransaction {
/// Returns the TX ID of the transaction this will create. /// Returns the TX ID of the transaction this will create.
pub fn txid(&self) -> [u8; 32] { pub fn txid(&self) -> [u8; 32] {
let mut res = self.tx.compute_txid().to_byte_array(); let mut res = self.tx.txid().to_byte_array();
res.reverse(); res.reverse();
res res
} }
@@ -295,7 +288,7 @@ impl SignableTransaction {
transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes()); transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes());
let offset = keys.clone().offset(self.offsets[i]); let offset = keys.clone().offset(self.offsets[i]);
if p2tr_script_buf(offset.group_key())? != self.prevouts[i].script_pubkey { if address_payload(offset.group_key())?.script_pubkey() != self.prevouts[i].script_pubkey {
None?; None?;
} }
@@ -382,7 +375,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
msg: &[u8], msg: &[u8],
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> { ) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
if !msg.is_empty() { if !msg.is_empty() {
panic!("message was passed to the TransactionSignMachine when it generates its own"); panic!("message was passed to the TransactionMachine when it generates its own");
} }
let commitments = (0 .. self.sigs.len()) let commitments = (0 .. self.sigs.len())

View File

@@ -22,10 +22,11 @@ use bitcoin_serai::{
hashes::Hash as HashTrait, hashes::Hash as HashTrait,
blockdata::opcodes::all::OP_RETURN, blockdata::opcodes::all::OP_RETURN,
script::{PushBytesBuf, Instruction, Instructions, Script}, script::{PushBytesBuf, Instruction, Instructions, Script},
address::NetworkChecked,
OutPoint, Amount, TxOut, Transaction, Network, Address, OutPoint, Amount, TxOut, Transaction, Network, Address,
}, },
wallet::{ wallet::{
tweak_keys, p2tr_script_buf, ReceivedOutput, Scanner, TransactionError, SignableTransaction, tweak_keys, address_payload, ReceivedOutput, Scanner, TransactionError, SignableTransaction,
}, },
rpc::Rpc, rpc::Rpc,
}; };
@@ -47,7 +48,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
"generatetoaddress", "generatetoaddress",
serde_json::json!([ serde_json::json!([
1, 1,
Address::from_script(&p2tr_script_buf(key).unwrap(), Network::Regtest).unwrap() Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())
]), ]),
) )
.await .await
@@ -68,7 +69,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
assert_eq!(outputs, scanner.scan_transaction(&block.txdata[0])); assert_eq!(outputs, scanner.scan_transaction(&block.txdata[0]));
assert_eq!(outputs.len(), 1); assert_eq!(outputs.len(), 1);
assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].compute_txid(), 0)); assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].txid(), 0));
assert_eq!(outputs[0].value(), block.txdata[0].output[0].value.to_sat()); assert_eq!(outputs[0].value(), block.txdata[0].output[0].value.to_sat());
assert_eq!( assert_eq!(
@@ -192,7 +193,7 @@ async_sequential! {
assert_eq!(output.offset(), Scalar::ZERO); assert_eq!(output.offset(), Scalar::ZERO);
let inputs = vec![output]; let inputs = vec![output];
let addr = || p2tr_script_buf(key).unwrap(); let addr = || Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap());
let payments = vec![(addr(), 1000)]; let payments = vec![(addr(), 1000)];
assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok()); assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok());
@@ -205,7 +206,7 @@ async_sequential! {
// No change // No change
assert!(SignableTransaction::new(inputs.clone(), &[(addr(), 1000)], None, None, FEE).is_ok()); assert!(SignableTransaction::new(inputs.clone(), &[(addr(), 1000)], None, None, FEE).is_ok());
// Consolidation TX // Consolidation TX
assert!(SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, FEE).is_ok()); assert!(SignableTransaction::new(inputs.clone(), &[], Some(&addr()), None, FEE).is_ok());
// Data // Data
assert!(SignableTransaction::new(inputs.clone(), &[], None, Some(vec![]), FEE).is_ok()); assert!(SignableTransaction::new(inputs.clone(), &[], None, Some(vec![]), FEE).is_ok());
// No outputs // No outputs
@@ -228,7 +229,7 @@ async_sequential! {
); );
assert_eq!( assert_eq!(
SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, 0), SignableTransaction::new(inputs.clone(), &[], Some(&addr()), None, 0),
Err(TransactionError::TooLowFee), Err(TransactionError::TooLowFee),
); );
@@ -260,19 +261,20 @@ async_sequential! {
// Declare payments, change, fee // Declare payments, change, fee
let payments = [ let payments = [
(p2tr_script_buf(key).unwrap(), 1005), (Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap()), 1005),
(p2tr_script_buf(offset_key).unwrap(), 1007) (Address::<NetworkChecked>::new(Network::Regtest, address_payload(offset_key).unwrap()), 1007)
]; ];
let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap(); let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
let change_key = key + (ProjectivePoint::GENERATOR * change_offset); let change_key = key + (ProjectivePoint::GENERATOR * change_offset);
let change_addr = p2tr_script_buf(change_key).unwrap(); let change_addr =
Address::<NetworkChecked>::new(Network::Regtest, address_payload(change_key).unwrap());
// Create and sign the TX // Create and sign the TX
let tx = SignableTransaction::new( let tx = SignableTransaction::new(
vec![output.clone(), offset_output.clone()], vec![output.clone(), offset_output.clone()],
&payments, &payments,
Some(change_addr.clone()), Some(&change_addr),
None, None,
FEE FEE
).unwrap(); ).unwrap();
@@ -285,7 +287,7 @@ async_sequential! {
// Ensure we can scan it // Ensure we can scan it
let outputs = scanner.scan_transaction(&tx); let outputs = scanner.scan_transaction(&tx);
for (o, output) in outputs.iter().enumerate() { for (o, output) in outputs.iter().enumerate() {
assert_eq!(output.outpoint(), &OutPoint::new(tx.compute_txid(), u32::try_from(o).unwrap())); assert_eq!(output.outpoint(), &OutPoint::new(tx.txid(), u32::try_from(o).unwrap()));
assert_eq!(&ReceivedOutput::read::<&[u8]>(&mut output.serialize().as_ref()).unwrap(), output); assert_eq!(&ReceivedOutput::read::<&[u8]>(&mut output.serialize().as_ref()).unwrap(), output);
} }
@@ -297,13 +299,13 @@ async_sequential! {
for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) { for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) {
assert_eq!( assert_eq!(
output, output,
&TxOut { script_pubkey: payment.0.clone(), value: Amount::from_sat(payment.1) }, &TxOut { script_pubkey: payment.0.script_pubkey(), value: Amount::from_sat(payment.1) },
); );
assert_eq!(scanned.value(), payment.1 ); assert_eq!(scanned.value(), payment.1 );
} }
// Make sure the change is correct // Make sure the change is correct
assert_eq!(needed_fee, u64::try_from(tx.vsize()).unwrap() * FEE); assert_eq!(needed_fee, u64::from(tx.weight()) * FEE);
let input_value = output.value() + offset_output.value(); let input_value = output.value() + offset_output.value();
let output_value = tx.output.iter().map(|output| output.value.to_sat()).sum::<u64>(); let output_value = tx.output.iter().map(|output| output.value.to_sat()).sum::<u64>();
assert_eq!(input_value - output_value, needed_fee); assert_eq!(input_value - output_value, needed_fee);
@@ -312,13 +314,13 @@ async_sequential! {
input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee; input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee;
assert_eq!( assert_eq!(
tx.output[2], tx.output[2],
TxOut { script_pubkey: change_addr, value: Amount::from_sat(change_amount) }, TxOut { script_pubkey: change_addr.script_pubkey(), value: Amount::from_sat(change_amount) },
); );
// This also tests send_raw_transaction and get_transaction, which the RPC test can't // This also tests send_raw_transaction and get_transaction, which the RPC test can't
// effectively test // effectively test
rpc.send_raw_transaction(&tx).await.unwrap(); rpc.send_raw_transaction(&tx).await.unwrap();
let mut hash = *tx.compute_txid().as_raw_hash().as_byte_array(); let mut hash = *tx.txid().as_raw_hash().as_byte_array();
hash.reverse(); hash.reverse();
assert_eq!(tx, rpc.get_transaction(&hash).await.unwrap()); assert_eq!(tx, rpc.get_transaction(&hash).await.unwrap());
assert_eq!(expected_id, hash); assert_eq!(expected_id, hash);
@@ -342,7 +344,7 @@ async_sequential! {
&SignableTransaction::new( &SignableTransaction::new(
vec![output], vec![output],
&[], &[],
Some(p2tr_script_buf(key).unwrap()), Some(&Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())),
Some(data.clone()), Some(data.clone()),
FEE FEE
).unwrap() ).unwrap()

View File

@@ -1,3 +1,3 @@
# Solidity build outputs # solidity build outputs
cache cache
artifacts artifacts

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
edition = "2021" edition = "2021"
publish = false publish = false
rust-version = "1.79" rust-version = "1.74"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true
@@ -18,32 +18,25 @@ workspace = true
[dependencies] [dependencies]
thiserror = { version = "1", default-features = false } thiserror = { version = "1", default-features = false }
eyre = { version = "0.6", default-features = false }
rand_core = { version = "0.6", default-features = false, features = ["std"] } sha3 = { version = "0.10", default-features = false, features = ["std"] }
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", default-features = false, features = ["recommended"] }
group = { version = "0.13", default-features = false } group = { version = "0.13", default-features = false }
k256 = { version = "^0.13.1", default-features = false, features = ["std", "ecdsa", "arithmetic"] } k256 = { version = "^0.13.1", default-features = false, features = ["std", "ecdsa"] }
frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["secp256k1"] } frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] }
alloy-core = { version = "0.7", default-features = false } ethers-core = { version = "2", default-features = false }
alloy-sol-types = { version = "0.7", default-features = false, features = ["json"] } ethers-providers = { version = "2", default-features = false }
alloy-consensus = { version = "0.1", default-features = false, features = ["k256"] } ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] }
alloy-network = { version = "0.1", default-features = false }
alloy-rpc-types-eth = { version = "0.1", default-features = false }
alloy-rpc-client = { version = "0.1", default-features = false }
alloy-simple-request-transport = { path = "./alloy-simple-request-transport", default-features = false }
alloy-provider = { version = "0.1", default-features = false }
alloy-node-bindings = { version = "0.1", default-features = false, optional = true }
[dev-dependencies] [dev-dependencies]
frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["tests"] } rand_core = { version = "0.6", default-features = false, features = ["std"] }
hex = { version = "0.4", default-features = false, features = ["std"] }
serde = { version = "1", default-features = false, features = ["std"] }
serde_json = { version = "1", default-features = false, features = ["std"] }
sha2 = { version = "0.10", default-features = false, features = ["std"] }
tokio = { version = "1", features = ["macros"] } tokio = { version = "1", features = ["macros"] }
alloy-node-bindings = { version = "0.1", default-features = false }
[features]
tests = ["alloy-node-bindings", "frost/tests"]

View File

@@ -3,12 +3,6 @@
This package contains Ethereum-related functionality, specifically deploying and This package contains Ethereum-related functionality, specifically deploying and
interacting with Serai contracts. interacting with Serai contracts.
While `monero-serai` and `bitcoin-serai` are general purpose libraries,
`ethereum-serai` is Serai specific. If any of the utilities are generally
desired, please fork and maintain your own copy to ensure the desired
functionality is preserved, or open an issue to request we make this library
general purpose.
### Dependencies ### Dependencies
- solc - solc

View File

@@ -1,29 +0,0 @@
[package]
name = "alloy-simple-request-transport"
version = "0.1.0"
description = "A transport for alloy based off simple-request"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum/alloy-simple-request-transport"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.74"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
tower = "0.4"
serde_json = { version = "1", default-features = false }
simple-request = { path = "../../../common/request", default-features = false }
alloy-json-rpc = { version = "0.1", default-features = false }
alloy-transport = { version = "0.1", default-features = false }
[features]
default = ["tls"]
tls = ["simple-request/tls"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,4 +0,0 @@
# Alloy Simple Request Transport
A transport for alloy based on simple-request, a small HTTP client built around
hyper.

View File

@@ -1,60 +0,0 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
use core::task;
use std::io;
use alloy_json_rpc::{RequestPacket, ResponsePacket};
use alloy_transport::{TransportError, TransportErrorKind, TransportFut};
use simple_request::{hyper, Request, Client};
use tower::Service;
#[derive(Clone, Debug)]
pub struct SimpleRequest {
client: Client,
url: String,
}
impl SimpleRequest {
pub fn new(url: String) -> Self {
Self { client: Client::with_connection_pool(), url }
}
}
impl Service<RequestPacket> for SimpleRequest {
type Response = ResponsePacket;
type Error = TransportError;
type Future = TransportFut<'static>;
#[inline]
fn poll_ready(&mut self, _cx: &mut task::Context<'_>) -> task::Poll<Result<(), Self::Error>> {
task::Poll::Ready(Ok(()))
}
#[inline]
fn call(&mut self, req: RequestPacket) -> Self::Future {
let inner = self.clone();
Box::pin(async move {
let packet = req.serialize().map_err(TransportError::SerError)?;
let request = Request::from(
hyper::Request::post(&inner.url)
.header("Content-Type", "application/json")
.body(serde_json::to_vec(&packet).map_err(TransportError::SerError)?.into())
.unwrap(),
);
let mut res = inner
.client
.request(request)
.await
.map_err(|e| TransportErrorKind::custom(io::Error::other(format!("{e:?}"))))?
.body()
.await
.map_err(|e| TransportErrorKind::custom(io::Error::other(format!("{e:?}"))))?;
serde_json::from_reader(&mut res).map_err(|e| TransportError::deser_err(e, ""))
})
}
}

View File

@@ -1,41 +1,15 @@
use std::process::Command;
fn main() { fn main() {
println!("cargo:rerun-if-changed=contracts/*"); println!("cargo:rerun-if-changed=contracts");
println!("cargo:rerun-if-changed=artifacts/*"); println!("cargo:rerun-if-changed=artifacts");
for line in String::from_utf8(Command::new("solc").args(["--version"]).output().unwrap().stdout)
.unwrap()
.lines()
{
if let Some(version) = line.strip_prefix("Version: ") {
let version = version.split('+').next().unwrap();
assert_eq!(version, "0.8.25");
}
}
#[rustfmt::skip] #[rustfmt::skip]
let args = [ let args = [
"--base-path", ".", "--base-path", ".",
"-o", "./artifacts", "--overwrite", "-o", "./artifacts", "--overwrite",
"--bin", "--abi", "--bin", "--abi",
"--via-ir", "--optimize", "--optimize",
"./contracts/Schnorr.sol"
"./contracts/IERC20.sol",
"./contracts/Schnorr.sol",
"./contracts/Deployer.sol",
"./contracts/Sandbox.sol",
"./contracts/Router.sol",
"./src/tests/contracts/Schnorr.sol",
"./src/tests/contracts/ERC20.sol",
"--no-color",
]; ];
let solc = Command::new("solc").args(args).output().unwrap();
assert!(solc.status.success()); assert!(std::process::Command::new("solc").args(args).status().unwrap().success());
for line in String::from_utf8(solc.stderr).unwrap().lines() {
assert!(!line.starts_with("Error:"));
}
} }

View File

@@ -1,52 +0,0 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
/*
The expected deployment process of the Router is as follows:
1) A transaction deploying Deployer is made. Then, a deterministic signature is
created such that an account with an unknown private key is the creator of
the contract. Anyone can fund this address, and once anyone does, the
transaction deploying Deployer can be published by anyone. No other
transaction may be made from that account.
2) Anyone deploys the Router through the Deployer. This uses a sequential nonce
such that meet-in-the-middle attacks, with complexity 2**80, aren't feasible.
While such attacks would still be feasible if the Deployer's address was
controllable, the usage of a deterministic signature with a NUMS method
prevents that.
This doesn't have any denial-of-service risks and will resolve once anyone steps
forward as deployer. This does fail to guarantee an identical address across
every chain, though it enables letting anyone efficiently ask the Deployer for
the address (with the Deployer having an identical address on every chain).
Unfortunately, guaranteeing identical addresses aren't feasible. We'd need the
Deployer contract to use a consistent salt for the Router, yet the Router must
be deployed with a specific public key for Serai. Since Ethereum isn't able to
determine a valid public key (one the result of a Serai DKG) from a dishonest
public key, we have to allow multiple deployments with Serai being the one to
determine which to use.
The alternative would be to have a council publish the Serai key on-Ethereum,
with Serai verifying the published result. This would introduce a DoS risk in
the council not publishing the correct key/not publishing any key.
*/
contract Deployer {
event Deployment(bytes32 indexed init_code_hash, address created);
error DeploymentFailed();
function deploy(bytes memory init_code) external {
address created;
assembly {
created := create(0, add(init_code, 0x20), mload(init_code))
}
if (created == address(0)) {
revert DeploymentFailed();
}
// These may be emitted out of order upon re-entrancy
emit Deployment(keccak256(init_code), created);
}
}

View File

@@ -1,20 +0,0 @@
// SPDX-License-Identifier: CC0
pragma solidity ^0.8.0;
interface IERC20 {
event Transfer(address indexed from, address indexed to, uint256 value);
event Approval(address indexed owner, address indexed spender, uint256 value);
function name() external view returns (string memory);
function symbol() external view returns (string memory);
function decimals() external view returns (uint8);
function totalSupply() external view returns (uint256);
function balanceOf(address owner) external view returns (uint256);
function transfer(address to, uint256 value) external returns (bool);
function transferFrom(address from, address to, uint256 value) external returns (bool);
function approve(address spender, uint256 value) external returns (bool);
function allowance(address owner, address spender) external view returns (uint256);
}

View File

@@ -1,222 +0,0 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
import "./IERC20.sol";
import "./Schnorr.sol";
import "./Sandbox.sol";
contract Router {
// Nonce is incremented for each batch of transactions executed/key update
uint256 public nonce;
// Current public key's x-coordinate
// This key must always have the parity defined within the Schnorr contract
bytes32 public seraiKey;
struct OutInstruction {
address to;
Call[] calls;
uint256 value;
}
struct Signature {
bytes32 c;
bytes32 s;
}
event SeraiKeyUpdated(
uint256 indexed nonce,
bytes32 indexed key,
Signature signature
);
event InInstruction(
address indexed from,
address indexed coin,
uint256 amount,
bytes instruction
);
// success is a uint256 representing a bitfield of transaction successes
event Executed(
uint256 indexed nonce,
bytes32 indexed batch,
uint256 success,
Signature signature
);
// error types
error InvalidKey();
error InvalidSignature();
error InvalidAmount();
error FailedTransfer();
error TooManyTransactions();
modifier _updateSeraiKeyAtEndOfFn(
uint256 _nonce,
bytes32 key,
Signature memory sig
) {
if (
(key == bytes32(0)) ||
((bytes32(uint256(key) % Schnorr.Q)) != key)
) {
revert InvalidKey();
}
_;
seraiKey = key;
emit SeraiKeyUpdated(_nonce, key, sig);
}
constructor(bytes32 _seraiKey) _updateSeraiKeyAtEndOfFn(
0,
_seraiKey,
Signature({ c: bytes32(0), s: bytes32(0) })
) {
nonce = 1;
}
// updateSeraiKey validates the given Schnorr signature against the current
// public key, and if successful, updates the contract's public key to the
// given one.
function updateSeraiKey(
bytes32 _seraiKey,
Signature calldata sig
) external _updateSeraiKeyAtEndOfFn(nonce, _seraiKey, sig) {
bytes memory message =
abi.encodePacked("updateSeraiKey", block.chainid, nonce, _seraiKey);
nonce++;
if (!Schnorr.verify(seraiKey, message, sig.c, sig.s)) {
revert InvalidSignature();
}
}
function inInstruction(
address coin,
uint256 amount,
bytes memory instruction
) external payable {
if (coin == address(0)) {
if (amount != msg.value) {
revert InvalidAmount();
}
} else {
(bool success, bytes memory res) =
address(coin).call(
abi.encodeWithSelector(
IERC20.transferFrom.selector,
msg.sender,
address(this),
amount
)
);
// Require there was nothing returned, which is done by some non-standard
// tokens, or that the ERC20 contract did in fact return true
bool nonStandardResOrTrue =
(res.length == 0) || abi.decode(res, (bool));
if (!(success && nonStandardResOrTrue)) {
revert FailedTransfer();
}
}
/*
Due to fee-on-transfer tokens, emitting the amount directly is frowned upon.
The amount instructed to transfer may not actually be the amount
transferred.
If we add nonReentrant to every single function which can effect the
balance, we can check the amount exactly matches. This prevents transfers of
less value than expected occurring, at least, not without an additional
transfer to top up the difference (which isn't routed through this contract
and accordingly isn't trying to artificially create events).
If we don't add nonReentrant, a transfer can be started, and then a new
transfer for the difference can follow it up (again and again until a
rounding error is reached). This contract would believe all transfers were
done in full, despite each only being done in part (except for the last
one).
Given fee-on-transfer tokens aren't intended to be supported, the only
token planned to be supported is Dai and it doesn't have any fee-on-transfer
logic, fee-on-transfer tokens aren't even able to be supported at this time,
we simply classify this entire class of tokens as non-standard
implementations which induce undefined behavior. It is the Serai network's
role not to add support for any non-standard implementations.
*/
emit InInstruction(msg.sender, coin, amount, instruction);
}
// execute accepts a list of transactions to execute as well as a signature.
// if signature verification passes, the given transactions are executed.
// if signature verification fails, this function will revert.
function execute(
OutInstruction[] calldata transactions,
Signature calldata sig
) external {
if (transactions.length > 256) {
revert TooManyTransactions();
}
bytes memory message =
abi.encode("execute", block.chainid, nonce, transactions);
uint256 executed_with_nonce = nonce;
// This prevents re-entrancy from causing double spends yet does allow
// out-of-order execution via re-entrancy
nonce++;
if (!Schnorr.verify(seraiKey, message, sig.c, sig.s)) {
revert InvalidSignature();
}
uint256 successes;
for (uint256 i = 0; i < transactions.length; i++) {
bool success;
// If there are no calls, send to `to` the value
if (transactions[i].calls.length == 0) {
(success, ) = transactions[i].to.call{
value: transactions[i].value,
gas: 5_000
}("");
} else {
// If there are calls, ignore `to`. Deploy a new Sandbox and proxy the
// calls through that
//
// We could use a single sandbox in order to reduce gas costs, yet that
// risks one person creating an approval that's hooked before another
// user's intended action executes, in order to drain their coins
//
// While technically, that would be a flaw in the sandboxed flow, this
// is robust and prevents such flaws from being possible
//
// We also don't want people to set state via the Sandbox and expect it
// future available when anyone else could set a distinct value
Sandbox sandbox = new Sandbox();
(success, ) = address(sandbox).call{
value: transactions[i].value,
// TODO: Have the Call specify the gas up front
gas: 350_000
}(
abi.encodeWithSelector(
Sandbox.sandbox.selector,
transactions[i].calls
)
);
}
assembly {
successes := or(successes, shl(i, success))
}
}
emit Executed(
executed_with_nonce,
keccak256(message),
successes,
sig
);
}
}

View File

@@ -1,48 +0,0 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.24;
struct Call {
address to;
uint256 value;
bytes data;
}
// A minimal sandbox focused on gas efficiency.
//
// The first call is executed if any of the calls fail, making it a fallback.
// All other calls are executed sequentially.
contract Sandbox {
error AlreadyCalled();
error CallsFailed();
function sandbox(Call[] calldata calls) external payable {
// Prevent re-entrancy due to this executing arbitrary calls from anyone
// and anywhere
bool called;
assembly { called := tload(0) }
if (called) {
revert AlreadyCalled();
}
assembly { tstore(0, 1) }
// Execute the calls, starting from 1
for (uint256 i = 1; i < calls.length; i++) {
(bool success, ) =
calls[i].to.call{ value: calls[i].value }(calls[i].data);
// If this call failed, execute the fallback (call 0)
if (!success) {
(success, ) =
calls[0].to.call{ value: address(this).balance }(calls[0].data);
// If this call also failed, revert entirely
if (!success) {
revert CallsFailed();
}
return;
}
}
// We don't clear the re-entrancy guard as this contract should never be
// called again, so there's no reason to spend the effort
}
}

View File

@@ -2,43 +2,35 @@
pragma solidity ^0.8.0; pragma solidity ^0.8.0;
// see https://github.com/noot/schnorr-verify for implementation details // see https://github.com/noot/schnorr-verify for implementation details
library Schnorr { contract Schnorr {
// secp256k1 group order // secp256k1 group order
uint256 constant public Q = uint256 constant public Q =
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141; 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
// Fixed parity for the public keys used in this contract // parity := public key y-coord parity (27 or 28)
// This avoids spending a word passing the parity in a similar style to // px := public key x-coord
// Bitcoin's Taproot // message := 32-byte message
uint8 constant public KEY_PARITY = 27;
error InvalidSOrA();
error MalformedSignature();
// px := public key x-coord, where the public key has a parity of KEY_PARITY
// message := 32-byte hash of the message
// c := schnorr signature challenge
// s := schnorr signature // s := schnorr signature
// e := schnorr signature challenge
function verify( function verify(
uint8 parity,
bytes32 px, bytes32 px,
bytes memory message, bytes32 message,
bytes32 c, bytes32 s,
bytes32 s bytes32 e
) internal pure returns (bool) { ) public view returns (bool) {
// ecrecover = (m, v, r, s) -> key // ecrecover = (m, v, r, s);
// We instead pass the following to obtain the nonce (not the key) bytes32 sp = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
// Then we hash it and verify it matches the challenge bytes32 ep = bytes32(Q - mulmod(uint256(e), uint256(px), Q));
bytes32 sa = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
bytes32 ca = bytes32(Q - mulmod(uint256(c), uint256(px), Q));
// For safety, we want each input to ecrecover to be 0 (sa, px, ca) require(sp != 0);
// The ecreover precomple checks `r` and `s` (`px` and `ca`) are non-zero // the ecrecover precompile implementation checks that the `r` and `s`
// That leaves us to check `sa` are non-zero // inputs are non-zero (in this case, `px` and `ep`), thus we don't need to
if (sa == 0) revert InvalidSOrA(); // check if they're zero.will make me
address R = ecrecover(sa, KEY_PARITY, px, ca); address R = ecrecover(sp, parity, px, ep);
if (R == address(0)) revert MalformedSignature(); require(R != address(0), "ecrecover failed");
return e == keccak256(
// Check the signature is correct by rebuilding the challenge abi.encodePacked(R, uint8(parity), px, block.chainid, message)
return c == keccak256(abi.encodePacked(R, px, message)); );
} }
} }

View File

@@ -1,30 +0,0 @@
[package]
name = "serai-ethereum-relayer"
version = "0.1.0"
description = "A relayer for Serai's Ethereum transactions"
license = "AGPL-3.0-only"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum/relayer"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = []
edition = "2021"
publish = false
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
log = { version = "0.4", default-features = false, features = ["std"] }
env_logger = { version = "0.10", default-features = false, features = ["humantime"] }
tokio = { version = "1", default-features = false, features = ["rt", "time", "io-util", "net", "macros"] }
serai-env = { path = "../../../common/env" }
serai-db = { path = "../../../common/db" }
[features]
parity-db = ["serai-db/parity-db"]
rocksdb = ["serai-db/rocksdb"]

View File

@@ -1,15 +0,0 @@
AGPL-3.0-only license
Copyright (c) 2023-2024 Luke Parker
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License Version 3 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.

View File

@@ -1,4 +0,0 @@
# Ethereum Transaction Relayer
This server collects Ethereum router commands to be published, offering an RPC
to fetch them.

View File

@@ -1,100 +0,0 @@
pub(crate) use tokio::{
io::{AsyncReadExt, AsyncWriteExt},
net::TcpListener,
};
use serai_db::{Get, DbTxn, Db as DbTrait};
#[tokio::main(flavor = "current_thread")]
async fn main() {
// Override the panic handler with one which will panic if any tokio task panics
{
let existing = std::panic::take_hook();
std::panic::set_hook(Box::new(move |panic| {
existing(panic);
const MSG: &str = "exiting the process due to a task panicking";
println!("{MSG}");
log::error!("{MSG}");
std::process::exit(1);
}));
}
if std::env::var("RUST_LOG").is_err() {
std::env::set_var("RUST_LOG", serai_env::var("RUST_LOG").unwrap_or_else(|| "info".to_string()));
}
env_logger::init();
log::info!("Starting Ethereum relayer server...");
// Open the DB
#[allow(unused_variables, unreachable_code)]
let db = {
#[cfg(all(feature = "parity-db", feature = "rocksdb"))]
panic!("built with parity-db and rocksdb");
#[cfg(all(feature = "parity-db", not(feature = "rocksdb")))]
let db =
serai_db::new_parity_db(&serai_env::var("DB_PATH").expect("path to DB wasn't specified"));
#[cfg(feature = "rocksdb")]
let db =
serai_db::new_rocksdb(&serai_env::var("DB_PATH").expect("path to DB wasn't specified"));
db
};
// Start command recipience server
// This should not be publicly exposed
// TODO: Add auth
tokio::spawn({
let db = db.clone();
async move {
// 5132 ^ ((b'E' << 8) | b'R')
let server = TcpListener::bind("0.0.0.0:20830").await.unwrap();
loop {
let (mut socket, _) = server.accept().await.unwrap();
let db = db.clone();
tokio::spawn(async move {
let mut db = db.clone();
loop {
let Ok(msg_len) = socket.read_u32_le().await else { break };
let mut buf = vec![0; usize::try_from(msg_len).unwrap()];
let Ok(_) = socket.read_exact(&mut buf).await else { break };
if buf.len() < 5 {
break;
}
let nonce = u32::from_le_bytes(buf[.. 4].try_into().unwrap());
let mut txn = db.txn();
txn.put(nonce.to_le_bytes(), &buf[4 ..]);
txn.commit();
let Ok(()) = socket.write_all(&[1]).await else { break };
log::info!("received signed command #{nonce}");
}
});
}
}
});
// Start command fetch server
// 5132 ^ ((b'E' << 8) | b'R') + 1
let server = TcpListener::bind("0.0.0.0:20831").await.unwrap();
loop {
let (mut socket, _) = server.accept().await.unwrap();
let db = db.clone();
tokio::spawn(async move {
let db = db.clone();
loop {
// Nonce to get the router comamnd for
let mut buf = vec![0; 4];
let Ok(_) = socket.read_exact(&mut buf).await else { break };
let command = db.get(&buf[.. 4]).unwrap_or(vec![]);
let Ok(()) = socket.write_all(&u32::try_from(command.len()).unwrap().to_le_bytes()).await
else {
break;
};
let Ok(()) = socket.write_all(&command).await else { break };
}
});
}
}

View File

@@ -1,37 +0,0 @@
use alloy_sol_types::sol;
#[rustfmt::skip]
#[allow(warnings)]
#[allow(needless_pass_by_value)]
#[allow(clippy::all)]
#[allow(clippy::ignored_unit_patterns)]
#[allow(clippy::redundant_closure_for_method_calls)]
mod erc20_container {
use super::*;
sol!("contracts/IERC20.sol");
}
pub use erc20_container::IERC20 as erc20;
#[rustfmt::skip]
#[allow(warnings)]
#[allow(needless_pass_by_value)]
#[allow(clippy::all)]
#[allow(clippy::ignored_unit_patterns)]
#[allow(clippy::redundant_closure_for_method_calls)]
mod deployer_container {
use super::*;
sol!("contracts/Deployer.sol");
}
pub use deployer_container::Deployer as deployer;
#[rustfmt::skip]
#[allow(warnings)]
#[allow(needless_pass_by_value)]
#[allow(clippy::all)]
#[allow(clippy::ignored_unit_patterns)]
#[allow(clippy::redundant_closure_for_method_calls)]
mod router_container {
use super::*;
sol!(Router, "artifacts/Router.abi");
}
pub use router_container::Router as router;

View File

@@ -0,0 +1,36 @@
use thiserror::Error;
use eyre::{eyre, Result};
use ethers_providers::{Provider, Http};
use ethers_contract::abigen;
use crate::crypto::ProcessedSignature;
#[derive(Error, Debug)]
pub enum EthereumError {
#[error("failed to verify Schnorr signature")]
VerificationError,
}
abigen!(Schnorr, "./artifacts/Schnorr.abi");
pub async fn call_verify(
contract: &Schnorr<Provider<Http>>,
params: &ProcessedSignature,
) -> Result<()> {
if contract
.verify(
params.parity + 27,
params.px.to_bytes().into(),
params.message,
params.s.to_bytes().into(),
params.e.to_bytes().into(),
)
.call()
.await?
{
Ok(())
} else {
Err(eyre!(EthereumError::VerificationError))
}
}

View File

@@ -1,188 +1,107 @@
use group::ff::PrimeField; use sha3::{Digest, Keccak256};
use group::Group;
use k256::{ use k256::{
elliptic_curve::{ops::Reduce, point::AffineCoordinates, sec1::ToEncodedPoint}, elliptic_curve::{
ProjectivePoint, Scalar, U256 as KU256, bigint::ArrayEncoding, ops::Reduce, point::DecompressPoint, sec1::ToEncodedPoint,
}; },
#[cfg(test)] AffinePoint, ProjectivePoint, Scalar, U256,
use k256::{elliptic_curve::point::DecompressPoint, AffinePoint};
use frost::{
algorithm::{Hram, SchnorrSignature},
curve::{Ciphersuite, Secp256k1},
}; };
use alloy_core::primitives::{Parity, Signature as AlloySignature}; use frost::{algorithm::Hram, curve::Secp256k1};
use alloy_consensus::{SignableTransaction, Signed, TxLegacy};
use crate::abi::router::{Signature as AbiSignature}; pub fn keccak256(data: &[u8]) -> [u8; 32] {
Keccak256::digest(data).into()
pub(crate) fn keccak256(data: &[u8]) -> [u8; 32] {
alloy_core::primitives::keccak256(data).into()
} }
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar { pub fn hash_to_scalar(data: &[u8]) -> Scalar {
<Scalar as Reduce<KU256>>::reduce_bytes(&keccak256(data).into()) Scalar::reduce(U256::from_be_slice(&keccak256(data)))
} }
pub fn address(point: &ProjectivePoint) -> [u8; 20] { pub fn address(point: &ProjectivePoint) -> [u8; 20] {
let encoded_point = point.to_encoded_point(false); let encoded_point = point.to_encoded_point(false);
// Last 20 bytes of the hash of the concatenated x and y coordinates keccak256(&encoded_point.as_ref()[1 .. 65])[12 .. 32].try_into().unwrap()
// We obtain the concatenated x and y coordinates via the uncompressed encoding of the point
keccak256(&encoded_point.as_ref()[1 .. 65])[12 ..].try_into().unwrap()
} }
/// Deterministically sign a transaction. pub fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
/// if r.is_zero().into() || s.is_zero().into() {
/// This function panics if passed a transaction with a non-None chain ID. return None;
pub fn deterministically_sign(tx: &TxLegacy) -> Signed<TxLegacy> {
assert!(
tx.chain_id.is_none(),
"chain ID was Some when deterministically signing a TX (causing a non-deterministic signer)"
);
let sig_hash = tx.signature_hash().0;
let mut r = hash_to_scalar(&[sig_hash.as_slice(), b"r"].concat());
let mut s = hash_to_scalar(&[sig_hash.as_slice(), b"s"].concat());
loop {
let r_bytes: [u8; 32] = r.to_repr().into();
let s_bytes: [u8; 32] = s.to_repr().into();
let v = Parity::NonEip155(false);
let signature =
AlloySignature::from_scalars_and_parity(r_bytes.into(), s_bytes.into(), v).unwrap();
let tx = tx.clone().into_signed(signature);
if tx.recover_signer().is_ok() {
return tx;
} }
// Re-hash until valid
r = hash_to_scalar(r_bytes.as_ref());
s = hash_to_scalar(s_bytes.as_ref());
}
}
/// The public key for a Schnorr-signing account.
#[allow(non_snake_case)] #[allow(non_snake_case)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)] let R = AffinePoint::decompress(&r.to_bytes(), v.into());
pub struct PublicKey {
pub(crate) A: ProjectivePoint,
pub(crate) px: Scalar,
}
impl PublicKey {
/// Construct a new `PublicKey`.
///
/// This will return None if the provided point isn't eligible to be a public key (due to
/// bounds such as parity).
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub fn new(A: ProjectivePoint) -> Option<PublicKey> { if let Some(R) = Option::<AffinePoint>::from(R) {
let affine = A.to_affine();
// Only allow even keys to save a word within Ethereum
let is_odd = bool::from(affine.y_is_odd());
if is_odd {
None?;
}
let x_coord = affine.x();
let x_coord_scalar = <Scalar as Reduce<KU256>>::reduce_bytes(&x_coord);
// Return None if a reduction would occur
// Reductions would be incredibly unlikely and shouldn't be an issue, yet it's one less
// headache/concern to have
// This does ban a trivial amoount of public keys
if x_coord_scalar.to_repr() != x_coord {
None?;
}
Some(PublicKey { A, px: x_coord_scalar })
}
pub fn point(&self) -> ProjectivePoint {
self.A
}
pub(crate) fn eth_repr(&self) -> [u8; 32] {
self.px.to_repr().into()
}
#[cfg(test)]
pub(crate) fn from_eth_repr(repr: [u8; 32]) -> Option<Self> {
#[allow(non_snake_case)] #[allow(non_snake_case)]
let A = Option::<AffinePoint>::from(AffinePoint::decompress(&repr.into(), 0.into()))?.into(); let R = ProjectivePoint::from(R);
Option::from(Scalar::from_repr(repr.into())).map(|px| PublicKey { A, px })
let r = r.invert().unwrap();
let u1 = ProjectivePoint::GENERATOR * (-message * r);
let u2 = R * (s * r);
let key: ProjectivePoint = u1 + u2;
if !bool::from(key.is_identity()) {
return Some(address(&key));
} }
} }
/// The HRAm to use for the Schnorr contract. None
}
#[derive(Clone, Default)] #[derive(Clone, Default)]
pub struct EthereumHram {} pub struct EthereumHram {}
impl Hram<Secp256k1> for EthereumHram { impl Hram<Secp256k1> for EthereumHram {
#[allow(non_snake_case)] #[allow(non_snake_case)]
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar { fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
let x_coord = A.to_affine().x(); let a_encoded_point = A.to_encoded_point(true);
let mut a_encoded = a_encoded_point.as_ref().to_owned();
a_encoded[0] += 25; // Ethereum uses 27/28 for point parity
let mut data = address(R).to_vec(); let mut data = address(R).to_vec();
data.extend(x_coord.as_slice()); data.append(&mut a_encoded);
data.extend(m); data.append(&mut m.to_vec());
Scalar::reduce(U256::from_be_slice(&keccak256(&data)))
<Scalar as Reduce<KU256>>::reduce_bytes(&keccak256(&data).into())
} }
} }
/// A signature for the Schnorr contract. pub struct ProcessedSignature {
#[derive(Clone, Copy, PartialEq, Eq, Debug)] pub s: Scalar,
pub struct Signature { pub px: Scalar,
pub(crate) c: Scalar, pub parity: u8,
pub(crate) s: Scalar, pub message: [u8; 32],
pub e: Scalar,
} }
impl Signature {
pub fn verify(&self, public_key: &PublicKey, message: &[u8]) -> bool {
#[allow(non_snake_case)] #[allow(non_snake_case)]
let R = (Secp256k1::generator() * self.s) - (public_key.A * self.c); pub fn preprocess_signature_for_ecrecover(
EthereumHram::hram(&R, &public_key.A, message) == self.c m: [u8; 32],
R: &ProjectivePoint,
s: Scalar,
A: &ProjectivePoint,
chain_id: U256,
) -> (Scalar, Scalar) {
let processed_sig = process_signature_for_contract(m, R, s, A, chain_id);
let sr = processed_sig.s.mul(&processed_sig.px).negate();
let er = processed_sig.e.mul(&processed_sig.px).negate();
(sr, er)
} }
/// Construct a new `Signature`. #[allow(non_snake_case)]
/// pub fn process_signature_for_contract(
/// This will return None if the signature is invalid. m: [u8; 32],
pub fn new( R: &ProjectivePoint,
public_key: &PublicKey, s: Scalar,
message: &[u8], A: &ProjectivePoint,
signature: SchnorrSignature<Secp256k1>, chain_id: U256,
) -> Option<Signature> { ) -> ProcessedSignature {
let c = EthereumHram::hram(&signature.R, &public_key.A, message); let encoded_pk = A.to_encoded_point(true);
if !signature.verify(public_key.A, c) { let px = &encoded_pk.as_ref()[1 .. 33];
None?; let px_scalar = Scalar::reduce(U256::from_be_slice(px));
} let e = EthereumHram::hram(R, A, &[chain_id.to_be_byte_array().as_slice(), &m].concat());
ProcessedSignature {
let res = Signature { c, s: signature.s }; s,
assert!(res.verify(public_key, message)); px: px_scalar,
Some(res) parity: &encoded_pk.as_ref()[0] - 2,
} #[allow(non_snake_case)]
message: m,
pub fn c(&self) -> Scalar { e,
self.c
}
pub fn s(&self) -> Scalar {
self.s
}
pub fn to_bytes(&self) -> [u8; 64] {
let mut res = [0; 64];
res[.. 32].copy_from_slice(self.c.to_repr().as_ref());
res[32 ..].copy_from_slice(self.s.to_repr().as_ref());
res
}
pub fn from_bytes(bytes: [u8; 64]) -> std::io::Result<Self> {
let mut reader = bytes.as_slice();
let c = Secp256k1::read_F(&mut reader)?;
let s = Secp256k1::read_F(&mut reader)?;
Ok(Signature { c, s })
}
}
impl From<&Signature> for AbiSignature {
fn from(sig: &Signature) -> AbiSignature {
let c: [u8; 32] = sig.c.to_repr().into();
let s: [u8; 32] = sig.s.to_repr().into();
AbiSignature { c: c.into(), s: s.into() }
} }
} }

View File

@@ -1,113 +0,0 @@
use std::sync::Arc;
use alloy_core::primitives::{hex::FromHex, Address, B256, U256, Bytes, TxKind};
use alloy_consensus::{Signed, TxLegacy};
use alloy_sol_types::{SolCall, SolEvent};
use alloy_rpc_types_eth::{BlockNumberOrTag, Filter};
use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider};
use crate::{
Error,
crypto::{self, keccak256, PublicKey},
router::Router,
};
pub use crate::abi::deployer as abi;
/// The Deployer contract for the Router contract.
///
/// This Deployer has a deterministic address, letting it be immediately identified on any
/// compatible chain. It then supports retrieving the Router contract's address (which isn't
/// deterministic) using a single log query.
#[derive(Clone, Debug)]
pub struct Deployer;
impl Deployer {
/// Obtain the transaction to deploy this contract, already signed.
///
/// The account this transaction is sent from (which is populated in `from`) must be sufficiently
/// funded for this transaction to be submitted. This account has no known private key to anyone,
/// so ETH sent can be neither misappropriated nor returned.
pub fn deployment_tx() -> Signed<TxLegacy> {
let bytecode = include_str!("../artifacts/Deployer.bin");
let bytecode =
Bytes::from_hex(bytecode).expect("compiled-in Deployer bytecode wasn't valid hex");
let tx = TxLegacy {
chain_id: None,
nonce: 0,
gas_price: 100_000_000_000u128,
// TODO: Use a more accurate gas limit
gas_limit: 1_000_000u128,
to: TxKind::Create,
value: U256::ZERO,
input: bytecode,
};
crypto::deterministically_sign(&tx)
}
/// Obtain the deterministic address for this contract.
pub fn address() -> [u8; 20] {
let deployer_deployer =
Self::deployment_tx().recover_signer().expect("deployment_tx didn't have a valid signature");
**Address::create(&deployer_deployer, 0)
}
/// Construct a new view of the `Deployer`.
pub async fn new(provider: Arc<RootProvider<SimpleRequest>>) -> Result<Option<Self>, Error> {
let address = Self::address();
let code = provider.get_code_at(address.into()).await.map_err(|_| Error::ConnectionError)?;
// Contract has yet to be deployed
if code.is_empty() {
return Ok(None);
}
Ok(Some(Self))
}
/// Yield the `ContractCall` necessary to deploy the Router.
pub fn deploy_router(&self, key: &PublicKey) -> TxLegacy {
TxLegacy {
to: TxKind::Call(Self::address().into()),
input: abi::deployCall::new((Router::init_code(key).into(),)).abi_encode().into(),
gas_limit: 1_000_000,
..Default::default()
}
}
/// Find the first Router deployed with the specified key as its first key.
///
/// This is the Router Serai will use, and is the only way to construct a `Router`.
pub async fn find_router(
&self,
provider: Arc<RootProvider<SimpleRequest>>,
key: &PublicKey,
) -> Result<Option<Router>, Error> {
let init_code = Router::init_code(key);
let init_code_hash = keccak256(&init_code);
#[cfg(not(test))]
let to_block = BlockNumberOrTag::Finalized;
#[cfg(test)]
let to_block = BlockNumberOrTag::Latest;
// Find the first log using this init code (where the init code is binding to the key)
// TODO: Make an abstraction for event filtering (de-duplicating common code)
let filter =
Filter::new().from_block(0).to_block(to_block).address(Address::from(Self::address()));
let filter = filter.event_signature(abi::Deployment::SIGNATURE_HASH);
let filter = filter.topic1(B256::from(init_code_hash));
let logs = provider.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
let Some(first_log) = logs.first() else { return Ok(None) };
let router = first_log
.log_decode::<abi::Deployment>()
.map_err(|_| Error::ConnectionError)?
.inner
.data
.created;
Ok(Some(Router::new(provider, router)))
}
}

View File

@@ -1,105 +0,0 @@
use std::{sync::Arc, collections::HashSet};
use alloy_core::primitives::{Address, B256, U256};
use alloy_sol_types::{SolInterface, SolEvent};
use alloy_rpc_types_eth::Filter;
use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider};
use crate::Error;
pub use crate::abi::erc20 as abi;
use abi::{IERC20Calls, Transfer, transferCall, transferFromCall};
#[derive(Clone, Debug)]
pub struct TopLevelErc20Transfer {
pub id: [u8; 32],
pub from: [u8; 20],
pub amount: U256,
pub data: Vec<u8>,
}
/// A view for an ERC20 contract.
#[derive(Clone, Debug)]
pub struct Erc20(Arc<RootProvider<SimpleRequest>>, Address);
impl Erc20 {
/// Construct a new view of the specified ERC20 contract.
pub fn new(provider: Arc<RootProvider<SimpleRequest>>, address: [u8; 20]) -> Self {
Self(provider, Address::from(&address))
}
pub async fn top_level_transfers(
&self,
block: u64,
to: [u8; 20],
) -> Result<Vec<TopLevelErc20Transfer>, Error> {
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
let filter = filter.event_signature(Transfer::SIGNATURE_HASH);
let mut to_topic = [0; 32];
to_topic[12 ..].copy_from_slice(&to);
let filter = filter.topic2(B256::from(to_topic));
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
let mut handled = HashSet::new();
let mut top_level_transfers = vec![];
for log in logs {
// Double check the address which emitted this log
if log.address() != self.1 {
Err(Error::ConnectionError)?;
}
let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?;
let tx =
self.0.get_transaction_by_hash(tx_id).await.ok().flatten().ok_or(Error::ConnectionError)?;
// If this is a top-level call...
if tx.to == Some(self.1) {
// And we recognize the call...
// Don't validate the encoding as this can't be re-encoded to an identical bytestring due
// to the InInstruction appended
if let Ok(call) = IERC20Calls::abi_decode(&tx.input, false) {
// Extract the top-level call's from/to/value
let (from, call_to, value) = match call {
IERC20Calls::transfer(transferCall { to: call_to, value }) => (tx.from, call_to, value),
IERC20Calls::transferFrom(transferFromCall { from, to: call_to, value }) => {
(from, call_to, value)
}
// Treat any other function selectors as unrecognized
_ => continue,
};
let log = log.log_decode::<Transfer>().map_err(|_| Error::ConnectionError)?.inner.data;
// Ensure the top-level transfer is equivalent, and this presumably isn't a log for an
// internal transfer
if (log.from != from) || (call_to != to) || (value != log.value) {
continue;
}
// Now that the top-level transfer is confirmed to be equivalent to the log, ensure it's
// the only log we handle
if handled.contains(&tx_id) {
continue;
}
handled.insert(tx_id);
// Read the data appended after
let encoded = call.abi_encode();
let data = tx.input.as_ref()[encoded.len() ..].to_vec();
// Push the transfer
top_level_transfers.push(TopLevelErc20Transfer {
// Since we'll only handle one log for this TX, set the ID to the TX ID
id: *tx_id,
from: *log.from.0,
amount: log.value,
data,
});
}
}
}
Ok(top_level_transfers)
}
}

View File

@@ -1,35 +1,2 @@
use thiserror::Error; pub mod contract;
pub mod alloy {
pub use alloy_core::primitives;
pub use alloy_core as core;
pub use alloy_sol_types as sol_types;
pub use alloy_consensus as consensus;
pub use alloy_network as network;
pub use alloy_rpc_types_eth as rpc_types;
pub use alloy_simple_request_transport as simple_request_transport;
pub use alloy_rpc_client as rpc_client;
pub use alloy_provider as provider;
}
pub mod crypto; pub mod crypto;
pub(crate) mod abi;
pub mod erc20;
pub mod deployer;
pub mod router;
pub mod machine;
#[cfg(any(test, feature = "tests"))]
pub mod tests;
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
pub enum Error {
#[error("failed to verify Schnorr signature")]
InvalidSignature,
#[error("couldn't make call/send TX")]
ConnectionError,
}

View File

@@ -1,414 +0,0 @@
use std::{
io::{self, Read},
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng};
use transcript::{Transcript, RecommendedTranscript};
use group::GroupEncoding;
use frost::{
curve::{Ciphersuite, Secp256k1},
Participant, ThresholdKeys, FrostError,
algorithm::Schnorr,
sign::*,
};
use alloy_core::primitives::U256;
use crate::{
crypto::{PublicKey, EthereumHram, Signature},
router::{
abi::{Call as AbiCall, OutInstruction as AbiOutInstruction},
Router,
},
};
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Call {
pub to: [u8; 20],
pub value: U256,
pub data: Vec<u8>,
}
impl Call {
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let mut to = [0; 20];
reader.read_exact(&mut to)?;
let value = {
let mut value_bytes = [0; 32];
reader.read_exact(&mut value_bytes)?;
U256::from_le_slice(&value_bytes)
};
let mut data_len = {
let mut data_len = [0; 4];
reader.read_exact(&mut data_len)?;
usize::try_from(u32::from_le_bytes(data_len)).expect("u32 couldn't fit within a usize")
};
// A valid DoS would be to claim a 4 GB data is present for only 4 bytes
// We read this in 1 KB chunks to only read data actually present (with a max DoS of 1 KB)
let mut data = vec![];
while data_len > 0 {
let chunk_len = data_len.min(1024);
let mut chunk = vec![0; chunk_len];
reader.read_exact(&mut chunk)?;
data.extend(&chunk);
data_len -= chunk_len;
}
Ok(Call { to, value, data })
}
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(&self.to)?;
writer.write_all(&self.value.as_le_bytes())?;
let data_len = u32::try_from(self.data.len())
.map_err(|_| io::Error::other("call data length exceeded 2**32"))?;
writer.write_all(&data_len.to_le_bytes())?;
writer.write_all(&self.data)
}
}
impl From<Call> for AbiCall {
fn from(call: Call) -> AbiCall {
AbiCall { to: call.to.into(), value: call.value, data: call.data.into() }
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum OutInstructionTarget {
Direct([u8; 20]),
Calls(Vec<Call>),
}
impl OutInstructionTarget {
fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let mut kind = [0xff];
reader.read_exact(&mut kind)?;
match kind[0] {
0 => {
let mut addr = [0; 20];
reader.read_exact(&mut addr)?;
Ok(OutInstructionTarget::Direct(addr))
}
1 => {
let mut calls_len = [0; 4];
reader.read_exact(&mut calls_len)?;
let calls_len = u32::from_le_bytes(calls_len);
let mut calls = vec![];
for _ in 0 .. calls_len {
calls.push(Call::read(reader)?);
}
Ok(OutInstructionTarget::Calls(calls))
}
_ => Err(io::Error::other("unrecognized OutInstructionTarget"))?,
}
}
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
match self {
OutInstructionTarget::Direct(addr) => {
writer.write_all(&[0])?;
writer.write_all(addr)?;
}
OutInstructionTarget::Calls(calls) => {
writer.write_all(&[1])?;
let call_len = u32::try_from(calls.len())
.map_err(|_| io::Error::other("amount of calls exceeded 2**32"))?;
writer.write_all(&call_len.to_le_bytes())?;
for call in calls {
call.write(writer)?;
}
}
}
Ok(())
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct OutInstruction {
pub target: OutInstructionTarget,
pub value: U256,
}
impl OutInstruction {
fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let target = OutInstructionTarget::read(reader)?;
let value = {
let mut value_bytes = [0; 32];
reader.read_exact(&mut value_bytes)?;
U256::from_le_slice(&value_bytes)
};
Ok(OutInstruction { target, value })
}
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
self.target.write(writer)?;
writer.write_all(&self.value.as_le_bytes())
}
}
impl From<OutInstruction> for AbiOutInstruction {
fn from(instruction: OutInstruction) -> AbiOutInstruction {
match instruction.target {
OutInstructionTarget::Direct(addr) => {
AbiOutInstruction { to: addr.into(), calls: vec![], value: instruction.value }
}
OutInstructionTarget::Calls(calls) => AbiOutInstruction {
to: [0; 20].into(),
calls: calls.into_iter().map(Into::into).collect(),
value: instruction.value,
},
}
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum RouterCommand {
UpdateSeraiKey { chain_id: U256, nonce: U256, key: PublicKey },
Execute { chain_id: U256, nonce: U256, outs: Vec<OutInstruction> },
}
impl RouterCommand {
pub fn msg(&self) -> Vec<u8> {
match self {
RouterCommand::UpdateSeraiKey { chain_id, nonce, key } => {
Router::update_serai_key_message(*chain_id, *nonce, key)
}
RouterCommand::Execute { chain_id, nonce, outs } => Router::execute_message(
*chain_id,
*nonce,
outs.iter().map(|out| out.clone().into()).collect(),
),
}
}
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let mut kind = [0xff];
reader.read_exact(&mut kind)?;
match kind[0] {
0 => {
let mut chain_id = [0; 32];
reader.read_exact(&mut chain_id)?;
let mut nonce = [0; 32];
reader.read_exact(&mut nonce)?;
let key = PublicKey::new(Secp256k1::read_G(reader)?)
.ok_or(io::Error::other("key for RouterCommand doesn't have an eth representation"))?;
Ok(RouterCommand::UpdateSeraiKey {
chain_id: U256::from_le_slice(&chain_id),
nonce: U256::from_le_slice(&nonce),
key,
})
}
1 => {
let mut chain_id = [0; 32];
reader.read_exact(&mut chain_id)?;
let chain_id = U256::from_le_slice(&chain_id);
let mut nonce = [0; 32];
reader.read_exact(&mut nonce)?;
let nonce = U256::from_le_slice(&nonce);
let mut outs_len = [0; 4];
reader.read_exact(&mut outs_len)?;
let outs_len = u32::from_le_bytes(outs_len);
let mut outs = vec![];
for _ in 0 .. outs_len {
outs.push(OutInstruction::read(reader)?);
}
Ok(RouterCommand::Execute { chain_id, nonce, outs })
}
_ => Err(io::Error::other("reading unknown type of RouterCommand"))?,
}
}
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
match self {
RouterCommand::UpdateSeraiKey { chain_id, nonce, key } => {
writer.write_all(&[0])?;
writer.write_all(&chain_id.as_le_bytes())?;
writer.write_all(&nonce.as_le_bytes())?;
writer.write_all(&key.A.to_bytes())
}
RouterCommand::Execute { chain_id, nonce, outs } => {
writer.write_all(&[1])?;
writer.write_all(&chain_id.as_le_bytes())?;
writer.write_all(&nonce.as_le_bytes())?;
writer.write_all(&u32::try_from(outs.len()).unwrap().to_le_bytes())?;
for out in outs {
out.write(writer)?;
}
Ok(())
}
}
}
pub fn serialize(&self) -> Vec<u8> {
let mut res = vec![];
self.write(&mut res).unwrap();
res
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct SignedRouterCommand {
command: RouterCommand,
signature: Signature,
}
impl SignedRouterCommand {
pub fn new(key: &PublicKey, command: RouterCommand, signature: &[u8; 64]) -> Option<Self> {
let c = Secp256k1::read_F(&mut &signature[.. 32]).ok()?;
let s = Secp256k1::read_F(&mut &signature[32 ..]).ok()?;
let signature = Signature { c, s };
if !signature.verify(key, &command.msg()) {
None?
}
Some(SignedRouterCommand { command, signature })
}
pub fn command(&self) -> &RouterCommand {
&self.command
}
pub fn signature(&self) -> &Signature {
&self.signature
}
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let command = RouterCommand::read(reader)?;
let mut sig = [0; 64];
reader.read_exact(&mut sig)?;
let signature = Signature::from_bytes(sig)?;
Ok(SignedRouterCommand { command, signature })
}
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
self.command.write(writer)?;
writer.write_all(&self.signature.to_bytes())
}
}
pub struct RouterCommandMachine {
key: PublicKey,
command: RouterCommand,
machine: AlgorithmMachine<Secp256k1, Schnorr<Secp256k1, RecommendedTranscript, EthereumHram>>,
}
impl RouterCommandMachine {
pub fn new(keys: ThresholdKeys<Secp256k1>, command: RouterCommand) -> Option<Self> {
// The Schnorr algorithm should be fine without this, even when using the IETF variant
// If this is better and more comprehensive, we should do it, even if not necessary
let mut transcript = RecommendedTranscript::new(b"ethereum-serai RouterCommandMachine v0.1");
let key = keys.group_key();
transcript.append_message(b"key", key.to_bytes());
transcript.append_message(b"command", command.serialize());
Some(Self {
key: PublicKey::new(key)?,
command,
machine: AlgorithmMachine::new(Schnorr::new(transcript), keys),
})
}
}
impl PreprocessMachine for RouterCommandMachine {
type Preprocess = Preprocess<Secp256k1, ()>;
type Signature = SignedRouterCommand;
type SignMachine = RouterCommandSignMachine;
fn preprocess<R: RngCore + CryptoRng>(
self,
rng: &mut R,
) -> (Self::SignMachine, Self::Preprocess) {
let (machine, preprocess) = self.machine.preprocess(rng);
(RouterCommandSignMachine { key: self.key, command: self.command, machine }, preprocess)
}
}
pub struct RouterCommandSignMachine {
key: PublicKey,
command: RouterCommand,
machine: AlgorithmSignMachine<Secp256k1, Schnorr<Secp256k1, RecommendedTranscript, EthereumHram>>,
}
impl SignMachine<SignedRouterCommand> for RouterCommandSignMachine {
type Params = ();
type Keys = ThresholdKeys<Secp256k1>;
type Preprocess = Preprocess<Secp256k1, ()>;
type SignatureShare = SignatureShare<Secp256k1>;
type SignatureMachine = RouterCommandSignatureMachine;
fn cache(self) -> CachedPreprocess {
unimplemented!(
"RouterCommand machines don't support caching their preprocesses due to {}",
"being already bound to a specific command"
);
}
fn from_cache(
(): (),
_: ThresholdKeys<Secp256k1>,
_: CachedPreprocess,
) -> (Self, Self::Preprocess) {
unimplemented!(
"RouterCommand machines don't support caching their preprocesses due to {}",
"being already bound to a specific command"
);
}
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
self.machine.read_preprocess(reader)
}
fn sign(
self,
commitments: HashMap<Participant, Self::Preprocess>,
msg: &[u8],
) -> Result<(RouterCommandSignatureMachine, Self::SignatureShare), FrostError> {
if !msg.is_empty() {
panic!("message was passed to a RouterCommand machine when it generates its own");
}
let (machine, share) = self.machine.sign(commitments, &self.command.msg())?;
Ok((RouterCommandSignatureMachine { key: self.key, command: self.command, machine }, share))
}
}
pub struct RouterCommandSignatureMachine {
key: PublicKey,
command: RouterCommand,
machine:
AlgorithmSignatureMachine<Secp256k1, Schnorr<Secp256k1, RecommendedTranscript, EthereumHram>>,
}
impl SignatureMachine<SignedRouterCommand> for RouterCommandSignatureMachine {
type SignatureShare = SignatureShare<Secp256k1>;
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare> {
self.machine.read_share(reader)
}
fn complete(
self,
shares: HashMap<Participant, Self::SignatureShare>,
) -> Result<SignedRouterCommand, FrostError> {
let sig = self.machine.complete(shares)?;
let signature = Signature::new(&self.key, &self.command.msg(), sig)
.expect("machine produced an invalid signature");
Ok(SignedRouterCommand { command: self.command, signature })
}
}

View File

@@ -1,443 +0,0 @@
use std::{sync::Arc, io, collections::HashSet};
use k256::{
elliptic_curve::{group::GroupEncoding, sec1},
ProjectivePoint,
};
use alloy_core::primitives::{hex::FromHex, Address, U256, Bytes, TxKind};
#[cfg(test)]
use alloy_core::primitives::B256;
use alloy_consensus::TxLegacy;
use alloy_sol_types::{SolValue, SolConstructor, SolCall, SolEvent};
use alloy_rpc_types_eth::Filter;
#[cfg(test)]
use alloy_rpc_types_eth::{BlockId, TransactionRequest, TransactionInput};
use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider};
pub use crate::{
Error,
crypto::{PublicKey, Signature},
abi::{erc20::Transfer, router as abi},
};
use abi::{SeraiKeyUpdated, InInstruction as InInstructionEvent, Executed as ExecutedEvent};
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum Coin {
Ether,
Erc20([u8; 20]),
}
impl Coin {
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let mut kind = [0xff];
reader.read_exact(&mut kind)?;
Ok(match kind[0] {
0 => Coin::Ether,
1 => {
let mut address = [0; 20];
reader.read_exact(&mut address)?;
Coin::Erc20(address)
}
_ => Err(io::Error::other("unrecognized Coin type"))?,
})
}
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
match self {
Coin::Ether => writer.write_all(&[0]),
Coin::Erc20(token) => {
writer.write_all(&[1])?;
writer.write_all(token)
}
}
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct InInstruction {
pub id: ([u8; 32], u64),
pub from: [u8; 20],
pub coin: Coin,
pub amount: U256,
pub data: Vec<u8>,
pub key_at_end_of_block: ProjectivePoint,
}
impl InInstruction {
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let id = {
let mut id_hash = [0; 32];
reader.read_exact(&mut id_hash)?;
let mut id_pos = [0; 8];
reader.read_exact(&mut id_pos)?;
let id_pos = u64::from_le_bytes(id_pos);
(id_hash, id_pos)
};
let mut from = [0; 20];
reader.read_exact(&mut from)?;
let coin = Coin::read(reader)?;
let mut amount = [0; 32];
reader.read_exact(&mut amount)?;
let amount = U256::from_le_slice(&amount);
let mut data_len = [0; 4];
reader.read_exact(&mut data_len)?;
let data_len = usize::try_from(u32::from_le_bytes(data_len))
.map_err(|_| io::Error::other("InInstruction data exceeded 2**32 in length"))?;
let mut data = vec![0; data_len];
reader.read_exact(&mut data)?;
let mut key_at_end_of_block = <ProjectivePoint as GroupEncoding>::Repr::default();
reader.read_exact(&mut key_at_end_of_block)?;
let key_at_end_of_block = Option::from(ProjectivePoint::from_bytes(&key_at_end_of_block))
.ok_or(io::Error::other("InInstruction had key at end of block which wasn't valid"))?;
Ok(InInstruction { id, from, coin, amount, data, key_at_end_of_block })
}
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(&self.id.0)?;
writer.write_all(&self.id.1.to_le_bytes())?;
writer.write_all(&self.from)?;
self.coin.write(writer)?;
writer.write_all(&self.amount.as_le_bytes())?;
writer.write_all(
&u32::try_from(self.data.len())
.map_err(|_| {
io::Error::other("InInstruction being written had data exceeding 2**32 in length")
})?
.to_le_bytes(),
)?;
writer.write_all(&self.data)?;
writer.write_all(&self.key_at_end_of_block.to_bytes())
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Executed {
pub tx_id: [u8; 32],
pub nonce: u64,
pub signature: [u8; 64],
}
/// The contract Serai uses to manage its state.
#[derive(Clone, Debug)]
pub struct Router(Arc<RootProvider<SimpleRequest>>, Address);
impl Router {
pub(crate) fn code() -> Vec<u8> {
let bytecode = include_str!("../artifacts/Router.bin");
Bytes::from_hex(bytecode).expect("compiled-in Router bytecode wasn't valid hex").to_vec()
}
pub(crate) fn init_code(key: &PublicKey) -> Vec<u8> {
let mut bytecode = Self::code();
// Append the constructor arguments
bytecode.extend((abi::constructorCall { _seraiKey: key.eth_repr().into() }).abi_encode());
bytecode
}
// This isn't pub in order to force users to use `Deployer::find_router`.
pub(crate) fn new(provider: Arc<RootProvider<SimpleRequest>>, address: Address) -> Self {
Self(provider, address)
}
pub fn address(&self) -> [u8; 20] {
**self.1
}
/// Get the key for Serai at the specified block.
#[cfg(test)]
pub async fn serai_key(&self, at: [u8; 32]) -> Result<PublicKey, Error> {
let call = TransactionRequest::default()
.to(self.1)
.input(TransactionInput::new(abi::seraiKeyCall::new(()).abi_encode().into()));
let bytes = self
.0
.call(&call)
.block(BlockId::Hash(B256::from(at).into()))
.await
.map_err(|_| Error::ConnectionError)?;
let res =
abi::seraiKeyCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?;
PublicKey::from_eth_repr(res._0.0).ok_or(Error::ConnectionError)
}
/// Get the message to be signed in order to update the key for Serai.
pub(crate) fn update_serai_key_message(chain_id: U256, nonce: U256, key: &PublicKey) -> Vec<u8> {
let mut buffer = b"updateSeraiKey".to_vec();
buffer.extend(&chain_id.to_be_bytes::<32>());
buffer.extend(&nonce.to_be_bytes::<32>());
buffer.extend(&key.eth_repr());
buffer
}
/// Update the key representing Serai.
pub fn update_serai_key(&self, public_key: &PublicKey, sig: &Signature) -> TxLegacy {
// TODO: Set a more accurate gas
TxLegacy {
to: TxKind::Call(self.1),
input: abi::updateSeraiKeyCall::new((public_key.eth_repr().into(), sig.into()))
.abi_encode()
.into(),
gas_limit: 100_000,
..Default::default()
}
}
/// Get the current nonce for the published batches.
#[cfg(test)]
pub async fn nonce(&self, at: [u8; 32]) -> Result<U256, Error> {
let call = TransactionRequest::default()
.to(self.1)
.input(TransactionInput::new(abi::nonceCall::new(()).abi_encode().into()));
let bytes = self
.0
.call(&call)
.block(BlockId::Hash(B256::from(at).into()))
.await
.map_err(|_| Error::ConnectionError)?;
let res =
abi::nonceCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?;
Ok(res._0)
}
/// Get the message to be signed in order to update the key for Serai.
pub(crate) fn execute_message(
chain_id: U256,
nonce: U256,
outs: Vec<abi::OutInstruction>,
) -> Vec<u8> {
("execute".to_string(), chain_id, nonce, outs).abi_encode_params()
}
/// Execute a batch of `OutInstruction`s.
pub fn execute(&self, outs: &[abi::OutInstruction], sig: &Signature) -> TxLegacy {
TxLegacy {
to: TxKind::Call(self.1),
input: abi::executeCall::new((outs.to_vec(), sig.into())).abi_encode().into(),
// TODO
gas_limit: 100_000 + ((200_000 + 10_000) * u128::try_from(outs.len()).unwrap()),
..Default::default()
}
}
pub async fn key_at_end_of_block(&self, block: u64) -> Result<Option<ProjectivePoint>, Error> {
let filter = Filter::new().from_block(0).to_block(block).address(self.1);
let filter = filter.event_signature(SeraiKeyUpdated::SIGNATURE_HASH);
let all_keys = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
if all_keys.is_empty() {
return Ok(None);
};
let last_key_x_coordinate_log = all_keys.last().ok_or(Error::ConnectionError)?;
let last_key_x_coordinate = last_key_x_coordinate_log
.log_decode::<SeraiKeyUpdated>()
.map_err(|_| Error::ConnectionError)?
.inner
.data
.key;
let mut compressed_point = <ProjectivePoint as GroupEncoding>::Repr::default();
compressed_point[0] = u8::from(sec1::Tag::CompressedEvenY);
compressed_point[1 ..].copy_from_slice(last_key_x_coordinate.as_slice());
let key =
Option::from(ProjectivePoint::from_bytes(&compressed_point)).ok_or(Error::ConnectionError)?;
Ok(Some(key))
}
pub async fn in_instructions(
&self,
block: u64,
allowed_tokens: &HashSet<[u8; 20]>,
) -> Result<Vec<InInstruction>, Error> {
let Some(key_at_end_of_block) = self.key_at_end_of_block(block).await? else {
return Ok(vec![]);
};
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
let filter = filter.event_signature(InInstructionEvent::SIGNATURE_HASH);
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
let mut transfer_check = HashSet::new();
let mut in_instructions = vec![];
for log in logs {
// Double check the address which emitted this log
if log.address() != self.1 {
Err(Error::ConnectionError)?;
}
let id = (
log.block_hash.ok_or(Error::ConnectionError)?.into(),
log.log_index.ok_or(Error::ConnectionError)?,
);
let tx_hash = log.transaction_hash.ok_or(Error::ConnectionError)?;
let tx = self
.0
.get_transaction_by_hash(tx_hash)
.await
.ok()
.flatten()
.ok_or(Error::ConnectionError)?;
let log =
log.log_decode::<InInstructionEvent>().map_err(|_| Error::ConnectionError)?.inner.data;
let coin = if log.coin.0 == [0; 20] {
Coin::Ether
} else {
let token = *log.coin.0;
if !allowed_tokens.contains(&token) {
continue;
}
// If this also counts as a top-level transfer via the token, drop it
//
// Necessary in order to handle a potential edge case with some theoretical token
// implementations
//
// This will either let it be handled by the top-level transfer hook or will drop it
// entirely on the side of caution
if tx.to == Some(token.into()) {
continue;
}
// Get all logs for this TX
let receipt = self
.0
.get_transaction_receipt(tx_hash)
.await
.map_err(|_| Error::ConnectionError)?
.ok_or(Error::ConnectionError)?;
let tx_logs = receipt.inner.logs();
// Find a matching transfer log
let mut found_transfer = false;
for tx_log in tx_logs {
let log_index = tx_log.log_index.ok_or(Error::ConnectionError)?;
// Ensure we didn't already use this transfer to check a distinct InInstruction event
if transfer_check.contains(&log_index) {
continue;
}
// Check if this log is from the token we expected to be transferred
if tx_log.address().0 != token {
continue;
}
// Check if this is a transfer log
// https://github.com/alloy-rs/core/issues/589
if tx_log.topics()[0] != Transfer::SIGNATURE_HASH {
continue;
}
let Ok(transfer) = Transfer::decode_log(&tx_log.inner.clone(), true) else { continue };
// Check if this is a transfer to us for the expected amount
if (transfer.to == self.1) && (transfer.value == log.amount) {
transfer_check.insert(log_index);
found_transfer = true;
break;
}
}
if !found_transfer {
// This shouldn't be a ConnectionError
// This is an exploit, a non-conforming ERC20, or an invalid connection
// This should halt the process which is sufficient, yet this is sub-optimal
// TODO
Err(Error::ConnectionError)?;
}
Coin::Erc20(token)
};
in_instructions.push(InInstruction {
id,
from: *log.from.0,
coin,
amount: log.amount,
data: log.instruction.as_ref().to_vec(),
key_at_end_of_block,
});
}
Ok(in_instructions)
}
pub async fn executed_commands(&self, block: u64) -> Result<Vec<Executed>, Error> {
let mut res = vec![];
{
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
let filter = filter.event_signature(SeraiKeyUpdated::SIGNATURE_HASH);
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
for log in logs {
// Double check the address which emitted this log
if log.address() != self.1 {
Err(Error::ConnectionError)?;
}
let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?.into();
let log =
log.log_decode::<SeraiKeyUpdated>().map_err(|_| Error::ConnectionError)?.inner.data;
let mut signature = [0; 64];
signature[.. 32].copy_from_slice(log.signature.c.as_ref());
signature[32 ..].copy_from_slice(log.signature.s.as_ref());
res.push(Executed {
tx_id,
nonce: log.nonce.try_into().map_err(|_| Error::ConnectionError)?,
signature,
});
}
}
{
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
let filter = filter.event_signature(ExecutedEvent::SIGNATURE_HASH);
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
for log in logs {
// Double check the address which emitted this log
if log.address() != self.1 {
Err(Error::ConnectionError)?;
}
let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?.into();
let log = log.log_decode::<ExecutedEvent>().map_err(|_| Error::ConnectionError)?.inner.data;
let mut signature = [0; 64];
signature[.. 32].copy_from_slice(log.signature.c.as_ref());
signature[32 ..].copy_from_slice(log.signature.s.as_ref());
res.push(Executed {
tx_id,
nonce: log.nonce.try_into().map_err(|_| Error::ConnectionError)?,
signature,
});
}
}
Ok(res)
}
#[cfg(feature = "tests")]
pub fn key_updated_filter(&self) -> Filter {
Filter::new().address(self.1).event_signature(SeraiKeyUpdated::SIGNATURE_HASH)
}
#[cfg(feature = "tests")]
pub fn executed_filter(&self) -> Filter {
Filter::new().address(self.1).event_signature(ExecutedEvent::SIGNATURE_HASH)
}
}

View File

@@ -1,13 +0,0 @@
use alloy_sol_types::sol;
#[rustfmt::skip]
#[allow(warnings)]
#[allow(needless_pass_by_value)]
#[allow(clippy::all)]
#[allow(clippy::ignored_unit_patterns)]
#[allow(clippy::redundant_closure_for_method_calls)]
mod schnorr_container {
use super::*;
sol!("src/tests/contracts/Schnorr.sol");
}
pub(crate) use schnorr_container::TestSchnorr as schnorr;

View File

@@ -1,51 +0,0 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
contract TestERC20 {
event Transfer(address indexed from, address indexed to, uint256 value);
event Approval(address indexed owner, address indexed spender, uint256 value);
function name() public pure returns (string memory) {
return "Test ERC20";
}
function symbol() public pure returns (string memory) {
return "TEST";
}
function decimals() public pure returns (uint8) {
return 18;
}
function totalSupply() public pure returns (uint256) {
return 1_000_000 * 10e18;
}
mapping(address => uint256) balances;
mapping(address => mapping(address => uint256)) allowances;
constructor() {
balances[msg.sender] = totalSupply();
}
function balanceOf(address owner) public view returns (uint256) {
return balances[owner];
}
function transfer(address to, uint256 value) public returns (bool) {
balances[msg.sender] -= value;
balances[to] += value;
return true;
}
function transferFrom(address from, address to, uint256 value) public returns (bool) {
allowances[from][msg.sender] -= value;
balances[from] -= value;
balances[to] += value;
return true;
}
function approve(address spender, uint256 value) public returns (bool) {
allowances[msg.sender][spender] = value;
return true;
}
function allowance(address owner, address spender) public view returns (uint256) {
return allowances[owner][spender];
}
}

View File

@@ -1,15 +0,0 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
import "../../../contracts/Schnorr.sol";
contract TestSchnorr {
function verify(
bytes32 px,
bytes calldata message,
bytes32 c,
bytes32 s
) external pure returns (bool) {
return Schnorr.verify(px, message, c, s);
}
}

View File

@@ -1,105 +0,0 @@
use rand_core::OsRng;
use group::ff::{Field, PrimeField};
use k256::{
ecdsa::{
self, hazmat::SignPrimitive, signature::hazmat::PrehashVerifier, SigningKey, VerifyingKey,
},
Scalar, ProjectivePoint,
};
use frost::{
curve::{Ciphersuite, Secp256k1},
algorithm::{Hram, IetfSchnorr},
tests::{algorithm_machines, sign},
};
use crate::{crypto::*, tests::key_gen};
// The ecrecover opcode, yet with parity replacing v
pub(crate) fn ecrecover(message: Scalar, odd_y: bool, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
let sig = ecdsa::Signature::from_scalars(r, s).ok()?;
let message: [u8; 32] = message.to_repr().into();
alloy_core::primitives::Signature::from_signature_and_parity(
sig,
alloy_core::primitives::Parity::Parity(odd_y),
)
.ok()?
.recover_address_from_prehash(&alloy_core::primitives::B256::from(message))
.ok()
.map(Into::into)
}
#[test]
fn test_ecrecover() {
let private = SigningKey::random(&mut OsRng);
let public = VerifyingKey::from(&private);
// Sign the signature
const MESSAGE: &[u8] = b"Hello, World!";
let (sig, recovery_id) = private
.as_nonzero_scalar()
.try_sign_prehashed(
<Secp256k1 as Ciphersuite>::F::random(&mut OsRng),
&keccak256(MESSAGE).into(),
)
.unwrap();
// Sanity check the signature verifies
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
{
assert_eq!(public.verify_prehash(&keccak256(MESSAGE), &sig).unwrap(), ());
}
// Perform the ecrecover
assert_eq!(
ecrecover(
hash_to_scalar(MESSAGE),
u8::from(recovery_id.unwrap().is_y_odd()) == 1,
*sig.r(),
*sig.s()
)
.unwrap(),
address(&ProjectivePoint::from(public.as_affine()))
);
}
// Run the sign test with the EthereumHram
#[test]
fn test_signing() {
let (keys, _) = key_gen();
const MESSAGE: &[u8] = b"Hello, World!";
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let _sig =
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
}
#[allow(non_snake_case)]
pub fn preprocess_signature_for_ecrecover(
R: ProjectivePoint,
public_key: &PublicKey,
m: &[u8],
s: Scalar,
) -> (Scalar, Scalar) {
let c = EthereumHram::hram(&R, &public_key.A, m);
let sa = -(s * public_key.px);
let ca = -(c * public_key.px);
(sa, ca)
}
#[test]
fn test_ecrecover_hack() {
let (keys, public_key) = key_gen();
const MESSAGE: &[u8] = b"Hello, World!";
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let sig =
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
let (sa, ca) = preprocess_signature_for_ecrecover(sig.R, &public_key, MESSAGE, sig.s);
let q = ecrecover(sa, false, public_key.px, ca).unwrap();
assert_eq!(q, address(&sig.R));
}

View File

@@ -1,131 +0,0 @@
use std::{sync::Arc, collections::HashMap};
use rand_core::OsRng;
use k256::{Scalar, ProjectivePoint};
use frost::{curve::Secp256k1, Participant, ThresholdKeys, tests::key_gen as frost_key_gen};
use alloy_core::{
primitives::{Address, U256, Bytes, TxKind},
hex::FromHex,
};
use alloy_consensus::{SignableTransaction, TxLegacy};
use alloy_rpc_types_eth::TransactionReceipt;
use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider};
use crate::crypto::{address, deterministically_sign, PublicKey};
#[cfg(test)]
mod crypto;
#[cfg(test)]
mod abi;
#[cfg(test)]
mod schnorr;
#[cfg(test)]
mod router;
pub fn key_gen() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, PublicKey) {
let mut keys = frost_key_gen::<_, Secp256k1>(&mut OsRng);
let mut group_key = keys[&Participant::new(1).unwrap()].group_key();
let mut offset = Scalar::ZERO;
while PublicKey::new(group_key).is_none() {
offset += Scalar::ONE;
group_key += ProjectivePoint::GENERATOR;
}
for keys in keys.values_mut() {
*keys = keys.offset(offset);
}
let public_key = PublicKey::new(group_key).unwrap();
(keys, public_key)
}
// TODO: Use a proper error here
pub async fn send(
provider: &RootProvider<SimpleRequest>,
wallet: &k256::ecdsa::SigningKey,
mut tx: TxLegacy,
) -> Option<TransactionReceipt> {
let verifying_key = *wallet.verifying_key().as_affine();
let address = Address::from(address(&verifying_key.into()));
// https://github.com/alloy-rs/alloy/issues/539
// let chain_id = provider.get_chain_id().await.unwrap();
// tx.chain_id = Some(chain_id);
tx.chain_id = None;
tx.nonce = provider.get_transaction_count(address).await.unwrap();
// 100 gwei
tx.gas_price = 100_000_000_000u128;
let sig = wallet.sign_prehash_recoverable(tx.signature_hash().as_ref()).unwrap();
assert_eq!(address, tx.clone().into_signed(sig.into()).recover_signer().unwrap());
assert!(
provider.get_balance(address).await.unwrap() >
((U256::from(tx.gas_price) * U256::from(tx.gas_limit)) + tx.value)
);
let mut bytes = vec![];
tx.encode_with_signature_fields(&sig.into(), &mut bytes);
let pending_tx = provider.send_raw_transaction(&bytes).await.ok()?;
pending_tx.get_receipt().await.ok()
}
pub async fn fund_account(
provider: &RootProvider<SimpleRequest>,
wallet: &k256::ecdsa::SigningKey,
to_fund: Address,
value: U256,
) -> Option<()> {
let funding_tx =
TxLegacy { to: TxKind::Call(to_fund), gas_limit: 21_000, value, ..Default::default() };
assert!(send(provider, wallet, funding_tx).await.unwrap().status());
Some(())
}
// TODO: Use a proper error here
pub async fn deploy_contract(
client: Arc<RootProvider<SimpleRequest>>,
wallet: &k256::ecdsa::SigningKey,
name: &str,
) -> Option<Address> {
let hex_bin_buf = std::fs::read_to_string(format!("./artifacts/{name}.bin")).unwrap();
let hex_bin =
if let Some(stripped) = hex_bin_buf.strip_prefix("0x") { stripped } else { &hex_bin_buf };
let bin = Bytes::from_hex(hex_bin).unwrap();
let deployment_tx = TxLegacy {
chain_id: None,
nonce: 0,
// 100 gwei
gas_price: 100_000_000_000u128,
gas_limit: 1_000_000,
to: TxKind::Create,
value: U256::ZERO,
input: bin,
};
let deployment_tx = deterministically_sign(&deployment_tx);
// Fund the deployer address
fund_account(
&client,
wallet,
deployment_tx.recover_signer().unwrap(),
U256::from(deployment_tx.tx().gas_limit) * U256::from(deployment_tx.tx().gas_price),
)
.await?;
let (deployment_tx, sig, _) = deployment_tx.into_parts();
let mut bytes = vec![];
deployment_tx.encode_with_signature_fields(&sig, &mut bytes);
let pending_tx = client.send_raw_transaction(&bytes).await.ok()?;
let receipt = pending_tx.get_receipt().await.ok()?;
assert!(receipt.status());
Some(receipt.contract_address.unwrap())
}

View File

@@ -1,184 +0,0 @@
use std::{convert::TryFrom, sync::Arc, collections::HashMap};
use rand_core::OsRng;
use group::Group;
use k256::ProjectivePoint;
use frost::{
curve::Secp256k1,
Participant, ThresholdKeys,
algorithm::IetfSchnorr,
tests::{algorithm_machines, sign},
};
use alloy_core::primitives::{Address, U256};
use alloy_simple_request_transport::SimpleRequest;
use alloy_rpc_types_eth::BlockTransactionsKind;
use alloy_rpc_client::ClientBuilder;
use alloy_provider::{Provider, RootProvider};
use alloy_node_bindings::{Anvil, AnvilInstance};
use crate::{
crypto::*,
deployer::Deployer,
router::{Router, abi as router},
tests::{key_gen, send, fund_account},
};
async fn setup_test() -> (
AnvilInstance,
Arc<RootProvider<SimpleRequest>>,
u64,
Router,
HashMap<Participant, ThresholdKeys<Secp256k1>>,
PublicKey,
) {
let anvil = Anvil::new().spawn();
let provider = RootProvider::new(
ClientBuilder::default().transport(SimpleRequest::new(anvil.endpoint()), true),
);
let chain_id = provider.get_chain_id().await.unwrap();
let wallet = anvil.keys()[0].clone().into();
let client = Arc::new(provider);
// Make sure the Deployer constructor returns None, as it doesn't exist yet
assert!(Deployer::new(client.clone()).await.unwrap().is_none());
// Deploy the Deployer
let tx = Deployer::deployment_tx();
fund_account(
&client,
&wallet,
tx.recover_signer().unwrap(),
U256::from(tx.tx().gas_limit) * U256::from(tx.tx().gas_price),
)
.await
.unwrap();
let (tx, sig, _) = tx.into_parts();
let mut bytes = vec![];
tx.encode_with_signature_fields(&sig, &mut bytes);
let pending_tx = client.send_raw_transaction(&bytes).await.unwrap();
let receipt = pending_tx.get_receipt().await.unwrap();
assert!(receipt.status());
let deployer =
Deployer::new(client.clone()).await.expect("network error").expect("deployer wasn't deployed");
let (keys, public_key) = key_gen();
// Verify the Router constructor returns None, as it doesn't exist yet
assert!(deployer.find_router(client.clone(), &public_key).await.unwrap().is_none());
// Deploy the router
let receipt = send(&client, &anvil.keys()[0].clone().into(), deployer.deploy_router(&public_key))
.await
.unwrap();
assert!(receipt.status());
let contract = deployer.find_router(client.clone(), &public_key).await.unwrap().unwrap();
(anvil, client, chain_id, contract, keys, public_key)
}
async fn latest_block_hash(client: &RootProvider<SimpleRequest>) -> [u8; 32] {
client
.get_block(client.get_block_number().await.unwrap().into(), BlockTransactionsKind::Hashes)
.await
.unwrap()
.unwrap()
.header
.hash
.unwrap()
.0
}
#[tokio::test]
async fn test_deploy_contract() {
let (_anvil, client, _, router, _, public_key) = setup_test().await;
let block_hash = latest_block_hash(&client).await;
assert_eq!(router.serai_key(block_hash).await.unwrap(), public_key);
assert_eq!(router.nonce(block_hash).await.unwrap(), U256::try_from(1u64).unwrap());
// TODO: Check it emitted SeraiKeyUpdated(public_key) at its genesis
}
pub fn hash_and_sign(
keys: &HashMap<Participant, ThresholdKeys<Secp256k1>>,
public_key: &PublicKey,
message: &[u8],
) -> Signature {
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let sig =
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, keys), message);
Signature::new(public_key, message, sig).unwrap()
}
#[tokio::test]
async fn test_router_update_serai_key() {
let (anvil, client, chain_id, contract, keys, public_key) = setup_test().await;
let next_key = loop {
let point = ProjectivePoint::random(&mut OsRng);
let Some(next_key) = PublicKey::new(point) else { continue };
break next_key;
};
let message = Router::update_serai_key_message(
U256::try_from(chain_id).unwrap(),
U256::try_from(1u64).unwrap(),
&next_key,
);
let sig = hash_and_sign(&keys, &public_key, &message);
let first_block_hash = latest_block_hash(&client).await;
assert_eq!(contract.serai_key(first_block_hash).await.unwrap(), public_key);
let receipt =
send(&client, &anvil.keys()[0].clone().into(), contract.update_serai_key(&next_key, &sig))
.await
.unwrap();
assert!(receipt.status());
let second_block_hash = latest_block_hash(&client).await;
assert_eq!(contract.serai_key(second_block_hash).await.unwrap(), next_key);
// Check this does still offer the historical state
assert_eq!(contract.serai_key(first_block_hash).await.unwrap(), public_key);
// TODO: Check logs
println!("gas used: {:?}", receipt.gas_used);
// println!("logs: {:?}", receipt.logs);
}
#[tokio::test]
async fn test_router_execute() {
let (anvil, client, chain_id, contract, keys, public_key) = setup_test().await;
let to = Address::from([0; 20]);
let value = U256::ZERO;
let tx = router::OutInstruction { to, value, calls: vec![] };
let txs = vec![tx];
let first_block_hash = latest_block_hash(&client).await;
let nonce = contract.nonce(first_block_hash).await.unwrap();
assert_eq!(nonce, U256::try_from(1u64).unwrap());
let message = Router::execute_message(U256::try_from(chain_id).unwrap(), nonce, txs.clone());
let sig = hash_and_sign(&keys, &public_key, &message);
let receipt =
send(&client, &anvil.keys()[0].clone().into(), contract.execute(&txs, &sig)).await.unwrap();
assert!(receipt.status());
let second_block_hash = latest_block_hash(&client).await;
assert_eq!(contract.nonce(second_block_hash).await.unwrap(), U256::try_from(2u64).unwrap());
// Check this does still offer the historical state
assert_eq!(contract.nonce(first_block_hash).await.unwrap(), U256::try_from(1u64).unwrap());
// TODO: Check logs
println!("gas used: {:?}", receipt.gas_used);
// println!("logs: {:?}", receipt.logs);
}

View File

@@ -1,93 +0,0 @@
use std::sync::Arc;
use rand_core::OsRng;
use group::ff::PrimeField;
use k256::Scalar;
use frost::{
curve::Secp256k1,
algorithm::IetfSchnorr,
tests::{algorithm_machines, sign},
};
use alloy_core::primitives::Address;
use alloy_sol_types::SolCall;
use alloy_rpc_types_eth::{TransactionInput, TransactionRequest};
use alloy_simple_request_transport::SimpleRequest;
use alloy_rpc_client::ClientBuilder;
use alloy_provider::{Provider, RootProvider};
use alloy_node_bindings::{Anvil, AnvilInstance};
use crate::{
Error,
crypto::*,
tests::{key_gen, deploy_contract, abi::schnorr as abi},
};
async fn setup_test() -> (AnvilInstance, Arc<RootProvider<SimpleRequest>>, Address) {
let anvil = Anvil::new().spawn();
let provider = RootProvider::new(
ClientBuilder::default().transport(SimpleRequest::new(anvil.endpoint()), true),
);
let wallet = anvil.keys()[0].clone().into();
let client = Arc::new(provider);
let address = deploy_contract(client.clone(), &wallet, "TestSchnorr").await.unwrap();
(anvil, client, address)
}
#[tokio::test]
async fn test_deploy_contract() {
setup_test().await;
}
pub async fn call_verify(
provider: &RootProvider<SimpleRequest>,
contract: Address,
public_key: &PublicKey,
message: &[u8],
signature: &Signature,
) -> Result<(), Error> {
let px: [u8; 32] = public_key.px.to_repr().into();
let c_bytes: [u8; 32] = signature.c.to_repr().into();
let s_bytes: [u8; 32] = signature.s.to_repr().into();
let call = TransactionRequest::default().to(contract).input(TransactionInput::new(
abi::verifyCall::new((px.into(), message.to_vec().into(), c_bytes.into(), s_bytes.into()))
.abi_encode()
.into(),
));
let bytes = provider.call(&call).await.map_err(|_| Error::ConnectionError)?;
let res =
abi::verifyCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?;
if res._0 {
Ok(())
} else {
Err(Error::InvalidSignature)
}
}
#[tokio::test]
async fn test_ecrecover_hack() {
let (_anvil, client, contract) = setup_test().await;
let (keys, public_key) = key_gen();
const MESSAGE: &[u8] = b"Hello, World!";
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let sig =
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
let sig = Signature::new(&public_key, MESSAGE, sig).unwrap();
call_verify(&client, contract, &public_key, MESSAGE, &sig).await.unwrap();
// Test an invalid signature fails
let mut sig = sig;
sig.s += Scalar::ONE;
assert!(call_verify(&client, contract, &public_key, MESSAGE, &sig).await.is_err());
}

View File

@@ -0,0 +1,128 @@
use std::{convert::TryFrom, sync::Arc, time::Duration, fs::File};
use rand_core::OsRng;
use ::k256::{
elliptic_curve::{bigint::ArrayEncoding, PrimeField},
U256,
};
use ethers_core::{
types::Signature,
abi::Abi,
utils::{keccak256, Anvil, AnvilInstance},
};
use ethers_contract::ContractFactory;
use ethers_providers::{Middleware, Provider, Http};
use frost::{
curve::Secp256k1,
Participant,
algorithm::IetfSchnorr,
tests::{key_gen, algorithm_machines, sign},
};
use ethereum_serai::{
crypto,
contract::{Schnorr, call_verify},
};
// TODO: Replace with a contract deployment from an unknown account, so the environment solely has
// to fund the deployer, not create/pass a wallet
pub async fn deploy_schnorr_verifier_contract(
chain_id: u32,
client: Arc<Provider<Http>>,
wallet: &k256::ecdsa::SigningKey,
) -> eyre::Result<Schnorr<Provider<Http>>> {
let abi: Abi = serde_json::from_reader(File::open("./artifacts/Schnorr.abi").unwrap()).unwrap();
let hex_bin_buf = std::fs::read_to_string("./artifacts/Schnorr.bin").unwrap();
let hex_bin =
if let Some(stripped) = hex_bin_buf.strip_prefix("0x") { stripped } else { &hex_bin_buf };
let bin = hex::decode(hex_bin).unwrap();
let factory = ContractFactory::new(abi, bin.into(), client.clone());
let mut deployment_tx = factory.deploy(())?.tx;
deployment_tx.set_chain_id(chain_id);
deployment_tx.set_gas(500_000);
let (max_fee_per_gas, max_priority_fee_per_gas) = client.estimate_eip1559_fees(None).await?;
deployment_tx.as_eip1559_mut().unwrap().max_fee_per_gas = Some(max_fee_per_gas);
deployment_tx.as_eip1559_mut().unwrap().max_priority_fee_per_gas = Some(max_priority_fee_per_gas);
let sig_hash = deployment_tx.sighash();
let (sig, rid) = wallet.sign_prehash_recoverable(sig_hash.as_ref()).unwrap();
// EIP-155 v
let mut v = u64::from(rid.to_byte());
assert!((v == 0) || (v == 1));
v += u64::from((chain_id * 2) + 35);
let r = sig.r().to_repr();
let r_ref: &[u8] = r.as_ref();
let s = sig.s().to_repr();
let s_ref: &[u8] = s.as_ref();
let deployment_tx = deployment_tx.rlp_signed(&Signature { r: r_ref.into(), s: s_ref.into(), v });
let pending_tx = client.send_raw_transaction(deployment_tx).await?;
let mut receipt;
while {
receipt = client.get_transaction_receipt(pending_tx.tx_hash()).await?;
receipt.is_none()
} {
tokio::time::sleep(Duration::from_secs(6)).await;
}
let receipt = receipt.unwrap();
assert!(receipt.status == Some(1.into()));
let contract = Schnorr::new(receipt.contract_address.unwrap(), client.clone());
Ok(contract)
}
async fn deploy_test_contract() -> (u32, AnvilInstance, Schnorr<Provider<Http>>) {
let anvil = Anvil::new().spawn();
let provider =
Provider::<Http>::try_from(anvil.endpoint()).unwrap().interval(Duration::from_millis(10u64));
let chain_id = provider.get_chainid().await.unwrap().as_u32();
let wallet = anvil.keys()[0].clone().into();
let client = Arc::new(provider);
(chain_id, anvil, deploy_schnorr_verifier_contract(chain_id, client, &wallet).await.unwrap())
}
#[tokio::test]
async fn test_deploy_contract() {
deploy_test_contract().await;
}
#[tokio::test]
async fn test_ecrecover_hack() {
let (chain_id, _anvil, contract) = deploy_test_contract().await;
let chain_id = U256::from(chain_id);
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
let group_key = keys[&Participant::new(1).unwrap()].group_key();
const MESSAGE: &[u8] = b"Hello, World!";
let hashed_message = keccak256(MESSAGE);
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
let algo = IetfSchnorr::<Secp256k1, crypto::EthereumHram>::ietf();
let sig = sign(
&mut OsRng,
&algo,
keys.clone(),
algorithm_machines(&mut OsRng, &algo, &keys),
full_message,
);
let mut processed_sig =
crypto::process_signature_for_contract(hashed_message, &sig.R, sig.s, &group_key, chain_id);
call_verify(&contract, &processed_sig).await.unwrap();
// test invalid signature fails
processed_sig.message[0] = 0;
assert!(call_verify(&contract, &processed_sig).await.is_err());
}

View File

@@ -0,0 +1,87 @@
use k256::{
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, sec1::ToEncodedPoint},
ProjectivePoint, Scalar, U256,
};
use frost::{curve::Secp256k1, Participant};
use ethereum_serai::crypto::*;
#[test]
fn test_ecrecover() {
use rand_core::OsRng;
use sha2::Sha256;
use sha3::{Digest, Keccak256};
use k256::ecdsa::{hazmat::SignPrimitive, signature::DigestVerifier, SigningKey, VerifyingKey};
let private = SigningKey::random(&mut OsRng);
let public = VerifyingKey::from(&private);
const MESSAGE: &[u8] = b"Hello, World!";
let (sig, recovery_id) = private
.as_nonzero_scalar()
.try_sign_prehashed_rfc6979::<Sha256>(&Keccak256::digest(MESSAGE), b"")
.unwrap();
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
{
assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ());
}
assert_eq!(
ecrecover(hash_to_scalar(MESSAGE), recovery_id.unwrap().is_y_odd().into(), *sig.r(), *sig.s())
.unwrap(),
address(&ProjectivePoint::from(public.as_affine()))
);
}
#[test]
fn test_signing() {
use frost::{
algorithm::IetfSchnorr,
tests::{algorithm_machines, key_gen, sign},
};
use rand_core::OsRng;
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
let _group_key = keys[&Participant::new(1).unwrap()].group_key();
const MESSAGE: &[u8] = b"Hello, World!";
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let _sig =
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
}
#[test]
fn test_ecrecover_hack() {
use frost::{
algorithm::IetfSchnorr,
tests::{algorithm_machines, key_gen, sign},
};
use rand_core::OsRng;
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
let group_key = keys[&Participant::new(1).unwrap()].group_key();
let group_key_encoded = group_key.to_encoded_point(true);
let group_key_compressed = group_key_encoded.as_ref();
let group_key_x = Scalar::reduce(U256::from_be_slice(&group_key_compressed[1 .. 33]));
const MESSAGE: &[u8] = b"Hello, World!";
let hashed_message = keccak256(MESSAGE);
let chain_id = U256::ONE;
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let sig = sign(
&mut OsRng,
&algo,
keys.clone(),
algorithm_machines(&mut OsRng, &algo, &keys),
full_message,
);
let (sr, er) =
preprocess_signature_for_ecrecover(hashed_message, &sig.R, sig.s, &group_key, chain_id);
let q = ecrecover(sr, group_key_compressed[0] - 2, group_key_x, er).unwrap();
assert_eq!(q, address(&sig.R));
}

View File

@@ -0,0 +1,2 @@
mod contract;
mod crypto;

View File

@@ -6,7 +6,7 @@ license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero" repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021" edition = "2021"
rust-version = "1.79" rust-version = "1.74"
[package.metadata.docs.rs] [package.metadata.docs.rs]
all-features = true all-features = true
@@ -18,35 +18,96 @@ workspace = true
[dependencies] [dependencies]
std-shims = { path = "../../common/std-shims", version = "^0.1.1", default-features = false } std-shims = { path = "../../common/std-shims", version = "^0.1.1", default-features = false }
async-trait = { version = "0.1", default-features = false }
thiserror = { version = "1", default-features = false, optional = true }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] } zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
subtle = { version = "^2.4", default-features = false }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] } rand_core = { version = "0.6", default-features = false }
# Used to send transactions
rand = { version = "0.8", default-features = false }
rand_chacha = { version = "0.3", default-features = false }
# Used to select decoys
rand_distr = { version = "0.4", default-features = false }
sha3 = { version = "0.10", default-features = false }
pbkdf2 = { version = "0.12", features = ["simple"], default-features = false }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "precomputed-tables"] }
# Used for the hash to curve, along with the more complicated proofs
group = { version = "0.13", default-features = false }
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.4", default-features = false }
multiexp = { path = "../../crypto/multiexp", version = "0.4", default-features = false, features = ["batch"] }
# Needed for multisig
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
dleq = { path = "../../crypto/dleq", version = "0.4", default-features = false, features = ["serialize"], optional = true }
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["ed25519"], optional = true }
monero-io = { path = "io", version = "0.1", default-features = false }
monero-generators = { path = "generators", version = "0.4", default-features = false } monero-generators = { path = "generators", version = "0.4", default-features = false }
monero-primitives = { path = "primitives", version = "0.1", default-features = false }
monero-mlsag = { path = "ringct/mlsag", version = "0.1", default-features = false } async-lock = { version = "3", default-features = false, optional = true }
monero-clsag = { path = "ringct/clsag", version = "0.1", default-features = false }
monero-borromean = { path = "ringct/borromean", version = "0.1", default-features = false }
monero-bulletproofs = { path = "ringct/bulletproofs", version = "0.1", default-features = false }
hex-literal = "0.4" hex-literal = "0.4"
hex = { version = "0.4", default-features = false, features = ["alloc"] }
serde = { version = "1", default-features = false, features = ["derive", "alloc"] }
serde_json = { version = "1", default-features = false, features = ["alloc"] }
base58-monero = { version = "2", default-features = false, features = ["check"] }
# Used for the provided HTTP RPC
digest_auth = { version = "0.3", default-features = false, optional = true }
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls"], optional = true }
tokio = { version = "1", default-features = false, optional = true }
[build-dependencies]
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.4", default-features = false }
monero-generators = { path = "generators", version = "0.4", default-features = false }
[dev-dependencies]
tokio = { version = "1", features = ["sync", "macros"] }
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
[features] [features]
std = [ std = [
"std-shims/std", "std-shims/std",
"zeroize/std", "thiserror",
"zeroize/std",
"subtle/std",
"rand_core/std",
"rand/std",
"rand_chacha/std",
"rand_distr/std",
"sha3/std",
"pbkdf2/std",
"multiexp/std",
"transcript/std",
"dleq/std",
"monero-io/std",
"monero-generators/std", "monero-generators/std",
"monero-primitives/std",
"monero-mlsag/std", "async-lock?/std",
"monero-clsag/std",
"monero-borromean/std", "hex/std",
"monero-bulletproofs/std", "serde/std",
"serde_json/std",
"base58-monero/std",
] ]
compile-time-generators = ["curve25519-dalek/precomputed-tables", "monero-bulletproofs/compile-time-generators"] cache-distribution = ["async-lock"]
multisig = ["monero-clsag/multisig", "std"] http-rpc = ["digest_auth", "simple-request", "tokio"]
default = ["std", "compile-time-generators"] multisig = ["transcript", "frost", "dleq", "std"]
binaries = ["tokio/rt-multi-thread", "tokio/macros", "http-rpc"]
experimental = []
default = ["std", "http-rpc"]

View File

@@ -1,6 +1,6 @@
MIT License MIT License
Copyright (c) 2022-2024 Luke Parker Copyright (c) 2022-2023 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,28 +1,49 @@
# monero-serai # monero-serai
A modern Monero transaction library. It provides a modern, Rust-friendly view of A modern Monero transaction library intended for usage in wallets. It prides
the Monero protocol. itself on accuracy, correctness, and removing common pit falls developers may
face.
This library is usable under no-std when the `std` feature (on by default) is monero-serai also offers the following features:
disabled.
### Wallet Functionality - Featured Addresses
- A FROST-based multisig orders of magnitude more performant than Monero's
monero-serai originally included wallet functionality. That has been moved to ### Purpose and support
monero-wallet.
### Purpose and Support
monero-serai was written for Serai, a decentralized exchange aiming to support monero-serai was written for Serai, a decentralized exchange aiming to support
Monero. Despite this, monero-serai is intended to be a widely usable library, Monero. Despite this, monero-serai is intended to be a widely usable library,
accurate to Monero. monero-serai guarantees the functionality needed for Serai, accurate to Monero. monero-serai guarantees the functionality needed for Serai,
yet does not include any functionality specific to Serai. yet will not deprive functionality from other users.
### Cargo Features Various legacy transaction formats are not currently implemented, yet we are
willing to add support for them. There aren't active development efforts around
them however.
- `std` (on by default): Enables `std` (and with it, more efficient internal ### Caveats
implementations).
- `compile-time-generators` (on by default): Derives the generators at This library DOES attempt to do the following:
compile-time so they don't need to be derived at runtime. This is recommended
if program size doesn't need to be kept minimal. - Create on-chain transactions identical to how wallet2 would (unless told not
- `multisig`: Enables the `multisig` feature for all dependencies. to)
- Not be detectable as monero-serai when scanning outputs
- Not reveal spent outputs to the connected RPC node
This library DOES NOT attempt to do the following:
- Have identical RPC behavior when creating transactions
- Be a wallet
This means that monero-serai shouldn't be fingerprintable on-chain. It also
shouldn't be fingerprintable if a targeted attack occurs to detect if the
receiving wallet is monero-serai or wallet2. It also should be generally safe
for usage with remote nodes.
It won't hide from remote nodes it's monero-serai however, potentially
allowing a remote node to profile you. The implications of this are left to the
user to consider.
It also won't act as a wallet, just as a transaction library. wallet2 has
several *non-transaction-level* policies, such as always attempting to use two
inputs to create transactions. These are considered out of scope to
monero-serai.

67
coins/monero/build.rs Normal file
View File

@@ -0,0 +1,67 @@
use std::{
io::Write,
env,
path::Path,
fs::{File, remove_file},
};
use dalek_ff_group::EdwardsPoint;
use monero_generators::bulletproofs_generators;
fn serialize(generators_string: &mut String, points: &[EdwardsPoint]) {
for generator in points {
generators_string.extend(
format!(
"
dalek_ff_group::EdwardsPoint(
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap()
),
",
generator.compress().to_bytes()
)
.chars(),
);
}
}
fn generators(prefix: &'static str, path: &str) {
let generators = bulletproofs_generators(prefix.as_bytes());
#[allow(non_snake_case)]
let mut G_str = String::new();
serialize(&mut G_str, &generators.G);
#[allow(non_snake_case)]
let mut H_str = String::new();
serialize(&mut H_str, &generators.H);
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
let _ = remove_file(&path);
File::create(&path)
.unwrap()
.write_all(
format!(
"
pub(crate) static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
pub fn GENERATORS() -> &'static Generators {{
GENERATORS_CELL.get_or_init(|| Generators {{
G: vec![
{G_str}
],
H: vec![
{H_str}
],
}})
}}
",
)
.as_bytes(),
)
.unwrap();
}
fn main() {
println!("cargo:rerun-if-changed=build.rs");
generators("bulletproof", "generators.rs");
generators("bulletproof_plus", "generators_plus.rs");
}

View File

@@ -1,7 +1,7 @@
[package] [package]
name = "monero-generators" name = "monero-generators"
version = "0.4.0" version = "0.4.0"
description = "Monero's hash to point function and generators" description = "Monero's hash_to_point and generators"
license = "MIT" license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators" repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
authors = ["Luke Parker <lukeparker5132@gmail.com>"] authors = ["Luke Parker <lukeparker5132@gmail.com>"]
@@ -20,27 +20,15 @@ std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-fe
subtle = { version = "^2.4", default-features = false } subtle = { version = "^2.4", default-features = false }
sha3 = { version = "0.10", default-features = false } sha3 = { version = "0.10", default-features = false }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "precomputed-tables"] }
group = { version = "0.13", default-features = false } group = { version = "0.13", default-features = false }
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.4", default-features = false } dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.4", default-features = false }
monero-io = { path = "../io", version = "0.1", default-features = false }
[dev-dependencies] [dev-dependencies]
hex = "0.4" hex = "0.4"
[features] [features]
std = [ std = ["std-shims/std", "subtle/std", "sha3/std", "dalek-ff-group/std"]
"std-shims/std",
"subtle/std",
"sha3/std",
"group/alloc",
"dalek-ff-group/std",
"monero-io/std"
]
default = ["std"] default = ["std"]

View File

@@ -1,6 +1,6 @@
MIT License MIT License
Copyright (c) 2022-2024 Luke Parker Copyright (c) 2022-2023 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,13 +1,7 @@
# Monero Generators # Monero Generators
Generators used by Monero in both its Pedersen commitments and Bulletproofs(+). Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
An implementation of Monero's `hash_to_ec` is included, as needed to generate An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
the generators. `hash_to_point` here, is included, as needed to generate generators.
This library is usable under no-std when the `std` feature (on by default) is This library is usable under no-std when the `std` feature is disabled.
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).

View File

@@ -1,20 +1,27 @@
use subtle::ConditionallySelectable; use subtle::ConditionallySelectable;
use curve25519_dalek::edwards::EdwardsPoint; use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
use group::ff::{Field, PrimeField}; use group::ff::{Field, PrimeField};
use dalek_ff_group::FieldElement; use dalek_ff_group::FieldElement;
use monero_io::decompress_point; use crate::hash;
use crate::keccak256; /// Decompress canonically encoded ed25519 point
/// It does not check if the point is in the prime order subgroup
pub fn decompress_point(bytes: [u8; 32]) -> Option<EdwardsPoint> {
CompressedEdwardsY(bytes)
.decompress()
// Ban points which are either unreduced or -0
.filter(|point| point.compress().to_bytes() == bytes)
}
/// Monero's `hash_to_ec` function. /// Monero's hash to point function, as named `hash_to_ec`.
pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint { pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
#[allow(non_snake_case)] #[allow(non_snake_case)]
let A = FieldElement::from(486662u64); let A = FieldElement::from(486662u64);
let v = FieldElement::from_square(keccak256(&bytes)).double(); let v = FieldElement::from_square(hash(&bytes)).double();
let w = v + FieldElement::ONE; let w = v + FieldElement::ONE;
let x = w.square() + (-A.square() * v); let x = w.square() + (-A.square() * v);

View File

@@ -1,46 +1,45 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))] //! Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
#![doc = include_str!("../README.md")] //!
#![deny(missing_docs)] //! An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
//! `hash_to_point` here, is included, as needed to generate generators.
#![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(not(feature = "std"), no_std)]
use std_shims::{sync::OnceLock, vec::Vec}; use std_shims::{sync::OnceLock, vec::Vec};
use sha3::{Digest, Keccak256}; use sha3::{Digest, Keccak256};
use curve25519_dalek::{constants::ED25519_BASEPOINT_POINT, edwards::EdwardsPoint}; use curve25519_dalek::edwards::{EdwardsPoint as DalekPoint};
use monero_io::{write_varint, decompress_point}; use group::{Group, GroupEncoding};
use dalek_ff_group::EdwardsPoint;
mod varint;
use varint::write_varint;
mod hash_to_point; mod hash_to_point;
pub use hash_to_point::hash_to_point; pub use hash_to_point::{hash_to_point, decompress_point};
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
fn keccak256(data: &[u8]) -> [u8; 32] { fn hash(data: &[u8]) -> [u8; 32] {
Keccak256::digest(data).into() Keccak256::digest(data).into()
} }
static H_CELL: OnceLock<EdwardsPoint> = OnceLock::new(); static H_CELL: OnceLock<DalekPoint> = OnceLock::new();
/// Monero's `H` generator. /// Monero's alternate generator `H`, used for amounts in Pedersen commitments.
///
/// Contrary to convention (`G` for values, `H` for randomness), `H` is used by Monero for amounts
/// within Pedersen commitments.
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub fn H() -> EdwardsPoint { pub fn H() -> DalekPoint {
*H_CELL.get_or_init(|| { *H_CELL.get_or_init(|| {
decompress_point(keccak256(&ED25519_BASEPOINT_POINT.compress().to_bytes())) decompress_point(hash(&EdwardsPoint::generator().to_bytes())).unwrap().mul_by_cofactor()
.unwrap()
.mul_by_cofactor()
}) })
} }
static H_POW_2_CELL: OnceLock<[EdwardsPoint; 64]> = OnceLock::new(); static H_POW_2_CELL: OnceLock<[DalekPoint; 64]> = OnceLock::new();
/// Monero's `H` generator, multiplied by 2**i for i in 1 ..= 64. /// Monero's alternate generator `H`, multiplied by 2**i for i in 1 ..= 64.
///
/// This table is useful when working with amounts, which are u64s.
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub fn H_pow_2() -> &'static [EdwardsPoint; 64] { pub fn H_pow_2() -> &'static [DalekPoint; 64] {
H_POW_2_CELL.get_or_init(|| { H_POW_2_CELL.get_or_init(|| {
let mut res = [H(); 64]; let mut res = [H(); 64];
for i in 1 .. 64 { for i in 1 .. 64 {
@@ -50,45 +49,31 @@ pub fn H_pow_2() -> &'static [EdwardsPoint; 64] {
}) })
} }
/// The maximum amount of commitments provable for within a single range proof. const MAX_M: usize = 16;
pub const MAX_COMMITMENTS: usize = 16; const N: usize = 64;
/// The amount of bits a value within a commitment may use. const MAX_MN: usize = MAX_M * N;
pub const COMMITMENT_BITS: usize = 64;
/// The logarithm (over 2) of the amount of bits a value within a commitment may use.
pub const LOG_COMMITMENT_BITS: usize = 6; // 2 ** 6 == N
/// Container struct for Bulletproofs(+) generators. /// Container struct for Bulletproofs(+) generators.
#[allow(non_snake_case)] #[allow(non_snake_case)]
pub struct Generators { pub struct Generators {
/// The G (bold) vector of generators.
pub G: Vec<EdwardsPoint>, pub G: Vec<EdwardsPoint>,
/// The H (bold) vector of generators.
pub H: Vec<EdwardsPoint>, pub H: Vec<EdwardsPoint>,
} }
/// Generate generators as needed for Bulletproofs(+), as Monero does. /// Generate generators as needed for Bulletproofs(+), as Monero does.
///
/// Consumers should not call this function ad-hoc, yet call it within a build script or use a
/// once-initialized static.
pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators { pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
// The maximum amount of bits used within a single range proof.
const MAX_MN: usize = MAX_COMMITMENTS * COMMITMENT_BITS;
let mut preimage = H().compress().to_bytes().to_vec();
preimage.extend(dst);
let mut res = Generators { G: Vec::with_capacity(MAX_MN), H: Vec::with_capacity(MAX_MN) }; let mut res = Generators { G: Vec::with_capacity(MAX_MN), H: Vec::with_capacity(MAX_MN) };
for i in 0 .. MAX_MN { for i in 0 .. MAX_MN {
// We generate a pair of generators per iteration
let i = 2 * i; let i = 2 * i;
let mut even = preimage.clone(); let mut even = H().compress().to_bytes().to_vec();
write_varint(&i, &mut even).unwrap(); even.extend(dst);
res.H.push(hash_to_point(keccak256(&even))); let mut odd = even.clone();
let mut odd = preimage.clone(); write_varint(&i.try_into().unwrap(), &mut even).unwrap();
write_varint(&(i + 1), &mut odd).unwrap(); write_varint(&(i + 1).try_into().unwrap(), &mut odd).unwrap();
res.G.push(hash_to_point(keccak256(&odd))); res.H.push(EdwardsPoint(hash_to_point(hash(&even))));
res.G.push(EdwardsPoint(hash_to_point(hash(&odd))));
} }
res res
} }

View File

@@ -0,0 +1,38 @@
use crate::{decompress_point, hash_to_point};
#[test]
fn crypto_tests() {
// tests.txt file copied from monero repo
// https://github.com/monero-project/monero/
// blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/tests/crypto/tests.txt
let reader = include_str!("./tests.txt");
for line in reader.lines() {
let mut words = line.split_whitespace();
let command = words.next().unwrap();
match command {
"check_key" => {
let key = words.next().unwrap();
let expected = match words.next().unwrap() {
"true" => true,
"false" => false,
_ => unreachable!("invalid result"),
};
let actual = decompress_point(hex::decode(key).unwrap().try_into().unwrap());
assert_eq!(actual.is_some(), expected);
}
"hash_to_ec" => {
let bytes = words.next().unwrap();
let expected = words.next().unwrap();
let actual = hash_to_point(hex::decode(bytes).unwrap().try_into().unwrap());
assert_eq!(hex::encode(actual.compress().to_bytes()), expected);
}
_ => unreachable!("unknown command"),
}
}
}

View File

@@ -1,36 +1 @@
use crate::{decompress_point, hash_to_point}; mod hash_to_point;
#[test]
fn test_vectors() {
// tests.txt file copied from monero repo
// https://github.com/monero-project/monero/
// blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/tests/crypto/tests.txt
let reader = include_str!("./tests.txt");
for line in reader.lines() {
let mut words = line.split_whitespace();
let command = words.next().unwrap();
match command {
"check_key" => {
let key = words.next().unwrap();
let expected = match words.next().unwrap() {
"true" => true,
"false" => false,
_ => unreachable!("invalid result"),
};
let actual = decompress_point(hex::decode(key).unwrap().try_into().unwrap());
assert_eq!(actual.is_some(), expected);
}
"hash_to_ec" => {
let bytes = words.next().unwrap();
let expected = words.next().unwrap();
let actual = hash_to_point(hex::decode(bytes).unwrap().try_into().unwrap());
assert_eq!(hex::encode(actual.compress().to_bytes()), expected);
}
_ => unreachable!("unknown command"),
}
}
}

View File

@@ -0,0 +1,16 @@
use std_shims::io::{self, Write};
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
let mut varint = *varint;
while {
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
varint >>= 7;
if varint != 0 {
b |= VARINT_CONTINUATION_MASK;
}
w.write_all(&[b])?;
varint != 0
} {}
Ok(())
}

View File

@@ -1,24 +0,0 @@
[package]
name = "monero-io"
version = "0.1.0"
description = "Serialization functions, as within the Monero protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/io"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc"] }
[features]
std = ["std-shims/std"]
default = ["std"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,11 +0,0 @@
# Monero IO
Serialization functions, as within the Monero protocol.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).

View File

@@ -1,219 +0,0 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
use core::fmt::Debug;
use std_shims::{
vec,
vec::Vec,
io::{self, Read, Write},
};
use curve25519_dalek::{
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
mod sealed {
/// A trait for a number readable/writable as a VarInt.
///
/// This is sealed to prevent unintended implementations.
pub trait VarInt: TryInto<u64> + TryFrom<u64> + Copy {
const BITS: usize;
}
impl VarInt for u8 {
const BITS: usize = 8;
}
impl VarInt for u32 {
const BITS: usize = 32;
}
impl VarInt for u64 {
const BITS: usize = 64;
}
impl VarInt for usize {
const BITS: usize = core::mem::size_of::<usize>() * 8;
}
}
/// The amount of bytes this number will take when serialized as a VarInt.
///
/// This function will panic if the VarInt exceeds u64::MAX.
pub fn varint_len<V: sealed::VarInt>(varint: V) -> usize {
let varint_u64: u64 = varint.try_into().map_err(|_| "varint exceeded u64").unwrap();
((usize::try_from(u64::BITS - varint_u64.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
}
/// Write a byte.
///
/// This is used as a building block within generic functions.
pub fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
w.write_all(&[*byte])
}
/// Write a number, VarInt-encoded.
///
/// This will panic if the VarInt exceeds u64::MAX.
pub fn write_varint<W: Write, U: sealed::VarInt>(varint: &U, w: &mut W) -> io::Result<()> {
let mut varint: u64 = (*varint).try_into().map_err(|_| "varint exceeded u64").unwrap();
while {
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
varint >>= 7;
if varint != 0 {
b |= VARINT_CONTINUATION_MASK;
}
write_byte(&b, w)?;
varint != 0
} {}
Ok(())
}
/// Write a scalar.
pub fn write_scalar<W: Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
w.write_all(&scalar.to_bytes())
}
/// Write a point.
pub fn write_point<W: Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
w.write_all(&point.compress().to_bytes())
}
/// Write a list of elements, without length-prefixing.
pub fn write_raw_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
f: F,
values: &[T],
w: &mut W,
) -> io::Result<()> {
for value in values {
f(value, w)?;
}
Ok(())
}
/// Write a list of elements, with length-prefixing.
pub fn write_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
f: F,
values: &[T],
w: &mut W,
) -> io::Result<()> {
write_varint(&values.len(), w)?;
write_raw_vec(f, values, w)
}
/// Read a constant amount of bytes.
pub fn read_bytes<R: Read, const N: usize>(r: &mut R) -> io::Result<[u8; N]> {
let mut res = [0; N];
r.read_exact(&mut res)?;
Ok(res)
}
/// Read a single byte.
pub fn read_byte<R: Read>(r: &mut R) -> io::Result<u8> {
Ok(read_bytes::<_, 1>(r)?[0])
}
/// Read a u16, little-endian encoded.
pub fn read_u16<R: Read>(r: &mut R) -> io::Result<u16> {
read_bytes(r).map(u16::from_le_bytes)
}
/// Read a u32, little-endian encoded.
pub fn read_u32<R: Read>(r: &mut R) -> io::Result<u32> {
read_bytes(r).map(u32::from_le_bytes)
}
/// Read a u64, little-endian encoded.
pub fn read_u64<R: Read>(r: &mut R) -> io::Result<u64> {
read_bytes(r).map(u64::from_le_bytes)
}
/// Read a canonically-encoded VarInt.
pub fn read_varint<R: Read, U: sealed::VarInt>(r: &mut R) -> io::Result<U> {
let mut bits = 0;
let mut res = 0;
while {
let b = read_byte(r)?;
if (bits != 0) && (b == 0) {
Err(io::Error::other("non-canonical varint"))?;
}
if ((bits + 7) >= U::BITS) && (b >= (1 << (U::BITS - bits))) {
Err(io::Error::other("varint overflow"))?;
}
res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits;
bits += 7;
b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK
} {}
res.try_into().map_err(|_| io::Error::other("VarInt does not fit into integer type"))
}
/// Read a canonically-encoded scalar.
///
/// Some scalars within the Monero protocol are not enforced to be canonically encoded. For such
/// scalars, they should be represented as `[u8; 32]` and later converted to scalars as relevant.
pub fn read_scalar<R: Read>(r: &mut R) -> io::Result<Scalar> {
Option::from(Scalar::from_canonical_bytes(read_bytes(r)?))
.ok_or_else(|| io::Error::other("unreduced scalar"))
}
/// Decompress a canonically-encoded Ed25519 point.
///
/// Ed25519 is of order `8 * l`. This function ensures each of those `8 * l` points have a singular
/// encoding by checking points aren't encoded with an unreduced field element, and aren't negative
/// when the negative is equivalent (0 == -0).
///
/// Since this decodes an Ed25519 point, it does not check the point is in the prime-order
/// subgroup. Torsioned points do have a canonical encoding, and only aren't canonical when
/// considered in relation to the prime-order subgroup.
pub fn decompress_point(bytes: [u8; 32]) -> Option<EdwardsPoint> {
CompressedEdwardsY(bytes)
.decompress()
// Ban points which are either unreduced or -0
.filter(|point| point.compress().to_bytes() == bytes)
}
/// Read a canonically-encoded Ed25519 point.
///
/// This internally calls `decompress_point` and has the same definition of canonicity. This
/// function does not check the resulting point is within the prime-order subgroup.
pub fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
let bytes = read_bytes(r)?;
decompress_point(bytes).ok_or_else(|| io::Error::other("invalid point"))
}
/// Read a canonically-encoded Ed25519 point, within the prime-order subgroup.
pub fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
read_point(r)
.ok()
.filter(EdwardsPoint::is_torsion_free)
.ok_or_else(|| io::Error::other("invalid point"))
}
/// Read a variable-length list of elements, without length-prefixing.
pub fn read_raw_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
f: F,
len: usize,
r: &mut R,
) -> io::Result<Vec<T>> {
let mut res = vec![];
for _ in 0 .. len {
res.push(f(r)?);
}
Ok(res)
}
/// Read a constant-length list of elements.
pub fn read_array<R: Read, T: Debug, F: Fn(&mut R) -> io::Result<T>, const N: usize>(
f: F,
r: &mut R,
) -> io::Result<[T; N]> {
read_raw_vec(f, N, r).map(|vec| vec.try_into().unwrap())
}
/// Read a length-prefixed variable-length list of elements.
pub fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, r: &mut R) -> io::Result<Vec<T>> {
read_raw_vec(f, read_varint(r)?, r)
}

View File

@@ -1,44 +0,0 @@
[package]
name = "monero-primitives"
version = "0.1.0"
description = "Primitives for the Monero protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/primitives"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
# Cryptographic dependencies
sha3 = { version = "0.10", default-features = false }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
# Other Monero dependencies
monero-io = { path = "../io", version = "0.1", default-features = false }
monero-generators = { path = "../generators", version = "0.4", default-features = false }
[dev-dependencies]
hex = { version = "0.4", default-features = false, features = ["alloc"] }
[features]
std = [
"std-shims/std",
"zeroize/std",
"sha3/std",
"monero-generators/std",
]
default = ["std"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,11 +0,0 @@
# Monero Primitives
Primitive structures and functions for the Monero protocol.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).

View File

@@ -1,248 +0,0 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
use std_shims::{io, vec::Vec};
#[cfg(feature = "std")]
use std_shims::sync::OnceLock;
use zeroize::{Zeroize, ZeroizeOnDrop};
use sha3::{Digest, Keccak256};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_POINT,
traits::VartimePrecomputedMultiscalarMul,
scalar::Scalar,
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation},
};
use monero_io::*;
use monero_generators::H;
mod unreduced_scalar;
pub use unreduced_scalar::UnreducedScalar;
#[cfg(test)]
mod tests;
// On std, we cache some variables in statics.
#[cfg(feature = "std")]
static INV_EIGHT_CELL: OnceLock<Scalar> = OnceLock::new();
/// The inverse of 8 over l.
#[cfg(feature = "std")]
#[allow(non_snake_case)]
pub fn INV_EIGHT() -> Scalar {
*INV_EIGHT_CELL.get_or_init(|| Scalar::from(8u8).invert())
}
// In no-std environments, we prefer the reduced memory use and calculate it ad-hoc.
/// The inverse of 8 over l.
#[cfg(not(feature = "std"))]
#[allow(non_snake_case)]
pub fn INV_EIGHT() -> Scalar {
Scalar::from(8u8).invert()
}
#[cfg(feature = "std")]
static G_PRECOMP_CELL: OnceLock<VartimeEdwardsPrecomputation> = OnceLock::new();
/// A cached (if std) pre-computation of the Ed25519 generator, G.
#[cfg(feature = "std")]
#[allow(non_snake_case)]
pub fn G_PRECOMP() -> &'static VartimeEdwardsPrecomputation {
G_PRECOMP_CELL.get_or_init(|| VartimeEdwardsPrecomputation::new([ED25519_BASEPOINT_POINT]))
}
/// A cached (if std) pre-computation of the Ed25519 generator, G.
#[cfg(not(feature = "std"))]
#[allow(non_snake_case)]
pub fn G_PRECOMP() -> VartimeEdwardsPrecomputation {
VartimeEdwardsPrecomputation::new([ED25519_BASEPOINT_POINT])
}
/// The Keccak-256 hash function.
pub fn keccak256(data: impl AsRef<[u8]>) -> [u8; 32] {
Keccak256::digest(data.as_ref()).into()
}
/// Hash the provided data to a scalar via keccak256(data) % l.
///
/// This function panics if it finds the Keccak-256 preimage for [0; 32].
pub fn keccak256_to_scalar(data: impl AsRef<[u8]>) -> Scalar {
let scalar = Scalar::from_bytes_mod_order(keccak256(data.as_ref()));
// Monero will explicitly error in this case
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
// not generate/verify a proof we believe to be valid when it isn't
assert!(scalar != Scalar::ZERO, "ZERO HASH: {:?}", data.as_ref());
scalar
}
/// Transparent structure representing a Pedersen commitment's contents.
#[allow(non_snake_case)]
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct Commitment {
/// The mask for this commitment.
pub mask: Scalar,
/// The amount committed to by this commitment.
pub amount: u64,
}
impl core::fmt::Debug for Commitment {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
fmt.debug_struct("Commitment").field("amount", &self.amount).finish_non_exhaustive()
}
}
impl Commitment {
/// A commitment to zero, defined with a mask of 1 (as to not be the identity).
pub fn zero() -> Commitment {
Commitment { mask: Scalar::ONE, amount: 0 }
}
/// Create a new Commitment.
pub fn new(mask: Scalar, amount: u64) -> Commitment {
Commitment { mask, amount }
}
/// Calculate the Pedersen commitment, as a point, from this transparent structure.
pub fn calculate(&self) -> EdwardsPoint {
EdwardsPoint::vartime_double_scalar_mul_basepoint(&Scalar::from(self.amount), &H(), &self.mask)
}
/// Write the Commitment.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(&self.mask.to_bytes())?;
w.write_all(&self.amount.to_le_bytes())
}
/// Serialize the Commitment to a `Vec<u8>`.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(32 + 8);
self.write(&mut res).unwrap();
res
}
/// Read a Commitment.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn read<R: io::Read>(r: &mut R) -> io::Result<Commitment> {
Ok(Commitment::new(read_scalar(r)?, read_u64(r)?))
}
}
/// Decoy data, as used for producing Monero's ring signatures.
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct Decoys {
offsets: Vec<u64>,
signer_index: u8,
ring: Vec<[EdwardsPoint; 2]>,
}
impl core::fmt::Debug for Decoys {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
fmt
.debug_struct("Decoys")
.field("offsets", &self.offsets)
.field("ring", &self.ring)
.finish_non_exhaustive()
}
}
#[allow(clippy::len_without_is_empty)]
impl Decoys {
/// Create a new instance of decoy data.
///
/// `offsets` are the positions of each ring member within the Monero blockchain, offset from the
/// prior member's position (with the initial ring member offset from 0).
pub fn new(offsets: Vec<u64>, signer_index: u8, ring: Vec<[EdwardsPoint; 2]>) -> Option<Self> {
if (offsets.len() != ring.len()) || (usize::from(signer_index) >= ring.len()) {
None?;
}
Some(Decoys { offsets, signer_index, ring })
}
/// The length of the ring.
pub fn len(&self) -> usize {
self.offsets.len()
}
/// The positions of the ring members within the Monero blockchain, as their offsets.
///
/// The list is formatted as the position of the first ring member, then the offset from each
/// ring member to its prior.
pub fn offsets(&self) -> &[u64] {
&self.offsets
}
/// The positions of the ring members within the Monero blockchain.
pub fn positions(&self) -> Vec<u64> {
let mut res = Vec::with_capacity(self.len());
res.push(self.offsets[0]);
for m in 1 .. self.len() {
res.push(res[m - 1] + self.offsets[m]);
}
res
}
/// The index of the signer within the ring.
pub fn signer_index(&self) -> u8 {
self.signer_index
}
/// The ring.
pub fn ring(&self) -> &[[EdwardsPoint; 2]] {
&self.ring
}
/// The [key, commitment] pair of the signer.
pub fn signer_ring_members(&self) -> [EdwardsPoint; 2] {
self.ring[usize::from(self.signer_index)]
}
/// Write the Decoys.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn write(&self, w: &mut impl io::Write) -> io::Result<()> {
write_vec(write_varint, &self.offsets, w)?;
w.write_all(&[self.signer_index])?;
write_vec(
|pair, w| {
write_point(&pair[0], w)?;
write_point(&pair[1], w)
},
&self.ring,
w,
)
}
/// Serialize the Decoys to a `Vec<u8>`.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut res =
Vec::with_capacity((1 + (2 * self.offsets.len())) + 1 + 1 + (self.ring.len() * 64));
self.write(&mut res).unwrap();
res
}
/// Read a set of Decoys.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn read(r: &mut impl io::Read) -> io::Result<Decoys> {
Decoys::new(
read_vec(read_varint, r)?,
read_byte(r)?,
read_vec(|r| Ok([read_point(r)?, read_point(r)?]), r)?,
)
.ok_or_else(|| io::Error::other("invalid Decoys"))
}
}

View File

@@ -1,41 +0,0 @@
[package]
name = "monero-borromean"
version = "0.1.0"
description = "Borromean ring signatures arranged into a range proof, as done by the Monero protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/ringct/borromean"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
# Cryptographic dependencies
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
# Other Monero dependencies
monero-io = { path = "../../io", version = "0.1", default-features = false }
monero-generators = { path = "../../generators", version = "0.4", default-features = false }
monero-primitives = { path = "../../primitives", version = "0.1", default-features = false }
[features]
std = [
"std-shims/std",
"zeroize/std",
"monero-io/std",
"monero-generators/std",
"monero-primitives/std",
]
default = ["std"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,12 +0,0 @@
# Monero Borromean
Borromean ring signatures arranged into a range proof, as done by the Monero
protocol.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).

View File

@@ -1,55 +0,0 @@
[package]
name = "monero-bulletproofs"
version = "0.1.0"
description = "Bulletproofs(+) range proofs, as defined by the Monero protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/ringct/bulletproofs"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
thiserror = { version = "1", default-features = false, optional = true }
rand_core = { version = "0.6", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
# Cryptographic dependencies
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
# Other Monero dependencies
monero-io = { path = "../../io", version = "0.1", default-features = false }
monero-generators = { path = "../../generators", version = "0.4", default-features = false }
monero-primitives = { path = "../../primitives", version = "0.1", default-features = false }
[build-dependencies]
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
monero-generators = { path = "../../generators", version = "0.4", default-features = false }
[dev-dependencies]
hex-literal = "0.4"
[features]
std = [
"std-shims/std",
"thiserror",
"rand_core/std",
"zeroize/std",
"monero-io/std",
"monero-generators/std",
"monero-primitives/std",
]
compile-time-generators = ["curve25519-dalek/precomputed-tables"]
default = ["std", "compile-time-generators"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,14 +0,0 @@
# Monero Bulletproofs(+)
Bulletproofs(+) range proofs, as defined by the Monero protocol.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).
- `compile-time-generators` (on by default): Derives the generators at
compile-time so they don't need to be derived at runtime. This is recommended
if program size doesn't need to be kept minimal.

View File

@@ -1,88 +0,0 @@
use std::{
io::Write,
env,
path::Path,
fs::{File, remove_file},
};
#[cfg(feature = "compile-time-generators")]
fn generators(prefix: &'static str, path: &str) {
use curve25519_dalek::EdwardsPoint;
use monero_generators::bulletproofs_generators;
fn serialize(generators_string: &mut String, points: &[EdwardsPoint]) {
for generator in points {
generators_string.extend(
format!(
"
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap(),
",
generator.compress().to_bytes()
)
.chars(),
);
}
}
let generators = bulletproofs_generators(prefix.as_bytes());
#[allow(non_snake_case)]
let mut G_str = String::new();
serialize(&mut G_str, &generators.G);
#[allow(non_snake_case)]
let mut H_str = String::new();
serialize(&mut H_str, &generators.H);
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
let _ = remove_file(&path);
File::create(&path)
.unwrap()
.write_all(
format!(
"
static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
pub(crate) fn GENERATORS() -> &'static Generators {{
GENERATORS_CELL.get_or_init(|| Generators {{
G: std_shims::vec![
{G_str}
],
H: std_shims::vec![
{H_str}
],
}})
}}
",
)
.as_bytes(),
)
.unwrap();
}
#[cfg(not(feature = "compile-time-generators"))]
fn generators(prefix: &'static str, path: &str) {
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
let _ = remove_file(&path);
File::create(&path)
.unwrap()
.write_all(
format!(
r#"
static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
pub(crate) fn GENERATORS() -> &'static Generators {{
GENERATORS_CELL.get_or_init(|| {{
monero_generators::bulletproofs_generators(b"{prefix}")
}})
}}
"#,
)
.as_bytes(),
)
.unwrap();
}
fn main() {
println!("cargo:rerun-if-changed=build.rs");
generators("bulletproof", "generators.rs");
generators("bulletproof_plus", "generators_plus.rs");
}

View File

@@ -1,101 +0,0 @@
use std_shims::vec::Vec;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_POINT,
traits::{IsIdentity, VartimeMultiscalarMul},
scalar::Scalar,
edwards::EdwardsPoint,
};
use monero_generators::{H, Generators};
use crate::{original, plus};
#[derive(Default)]
pub(crate) struct InternalBatchVerifier {
pub(crate) g: Scalar,
pub(crate) h: Scalar,
pub(crate) g_bold: Vec<Scalar>,
pub(crate) h_bold: Vec<Scalar>,
pub(crate) other: Vec<(Scalar, EdwardsPoint)>,
}
impl InternalBatchVerifier {
#[must_use]
fn verify(self, G: EdwardsPoint, H: EdwardsPoint, generators: &Generators) -> bool {
let capacity = 2 + self.g_bold.len() + self.h_bold.len() + self.other.len();
let mut scalars = Vec::with_capacity(capacity);
let mut points = Vec::with_capacity(capacity);
scalars.push(self.g);
points.push(G);
scalars.push(self.h);
points.push(H);
for (i, g_bold) in self.g_bold.into_iter().enumerate() {
scalars.push(g_bold);
points.push(generators.G[i]);
}
for (i, h_bold) in self.h_bold.into_iter().enumerate() {
scalars.push(h_bold);
points.push(generators.H[i]);
}
for (scalar, point) in self.other {
scalars.push(scalar);
points.push(point);
}
EdwardsPoint::vartime_multiscalar_mul(scalars, points).is_identity()
}
}
#[derive(Default)]
pub(crate) struct BulletproofsBatchVerifier(pub(crate) InternalBatchVerifier);
impl BulletproofsBatchVerifier {
#[must_use]
pub(crate) fn verify(self) -> bool {
self.0.verify(ED25519_BASEPOINT_POINT, H(), original::GENERATORS())
}
}
#[derive(Default)]
pub(crate) struct BulletproofsPlusBatchVerifier(pub(crate) InternalBatchVerifier);
impl BulletproofsPlusBatchVerifier {
#[must_use]
pub(crate) fn verify(self) -> bool {
// Bulletproofs+ is written as per the paper, with G for the value and H for the mask
// Monero uses H for the value and G for the mask
self.0.verify(H(), ED25519_BASEPOINT_POINT, plus::GENERATORS())
}
}
/// A batch verifier for Bulletproofs(+).
///
/// This uses a fixed layout such that all fixed points only incur a single point scaling,
/// regardless of the amounts of proofs verified. For all variable points (commitments), they're
/// accumulated with the fixed points into a single multiscalar multiplication.
#[derive(Default)]
pub struct BatchVerifier {
pub(crate) original: BulletproofsBatchVerifier,
pub(crate) plus: BulletproofsPlusBatchVerifier,
}
impl BatchVerifier {
/// Create a new batch verifier.
pub fn new() -> Self {
Self {
original: BulletproofsBatchVerifier(InternalBatchVerifier::default()),
plus: BulletproofsPlusBatchVerifier(InternalBatchVerifier::default()),
}
}
/// Verify all of the proofs queued within this batch verifier.
///
/// This uses a variable-time multiscalar multiplication internally.
#[must_use]
pub fn verify(self) -> bool {
self.original.verify() && self.plus.verify()
}
}

View File

@@ -1,74 +0,0 @@
use std_shims::{vec, vec::Vec};
use curve25519_dalek::{
traits::{MultiscalarMul, VartimeMultiscalarMul},
scalar::Scalar,
edwards::EdwardsPoint,
};
pub(crate) use monero_generators::{MAX_COMMITMENTS, COMMITMENT_BITS, LOG_COMMITMENT_BITS};
pub(crate) fn multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
let mut buf_scalars = Vec::with_capacity(pairs.len());
let mut buf_points = Vec::with_capacity(pairs.len());
for (scalar, point) in pairs {
buf_scalars.push(scalar);
buf_points.push(point);
}
EdwardsPoint::multiscalar_mul(buf_scalars, buf_points)
}
pub(crate) fn multiexp_vartime(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
let mut buf_scalars = Vec::with_capacity(pairs.len());
let mut buf_points = Vec::with_capacity(pairs.len());
for (scalar, point) in pairs {
buf_scalars.push(scalar);
buf_points.push(point);
}
EdwardsPoint::vartime_multiscalar_mul(buf_scalars, buf_points)
}
/*
This has room for optimization worth investigating further. It currently takes
an iterative approach. It can be optimized further via divide and conquer.
Assume there are 4 challenges.
Iterative approach (current):
1. Do the optimal multiplications across challenge column 0 and 1.
2. Do the optimal multiplications across that result and column 2.
3. Do the optimal multiplications across that result and column 3.
Divide and conquer (worth investigating further):
1. Do the optimal multiplications across challenge column 0 and 1.
2. Do the optimal multiplications across challenge column 2 and 3.
3. Multiply both results together.
When there are 4 challenges (n=16), the iterative approach does 28 multiplications
versus divide and conquer's 24.
*/
pub(crate) fn challenge_products(challenges: &[(Scalar, Scalar)]) -> Vec<Scalar> {
let mut products = vec![Scalar::ONE; 1 << challenges.len()];
if !challenges.is_empty() {
products[0] = challenges[0].1;
products[1] = challenges[0].0;
for (j, challenge) in challenges.iter().enumerate().skip(1) {
let mut slots = (1 << (j + 1)) - 1;
while slots > 0 {
products[slots] = products[slots / 2] * challenge.0;
products[slots - 1] = products[slots / 2] * challenge.1;
slots = slots.saturating_sub(2);
}
}
// Sanity check since if the above failed to populate, it'd be critical
for product in &products {
debug_assert!(*product != Scalar::ZERO);
}
}
products
}

View File

@@ -1,292 +0,0 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
#![allow(non_snake_case)]
use std_shims::{
vec,
vec::Vec,
io::{self, Read, Write},
};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroizing;
use curve25519_dalek::edwards::EdwardsPoint;
use monero_io::*;
pub use monero_generators::MAX_COMMITMENTS;
use monero_primitives::Commitment;
pub(crate) mod scalar_vector;
pub(crate) mod point_vector;
pub(crate) mod core;
use crate::core::LOG_COMMITMENT_BITS;
pub(crate) mod batch_verifier;
use batch_verifier::{BulletproofsBatchVerifier, BulletproofsPlusBatchVerifier};
pub use batch_verifier::BatchVerifier;
pub(crate) mod original;
use crate::original::{
IpProof, AggregateRangeStatement as OriginalStatement, AggregateRangeWitness as OriginalWitness,
AggregateRangeProof as OriginalProof,
};
pub(crate) mod plus;
use crate::plus::{
WipProof, AggregateRangeStatement as PlusStatement, AggregateRangeWitness as PlusWitness,
AggregateRangeProof as PlusProof,
};
#[cfg(test)]
mod tests;
/// An error from proving/verifying Bulletproofs(+).
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum BulletproofError {
/// Proving/verifying a Bulletproof(+) range proof with no commitments.
#[cfg_attr(feature = "std", error("no commitments to prove the range for"))]
NoCommitments,
/// Proving/verifying a Bulletproof(+) range proof with more commitments than supported.
#[cfg_attr(feature = "std", error("too many commitments to prove the range for"))]
TooManyCommitments,
}
/// A Bulletproof(+).
///
/// This encapsulates either a Bulletproof or a Bulletproof+.
#[allow(clippy::large_enum_variant)]
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum Bulletproof {
/// A Bulletproof.
Original(OriginalProof),
/// A Bulletproof+.
Plus(PlusProof),
}
impl Bulletproof {
fn bp_fields(plus: bool) -> usize {
if plus {
6
} else {
9
}
}
/// Calculate the weight penalty for the Bulletproof(+).
///
/// Bulletproofs(+) are logarithmically sized yet linearly timed. Evaluating by their size alone
/// accordingly doesn't properly represent the burden of the proof. Monero 'claws back' some of
/// the weight lost by using a proof smaller than it is fast to compensate for this.
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
// src/cryptonote_basic/cryptonote_format_utils.cpp#L106-L124
pub fn calculate_bp_clawback(plus: bool, n_outputs: usize) -> (usize, usize) {
#[allow(non_snake_case)]
let mut LR_len = 0;
let mut n_padded_outputs = 1;
while n_padded_outputs < n_outputs {
LR_len += 1;
n_padded_outputs = 1 << LR_len;
}
LR_len += LOG_COMMITMENT_BITS;
let mut bp_clawback = 0;
if n_padded_outputs > 2 {
let fields = Bulletproof::bp_fields(plus);
let base = ((fields + (2 * (LOG_COMMITMENT_BITS + 1))) * 32) / 2;
let size = (fields + (2 * LR_len)) * 32;
bp_clawback = ((base * n_padded_outputs) - size) * 4 / 5;
}
(bp_clawback, LR_len)
}
/// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof.
pub fn prove<R: RngCore + CryptoRng>(
rng: &mut R,
outputs: Vec<Commitment>,
) -> Result<Bulletproof, BulletproofError> {
if outputs.is_empty() {
Err(BulletproofError::NoCommitments)?;
}
if outputs.len() > MAX_COMMITMENTS {
Err(BulletproofError::TooManyCommitments)?;
}
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
Ok(Bulletproof::Original(
OriginalStatement::new(&commitments)
.unwrap()
.prove(rng, OriginalWitness::new(outputs).unwrap())
.unwrap(),
))
}
/// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof+.
pub fn prove_plus<R: RngCore + CryptoRng>(
rng: &mut R,
outputs: Vec<Commitment>,
) -> Result<Bulletproof, BulletproofError> {
if outputs.is_empty() {
Err(BulletproofError::NoCommitments)?;
}
if outputs.len() > MAX_COMMITMENTS {
Err(BulletproofError::TooManyCommitments)?;
}
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
Ok(Bulletproof::Plus(
PlusStatement::new(&commitments)
.unwrap()
.prove(rng, &Zeroizing::new(PlusWitness::new(outputs).unwrap()))
.unwrap(),
))
}
/// Verify the given Bulletproof(+).
#[must_use]
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
match self {
Bulletproof::Original(bp) => {
let mut verifier = BulletproofsBatchVerifier::default();
let Some(statement) = OriginalStatement::new(commitments) else {
return false;
};
if !statement.verify(rng, &mut verifier, bp.clone()) {
return false;
}
verifier.verify()
}
Bulletproof::Plus(bp) => {
let mut verifier = BulletproofsPlusBatchVerifier::default();
let Some(statement) = PlusStatement::new(commitments) else {
return false;
};
if !statement.verify(rng, &mut verifier, bp.clone()) {
return false;
}
verifier.verify()
}
}
}
/// Accumulate the verification for the given Bulletproof(+) into the specified BatchVerifier.
///
/// Returns false if the Bulletproof(+) isn't sane, leaving the BatchVerifier in an undefined
/// state.
///
/// Returns true if the Bulletproof(+) is sane, regardless of its validity.
///
/// The BatchVerifier must have its verification function executed to actually verify this proof.
#[must_use]
pub fn batch_verify<R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier,
commitments: &[EdwardsPoint],
) -> bool {
match self {
Bulletproof::Original(bp) => {
let Some(statement) = OriginalStatement::new(commitments) else {
return false;
};
statement.verify(rng, &mut verifier.original, bp.clone())
}
Bulletproof::Plus(bp) => {
let Some(statement) = PlusStatement::new(commitments) else {
return false;
};
statement.verify(rng, &mut verifier.plus, bp.clone())
}
}
}
fn write_core<W: Write, F: Fn(&[EdwardsPoint], &mut W) -> io::Result<()>>(
&self,
w: &mut W,
specific_write_vec: F,
) -> io::Result<()> {
match self {
Bulletproof::Original(bp) => {
write_point(&bp.A, w)?;
write_point(&bp.S, w)?;
write_point(&bp.T1, w)?;
write_point(&bp.T2, w)?;
write_scalar(&bp.tau_x, w)?;
write_scalar(&bp.mu, w)?;
specific_write_vec(&bp.ip.L, w)?;
specific_write_vec(&bp.ip.R, w)?;
write_scalar(&bp.ip.a, w)?;
write_scalar(&bp.ip.b, w)?;
write_scalar(&bp.t_hat, w)
}
Bulletproof::Plus(bp) => {
write_point(&bp.A, w)?;
write_point(&bp.wip.A, w)?;
write_point(&bp.wip.B, w)?;
write_scalar(&bp.wip.r_answer, w)?;
write_scalar(&bp.wip.s_answer, w)?;
write_scalar(&bp.wip.delta_answer, w)?;
specific_write_vec(&bp.wip.L, w)?;
specific_write_vec(&bp.wip.R, w)
}
}
}
/// Write a Bulletproof(+) for the message signed by a transaction's signature.
///
/// This has a distinct encoding from the standard encoding.
pub fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.write_core(w, |points, w| write_raw_vec(write_point, points, w))
}
/// Write a Bulletproof(+).
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.write_core(w, |points, w| write_vec(write_point, points, w))
}
/// Serialize a Bulletproof(+) to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
/// Read a Bulletproof.
pub fn read<R: Read>(r: &mut R) -> io::Result<Bulletproof> {
Ok(Bulletproof::Original(OriginalProof {
A: read_point(r)?,
S: read_point(r)?,
T1: read_point(r)?,
T2: read_point(r)?,
tau_x: read_scalar(r)?,
mu: read_scalar(r)?,
ip: IpProof {
L: read_vec(read_point, r)?,
R: read_vec(read_point, r)?,
a: read_scalar(r)?,
b: read_scalar(r)?,
},
t_hat: read_scalar(r)?,
}))
}
/// Read a Bulletproof+.
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Bulletproof> {
Ok(Bulletproof::Plus(PlusProof {
A: read_point(r)?,
wip: WipProof {
A: read_point(r)?,
B: read_point(r)?,
r_answer: read_scalar(r)?,
s_answer: read_scalar(r)?,
delta_answer: read_scalar(r)?,
L: read_vec(read_point, r)?.into_iter().collect(),
R: read_vec(read_point, r)?.into_iter().collect(),
},
}))
}
}

View File

@@ -1,303 +0,0 @@
use std_shims::{vec, vec::Vec};
use zeroize::Zeroize;
use curve25519_dalek::{Scalar, EdwardsPoint};
use monero_generators::H;
use monero_primitives::{INV_EIGHT, keccak256_to_scalar};
use crate::{
core::{multiexp_vartime, challenge_products},
scalar_vector::ScalarVector,
point_vector::PointVector,
BulletproofsBatchVerifier,
};
/// An error from proving/verifying Inner-Product statements.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub(crate) enum IpError {
IncorrectAmountOfGenerators,
DifferingLrLengths,
}
/// The Bulletproofs Inner-Product statement.
///
/// This is for usage with Protocol 2 from the Bulletproofs paper.
#[derive(Clone, Debug)]
pub(crate) struct IpStatement {
// Weights for h_bold
h_bold_weights: ScalarVector,
// u as the discrete logarithm of G
u: Scalar,
}
/// The witness for the Bulletproofs Inner-Product statement.
#[derive(Clone, Debug)]
pub(crate) struct IpWitness {
// a
a: ScalarVector,
// b
b: ScalarVector,
}
impl IpWitness {
/// Construct a new witness for an Inner-Product statement.
///
/// This functions return None if the lengths of a, b are mismatched, not a power of two, or are
/// empty.
pub(crate) fn new(a: ScalarVector, b: ScalarVector) -> Option<Self> {
if a.0.is_empty() || (a.len() != b.len()) {
None?;
}
let mut power_of_2 = 1;
while power_of_2 < a.len() {
power_of_2 <<= 1;
}
if power_of_2 != a.len() {
None?;
}
Some(Self { a, b })
}
}
/// A proof for the Bulletproofs Inner-Product statement.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub(crate) struct IpProof {
pub(crate) L: Vec<EdwardsPoint>,
pub(crate) R: Vec<EdwardsPoint>,
pub(crate) a: Scalar,
pub(crate) b: Scalar,
}
impl IpStatement {
/// Create a new Inner-Product statement which won't transcript P.
///
/// This MUST only be called when P is deterministic to already transcripted elements.
pub(crate) fn new_without_P_transcript(h_bold_weights: ScalarVector, u: Scalar) -> Self {
Self { h_bold_weights, u }
}
// Transcript a round of the protocol
fn transcript_L_R(transcript: Scalar, L: EdwardsPoint, R: EdwardsPoint) -> Scalar {
let mut transcript = transcript.to_bytes().to_vec();
transcript.extend(L.compress().to_bytes());
transcript.extend(R.compress().to_bytes());
keccak256_to_scalar(transcript)
}
/// Prove for this Inner-Product statement.
///
/// Returns an error if this statement couldn't be proven for (such as if the witness isn't
/// consistent).
pub(crate) fn prove(
self,
mut transcript: Scalar,
witness: IpWitness,
) -> Result<IpProof, IpError> {
let generators = crate::original::GENERATORS();
let g_bold_slice = &generators.G[.. witness.a.len()];
let h_bold_slice = &generators.H[.. witness.a.len()];
let (mut g_bold, mut h_bold, u, mut a, mut b) = {
let IpStatement { h_bold_weights, u } = self;
let u = H() * u;
// Ensure we have the exact amount of weights
if h_bold_weights.len() != g_bold_slice.len() {
Err(IpError::IncorrectAmountOfGenerators)?;
}
// Acquire a local copy of the generators
let g_bold = PointVector(g_bold_slice.to_vec());
let h_bold = PointVector(h_bold_slice.to_vec()).mul_vec(&h_bold_weights);
let IpWitness { a, b } = witness;
(g_bold, h_bold, u, a, b)
};
let mut L_vec = vec![];
let mut R_vec = vec![];
// `else: (n > 1)` case, lines 18-35 of the Bulletproofs paper
// This interprets `g_bold.len()` as `n`
while g_bold.len() > 1 {
// Split a, b, g_bold, h_bold as needed for lines 20-24
let (a1, a2) = a.clone().split();
let (b1, b2) = b.clone().split();
let (g_bold1, g_bold2) = g_bold.split();
let (h_bold1, h_bold2) = h_bold.split();
let n_hat = g_bold1.len();
// Sanity
debug_assert_eq!(a1.len(), n_hat);
debug_assert_eq!(a2.len(), n_hat);
debug_assert_eq!(b1.len(), n_hat);
debug_assert_eq!(b2.len(), n_hat);
debug_assert_eq!(g_bold1.len(), n_hat);
debug_assert_eq!(g_bold2.len(), n_hat);
debug_assert_eq!(h_bold1.len(), n_hat);
debug_assert_eq!(h_bold2.len(), n_hat);
// cl, cr, lines 21-22
let cl = a1.clone().inner_product(&b2);
let cr = a2.clone().inner_product(&b1);
let L = {
let mut L_terms = Vec::with_capacity(1 + (2 * g_bold1.len()));
for (a, g) in a1.0.iter().zip(g_bold2.0.iter()) {
L_terms.push((*a, *g));
}
for (b, h) in b2.0.iter().zip(h_bold1.0.iter()) {
L_terms.push((*b, *h));
}
L_terms.push((cl, u));
// Uses vartime since this isn't a ZK proof
multiexp_vartime(&L_terms)
};
L_vec.push(L * INV_EIGHT());
let R = {
let mut R_terms = Vec::with_capacity(1 + (2 * g_bold1.len()));
for (a, g) in a2.0.iter().zip(g_bold1.0.iter()) {
R_terms.push((*a, *g));
}
for (b, h) in b1.0.iter().zip(h_bold2.0.iter()) {
R_terms.push((*b, *h));
}
R_terms.push((cr, u));
multiexp_vartime(&R_terms)
};
R_vec.push(R * INV_EIGHT());
// Now that we've calculate L, R, transcript them to receive x (26-27)
transcript = Self::transcript_L_R(transcript, *L_vec.last().unwrap(), *R_vec.last().unwrap());
let x = transcript;
let x_inv = x.invert();
// The prover and verifier now calculate the following (28-31)
g_bold = PointVector(Vec::with_capacity(g_bold1.len()));
for (a, b) in g_bold1.0.into_iter().zip(g_bold2.0.into_iter()) {
g_bold.0.push(multiexp_vartime(&[(x_inv, a), (x, b)]));
}
h_bold = PointVector(Vec::with_capacity(h_bold1.len()));
for (a, b) in h_bold1.0.into_iter().zip(h_bold2.0.into_iter()) {
h_bold.0.push(multiexp_vartime(&[(x, a), (x_inv, b)]));
}
// 32-34
a = (a1 * x) + &(a2 * x_inv);
b = (b1 * x_inv) + &(b2 * x);
}
// `if n = 1` case from line 14-17
// Sanity
debug_assert_eq!(g_bold.len(), 1);
debug_assert_eq!(h_bold.len(), 1);
debug_assert_eq!(a.len(), 1);
debug_assert_eq!(b.len(), 1);
// We simply send a/b
Ok(IpProof { L: L_vec, R: R_vec, a: a[0], b: b[0] })
}
/// Queue an Inner-Product proof for batch verification.
///
/// This will return Err if there is an error. This will return Ok if the proof was successfully
/// queued for batch verification. The caller is required to verify the batch in order to ensure
/// the proof is actually correct.
pub(crate) fn verify(
self,
verifier: &mut BulletproofsBatchVerifier,
ip_rows: usize,
mut transcript: Scalar,
verifier_weight: Scalar,
proof: IpProof,
) -> Result<(), IpError> {
let generators = crate::original::GENERATORS();
let g_bold_slice = &generators.G[.. ip_rows];
let h_bold_slice = &generators.H[.. ip_rows];
let IpStatement { h_bold_weights, u } = self;
// Verify the L/R lengths
{
// Calculate the discrete log w.r.t. 2 for the amount of generators present
let mut lr_len = 0;
while (1 << lr_len) < g_bold_slice.len() {
lr_len += 1;
}
// This proof has less/more terms than the passed in generators are for
if proof.L.len() != lr_len {
Err(IpError::IncorrectAmountOfGenerators)?;
}
if proof.L.len() != proof.R.len() {
Err(IpError::DifferingLrLengths)?;
}
}
// Again, we start with the `else: (n > 1)` case
// We need x, x_inv per lines 25-27 for lines 28-31
let mut xs = Vec::with_capacity(proof.L.len());
for (L, R) in proof.L.iter().zip(proof.R.iter()) {
transcript = Self::transcript_L_R(transcript, *L, *R);
xs.push(transcript);
}
// We calculate their inverse in batch
let mut x_invs = xs.clone();
Scalar::batch_invert(&mut x_invs);
// Now, with x and x_inv, we need to calculate g_bold', h_bold', P'
//
// For the sake of performance, we solely want to calculate all of these in terms of scalings
// for g_bold, h_bold, P, and don't want to actually perform intermediary scalings of the
// points
//
// L and R are easy, as it's simply x**2, x**-2
//
// For the series of g_bold, h_bold, we use the `challenge_products` function
// For how that works, please see its own documentation
let product_cache = {
let mut challenges = Vec::with_capacity(proof.L.len());
let x_iter = xs.into_iter().zip(x_invs);
let lr_iter = proof.L.into_iter().zip(proof.R);
for ((x, x_inv), (L, R)) in x_iter.zip(lr_iter) {
challenges.push((x, x_inv));
verifier.0.other.push((verifier_weight * (x * x), L.mul_by_cofactor()));
verifier.0.other.push((verifier_weight * (x_inv * x_inv), R.mul_by_cofactor()));
}
challenge_products(&challenges)
};
// And now for the `if n = 1` case
let c = proof.a * proof.b;
// The multiexp of these terms equate to the final permutation of P
// We now add terms for a * g_bold' + b * h_bold' b + c * u, with the scalars negative such
// that the terms sum to 0 for an honest prover
// The g_bold * a term case from line 16
#[allow(clippy::needless_range_loop)]
for i in 0 .. g_bold_slice.len() {
verifier.0.g_bold[i] -= verifier_weight * product_cache[i] * proof.a;
}
// The h_bold * b term case from line 16
for i in 0 .. h_bold_slice.len() {
verifier.0.h_bold[i] -=
verifier_weight * product_cache[product_cache.len() - 1 - i] * proof.b * h_bold_weights[i];
}
// The c * u term case from line 16
verifier.0.h -= verifier_weight * c * u;
Ok(())
}
}

View File

@@ -1,339 +0,0 @@
use std_shims::{sync::OnceLock, vec::Vec};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use curve25519_dalek::{constants::ED25519_BASEPOINT_POINT, Scalar, EdwardsPoint};
use monero_generators::{H, Generators, MAX_COMMITMENTS, COMMITMENT_BITS};
use monero_primitives::{Commitment, INV_EIGHT, keccak256_to_scalar};
use crate::{core::multiexp, scalar_vector::ScalarVector, BulletproofsBatchVerifier};
pub(crate) mod inner_product;
use inner_product::*;
pub(crate) use inner_product::IpProof;
include!(concat!(env!("OUT_DIR"), "/generators.rs"));
#[derive(Clone, Debug)]
pub(crate) struct AggregateRangeStatement<'a> {
commitments: &'a [EdwardsPoint],
}
#[derive(Clone, Debug)]
pub(crate) struct AggregateRangeWitness {
commitments: Vec<Commitment>,
}
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct AggregateRangeProof {
pub(crate) A: EdwardsPoint,
pub(crate) S: EdwardsPoint,
pub(crate) T1: EdwardsPoint,
pub(crate) T2: EdwardsPoint,
pub(crate) tau_x: Scalar,
pub(crate) mu: Scalar,
pub(crate) t_hat: Scalar,
pub(crate) ip: IpProof,
}
impl<'a> AggregateRangeStatement<'a> {
pub(crate) fn new(commitments: &'a [EdwardsPoint]) -> Option<Self> {
if commitments.is_empty() || (commitments.len() > MAX_COMMITMENTS) {
None?;
}
Some(Self { commitments })
}
}
impl AggregateRangeWitness {
pub(crate) fn new(commitments: Vec<Commitment>) -> Option<Self> {
if commitments.is_empty() || (commitments.len() > MAX_COMMITMENTS) {
None?;
}
Some(Self { commitments })
}
}
impl<'a> AggregateRangeStatement<'a> {
fn initial_transcript(&self) -> (Scalar, Vec<EdwardsPoint>) {
let V = self.commitments.iter().map(|c| c * INV_EIGHT()).collect::<Vec<_>>();
(keccak256_to_scalar(V.iter().flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>()), V)
}
fn transcript_A_S(transcript: Scalar, A: EdwardsPoint, S: EdwardsPoint) -> (Scalar, Scalar) {
let mut buf = Vec::with_capacity(96);
buf.extend(transcript.to_bytes());
buf.extend(A.compress().to_bytes());
buf.extend(S.compress().to_bytes());
let y = keccak256_to_scalar(buf);
let z = keccak256_to_scalar(y.to_bytes());
(y, z)
}
fn transcript_T12(transcript: Scalar, T1: EdwardsPoint, T2: EdwardsPoint) -> Scalar {
let mut buf = Vec::with_capacity(128);
buf.extend(transcript.to_bytes());
buf.extend(transcript.to_bytes());
buf.extend(T1.compress().to_bytes());
buf.extend(T2.compress().to_bytes());
keccak256_to_scalar(buf)
}
fn transcript_tau_x_mu_t_hat(
transcript: Scalar,
tau_x: Scalar,
mu: Scalar,
t_hat: Scalar,
) -> Scalar {
let mut buf = Vec::with_capacity(128);
buf.extend(transcript.to_bytes());
buf.extend(transcript.to_bytes());
buf.extend(tau_x.to_bytes());
buf.extend(mu.to_bytes());
buf.extend(t_hat.to_bytes());
keccak256_to_scalar(buf)
}
#[allow(clippy::needless_pass_by_value)]
pub(crate) fn prove(
self,
rng: &mut (impl RngCore + CryptoRng),
witness: AggregateRangeWitness,
) -> Option<AggregateRangeProof> {
if self.commitments != witness.commitments.iter().map(Commitment::calculate).collect::<Vec<_>>()
{
None?
};
let generators = GENERATORS();
let (mut transcript, _) = self.initial_transcript();
// Find out the padded amount of commitments
let mut padded_pow_of_2 = 1;
while padded_pow_of_2 < witness.commitments.len() {
padded_pow_of_2 <<= 1;
}
let mut aL = ScalarVector::new(padded_pow_of_2 * COMMITMENT_BITS);
for (i, commitment) in witness.commitments.iter().enumerate() {
let mut amount = commitment.amount;
for j in 0 .. COMMITMENT_BITS {
aL[(i * COMMITMENT_BITS) + j] = Scalar::from(amount & 1);
amount >>= 1;
}
}
let aR = aL.clone() - Scalar::ONE;
let alpha = Scalar::random(&mut *rng);
let A = {
let mut terms = Vec::with_capacity(1 + (2 * aL.len()));
terms.push((alpha, ED25519_BASEPOINT_POINT));
for (aL, G) in aL.0.iter().zip(&generators.G) {
terms.push((*aL, *G));
}
for (aR, H) in aR.0.iter().zip(&generators.H) {
terms.push((*aR, *H));
}
let res = multiexp(&terms) * INV_EIGHT();
terms.zeroize();
res
};
let mut sL = ScalarVector::new(padded_pow_of_2 * COMMITMENT_BITS);
let mut sR = ScalarVector::new(padded_pow_of_2 * COMMITMENT_BITS);
for i in 0 .. (padded_pow_of_2 * COMMITMENT_BITS) {
sL[i] = Scalar::random(&mut *rng);
sR[i] = Scalar::random(&mut *rng);
}
let rho = Scalar::random(&mut *rng);
let S = {
let mut terms = Vec::with_capacity(1 + (2 * sL.len()));
terms.push((rho, ED25519_BASEPOINT_POINT));
for (sL, G) in sL.0.iter().zip(&generators.G) {
terms.push((*sL, *G));
}
for (sR, H) in sR.0.iter().zip(&generators.H) {
terms.push((*sR, *H));
}
let res = multiexp(&terms) * INV_EIGHT();
terms.zeroize();
res
};
let (y, z) = Self::transcript_A_S(transcript, A, S);
transcript = z;
let z = ScalarVector::powers(z, 3 + padded_pow_of_2);
let twos = ScalarVector::powers(Scalar::from(2u8), COMMITMENT_BITS);
let l = [aL - z[1], sL];
let y_pow_n = ScalarVector::powers(y, aR.len());
let mut r = [((aR + z[1]) * &y_pow_n), sR * &y_pow_n];
{
for j in 0 .. padded_pow_of_2 {
for i in 0 .. COMMITMENT_BITS {
r[0].0[(j * COMMITMENT_BITS) + i] += z[2 + j] * twos[i];
}
}
}
let t1 = (l[0].clone().inner_product(&r[1])) + (r[0].clone().inner_product(&l[1]));
let t2 = l[1].clone().inner_product(&r[1]);
let tau_1 = Scalar::random(&mut *rng);
let T1 = {
let mut T1_terms = [(t1, H()), (tau_1, ED25519_BASEPOINT_POINT)];
for term in &mut T1_terms {
term.0 *= INV_EIGHT();
}
let T1 = multiexp(&T1_terms);
T1_terms.zeroize();
T1
};
let tau_2 = Scalar::random(&mut *rng);
let T2 = {
let mut T2_terms = [(t2, H()), (tau_2, ED25519_BASEPOINT_POINT)];
for term in &mut T2_terms {
term.0 *= INV_EIGHT();
}
let T2 = multiexp(&T2_terms);
T2_terms.zeroize();
T2
};
transcript = Self::transcript_T12(transcript, T1, T2);
let x = transcript;
let [l0, l1] = l;
let l = l0 + &(l1 * x);
let [r0, r1] = r;
let r = r0 + &(r1 * x);
let t_hat = l.clone().inner_product(&r);
let mut tau_x = ((tau_2 * x) + tau_1) * x;
{
for (i, commitment) in witness.commitments.iter().enumerate() {
tau_x += z[2 + i] * commitment.mask;
}
}
let mu = alpha + (rho * x);
let y_inv_pow_n = ScalarVector::powers(y.invert(), l.len());
transcript = Self::transcript_tau_x_mu_t_hat(transcript, tau_x, mu, t_hat);
let x_ip = transcript;
let ip = IpStatement::new_without_P_transcript(y_inv_pow_n, x_ip)
.prove(transcript, IpWitness::new(l, r).unwrap())
.unwrap();
let res = AggregateRangeProof { A, S, T1, T2, tau_x, mu, t_hat, ip };
#[cfg(debug_assertions)]
{
let mut verifier = BulletproofsBatchVerifier::default();
debug_assert!(self.verify(rng, &mut verifier, res.clone()));
debug_assert!(verifier.verify());
}
Some(res)
}
#[must_use]
pub(crate) fn verify(
self,
rng: &mut (impl RngCore + CryptoRng),
verifier: &mut BulletproofsBatchVerifier,
mut proof: AggregateRangeProof,
) -> bool {
let mut padded_pow_of_2 = 1;
while padded_pow_of_2 < self.commitments.len() {
padded_pow_of_2 <<= 1;
}
let ip_rows = padded_pow_of_2 * COMMITMENT_BITS;
while verifier.0.g_bold.len() < ip_rows {
verifier.0.g_bold.push(Scalar::ZERO);
verifier.0.h_bold.push(Scalar::ZERO);
}
let (mut transcript, mut commitments) = self.initial_transcript();
for commitment in &mut commitments {
*commitment = commitment.mul_by_cofactor();
}
let (y, z) = Self::transcript_A_S(transcript, proof.A, proof.S);
transcript = z;
let z = ScalarVector::powers(z, 3 + padded_pow_of_2);
transcript = Self::transcript_T12(transcript, proof.T1, proof.T2);
let x = transcript;
transcript = Self::transcript_tau_x_mu_t_hat(transcript, proof.tau_x, proof.mu, proof.t_hat);
let x_ip = transcript;
proof.A = proof.A.mul_by_cofactor();
proof.S = proof.S.mul_by_cofactor();
proof.T1 = proof.T1.mul_by_cofactor();
proof.T2 = proof.T2.mul_by_cofactor();
let y_pow_n = ScalarVector::powers(y, ip_rows);
let y_inv_pow_n = ScalarVector::powers(y.invert(), ip_rows);
let twos = ScalarVector::powers(Scalar::from(2u8), COMMITMENT_BITS);
// 65
{
let weight = Scalar::random(&mut *rng);
verifier.0.h += weight * proof.t_hat;
verifier.0.g += weight * proof.tau_x;
// Now that we've accumulated the lhs, negate the weight and accumulate the rhs
// These will now sum to 0 if equal
let weight = -weight;
verifier.0.h += weight * (z[1] - (z[2])) * y_pow_n.sum();
for (i, commitment) in commitments.iter().enumerate() {
verifier.0.other.push((weight * z[2 + i], *commitment));
}
for i in 0 .. padded_pow_of_2 {
verifier.0.h -= weight * z[3 + i] * twos.clone().sum();
}
verifier.0.other.push((weight * x, proof.T1));
verifier.0.other.push((weight * (x * x), proof.T2));
}
let ip_weight = Scalar::random(&mut *rng);
// 66
verifier.0.other.push((ip_weight, proof.A));
verifier.0.other.push((ip_weight * x, proof.S));
// We can replace these with a g_sum, h_sum scalar in the batch verifier
// It'd trade `2 * ip_rows` scalar additions (per proof) for one scalar addition and an
// additional term in the MSM
let ip_z = ip_weight * z[1];
for i in 0 .. ip_rows {
verifier.0.h_bold[i] += ip_z;
}
let neg_ip_z = -ip_z;
for i in 0 .. ip_rows {
verifier.0.g_bold[i] += neg_ip_z;
}
{
for j in 0 .. padded_pow_of_2 {
for i in 0 .. COMMITMENT_BITS {
let full_i = (j * COMMITMENT_BITS) + i;
verifier.0.h_bold[full_i] += ip_weight * y_inv_pow_n[full_i] * z[2 + j] * twos[i];
}
}
}
verifier.0.h += ip_weight * x_ip * proof.t_hat;
// 67, 68
verifier.0.g += ip_weight * -proof.mu;
let res = IpStatement::new_without_P_transcript(y_inv_pow_n, x_ip)
.verify(verifier, ip_rows, transcript, ip_weight, proof.ip);
res.is_ok()
}
}

View File

@@ -1,20 +0,0 @@
use std_shims::{sync::OnceLock, vec::Vec};
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use monero_generators::hash_to_point;
use monero_primitives::{keccak256, keccak256_to_scalar};
// Monero starts BP+ transcripts with the following constant.
static TRANSCRIPT_CELL: OnceLock<[u8; 32]> = OnceLock::new();
pub(crate) fn TRANSCRIPT() -> [u8; 32] {
// Why this uses a hash_to_point is completely unknown.
*TRANSCRIPT_CELL
.get_or_init(|| hash_to_point(keccak256(b"bulletproof_plus_transcript")).compress().to_bytes())
}
pub(crate) fn initial_transcript(commitments: core::slice::Iter<'_, EdwardsPoint>) -> Scalar {
let commitments_hash =
keccak256_to_scalar(commitments.flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>());
keccak256_to_scalar([TRANSCRIPT().as_ref(), &commitments_hash.to_bytes()].concat())
}

View File

@@ -1,138 +0,0 @@
use core::{
borrow::Borrow,
ops::{Index, IndexMut, Add, Sub, Mul},
};
use std_shims::{vec, vec::Vec};
use zeroize::{Zeroize, ZeroizeOnDrop};
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use crate::core::multiexp;
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
impl Index<usize> for ScalarVector {
type Output = Scalar;
fn index(&self, index: usize) -> &Scalar {
&self.0[index]
}
}
impl IndexMut<usize> for ScalarVector {
fn index_mut(&mut self, index: usize) -> &mut Scalar {
&mut self.0[index]
}
}
impl<S: Borrow<Scalar>> Add<S> for ScalarVector {
type Output = ScalarVector;
fn add(mut self, scalar: S) -> ScalarVector {
for s in &mut self.0 {
*s += scalar.borrow();
}
self
}
}
impl<S: Borrow<Scalar>> Sub<S> for ScalarVector {
type Output = ScalarVector;
fn sub(mut self, scalar: S) -> ScalarVector {
for s in &mut self.0 {
*s -= scalar.borrow();
}
self
}
}
impl<S: Borrow<Scalar>> Mul<S> for ScalarVector {
type Output = ScalarVector;
fn mul(mut self, scalar: S) -> ScalarVector {
for s in &mut self.0 {
*s *= scalar.borrow();
}
self
}
}
impl Add<&ScalarVector> for ScalarVector {
type Output = ScalarVector;
fn add(mut self, other: &ScalarVector) -> ScalarVector {
debug_assert_eq!(self.len(), other.len());
for (s, o) in self.0.iter_mut().zip(other.0.iter()) {
*s += o;
}
self
}
}
impl Sub<&ScalarVector> for ScalarVector {
type Output = ScalarVector;
fn sub(mut self, other: &ScalarVector) -> ScalarVector {
debug_assert_eq!(self.len(), other.len());
for (s, o) in self.0.iter_mut().zip(other.0.iter()) {
*s -= o;
}
self
}
}
impl Mul<&ScalarVector> for ScalarVector {
type Output = ScalarVector;
fn mul(mut self, other: &ScalarVector) -> ScalarVector {
debug_assert_eq!(self.len(), other.len());
for (s, o) in self.0.iter_mut().zip(other.0.iter()) {
*s *= o;
}
self
}
}
impl Mul<&[EdwardsPoint]> for &ScalarVector {
type Output = EdwardsPoint;
fn mul(self, b: &[EdwardsPoint]) -> EdwardsPoint {
debug_assert_eq!(self.len(), b.len());
let mut multiexp_args = self.0.iter().copied().zip(b.iter().copied()).collect::<Vec<_>>();
let res = multiexp(&multiexp_args);
multiexp_args.zeroize();
res
}
}
impl ScalarVector {
pub(crate) fn new(len: usize) -> Self {
ScalarVector(vec![Scalar::ZERO; len])
}
pub(crate) fn powers(x: Scalar, len: usize) -> Self {
debug_assert!(len != 0);
let mut res = Vec::with_capacity(len);
res.push(Scalar::ONE);
res.push(x);
for i in 2 .. len {
res.push(res[i - 1] * x);
}
res.truncate(len);
ScalarVector(res)
}
pub(crate) fn len(&self) -> usize {
self.0.len()
}
pub(crate) fn sum(mut self) -> Scalar {
self.0.drain(..).sum()
}
pub(crate) fn inner_product(self, vector: &Self) -> Scalar {
(self * vector).sum()
}
pub(crate) fn weighted_inner_product(self, vector: &Self, y: &Self) -> Scalar {
(self * vector * y).sum()
}
pub(crate) fn split(mut self) -> (Self, Self) {
debug_assert!(self.len() > 1);
let r = self.0.split_off(self.0.len() / 2);
debug_assert_eq!(self.len(), r.len());
(self, ScalarVector(r))
}
}

View File

@@ -1,56 +0,0 @@
use rand_core::{RngCore, OsRng};
use curve25519_dalek::scalar::Scalar;
use monero_primitives::Commitment;
use crate::{batch_verifier::BatchVerifier, Bulletproof, BulletproofError};
mod original;
mod plus;
macro_rules! bulletproofs_tests {
($name: ident, $max: ident, $plus: literal) => {
#[test]
fn $name() {
// Create Bulletproofs for all possible output quantities
let mut verifier = BatchVerifier::new();
for i in 1 ..= 16 {
let commitments = (1 ..= i)
.map(|_| Commitment::new(Scalar::random(&mut OsRng), OsRng.next_u64()))
.collect::<Vec<_>>();
let bp = if $plus {
Bulletproof::prove_plus(&mut OsRng, commitments.clone()).unwrap()
} else {
Bulletproof::prove(&mut OsRng, commitments.clone()).unwrap()
};
let commitments = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
assert!(bp.verify(&mut OsRng, &commitments));
assert!(bp.batch_verify(&mut OsRng, &mut verifier, &commitments));
}
assert!(verifier.verify());
}
#[test]
fn $max() {
// Check Bulletproofs errors if we try to prove for too many outputs
let mut commitments = vec![];
for _ in 0 .. 17 {
commitments.push(Commitment::new(Scalar::ZERO, 0));
}
assert_eq!(
(if $plus {
Bulletproof::prove_plus(&mut OsRng, commitments)
} else {
Bulletproof::prove(&mut OsRng, commitments)
})
.unwrap_err(),
BulletproofError::TooManyCommitments,
);
}
};
}
bulletproofs_tests!(bulletproofs, bulletproofs_max, false);
bulletproofs_tests!(bulletproofs_plus, bulletproofs_plus_max, true);

View File

@@ -1,75 +0,0 @@
// The inner product relation is P = sum(g_bold * a, h_bold * b, g * (a * b))
use rand_core::OsRng;
use curve25519_dalek::Scalar;
use monero_generators::H;
use crate::{
scalar_vector::ScalarVector,
point_vector::PointVector,
original::{
GENERATORS,
inner_product::{IpStatement, IpWitness},
},
BulletproofsBatchVerifier,
};
#[test]
fn test_zero_inner_product() {
let statement =
IpStatement::new_without_P_transcript(ScalarVector(vec![Scalar::ONE; 1]), Scalar::ONE);
let witness = IpWitness::new(ScalarVector::new(1), ScalarVector::new(1)).unwrap();
let transcript = Scalar::random(&mut OsRng);
let proof = statement.clone().prove(transcript, witness).unwrap();
let mut verifier = BulletproofsBatchVerifier::default();
verifier.0.g_bold = vec![Scalar::ZERO; 1];
verifier.0.h_bold = vec![Scalar::ZERO; 1];
statement.verify(&mut verifier, 1, transcript, Scalar::random(&mut OsRng), proof).unwrap();
assert!(verifier.verify());
}
#[test]
fn test_inner_product() {
// P = sum(g_bold * a, h_bold * b, g * u * <a, b>)
let generators = GENERATORS();
let mut verifier = BulletproofsBatchVerifier::default();
verifier.0.g_bold = vec![Scalar::ZERO; 32];
verifier.0.h_bold = vec![Scalar::ZERO; 32];
for i in [1, 2, 4, 8, 16, 32] {
let g = H();
let mut g_bold = vec![];
let mut h_bold = vec![];
for i in 0 .. i {
g_bold.push(generators.G[i]);
h_bold.push(generators.H[i]);
}
let g_bold = PointVector(g_bold);
let h_bold = PointVector(h_bold);
let mut a = ScalarVector::new(i);
let mut b = ScalarVector::new(i);
for i in 0 .. i {
a[i] = Scalar::random(&mut OsRng);
b[i] = Scalar::random(&mut OsRng);
}
let P = g_bold.multiexp(&a) + h_bold.multiexp(&b) + (g * a.clone().inner_product(&b));
let statement =
IpStatement::new_without_P_transcript(ScalarVector(vec![Scalar::ONE; i]), Scalar::ONE);
let witness = IpWitness::new(a, b).unwrap();
let transcript = Scalar::random(&mut OsRng);
let proof = statement.clone().prove(transcript, witness).unwrap();
let weight = Scalar::random(&mut OsRng);
verifier.0.other.push((weight, P));
statement.verify(&mut verifier, i, transcript, weight, proof).unwrap();
}
assert!(verifier.verify());
}

View File

@@ -1,62 +0,0 @@
use hex_literal::hex;
use rand_core::OsRng;
use curve25519_dalek::scalar::Scalar;
use monero_io::decompress_point;
use crate::{
original::{IpProof, AggregateRangeProof as OriginalProof},
Bulletproof,
};
mod inner_product;
#[test]
fn bulletproofs_vector() {
let scalar = |scalar| Scalar::from_canonical_bytes(scalar).unwrap();
let point = |point| decompress_point(point).unwrap();
// Generated from Monero
assert!(Bulletproof::Original(OriginalProof {
A: point(hex!("ef32c0b9551b804decdcb107eb22aa715b7ce259bf3c5cac20e24dfa6b28ac71")),
S: point(hex!("e1285960861783574ee2b689ae53622834eb0b035d6943103f960cd23e063fa0")),
T1: point(hex!("4ea07735f184ba159d0e0eb662bac8cde3eb7d39f31e567b0fbda3aa23fe5620")),
T2: point(hex!("b8390aa4b60b255630d40e592f55ec6b7ab5e3a96bfcdcd6f1cd1d2fc95f441e")),
tau_x: scalar(hex!("5957dba8ea9afb23d6e81cc048a92f2d502c10c749dc1b2bd148ae8d41ec7107")),
mu: scalar(hex!("923023b234c2e64774b820b4961f7181f6c1dc152c438643e5a25b0bf271bc02")),
ip: IpProof {
L: vec![
point(hex!("c45f656316b9ebf9d357fb6a9f85b5f09e0b991dd50a6e0ae9b02de3946c9d99")),
point(hex!("9304d2bf0f27183a2acc58cc755a0348da11bd345485fda41b872fee89e72aac")),
point(hex!("1bb8b71925d155dd9569f64129ea049d6149fdc4e7a42a86d9478801d922129b")),
point(hex!("5756a7bf887aa72b9a952f92f47182122e7b19d89e5dd434c747492b00e1c6b7")),
point(hex!("6e497c910d102592830555356af5ff8340e8d141e3fb60ea24cfa587e964f07d")),
point(hex!("f4fa3898e7b08e039183d444f3d55040f3c790ed806cb314de49f3068bdbb218")),
point(hex!("0bbc37597c3ead517a3841e159c8b7b79a5ceaee24b2a9a20350127aab428713")),
],
R: vec![
point(hex!("609420ba1702781692e84accfd225adb3d077aedc3cf8125563400466b52dbd9")),
point(hex!("fb4e1d079e7a2b0ec14f7e2a3943bf50b6d60bc346a54fcf562fb234b342abf8")),
point(hex!("6ae3ac97289c48ce95b9c557289e82a34932055f7f5e32720139824fe81b12e5")),
point(hex!("d071cc2ffbdab2d840326ad15f68c01da6482271cae3cf644670d1632f29a15c")),
point(hex!("e52a1754b95e1060589ba7ce0c43d0060820ebfc0d49dc52884bc3c65ad18af5")),
point(hex!("41573b06140108539957df71aceb4b1816d2409ce896659aa5c86f037ca5e851")),
point(hex!("a65970b2cc3c7b08b2b5b739dbc8e71e646783c41c625e2a5b1535e3d2e0f742")),
],
a: scalar(hex!("0077c5383dea44d3cd1bc74849376bd60679612dc4b945255822457fa0c0a209")),
b: scalar(hex!("fe80cf5756473482581e1d38644007793ddc66fdeb9404ec1689a907e4863302")),
},
t_hat: scalar(hex!("40dfb08e09249040df997851db311bd6827c26e87d6f0f332c55be8eef10e603"))
})
.verify(
&mut OsRng,
&[
// For some reason, these vectors are * INV_EIGHT
point(hex!("8e8f23f315edae4f6c2f948d9a861e0ae32d356b933cd11d2f0e031ac744c41f"))
.mul_by_cofactor(),
point(hex!("2829cbd025aa54cd6e1b59a032564f22f0b2e5627f7f2c4297f90da438b5510f"))
.mul_by_cofactor(),
]
));
}

View File

@@ -1,28 +0,0 @@
use rand_core::{RngCore, OsRng};
use curve25519_dalek::Scalar;
use monero_primitives::Commitment;
use crate::{
batch_verifier::BulletproofsPlusBatchVerifier,
plus::aggregate_range_proof::{AggregateRangeStatement, AggregateRangeWitness},
};
#[test]
fn test_aggregate_range_proof() {
let mut verifier = BulletproofsPlusBatchVerifier::default();
for m in 1 ..= 16 {
let mut commitments = vec![];
for _ in 0 .. m {
commitments.push(Commitment::new(Scalar::random(&mut OsRng), OsRng.next_u64()));
}
let commitment_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
let statement = AggregateRangeStatement::new(&commitment_points).unwrap();
let witness = AggregateRangeWitness::new(commitments).unwrap();
let proof = statement.clone().prove(&mut OsRng, &witness).unwrap();
statement.verify(&mut OsRng, &mut verifier, proof);
}
assert!(verifier.verify());
}

View File

@@ -1,65 +0,0 @@
[package]
name = "monero-clsag"
version = "0.1.0"
description = "The CLSAG linkable ring signature, as defined by the Monero protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/ringct/clsag"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
thiserror = { version = "1", default-features = false, optional = true }
rand_core = { version = "0.6", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
subtle = { version = "^2.4", default-features = false }
# Cryptographic dependencies
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
# Multisig dependencies
rand_chacha = { version = "0.3", default-features = false, optional = true }
transcript = { package = "flexible-transcript", path = "../../../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
group = { version = "0.13", default-features = false, optional = true }
dalek-ff-group = { path = "../../../../crypto/dalek-ff-group", version = "0.4", default-features = false, optional = true }
frost = { package = "modular-frost", path = "../../../../crypto/frost", default-features = false, features = ["ed25519"], optional = true }
# Other Monero dependencies
monero-io = { path = "../../io", version = "0.1", default-features = false }
monero-generators = { path = "../../generators", version = "0.4", default-features = false }
monero-primitives = { path = "../../primitives", version = "0.1", default-features = false }
[dev-dependencies]
frost = { package = "modular-frost", path = "../../../../crypto/frost", default-features = false, features = ["ed25519", "tests"] }
[features]
std = [
"std-shims/std",
"thiserror",
"rand_core/std",
"zeroize/std",
"subtle/std",
"rand_chacha?/std",
"transcript?/std",
"group?/alloc",
"dalek-ff-group?/std",
"monero-io/std",
"monero-generators/std",
"monero-primitives/std",
]
multisig = ["rand_chacha", "transcript", "group", "dalek-ff-group", "frost", "std"]
default = ["std"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,15 +0,0 @@
# Monero CLSAG
The CLSAG linkable ring signature, as defined by the Monero protocol.
Additionally included is a FROST-inspired threshold multisignature algorithm.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).
- `multisig`: Provides a FROST-inspired threshold multisignature algorithm for
use.

Some files were not shown because too many files have changed in this diff Show More