1 Commits

Author SHA1 Message Date
Luke Parker
f029471f9f Initial work on a tables lib
Incomplete lib intended to offer tables for all cryptographic libraries 
expecting them. Right now, it creates the tables at runtime. While that 
would still offer improvements, ideally large tables are built at 
compile time and simply entered into memory at runtime.

My best idea for that was a linked list in the binary itself, where this 
app (at first call), reads a table stored to a section of data, then 
grabs the next reference to one and continues reading. The main issue 
with this idea, besides not yet researching how to encode data into the 
binary, was the fact the same table would be saved to disk multiple 
times under this simplistic model. An O(n) iteration when writing to the 
bin could potentially solve that.

Related https://github.com/serai-dex/serai/issues/41.
2022-07-16 16:25:55 -04:00
459 changed files with 6928 additions and 75219 deletions

5
.gitattributes vendored
View File

@@ -1,5 +0,0 @@
# Auto detect text files and perform LF normalization
* text=auto
* text eol=lf
*.pdf binary

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2023 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,47 +0,0 @@
name: bitcoin-regtest
description: Spawns a regtest Bitcoin daemon
inputs:
version:
description: "Version to download and run"
required: false
default: 24.0.1
runs:
using: "composite"
steps:
- name: Bitcoin Daemon Cache
id: cache-bitcoind
uses: actions/cache@v3
with:
path: bitcoin.tar.gz
key: bitcoind-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
- name: Download the Bitcoin Daemon
if: steps.cache-bitcoind.outputs.cache-hit != 'true'
shell: bash
run: |
RUNNER_OS=linux
RUNNER_ARCH=x86_64
FILE=bitcoin-${{ inputs.version }}-$RUNNER_ARCH-$RUNNER_OS-gnu.tar.gz
wget https://bitcoincore.org/bin/bitcoin-core-${{ inputs.version }}/$FILE
mv $FILE bitcoin.tar.gz
- name: Extract the Bitcoin Daemon
shell: bash
run: |
tar xzvf bitcoin.tar.gz
cd bitcoin-${{ inputs.version }}
sudo mv bin/* /bin && sudo mv lib/* /lib
- name: Bitcoin Regtest Daemon
shell: bash
run: |
RPC_USER=serai
RPC_PASS=seraidex
bitcoind -txindex -regtest \
-rpcuser=$RPC_USER -rpcpassword=$RPC_PASS \
-rpcbind=127.0.0.1 -rpcbind=$(hostname) -rpcallowip=0.0.0.0/0 \
-daemon

View File

@@ -1,43 +0,0 @@
name: build-dependencies
description: Installs build dependencies for Serai
inputs:
github-token:
description: "GitHub token to install Protobuf with"
require: true
default:
rust-toolchain:
description: "Rust toolchain to install"
required: false
default: stable
rust-components:
description: "Rust components to install"
required: false
default:
runs:
using: "composite"
steps:
- name: Install Protobuf
uses: arduino/setup-protoc@v2.0.0
with:
repo-token: ${{ inputs.github-token }}
- name: Install solc
shell: bash
run: |
pip3 install solc-select==0.2.1
solc-select install 0.8.16
solc-select use 0.8.16
- name: Install Rust
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ inputs.rust-toolchain }}
components: ${{ inputs.rust-components }}
targets: wasm32-unknown-unknown, riscv32imac-unknown-none-elf
# - name: Cache Rust
# uses: Swatinem/rust-cache@v2

View File

@@ -1,44 +0,0 @@
name: monero-wallet-rpc
description: Spawns a Monero Wallet-RPC.
inputs:
version:
description: "Version to download and run"
required: false
default: v0.18.2.0
runs:
using: "composite"
steps:
- name: Monero Wallet RPC Cache
id: cache-monero-wallet-rpc
uses: actions/cache@v3
with:
path: monero-wallet-rpc
key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
- name: Download the Monero Wallet RPC
if: steps.cache-monero-wallet-rpc.outputs.cache-hit != 'true'
# Calculates OS/ARCH to demonstrate it, yet then locks to linux-x64 due
# to the contained folder not following the same naming scheme and
# requiring further expansion not worth doing right now
shell: bash
run: |
RUNNER_OS=${{ runner.os }}
RUNNER_ARCH=${{ runner.arch }}
RUNNER_OS=${RUNNER_OS,,}
RUNNER_ARCH=${RUNNER_ARCH,,}
RUNNER_OS=linux
RUNNER_ARCH=x64
FILE=monero-$RUNNER_OS-$RUNNER_ARCH-${{ inputs.version }}.tar.bz2
wget https://downloads.getmonero.org/cli/$FILE
tar -xvf $FILE
mv monero-x86_64-linux-gnu-${{ inputs.version }}/monero-wallet-rpc monero-wallet-rpc
- name: Monero Wallet RPC
shell: bash
run: ./monero-wallet-rpc --disable-rpc-login --rpc-bind-port 6061 --allow-mismatched-daemon-version --wallet-dir ./ --detach

View File

@@ -1,44 +0,0 @@
name: monero-regtest
description: Spawns a regtest Monero daemon
inputs:
version:
description: "Version to download and run"
required: false
default: v0.18.2.0
runs:
using: "composite"
steps:
- name: Monero Daemon Cache
id: cache-monerod
uses: actions/cache@v3
with:
path: monerod
key: monerod-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
- name: Download the Monero Daemon
if: steps.cache-monerod.outputs.cache-hit != 'true'
# Calculates OS/ARCH to demonstrate it, yet then locks to linux-x64 due
# to the contained folder not following the same naming scheme and
# requiring further expansion not worth doing right now
shell: bash
run: |
RUNNER_OS=${{ runner.os }}
RUNNER_ARCH=${{ runner.arch }}
RUNNER_OS=${RUNNER_OS,,}
RUNNER_ARCH=${RUNNER_ARCH,,}
RUNNER_OS=linux
RUNNER_ARCH=x64
FILE=monero-$RUNNER_OS-$RUNNER_ARCH-${{ inputs.version }}.tar.bz2
wget https://downloads.getmonero.org/cli/$FILE
tar -xvf $FILE
mv monero-x86_64-linux-gnu-${{ inputs.version }}/monerod monerod
- name: Monero Regtest Daemon
shell: bash
run: ./monerod --regtest --offline --fixed-difficulty=1 --detach

View File

@@ -1,44 +0,0 @@
name: test-dependencies
description: Installs test dependencies for Serai
inputs:
github-token:
description: "GitHub token to install Protobuf with"
require: true
default:
monero-version:
description: "Monero version to download and run as a regtest node"
required: false
default: v0.18.2.0
bitcoin-version:
description: "Bitcoin version to download and run as a regtest node"
required: false
default: 24.0.1
runs:
using: "composite"
steps:
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ inputs.github-token }}
- name: Install Foundry
uses: foundry-rs/foundry-toolchain@v1
with:
version: nightly
- name: Run a Monero Regtest Node
uses: ./.github/actions/monero
with:
version: ${{ inputs.monero-version }}
- name: Run a Bitcoin Regtest Node
uses: ./.github/actions/bitcoin
with:
version: ${{ inputs.bitcoin-version }}
- name: Run a Monero Wallet-RPC
uses: ./.github/actions/monero-wallet-rpc

View File

@@ -1 +0,0 @@
nightly-2023-07-01

View File

@@ -1,27 +0,0 @@
name: Daily Deny Check
on:
schedule:
- cron: "0 0 * * *"
jobs:
deny:
name: Run cargo deny
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Advisory Cache
uses: actions/cache@v3
with:
path: ~/.cargo/advisory-db
key: rust-advisory-db
- name: Install cargo
uses: dtolnay/rust-toolchain@stable
- name: Install cargo deny
run: cargo install --locked cargo-deny
- name: Run cargo deny
run: cargo deny -L error --all-features check

View File

@@ -1,59 +0,0 @@
name: Monero Tests
on:
push:
branches:
- develop
paths:
- "coins/monero/**"
- "processor/**"
pull_request:
paths:
- "coins/monero/**"
- "processor/**"
jobs:
# Only run these once since they will be consistent regardless of any node
unit-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Test Dependencies
uses: ./.github/actions/test-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Run Unit Tests Without Features
run: cargo test --package monero-serai --lib
# Doesn't run unit tests with features as the tests workflow will
integration-tests:
runs-on: ubuntu-latest
# Test against all supported protocol versions
strategy:
matrix:
version: [v0.17.3.2, v0.18.2.0]
steps:
- uses: actions/checkout@v3
- name: Test Dependencies
uses: ./.github/actions/test-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
monero-version: ${{ matrix.version }}
- name: Run Integration Tests Without Features
# Runs with the binaries feature so the binaries build
# https://github.com/rust-lang/cargo/issues/8396
run: cargo test --package monero-serai --features binaries --test '*'
- name: Run Integration Tests
# Don't run if the the tests workflow also will
if: ${{ matrix.version != 'v0.18.2.0' }}
run: |
cargo test --package monero-serai --all-features --test '*'
cargo test --package serai-processor --all-features monero

View File

@@ -1,53 +0,0 @@
name: Monthly Nightly Update
on:
schedule:
- cron: "0 0 1 * *"
jobs:
update:
name: Update nightly
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
submodules: "recursive"
- name: Write nightly version
run: echo $(date +"nightly-%Y-%m"-01) > .github/nightly-version
- name: Create the commit
run: |
git config user.name "GitHub Actions"
git config user.email "<>"
git checkout -b $(date +"nightly-%Y-%m")
git add .github/nightly-version
git commit -m "Update nightly"
git push -u origin $(date +"nightly-%Y-%m")
- name: Pull Request
uses: actions/github-script@v6
with:
script: |
const { repo, owner } = context.repo;
const result = await github.rest.pulls.create({
title: (new Date()).toLocaleString(
false,
{ month: "long", year: "numeric" }
) + " - Rust Nightly Update",
owner,
repo,
head: "nightly-" + (new Date()).toISOString().split("-").splice(0, 2).join("-"),
base: "develop",
body: "PR auto-generated by a GitHub workflow."
});
github.rest.issues.addLabels({
owner,
repo,
issue_number: result.data.number,
labels: ["improvement"]
});

View File

@@ -1,21 +0,0 @@
name: no-std build
on:
push:
branches:
- develop
pull_request:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ inputs.github-token }}
- name: Verify no-std builds
run: cd tests/no-std && cargo build --target riscv32imac-unknown-none-elf

View File

@@ -1,85 +0,0 @@
name: Tests
on:
push:
branches:
- develop
pull_request:
workflow_dispatch:
jobs:
clippy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Get nightly version to use
id: nightly
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
- name: Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
rust-toolchain: ${{ steps.nightly.outputs.version }}
rust-components: clippy
- name: Run Clippy
# Allow dbg_macro when run locally, yet not when pushed
run: cargo clippy --all-features --all-targets -- -D clippy::dbg_macro $(grep "\S" ../../clippy-config | grep -v "#")
deny:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Advisory Cache
uses: actions/cache@v3
with:
path: ~/.cargo/advisory-db
key: rust-advisory-db
- name: Install cargo
uses: dtolnay/rust-toolchain@stable
- name: Install cargo deny
run: cargo install --locked cargo-deny
- name: Run cargo deny
run: cargo deny -L error --all-features check
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Test Dependencies
uses: ./.github/actions/test-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Build node
run: |
cd substrate/node
cargo build
- name: Run Tests
run: GITHUB_CI=true cargo test --all-features
fmt:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Get nightly version to use
id: nightly
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
- name: Install rustfmt
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ steps.nightly.outputs.version }}
components: rustfmt
- name: Run rustfmt
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check

2
.gitignore vendored
View File

@@ -1,2 +1,2 @@
target
.vscode
Cargo.lock

3
.gitmodules vendored Normal file
View File

@@ -0,0 +1,3 @@
[submodule "coins/monero/c/monero"]
path = coins/monero/c/monero
url = https://github.com/monero-project/monero

View File

@@ -1,16 +0,0 @@
tab_spaces = 2
max_width = 100
# Let the developer decide based on the 100 char line limit
use_small_heuristics = "Max"
error_on_line_overflow = true
error_on_unformatted = true
imports_granularity = "Crate"
reorder_imports = false
reorder_modules = false
unstable_features = true
spaces_around_ranges = true
binop_separator = "Back"

View File

@@ -1,37 +0,0 @@
# Contributing
Contributions come in a variety of forms. Developing Serai, helping document it,
using its libraries in another project, using and testing it, and simply sharing
it are all valuable ways of contributing.
This document will specifically focus on contributions to this repository in the
form of code and documentation.
### Rules
- Stable native Rust, nightly wasm and tools.
- `cargo fmt` must be used.
- `cargo clippy` must pass, except for the ignored rules (`type_complexity` and
`dead_code`).
- The CI must pass.
- Only use uppercase variable names when relevant to cryptography.
- Use a two-space ident when possible.
- Put a space after comment markers.
- Don't use multiple newlines between sections of code.
- Have a newline before EOF.
### Guidelines
- Sort inputs as core, std, third party, and then Serai.
- Comment code reasonably.
- Include tests for new features.
- Sign commits.
### Submission
All submissions should be through GitHub. Contributions to a crate will be
licensed according to the crate's existing license, with the crate's copyright
holders (distinct from authors) having the right to re-license the crate via a
unanimous decision.

12192
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,72 +1,22 @@
[workspace]
members = [
"common/std-shims",
"common/zalloc",
"common/db",
"crypto/transcript",
"crypto/ff-group-tests",
"crypto/dalek-ff-group",
"crypto/ed448",
"crypto/ciphersuite",
"crypto/tables",
"crypto/multiexp",
"crypto/schnorr",
"crypto/dleq",
"crypto/dkg",
"crypto/frost",
"crypto/schnorrkel",
"coins/ethereum",
"coins/monero/generators",
"coins/monero",
"message-queue",
"processor/messages",
"processor",
"coordinator/tributary/tendermint",
"coordinator/tributary",
"coordinator",
"substrate/primitives",
"substrate/tokens/primitives",
"substrate/tokens/pallet",
"substrate/in-instructions/primitives",
"substrate/in-instructions/pallet",
"substrate/validator-sets/primitives",
"substrate/validator-sets/pallet",
"substrate/runtime",
"substrate/node",
"substrate/client",
"tests/no-std",
"substrate/consensus",
"substrate/node"
]
# Always compile Monero (and a variety of dependencies) with optimizations due
# to the extensive operations required for Bulletproofs
[profile.dev.package]
subtle = { opt-level = 3 }
curve25519-dalek = { opt-level = 3 }
ff = { opt-level = 3 }
group = { opt-level = 3 }
crypto-bigint = { opt-level = 3 }
dalek-ff-group = { opt-level = 3 }
minimal-ed448 = { opt-level = 3 }
multiexp = { opt-level = 3 }
monero-serai = { opt-level = 3 }
[profile.release]
panic = "unwind"

View File

@@ -1,8 +0,0 @@
Serai crates are licensed under one of two licenses, either MIT or AGPL-3.0,
depending on the crate in question. Each crate declares their license in their
`Cargo.toml` and includes a `LICENSE` file detailing its status. Additionally,
a full copy of the AGPL-3.0 License is included in the root of this repository
as a reference text. This copy should be provided with any distribution of a
crate licensed under the AGPL-3.0, as per its terms.
The GitHub actions (`.github/actions`) are licensed under the MIT license.

View File

@@ -1,44 +1,22 @@
# Serai
Serai is a new DEX, built from the ground up, initially planning on listing
Bitcoin, Ethereum, DAI, and Monero, offering a liquidity-pool-based trading
experience. Funds are stored in an economically secured threshold-multisig
Bitcoin, Ethereum, Monero, DAI, and USDC, offering a liquidity pool trading
experience. Funds are stored in an economically secured threshold multisig
wallet.
[Getting Started](docs/Getting%20Started.md)
### Layout
- `audits`: Audits for various parts of Serai.
- `docs` - Documentation on the Serai protocol.
- `docs`: Documentation on the Serai protocol.
- `coins` - Various coin libraries intended for usage in Serai yet also by the
wider community. This means they will always support the functionality Serai
needs, yet won't disadvantage other use cases when possible.
- `common`: Crates containing utilities common to a variety of areas under
Serai, none neatly fitting under another category.
- `crypto`: A series of composable cryptographic libraries built around the
- `crypto` - A series of composable cryptographic libraries built around the
`ff`/`group` APIs achieving a variety of tasks. These range from generic
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
needed for Bitcoin-Monero atomic swaps.
- `coins`: Various coin libraries intended for usage in Serai yet also by the
wider community. This means they will always support the functionality Serai
needs, yet won't disadvantage other use cases when possible.
- `processor`: A generic chain processor to process data for Serai and process
- `processor` - A generic chain processor to process data for Serai and process
events from Serai, executing transactions as expected and needed.
- `coordinator`: A service to manage processors and communicate over a P2P
network with other validators.
- `substrate`: Substrate crates used to instantiate the Serai network.
- `deploy`: Scripts to deploy a Serai node/test environment.
### Links
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
- [Mastodon](https://cryptodon.lol/@serai): https://cryptodon.lol/@serai
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
- [Matrix](https://matrix.to/#/#serai:matrix.org):
https://matrix.to/#/#serai:matrix.org

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2023 Cypher Stack
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,7 +0,0 @@
# Cypher Stack /crypto Audit, March 2023
This audit was over the /crypto folder, excluding the ed448 crate, the `Ed448`
ciphersuite in the ciphersuite crate, and the `dleq/experimental` feature. It is
encompassing up to commit 669d2dbffc1dafb82a09d9419ea182667115df06.
Please see https://github.com/cypherstack/serai-audit for provenance.

View File

@@ -1,51 +0,0 @@
# No warnings allowed
-D warnings
# nursery
-D clippy::nursery
# Erratic and unhelpful
-A clippy::missing_const_for_fn
# Too many false/irrelevant positives
-A clippy::redundant_pub_crate
# Flags on any debug_assert using an RNG
-A clippy::debug_assert_with_mut_call
# Stylistic preference
-A clippy::option_if_let_else
# pedantic
-D clippy::unnecessary_wraps
-D clippy::unused_async
-D clippy::unused_self
# restrictions
# Safety
-D clippy::as_conversions
-D clippy::disallowed_script_idents
-D clippy::wildcard_enum_match_arm
# Clarity
-D clippy::assertions_on_result_states
-D clippy::deref_by_slicing
-D clippy::empty_structs_with_brackets
-D clippy::get_unwrap
-D clippy::rest_pat_in_fully_bound_structs
-D clippy::semicolon_inside_block
-D clippy::tests_outside_test_module
# Quality
-D clippy::format_push_string
-D clippy::string_to_string
# These potentially should be enabled in the future
# -D clippy::missing_errors_doc
# -D clippy::missing_panics_doc
# -D clippy::doc_markdown
# TODO: Enable this
# -D clippy::cargo
# Not in nightly yet
# -D clippy::redundant_type_annotations
# -D clippy::big_endian_bytes
# -D clippy::host_endian_bytes

View File

@@ -1,37 +0,0 @@
[package]
name = "bitcoin-serai"
version = "0.2.0"
description = "A Bitcoin library for FROST-signing transactions"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
edition = "2021"
[dependencies]
lazy_static = "1"
thiserror = "1"
zeroize = "^1.5"
rand_core = "0.6"
sha2 = "0.10"
secp256k1 = { version = "0.27", features = ["global-context"] }
bitcoin = { version = "0.30", features = ["serde"] }
k256 = { version = "^0.13.1", default-features = false, features = ["std", "arithmetic", "bits"] }
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", features = ["recommended"] }
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["secp256k1"] }
hex = "0.4"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
reqwest = { version = "0.11", features = ["json"] }
[dev-dependencies]
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["tests"] }
tokio = { version = "1", features = ["full"] }
[features]
hazmat = []

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2023 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,4 +0,0 @@
# bitcoin-serai
An application of [modular-frost](https://docs.rs/modular-frost) to Bitcoin
transactions, enabling extremely-efficient multisigs.

View File

@@ -1,160 +0,0 @@
use core::fmt::Debug;
use std::io;
use lazy_static::lazy_static;
use zeroize::Zeroizing;
use rand_core::{RngCore, CryptoRng};
use sha2::{Digest, Sha256};
use transcript::Transcript;
use secp256k1::schnorr::Signature;
use k256::{
elliptic_curve::{
ops::Reduce,
sec1::{Tag, ToEncodedPoint},
},
U256, Scalar, ProjectivePoint,
};
use frost::{
curve::{Ciphersuite, Secp256k1},
Participant, ThresholdKeys, ThresholdView, FrostError,
algorithm::{Hram as HramTrait, Algorithm, Schnorr as FrostSchnorr},
};
use bitcoin::key::XOnlyPublicKey;
/// Get the x coordinate of a non-infinity, even point. Panics on invalid input.
pub fn x(key: &ProjectivePoint) -> [u8; 32] {
let encoded = key.to_encoded_point(true);
assert_eq!(encoded.tag(), Tag::CompressedEvenY, "x coordinate of odd key");
(*encoded.x().expect("point at infinity")).into()
}
/// Convert a non-infinite even point to a XOnlyPublicKey. Panics on invalid input.
pub fn x_only(key: &ProjectivePoint) -> XOnlyPublicKey {
XOnlyPublicKey::from_slice(&x(key)).unwrap()
}
/// Make a point even by adding the generator until it is even. Returns the even point and the
/// amount of additions required.
pub fn make_even(mut key: ProjectivePoint) -> (ProjectivePoint, u64) {
let mut c = 0;
while key.to_encoded_point(true).tag() == Tag::CompressedOddY {
key += ProjectivePoint::GENERATOR;
c += 1;
}
(key, c)
}
/// A BIP-340 compatible HRAm for use with the modular-frost Schnorr Algorithm.
///
/// If passed an odd nonce, it will have the generator added until it is even.
#[derive(Clone, Copy, Debug)]
pub struct Hram {}
lazy_static! {
static ref TAG_HASH: [u8; 32] = Sha256::digest(b"BIP0340/challenge").into();
}
#[allow(non_snake_case)]
impl HramTrait<Secp256k1> for Hram {
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
// Convert the nonce to be even
let (R, _) = make_even(*R);
let mut data = Sha256::new();
data.update(*TAG_HASH);
data.update(*TAG_HASH);
data.update(x(&R));
data.update(x(A));
data.update(m);
Scalar::reduce(U256::from_be_slice(&data.finalize()))
}
}
/// BIP-340 Schnorr signature algorithm.
///
/// This must be used with a ThresholdKeys whose group key is even. If it is odd, this will panic.
#[derive(Clone)]
pub struct Schnorr<T: Sync + Clone + Debug + Transcript>(FrostSchnorr<Secp256k1, T, Hram>);
impl<T: Sync + Clone + Debug + Transcript> Schnorr<T> {
/// Construct a Schnorr algorithm continuing the specified transcript.
pub fn new(transcript: T) -> Schnorr<T> {
Schnorr(FrostSchnorr::new(transcript))
}
}
impl<T: Sync + Clone + Debug + Transcript> Algorithm<Secp256k1> for Schnorr<T> {
type Transcript = T;
type Addendum = ();
type Signature = Signature;
fn transcript(&mut self) -> &mut Self::Transcript {
self.0.transcript()
}
fn nonces(&self) -> Vec<Vec<ProjectivePoint>> {
self.0.nonces()
}
fn preprocess_addendum<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
keys: &ThresholdKeys<Secp256k1>,
) {
self.0.preprocess_addendum(rng, keys)
}
fn read_addendum<R: io::Read>(&self, reader: &mut R) -> io::Result<Self::Addendum> {
self.0.read_addendum(reader)
}
fn process_addendum(
&mut self,
view: &ThresholdView<Secp256k1>,
i: Participant,
addendum: (),
) -> Result<(), FrostError> {
self.0.process_addendum(view, i, addendum)
}
fn sign_share(
&mut self,
params: &ThresholdView<Secp256k1>,
nonce_sums: &[Vec<<Secp256k1 as Ciphersuite>::G>],
nonces: Vec<Zeroizing<<Secp256k1 as Ciphersuite>::F>>,
msg: &[u8],
) -> <Secp256k1 as Ciphersuite>::F {
self.0.sign_share(params, nonce_sums, nonces, msg)
}
#[must_use]
fn verify(
&self,
group_key: ProjectivePoint,
nonces: &[Vec<ProjectivePoint>],
sum: Scalar,
) -> Option<Self::Signature> {
self.0.verify(group_key, nonces, sum).map(|mut sig| {
// Make the R of the final signature even
let offset;
(sig.R, offset) = make_even(sig.R);
// s = r + cx. Since we added to the r, add to s
sig.s += Scalar::from(offset);
// Convert to a secp256k1 signature
Signature::from_slice(&sig.serialize()[1 ..]).unwrap()
})
}
fn verify_share(
&self,
verification_share: ProjectivePoint,
nonces: &[Vec<ProjectivePoint>],
share: Scalar,
) -> Result<Vec<(Scalar, ProjectivePoint)>, ()> {
self.0.verify_share(verification_share, nonces, share)
}
}

View File

@@ -1,19 +0,0 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
/// The bitcoin Rust library.
pub use bitcoin;
/// Cryptographic helpers.
#[cfg(feature = "hazmat")]
pub mod crypto;
#[cfg(not(feature = "hazmat"))]
pub(crate) mod crypto;
/// Wallet functionality to create transactions.
pub mod wallet;
/// A minimal asynchronous Bitcoin RPC client.
pub mod rpc;
#[cfg(test)]
mod tests;

View File

@@ -1,145 +0,0 @@
use core::fmt::Debug;
use thiserror::Error;
use serde::{Deserialize, de::DeserializeOwned};
use serde_json::json;
use bitcoin::{
hashes::{Hash, hex::FromHex},
consensus::encode,
Txid, Transaction, BlockHash, Block,
};
#[derive(Clone, PartialEq, Eq, Debug, Deserialize)]
pub struct Error {
code: isize,
message: String,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(untagged)]
enum RpcResponse<T> {
Ok { result: T },
Err { error: Error },
}
/// A minimal asynchronous Bitcoin RPC client.
#[derive(Clone, Debug)]
pub struct Rpc(String);
#[derive(Clone, PartialEq, Eq, Debug, Error)]
pub enum RpcError {
#[error("couldn't connect to node")]
ConnectionError,
#[error("request had an error: {0:?}")]
RequestError(Error),
#[error("node sent an invalid response")]
InvalidResponse,
}
impl Rpc {
pub async fn new(url: String) -> Result<Rpc, RpcError> {
let rpc = Rpc(url);
// Make an RPC request to verify the node is reachable and sane
rpc.get_latest_block_number().await?;
Ok(rpc)
}
/// Perform an arbitrary RPC call.
pub async fn rpc_call<Response: DeserializeOwned + Debug>(
&self,
method: &str,
params: serde_json::Value,
) -> Result<Response, RpcError> {
let client = reqwest::Client::new();
let res = client
.post(&self.0)
.json(&json!({ "jsonrpc": "2.0", "method": method, "params": params }))
.send()
.await
.map_err(|_| RpcError::ConnectionError)?
.text()
.await
.map_err(|_| RpcError::ConnectionError)?;
let res: RpcResponse<Response> =
serde_json::from_str(&res).map_err(|_| RpcError::InvalidResponse)?;
match res {
RpcResponse::Ok { result } => Ok(result),
RpcResponse::Err { error } => Err(RpcError::RequestError(error)),
}
}
/// Get the latest block's number.
///
/// The genesis block's 'number' is zero. They increment from there.
pub async fn get_latest_block_number(&self) -> Result<usize, RpcError> {
// getblockcount doesn't return the amount of blocks on the current chain, yet the "height"
// of the current chain. The "height" of the current chain is defined as the "height" of the
// tip block of the current chain. The "height" of a block is defined as the amount of blocks
// present when the block was created. Accordingly, the genesis block has height 0, and
// getblockcount will return 0 when it's only the only block, despite their being one block.
self.rpc_call("getblockcount", json!([])).await
}
/// Get the hash of a block by the block's number.
pub async fn get_block_hash(&self, number: usize) -> Result<[u8; 32], RpcError> {
let mut hash = *self
.rpc_call::<BlockHash>("getblockhash", json!([number]))
.await?
.as_raw_hash()
.as_byte_array();
// bitcoin stores the inner bytes in reverse order.
hash.reverse();
Ok(hash)
}
/// Get a block's number by its hash.
pub async fn get_block_number(&self, hash: &[u8; 32]) -> Result<usize, RpcError> {
#[derive(Deserialize, Debug)]
struct Number {
height: usize,
}
Ok(self.rpc_call::<Number>("getblockheader", json!([hex::encode(hash)])).await?.height)
}
/// Get a block by its hash.
pub async fn get_block(&self, hash: &[u8; 32]) -> Result<Block, RpcError> {
let hex = self.rpc_call::<String>("getblock", json!([hex::encode(hash), 0])).await?;
let bytes: Vec<u8> = FromHex::from_hex(&hex).map_err(|_| RpcError::InvalidResponse)?;
let block: Block = encode::deserialize(&bytes).map_err(|_| RpcError::InvalidResponse)?;
let mut block_hash = *block.block_hash().as_raw_hash().as_byte_array();
block_hash.reverse();
if hash != &block_hash {
Err(RpcError::InvalidResponse)?;
}
Ok(block)
}
/// Publish a transaction.
pub async fn send_raw_transaction(&self, tx: &Transaction) -> Result<Txid, RpcError> {
let txid = self.rpc_call("sendrawtransaction", json!([encode::serialize_hex(tx)])).await?;
if txid != tx.txid() {
Err(RpcError::InvalidResponse)?;
}
Ok(txid)
}
/// Get a transaction by its hash.
pub async fn get_transaction(&self, hash: &[u8; 32]) -> Result<Transaction, RpcError> {
let hex = self.rpc_call::<String>("getrawtransaction", json!([hex::encode(hash)])).await?;
let bytes: Vec<u8> = FromHex::from_hex(&hex).map_err(|_| RpcError::InvalidResponse)?;
let tx: Transaction = encode::deserialize(&bytes).map_err(|_| RpcError::InvalidResponse)?;
let mut tx_hash = *tx.txid().as_raw_hash().as_byte_array();
tx_hash.reverse();
if hash != &tx_hash {
Err(RpcError::InvalidResponse)?;
}
Ok(tx)
}
}

View File

@@ -1,47 +0,0 @@
use rand_core::OsRng;
use sha2::{Digest, Sha256};
use secp256k1::{SECP256K1, Message};
use k256::Scalar;
use transcript::{Transcript, RecommendedTranscript};
use frost::{
curve::Secp256k1,
Participant,
tests::{algorithm_machines, key_gen, sign},
};
use crate::{
bitcoin::hashes::{Hash as HashTrait, sha256::Hash},
crypto::{x_only, make_even, Schnorr},
};
#[test]
fn test_algorithm() {
let mut keys = key_gen::<_, Secp256k1>(&mut OsRng);
const MESSAGE: &[u8] = b"Hello, World!";
for (_, keys) in keys.iter_mut() {
let (_, offset) = make_even(keys.group_key());
*keys = keys.offset(Scalar::from(offset));
}
let algo =
Schnorr::<RecommendedTranscript>::new(RecommendedTranscript::new(b"bitcoin-serai sign test"));
let sig = sign(
&mut OsRng,
algo.clone(),
keys.clone(),
algorithm_machines(&mut OsRng, algo, &keys),
&Sha256::digest(MESSAGE),
);
SECP256K1
.verify_schnorr(
&sig,
&Message::from(Hash::hash(MESSAGE)),
&x_only(&keys[&Participant::new(1).unwrap()].group_key()),
)
.unwrap()
}

View File

@@ -1 +0,0 @@
mod crypto;

View File

@@ -1,160 +0,0 @@
use std::{
io::{self, Read, Write},
collections::HashMap,
};
use k256::{
elliptic_curve::sec1::{Tag, ToEncodedPoint},
Scalar, ProjectivePoint,
};
use frost::{
curve::{Ciphersuite, Secp256k1},
ThresholdKeys,
};
use bitcoin::{
consensus::encode::{Decodable, serialize},
key::TweakedPublicKey,
OutPoint, ScriptBuf, TxOut, Transaction, Block, Network, Address,
};
use crate::crypto::{x_only, make_even};
mod send;
pub use send::*;
/// Tweak keys to ensure they're usable with Bitcoin.
pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
let (_, offset) = make_even(keys.group_key());
keys.offset(Scalar::from(offset))
}
/// Return the Taproot address for a public key.
pub fn address(network: Network, key: ProjectivePoint) -> Option<Address> {
if key.to_encoded_point(true).tag() != Tag::CompressedEvenY {
return None;
}
Some(Address::p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key)), network))
}
/// A spendable output.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct ReceivedOutput {
// The scalar offset to obtain the key usable to spend this output.
offset: Scalar,
// The output to spend.
output: TxOut,
// The TX ID and vout of the output to spend.
outpoint: OutPoint,
}
impl ReceivedOutput {
/// The offset for this output.
pub fn offset(&self) -> Scalar {
self.offset
}
/// The outpoint for this output.
pub fn outpoint(&self) -> &OutPoint {
&self.outpoint
}
/// The value of this output.
pub fn value(&self) -> u64 {
self.output.value
}
/// Read a ReceivedOutput from a generic satisfying Read.
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
Ok(ReceivedOutput {
offset: Secp256k1::read_F(r)?,
output: TxOut::consensus_decode(r)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid TxOut"))?,
outpoint: OutPoint::consensus_decode(r)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid OutPoint"))?,
})
}
/// Write a ReceivedOutput to a generic satisfying Write.
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(&self.offset.to_bytes())?;
w.write_all(&serialize(&self.output))?;
w.write_all(&serialize(&self.outpoint))
}
/// Serialize a ReceivedOutput to a Vec<u8>.
pub fn serialize(&self) -> Vec<u8> {
let mut res = vec![];
self.write(&mut res).unwrap();
res
}
}
/// A transaction scanner capable of being used with HDKD schemes.
#[derive(Clone, Debug)]
pub struct Scanner {
key: ProjectivePoint,
scripts: HashMap<ScriptBuf, Scalar>,
}
impl Scanner {
/// Construct a Scanner for a key.
///
/// Returns None if this key can't be scanned for.
pub fn new(key: ProjectivePoint) -> Option<Scanner> {
let mut scripts = HashMap::new();
// Uses Network::Bitcoin since network is irrelevant here
scripts.insert(address(Network::Bitcoin, key)?.script_pubkey(), Scalar::ZERO);
Some(Scanner { key, scripts })
}
/// Register an offset to scan for.
///
/// Due to Bitcoin's requirement that points are even, not every offset may be used.
/// If an offset isn't usable, it will be incremented until it is. If this offset is already
/// present, None is returned. Else, Some(offset) will be, with the used offset.
pub fn register_offset(&mut self, mut offset: Scalar) -> Option<Scalar> {
loop {
match address(Network::Bitcoin, self.key + (ProjectivePoint::GENERATOR * offset)) {
Some(address) => {
let script = address.script_pubkey();
if self.scripts.contains_key(&script) {
None?;
}
self.scripts.insert(script, offset);
return Some(offset);
}
None => offset += Scalar::ONE,
}
}
}
/// Scan a transaction.
pub fn scan_transaction(&self, tx: &Transaction) -> Vec<ReceivedOutput> {
let mut res = vec![];
for (vout, output) in tx.output.iter().enumerate() {
if let Some(offset) = self.scripts.get(&output.script_pubkey) {
res.push(ReceivedOutput {
offset: *offset,
output: output.clone(),
outpoint: OutPoint::new(tx.txid(), u32::try_from(vout).unwrap()),
});
}
}
res
}
/// Scan a block.
///
/// This will also scan the coinbase transaction which is bound by maturity. If received outputs
/// must be immediately spendable, a post-processing pass is needed to remove those outputs.
/// Alternatively, scan_transaction can be called on `block.txdata[1 ..]`.
pub fn scan_block(&self, block: &Block) -> Vec<ReceivedOutput> {
let mut res = vec![];
for tx in &block.txdata {
res.extend(self.scan_transaction(tx));
}
res
}
}

View File

@@ -1,382 +0,0 @@
use std::{
io::{self, Read},
collections::HashMap,
};
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use transcript::{Transcript, RecommendedTranscript};
use k256::{elliptic_curve::sec1::ToEncodedPoint, Scalar};
use frost::{curve::Secp256k1, Participant, ThresholdKeys, FrostError, sign::*};
use bitcoin::{
sighash::{TapSighashType, SighashCache, Prevouts},
absolute::LockTime,
script::{PushBytesBuf, ScriptBuf},
OutPoint, Sequence, Witness, TxIn, TxOut, Transaction, Network, Address,
};
use crate::{
crypto::Schnorr,
wallet::{address, ReceivedOutput},
};
#[rustfmt::skip]
// https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04/src/policy/policy.h#L27
const MAX_STANDARD_TX_WEIGHT: u64 = 400_000;
#[rustfmt::skip]
//https://github.com/bitcoin/bitcoin/blob/a245429d680eb95cf4c0c78e58e63e3f0f5d979a/src/test/transaction_tests.cpp#L815-L816
const DUST: u64 = 674;
#[derive(Clone, PartialEq, Eq, Debug, Error)]
pub enum TransactionError {
#[error("no inputs were specified")]
NoInputs,
#[error("no outputs were created")]
NoOutputs,
#[error("a specified payment's amount was less than bitcoin's required minimum")]
DustPayment,
#[error("too much data was specified")]
TooMuchData,
#[error("not enough funds for these payments")]
NotEnoughFunds,
#[error("transaction was too large")]
TooLargeTransaction,
}
/// A signable transaction, clone-able across attempts.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct SignableTransaction {
tx: Transaction,
offsets: Vec<Scalar>,
prevouts: Vec<TxOut>,
needed_fee: u64,
}
impl SignableTransaction {
fn calculate_weight(inputs: usize, payments: &[(Address, u64)], change: Option<&Address>) -> u64 {
// Expand this a full transaction in order to use the bitcoin library's weight function
let mut tx = Transaction {
version: 2,
lock_time: LockTime::ZERO,
input: vec![
TxIn {
// This is a fixed size
// See https://developer.bitcoin.org/reference/transactions.html#raw-transaction-format
previous_output: OutPoint::default(),
// This is empty for a Taproot spend
script_sig: ScriptBuf::new(),
// This is fixed size, yet we do use Sequence::MAX
sequence: Sequence::MAX,
// Our witnesses contains a single 64-byte signature
witness: Witness::from_slice(&[vec![0; 64]])
};
inputs
],
output: payments
.iter()
// The payment is a fixed size so we don't have to use it here
// The script pub key is not of a fixed size and does have to be used here
.map(|payment| TxOut { value: payment.1, script_pubkey: payment.0.script_pubkey() })
.collect(),
};
if let Some(change) = change {
// Use a 0 value since we're currently unsure what the change amount will be, and since
// the value is fixed size (so any value could be used here)
tx.output.push(TxOut { value: 0, script_pubkey: change.script_pubkey() });
}
u64::try_from(tx.weight()).unwrap()
}
/// Returns the fee necessary for this transaction to achieve the fee rate specified at
/// construction.
///
/// The actual fee this transaction will use is `sum(inputs) - sum(outputs)`.
pub fn needed_fee(&self) -> u64 {
self.needed_fee
}
/// Create a new SignableTransaction.
///
/// If a change address is specified, any leftover funds will be sent to it if the leftover funds
/// exceed the minimum output amount. If a change address isn't specified, all leftover funds
/// will become part of the paid fee.
///
/// If data is specified, an OP_RETURN output will be added with it.
pub fn new(
mut inputs: Vec<ReceivedOutput>,
payments: &[(Address, u64)],
change: Option<Address>,
data: Option<Vec<u8>>,
fee_per_weight: u64,
) -> Result<SignableTransaction, TransactionError> {
if inputs.is_empty() {
Err(TransactionError::NoInputs)?;
}
if payments.is_empty() && change.is_none() && data.is_none() {
Err(TransactionError::NoOutputs)?;
}
for (_, amount) in payments {
if *amount < DUST {
Err(TransactionError::DustPayment)?;
}
}
if data.as_ref().map(|data| data.len()).unwrap_or(0) > 80 {
Err(TransactionError::TooMuchData)?;
}
let input_sat = inputs.iter().map(|input| input.output.value).sum::<u64>();
let offsets = inputs.iter().map(|input| input.offset).collect();
let tx_ins = inputs
.iter()
.map(|input| TxIn {
previous_output: input.outpoint,
script_sig: ScriptBuf::new(),
sequence: Sequence::MAX,
witness: Witness::new(),
})
.collect::<Vec<_>>();
let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>();
let mut tx_outs = payments
.iter()
.map(|payment| TxOut { value: payment.1, script_pubkey: payment.0.script_pubkey() })
.collect::<Vec<_>>();
// Add the OP_RETURN output
if let Some(data) = data {
tx_outs.push(TxOut {
value: 0,
script_pubkey: ScriptBuf::new_op_return(
&PushBytesBuf::try_from(data)
.expect("data didn't fit into PushBytes depsite being checked"),
),
})
}
let mut weight = Self::calculate_weight(tx_ins.len(), payments, None);
let mut needed_fee = fee_per_weight * weight;
if input_sat < (payment_sat + needed_fee) {
Err(TransactionError::NotEnoughFunds)?;
}
// If there's a change address, check if there's change to give it
if let Some(change) = change.as_ref() {
let weight_with_change = Self::calculate_weight(tx_ins.len(), payments, Some(change));
let fee_with_change = fee_per_weight * weight_with_change;
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
if value >= DUST {
tx_outs.push(TxOut { value, script_pubkey: change.script_pubkey() });
weight = weight_with_change;
needed_fee = fee_with_change;
}
}
}
if tx_outs.is_empty() {
Err(TransactionError::NoOutputs)?;
}
if weight > MAX_STANDARD_TX_WEIGHT {
Err(TransactionError::TooLargeTransaction)?;
}
Ok(SignableTransaction {
tx: Transaction { version: 2, lock_time: LockTime::ZERO, input: tx_ins, output: tx_outs },
offsets,
prevouts: inputs.drain(..).map(|input| input.output).collect(),
needed_fee,
})
}
/// Create a multisig machine for this transaction.
///
/// Returns None if the wrong keys are used.
pub fn multisig(
self,
keys: ThresholdKeys<Secp256k1>,
mut transcript: RecommendedTranscript,
) -> Option<TransactionMachine> {
transcript.domain_separate(b"bitcoin_transaction");
transcript.append_message(b"root_key", keys.group_key().to_encoded_point(true).as_bytes());
// Transcript the inputs and outputs
let tx = &self.tx;
for input in &tx.input {
transcript.append_message(b"input_hash", input.previous_output.txid);
transcript.append_message(b"input_output_index", input.previous_output.vout.to_le_bytes());
}
for payment in &tx.output {
transcript.append_message(b"output_script", payment.script_pubkey.as_bytes());
transcript.append_message(b"output_amount", payment.value.to_le_bytes());
}
let mut sigs = vec![];
for i in 0 .. tx.input.len() {
let mut transcript = transcript.clone();
transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes());
let offset = keys.clone().offset(self.offsets[i]);
if address(Network::Bitcoin, offset.group_key())?.script_pubkey() !=
self.prevouts[i].script_pubkey
{
None?;
}
sigs.push(AlgorithmMachine::new(
Schnorr::new(transcript),
keys.clone().offset(self.offsets[i]),
));
}
Some(TransactionMachine { tx: self, sigs })
}
}
/// A FROST signing machine to produce a Bitcoin transaction.
///
/// This does not support caching its preprocess. When sign is called, the message must be empty.
/// This will panic if it isn't.
pub struct TransactionMachine {
tx: SignableTransaction,
sigs: Vec<AlgorithmMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
}
impl PreprocessMachine for TransactionMachine {
type Preprocess = Vec<Preprocess<Secp256k1, ()>>;
type Signature = Transaction;
type SignMachine = TransactionSignMachine;
fn preprocess<R: RngCore + CryptoRng>(
mut self,
rng: &mut R,
) -> (Self::SignMachine, Self::Preprocess) {
let mut preprocesses = Vec::with_capacity(self.sigs.len());
let sigs = self
.sigs
.drain(..)
.map(|sig| {
let (sig, preprocess) = sig.preprocess(rng);
preprocesses.push(preprocess);
sig
})
.collect();
(TransactionSignMachine { tx: self.tx, sigs }, preprocesses)
}
}
pub struct TransactionSignMachine {
tx: SignableTransaction,
sigs: Vec<AlgorithmSignMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
}
impl SignMachine<Transaction> for TransactionSignMachine {
type Params = ();
type Keys = ThresholdKeys<Secp256k1>;
type Preprocess = Vec<Preprocess<Secp256k1, ()>>;
type SignatureShare = Vec<SignatureShare<Secp256k1>>;
type SignatureMachine = TransactionSignatureMachine;
fn cache(self) -> CachedPreprocess {
unimplemented!(
"Bitcoin transactions don't support caching their preprocesses due to {}",
"being already bound to a specific transaction"
);
}
fn from_cache(
_: (),
_: ThresholdKeys<Secp256k1>,
_: CachedPreprocess,
) -> Result<Self, FrostError> {
unimplemented!(
"Bitcoin transactions don't support caching their preprocesses due to {}",
"being already bound to a specific transaction"
);
}
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
self.sigs.iter().map(|sig| sig.read_preprocess(reader)).collect()
}
fn sign(
mut self,
commitments: HashMap<Participant, Self::Preprocess>,
msg: &[u8],
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
if !msg.is_empty() {
panic!("message was passed to the TransactionMachine when it generates its own");
}
let commitments = (0 .. self.sigs.len())
.map(|c| {
commitments
.iter()
.map(|(l, commitments)| (*l, commitments[c].clone()))
.collect::<HashMap<_, _>>()
})
.collect::<Vec<_>>();
let mut cache = SighashCache::new(&self.tx.tx);
// Sign committing to all inputs
let prevouts = Prevouts::All(&self.tx.prevouts);
let mut shares = Vec::with_capacity(self.sigs.len());
let sigs = self
.sigs
.drain(..)
.enumerate()
.map(|(i, sig)| {
let (sig, share) = sig.sign(
commitments[i].clone(),
cache
.taproot_key_spend_signature_hash(i, &prevouts, TapSighashType::Default)
.unwrap()
.as_ref(),
)?;
shares.push(share);
Ok(sig)
})
.collect::<Result<_, _>>()?;
Ok((TransactionSignatureMachine { tx: self.tx.tx, sigs }, shares))
}
}
pub struct TransactionSignatureMachine {
tx: Transaction,
sigs: Vec<AlgorithmSignatureMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
}
impl SignatureMachine<Transaction> for TransactionSignatureMachine {
type SignatureShare = Vec<SignatureShare<Secp256k1>>;
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare> {
self.sigs.iter().map(|sig| sig.read_share(reader)).collect()
}
fn complete(
mut self,
mut shares: HashMap<Participant, Self::SignatureShare>,
) -> Result<Transaction, FrostError> {
for (input, schnorr) in self.tx.input.iter_mut().zip(self.sigs.drain(..)) {
let sig = schnorr.complete(
shares.iter_mut().map(|(l, shares)| (*l, shares.remove(0))).collect::<HashMap<_, _>>(),
)?;
let mut witness = Witness::new();
witness.push(sig.as_ref());
input.witness = witness;
}
Ok(self.tx)
}
}

View File

@@ -1,25 +0,0 @@
use bitcoin_serai::{bitcoin::hashes::Hash as HashTrait, rpc::RpcError};
mod runner;
use runner::rpc;
async_sequential! {
async fn test_rpc() {
let rpc = rpc().await;
// Test get_latest_block_number and get_block_hash by round tripping them
let latest = rpc.get_latest_block_number().await.unwrap();
let hash = rpc.get_block_hash(latest).await.unwrap();
assert_eq!(rpc.get_block_number(&hash).await.unwrap(), latest);
// Test this actually is the latest block number by checking asking for the next block's errors
assert!(matches!(rpc.get_block_hash(latest + 1).await, Err(RpcError::RequestError(_))));
// Test get_block by checking the received block's hash matches the request
let block = rpc.get_block(&hash).await.unwrap();
// Hashes are stored in reverse. It's bs from Satoshi
let mut block_hash = *block.block_hash().as_raw_hash().as_byte_array();
block_hash.reverse();
assert_eq!(hash, block_hash);
}
}

View File

@@ -1,44 +0,0 @@
use bitcoin_serai::rpc::Rpc;
use tokio::sync::Mutex;
lazy_static::lazy_static! {
pub static ref SEQUENTIAL: Mutex<()> = Mutex::new(());
}
#[allow(dead_code)]
pub(crate) async fn rpc() -> Rpc {
let rpc = Rpc::new("http://serai:seraidex@127.0.0.1:18443".to_string()).await.unwrap();
// If this node has already been interacted with, clear its chain
if rpc.get_latest_block_number().await.unwrap() > 0 {
rpc
.rpc_call(
"invalidateblock",
serde_json::json!([hex::encode(rpc.get_block_hash(1).await.unwrap())]),
)
.await
.unwrap()
}
rpc
}
#[macro_export]
macro_rules! async_sequential {
($(async fn $name: ident() $body: block)*) => {
$(
#[tokio::test]
async fn $name() {
let guard = runner::SEQUENTIAL.lock().await;
let local = tokio::task::LocalSet::new();
local.run_until(async move {
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {
drop(guard);
Err(err).unwrap()
}
}).await;
}
)*
}
}

View File

@@ -1,348 +0,0 @@
use std::collections::HashMap;
use rand_core::{RngCore, OsRng};
use transcript::{Transcript, RecommendedTranscript};
use k256::{
elliptic_curve::{
group::{ff::Field, Group},
sec1::{Tag, ToEncodedPoint},
},
Scalar, ProjectivePoint,
};
use frost::{
curve::Secp256k1,
Participant, ThresholdKeys,
tests::{THRESHOLD, key_gen, sign_without_caching},
};
use bitcoin_serai::{
bitcoin::{
hashes::Hash as HashTrait,
blockdata::opcodes::all::OP_RETURN,
script::{PushBytesBuf, Instruction, Instructions, Script},
OutPoint, TxOut, Transaction, Network, Address,
},
wallet::{tweak_keys, address, ReceivedOutput, Scanner, TransactionError, SignableTransaction},
rpc::Rpc,
};
mod runner;
use runner::rpc;
const FEE: u64 = 20;
fn is_even(key: ProjectivePoint) -> bool {
key.to_encoded_point(true).tag() == Tag::CompressedEvenY
}
async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint) -> ReceivedOutput {
let block_number = rpc.get_latest_block_number().await.unwrap() + 1;
rpc
.rpc_call::<Vec<String>>(
"generatetoaddress",
serde_json::json!([1, address(Network::Regtest, key).unwrap()]),
)
.await
.unwrap();
// Mine until maturity
rpc
.rpc_call::<Vec<String>>(
"generatetoaddress",
serde_json::json!([100, Address::p2sh(Script::empty(), Network::Regtest).unwrap()]),
)
.await
.unwrap();
let block = rpc.get_block(&rpc.get_block_hash(block_number).await.unwrap()).await.unwrap();
let mut outputs = scanner.scan_block(&block);
assert_eq!(outputs, scanner.scan_transaction(&block.txdata[0]));
assert_eq!(outputs.len(), 1);
assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].txid(), 0));
assert_eq!(outputs[0].value(), block.txdata[0].output[0].value);
assert_eq!(
ReceivedOutput::read::<&[u8]>(&mut outputs[0].serialize().as_ref()).unwrap(),
outputs[0]
);
outputs.swap_remove(0)
}
fn keys() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, ProjectivePoint) {
let mut keys = key_gen(&mut OsRng);
for (_, keys) in keys.iter_mut() {
*keys = tweak_keys(keys);
}
let key = keys.values().next().unwrap().group_key();
(keys, key)
}
fn sign(
keys: &HashMap<Participant, ThresholdKeys<Secp256k1>>,
tx: SignableTransaction,
) -> Transaction {
let mut machines = HashMap::new();
for i in (1 ..= THRESHOLD).map(|i| Participant::new(i).unwrap()) {
machines.insert(
i,
tx.clone()
.multisig(keys[&i].clone(), RecommendedTranscript::new(b"bitcoin-serai Test Transaction"))
.unwrap(),
);
}
sign_without_caching(&mut OsRng, machines, &[])
}
#[test]
fn test_tweak_keys() {
let mut even = false;
let mut odd = false;
// Generate keys until we get an even set and an odd set
while !(even && odd) {
let mut keys = key_gen(&mut OsRng).drain().next().unwrap().1;
if is_even(keys.group_key()) {
// Tweaking should do nothing
assert_eq!(tweak_keys(&keys).group_key(), keys.group_key());
even = true;
} else {
let tweaked = tweak_keys(&keys).group_key();
assert_ne!(tweaked, keys.group_key());
// Tweaking should produce an even key
assert!(is_even(tweaked));
// Verify it uses the smallest possible offset
while keys.group_key().to_encoded_point(true).tag() == Tag::CompressedOddY {
keys = keys.offset(Scalar::ONE);
}
assert_eq!(tweaked, keys.group_key());
odd = true;
}
}
}
async_sequential! {
async fn test_scanner() {
// Test Scanners are creatable for even keys.
for _ in 0 .. 128 {
let key = ProjectivePoint::random(&mut OsRng);
assert_eq!(Scanner::new(key).is_some(), is_even(key));
}
let mut key = ProjectivePoint::random(&mut OsRng);
while !is_even(key) {
key += ProjectivePoint::GENERATOR;
}
{
let mut scanner = Scanner::new(key).unwrap();
for _ in 0 .. 128 {
let mut offset = Scalar::random(&mut OsRng);
let registered = scanner.register_offset(offset).unwrap();
// Registering this again should return None
assert!(scanner.register_offset(offset).is_none());
// We can only register offsets resulting in even keys
// Make this even
while !is_even(key + (ProjectivePoint::GENERATOR * offset)) {
offset += Scalar::ONE;
}
// Ensure it matches the registered offset
assert_eq!(registered, offset);
// Assert registering this again fails
assert!(scanner.register_offset(offset).is_none());
}
}
let rpc = rpc().await;
let mut scanner = Scanner::new(key).unwrap();
assert_eq!(send_and_get_output(&rpc, &scanner, key).await.offset(), Scalar::ZERO);
// Register an offset and test receiving to it
let offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
assert_eq!(
send_and_get_output(&rpc, &scanner, key + (ProjectivePoint::GENERATOR * offset))
.await
.offset(),
offset
);
}
async fn test_transaction_errors() {
let (_, key) = keys();
let rpc = rpc().await;
let scanner = Scanner::new(key).unwrap();
let output = send_and_get_output(&rpc, &scanner, key).await;
assert_eq!(output.offset(), Scalar::ZERO);
let inputs = vec![output];
let addr = || address(Network::Regtest, key).unwrap();
let payments = vec![(addr(), 1000)];
assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok());
assert_eq!(
SignableTransaction::new(vec![], &payments, None, None, FEE),
Err(TransactionError::NoInputs)
);
// No change
assert!(SignableTransaction::new(inputs.clone(), &[(addr(), 1000)], None, None, FEE).is_ok());
// Consolidation TX
assert!(SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, FEE).is_ok());
// Data
assert!(SignableTransaction::new(inputs.clone(), &[], None, Some(vec![]), FEE).is_ok());
// No outputs
assert_eq!(
SignableTransaction::new(inputs.clone(), &[], None, None, FEE),
Err(TransactionError::NoOutputs),
);
assert_eq!(
SignableTransaction::new(inputs.clone(), &[(addr(), 1)], None, None, FEE),
Err(TransactionError::DustPayment),
);
assert!(
SignableTransaction::new(inputs.clone(), &payments, None, Some(vec![0; 80]), FEE).is_ok()
);
assert_eq!(
SignableTransaction::new(inputs.clone(), &payments, None, Some(vec![0; 81]), FEE),
Err(TransactionError::TooMuchData),
);
assert_eq!(
SignableTransaction::new(inputs.clone(), &[(addr(), inputs[0].value() * 2)], None, None, FEE),
Err(TransactionError::NotEnoughFunds),
);
assert_eq!(
SignableTransaction::new(inputs, &vec![(addr(), 1000); 10000], None, None, 0),
Err(TransactionError::TooLargeTransaction),
);
}
async fn test_send() {
let (keys, key) = keys();
let rpc = rpc().await;
let mut scanner = Scanner::new(key).unwrap();
// Get inputs, one not offset and one offset
let output = send_and_get_output(&rpc, &scanner, key).await;
assert_eq!(output.offset(), Scalar::ZERO);
let offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
let offset_key = key + (ProjectivePoint::GENERATOR * offset);
let offset_output = send_and_get_output(&rpc, &scanner, offset_key).await;
assert_eq!(offset_output.offset(), offset);
// Declare payments, change, fee
let payments = [
(address(Network::Regtest, key).unwrap(), 1005),
(address(Network::Regtest, offset_key).unwrap(), 1007)
];
let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
let change_key = key + (ProjectivePoint::GENERATOR * change_offset);
let change_addr = address(Network::Regtest, change_key).unwrap();
// Create and sign the TX
let tx = SignableTransaction::new(
vec![output.clone(), offset_output.clone()],
&payments,
Some(change_addr.clone()),
None,
FEE
).unwrap();
let needed_fee = tx.needed_fee();
let tx = sign(&keys, tx);
assert_eq!(tx.output.len(), 3);
// Ensure we can scan it
let outputs = scanner.scan_transaction(&tx);
for (o, output) in outputs.iter().enumerate() {
assert_eq!(output.outpoint(), &OutPoint::new(tx.txid(), u32::try_from(o).unwrap()));
assert_eq!(&ReceivedOutput::read::<&[u8]>(&mut output.serialize().as_ref()).unwrap(), output);
}
assert_eq!(outputs[0].offset(), Scalar::ZERO);
assert_eq!(outputs[1].offset(), offset);
assert_eq!(outputs[2].offset(), change_offset);
// Make sure the payments were properly created
for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) {
assert_eq!(output, &TxOut { script_pubkey: payment.0.script_pubkey(), value: payment.1 });
assert_eq!(scanned.value(), payment.1 );
}
// Make sure the change is correct
assert_eq!(needed_fee, u64::try_from(tx.weight()).unwrap() * FEE);
let input_value = output.value() + offset_output.value();
let output_value = tx.output.iter().map(|output| output.value).sum::<u64>();
assert_eq!(input_value - output_value, needed_fee);
let change_amount =
input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee;
assert_eq!(
tx.output[2],
TxOut { script_pubkey: change_addr.script_pubkey(), value: change_amount },
);
// This also tests send_raw_transaction and get_transaction, which the RPC test can't
// effectively test
rpc.send_raw_transaction(&tx).await.unwrap();
let mut hash = *tx.txid().as_raw_hash().as_byte_array();
hash.reverse();
assert_eq!(tx, rpc.get_transaction(&hash).await.unwrap());
}
async fn test_data() {
let (keys, key) = keys();
let rpc = rpc().await;
let scanner = Scanner::new(key).unwrap();
let output = send_and_get_output(&rpc, &scanner, key).await;
assert_eq!(output.offset(), Scalar::ZERO);
let data_len = 60 + usize::try_from(OsRng.next_u64() % 21).unwrap();
let mut data = vec![0; data_len];
OsRng.fill_bytes(&mut data);
let tx = sign(
&keys,
SignableTransaction::new(
vec![output],
&[],
address(Network::Regtest, key),
Some(data.clone()),
FEE
).unwrap()
);
assert!(tx.output[0].script_pubkey.is_op_return());
let check = |mut instructions: Instructions| {
assert_eq!(instructions.next().unwrap().unwrap(), Instruction::Op(OP_RETURN));
assert_eq!(
instructions.next().unwrap().unwrap(),
Instruction::PushBytes(&PushBytesBuf::try_from(data.clone()).unwrap()),
);
assert!(instructions.next().is_none());
};
check(tx.output[0].script_pubkey.instructions());
check(tx.output[0].script_pubkey.instructions_minimal());
}
}

View File

@@ -1,3 +0,0 @@
# solidity build outputs
cache
artifacts

View File

@@ -1,37 +0,0 @@
[package]
name = "ethereum-serai"
version = "0.1.0"
description = "An Ethereum library supporting Schnorr signing and on-chain verification"
license = "AGPL-3.0-only"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
edition = "2021"
publish = false
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
thiserror = "1"
rand_core = "0.6"
serde_json = "1"
serde = "1"
sha2 = "0.10"
sha3 = "0.10"
group = "0.13"
k256 = { version = "^0.13.1", default-features = false, features = ["std", "arithmetic", "bits", "ecdsa"] }
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] }
eyre = "0.6"
ethers = { version = "2", default-features = false, features = ["abigen", "ethers-solc"] }
[build-dependencies]
ethers-solc = "2"
[dev-dependencies]
tokio = { version = "1", features = ["macros"] }

View File

@@ -1,15 +0,0 @@
AGPL-3.0-only license
Copyright (c) 2022-2023 Luke Parker
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License Version 3 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.

View File

@@ -1,9 +0,0 @@
# Ethereum
This package contains Ethereum-related functionality, specifically deploying and
interacting with Serai contracts.
### Dependencies
- solc
- [Foundry](https://github.com/foundry-rs/foundry)

View File

@@ -1,16 +0,0 @@
use ethers_solc::{Project, ProjectPathsConfig};
fn main() {
println!("cargo:rerun-if-changed=contracts");
println!("cargo:rerun-if-changed=artifacts");
// configure the project with all its paths, solc, cache etc.
let project = Project::builder()
.paths(ProjectPathsConfig::hardhat(env!("CARGO_MANIFEST_DIR")).unwrap())
.build()
.unwrap();
project.compile().unwrap();
// Tell Cargo that if a source file changes, to rerun this build script.
project.rerun_if_sources_changed();
}

View File

@@ -1,36 +0,0 @@
//SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
// see https://github.com/noot/schnorr-verify for implementation details
contract Schnorr {
// secp256k1 group order
uint256 constant public Q =
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
// parity := public key y-coord parity (27 or 28)
// px := public key x-coord
// message := 32-byte message
// s := schnorr signature
// e := schnorr signature challenge
function verify(
uint8 parity,
bytes32 px,
bytes32 message,
bytes32 s,
bytes32 e
) public view returns (bool) {
// ecrecover = (m, v, r, s);
bytes32 sp = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
bytes32 ep = bytes32(Q - mulmod(uint256(e), uint256(px), Q));
require(sp != 0);
// the ecrecover precompile implementation checks that the `r` and `s`
// inputs are non-zero (in this case, `px` and `ep`), thus we don't need to
// check if they're zero.will make me
address R = ecrecover(sp, parity, px, ep);
require(R != address(0), "ecrecover failed");
return e == keccak256(
abi.encodePacked(R, uint8(parity), px, block.chainid, message)
);
}
}

View File

@@ -1,52 +0,0 @@
use crate::crypto::ProcessedSignature;
use ethers::{contract::ContractFactory, prelude::*, solc::artifacts::contract::ContractBytecode};
use eyre::{eyre, Result};
use std::fs::File;
use std::sync::Arc;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum EthereumError {
#[error("failed to verify Schnorr signature")]
VerificationError,
}
abigen!(
Schnorr,
"./artifacts/Schnorr.sol/Schnorr.json",
event_derives(serde::Deserialize, serde::Serialize),
);
pub async fn deploy_schnorr_verifier_contract(
client: Arc<SignerMiddleware<Provider<Http>, LocalWallet>>,
) -> Result<Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>> {
let path = "./artifacts/Schnorr.sol/Schnorr.json";
let artifact: ContractBytecode = serde_json::from_reader(File::open(path).unwrap()).unwrap();
let abi = artifact.abi.unwrap();
let bin = artifact.bytecode.unwrap().object;
let factory = ContractFactory::new(abi, bin.into_bytes().unwrap(), client.clone());
let contract = factory.deploy(())?.send().await?;
let contract = Schnorr::new(contract.address(), client);
Ok(contract)
}
pub async fn call_verify(
contract: &Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>,
params: &ProcessedSignature,
) -> Result<()> {
if contract
.verify(
params.parity + 27,
params.px.to_bytes().into(),
params.message,
params.s.to_bytes().into(),
params.e.to_bytes().into(),
)
.call()
.await?
{
Ok(())
} else {
Err(eyre!(EthereumError::VerificationError))
}
}

View File

@@ -1,107 +0,0 @@
use sha3::{Digest, Keccak256};
use group::Group;
use k256::{
elliptic_curve::{
bigint::ArrayEncoding, ops::Reduce, point::DecompressPoint, sec1::ToEncodedPoint,
},
AffinePoint, ProjectivePoint, Scalar, U256,
};
use frost::{algorithm::Hram, curve::Secp256k1};
pub fn keccak256(data: &[u8]) -> [u8; 32] {
Keccak256::digest(data).try_into().unwrap()
}
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
Scalar::reduce(U256::from_be_slice(&keccak256(data)))
}
pub fn address(point: &ProjectivePoint) -> [u8; 20] {
let encoded_point = point.to_encoded_point(false);
keccak256(&encoded_point.as_ref()[1 .. 65])[12 .. 32].try_into().unwrap()
}
pub fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
if r.is_zero().into() || s.is_zero().into() {
return None;
}
#[allow(non_snake_case)]
let R = AffinePoint::decompress(&r.to_bytes(), v.into());
#[allow(non_snake_case)]
if let Some(R) = Option::<AffinePoint>::from(R) {
#[allow(non_snake_case)]
let R = ProjectivePoint::from(R);
let r = r.invert().unwrap();
let u1 = ProjectivePoint::GENERATOR * (-message * r);
let u2 = R * (s * r);
let key: ProjectivePoint = u1 + u2;
if !bool::from(key.is_identity()) {
return Some(address(&key));
}
}
None
}
#[derive(Clone, Default)]
pub struct EthereumHram {}
impl Hram<Secp256k1> for EthereumHram {
#[allow(non_snake_case)]
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
let a_encoded_point = A.to_encoded_point(true);
let mut a_encoded = a_encoded_point.as_ref().to_owned();
a_encoded[0] += 25; // Ethereum uses 27/28 for point parity
let mut data = address(R).to_vec();
data.append(&mut a_encoded);
data.append(&mut m.to_vec());
Scalar::reduce(U256::from_be_slice(&keccak256(&data)))
}
}
pub struct ProcessedSignature {
pub s: Scalar,
pub px: Scalar,
pub parity: u8,
pub message: [u8; 32],
pub e: Scalar,
}
#[allow(non_snake_case)]
pub fn preprocess_signature_for_ecrecover(
m: [u8; 32],
R: &ProjectivePoint,
s: Scalar,
A: &ProjectivePoint,
chain_id: U256,
) -> (Scalar, Scalar) {
let processed_sig = process_signature_for_contract(m, R, s, A, chain_id);
let sr = processed_sig.s.mul(&processed_sig.px).negate();
let er = processed_sig.e.mul(&processed_sig.px).negate();
(sr, er)
}
#[allow(non_snake_case)]
pub fn process_signature_for_contract(
m: [u8; 32],
R: &ProjectivePoint,
s: Scalar,
A: &ProjectivePoint,
chain_id: U256,
) -> ProcessedSignature {
let encoded_pk = A.to_encoded_point(true);
let px = &encoded_pk.as_ref()[1 .. 33];
let px_scalar = Scalar::reduce(U256::from_be_slice(px));
let e = EthereumHram::hram(R, A, &[chain_id.to_be_byte_array().as_slice(), &m].concat());
ProcessedSignature {
s,
px: px_scalar,
parity: &encoded_pk.as_ref()[0] - 2,
#[allow(non_snake_case)]
message: m,
e,
}
}

View File

@@ -1,2 +0,0 @@
pub mod contract;
pub mod crypto;

View File

@@ -1,71 +0,0 @@
use std::{convert::TryFrom, sync::Arc, time::Duration};
use rand_core::OsRng;
use ::k256::{elliptic_curve::bigint::ArrayEncoding, U256};
use ethers::{
prelude::*,
utils::{keccak256, Anvil, AnvilInstance},
};
use frost::{
curve::Secp256k1,
Participant,
algorithm::IetfSchnorr,
tests::{key_gen, algorithm_machines, sign},
};
use ethereum_serai::{
crypto,
contract::{Schnorr, call_verify, deploy_schnorr_verifier_contract},
};
async fn deploy_test_contract(
) -> (u32, AnvilInstance, Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>) {
let anvil = Anvil::new().spawn();
let wallet: LocalWallet = anvil.keys()[0].clone().into();
let provider =
Provider::<Http>::try_from(anvil.endpoint()).unwrap().interval(Duration::from_millis(10u64));
let chain_id = provider.get_chainid().await.unwrap().as_u32();
let client = Arc::new(SignerMiddleware::new_with_provider_chain(provider, wallet).await.unwrap());
(chain_id, anvil, deploy_schnorr_verifier_contract(client).await.unwrap())
}
#[tokio::test]
async fn test_deploy_contract() {
deploy_test_contract().await;
}
#[tokio::test]
async fn test_ecrecover_hack() {
let (chain_id, _anvil, contract) = deploy_test_contract().await;
let chain_id = U256::from(chain_id);
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
let group_key = keys[&Participant::new(1).unwrap()].group_key();
const MESSAGE: &[u8] = b"Hello, World!";
let hashed_message = keccak256(MESSAGE);
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
let algo = IetfSchnorr::<Secp256k1, crypto::EthereumHram>::ietf();
let sig = sign(
&mut OsRng,
algo.clone(),
keys.clone(),
algorithm_machines(&mut OsRng, algo, &keys),
full_message,
);
let mut processed_sig =
crypto::process_signature_for_contract(hashed_message, &sig.R, sig.s, &group_key, chain_id);
call_verify(&contract, &processed_sig).await.unwrap();
// test invalid signature fails
processed_sig.message[0] = 0;
assert!(call_verify(&contract, &processed_sig).await.is_err());
}

View File

@@ -1,92 +0,0 @@
use k256::{
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, sec1::ToEncodedPoint},
ProjectivePoint, Scalar, U256,
};
use frost::{curve::Secp256k1, Participant};
use ethereum_serai::crypto::*;
#[test]
fn test_ecrecover() {
use rand_core::OsRng;
use sha2::Sha256;
use sha3::{Digest, Keccak256};
use k256::ecdsa::{hazmat::SignPrimitive, signature::DigestVerifier, SigningKey, VerifyingKey};
let private = SigningKey::random(&mut OsRng);
let public = VerifyingKey::from(&private);
const MESSAGE: &[u8] = b"Hello, World!";
let (sig, recovery_id) = private
.as_nonzero_scalar()
.try_sign_prehashed_rfc6979::<Sha256>(&Keccak256::digest(MESSAGE), b"")
.unwrap();
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
{
assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ());
}
assert_eq!(
ecrecover(hash_to_scalar(MESSAGE), recovery_id.unwrap().is_y_odd().into(), *sig.r(), *sig.s())
.unwrap(),
address(&ProjectivePoint::from(public.as_affine()))
);
}
#[test]
fn test_signing() {
use frost::{
algorithm::IetfSchnorr,
tests::{algorithm_machines, key_gen, sign},
};
use rand_core::OsRng;
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
let _group_key = keys[&Participant::new(1).unwrap()].group_key();
const MESSAGE: &[u8] = b"Hello, World!";
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let _sig = sign(
&mut OsRng,
algo,
keys.clone(),
algorithm_machines(&mut OsRng, IetfSchnorr::<Secp256k1, EthereumHram>::ietf(), &keys),
MESSAGE,
);
}
#[test]
fn test_ecrecover_hack() {
use frost::{
algorithm::IetfSchnorr,
tests::{algorithm_machines, key_gen, sign},
};
use rand_core::OsRng;
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
let group_key = keys[&Participant::new(1).unwrap()].group_key();
let group_key_encoded = group_key.to_encoded_point(true);
let group_key_compressed = group_key_encoded.as_ref();
let group_key_x = Scalar::reduce(U256::from_be_slice(&group_key_compressed[1 .. 33]));
const MESSAGE: &[u8] = b"Hello, World!";
let hashed_message = keccak256(MESSAGE);
let chain_id = U256::ONE;
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let sig = sign(
&mut OsRng,
algo.clone(),
keys.clone(),
algorithm_machines(&mut OsRng, algo, &keys),
full_message,
);
let (sr, er) =
preprocess_signature_for_ecrecover(hashed_message, &sig.R, sig.s, &group_key, chain_id);
let q = ecrecover(sr, group_key_compressed[0] - 2, group_key_x, er).unwrap();
assert_eq!(q, address(&sig.R));
}

View File

@@ -1,2 +0,0 @@
mod contract;
mod crypto;

1
coins/monero/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
c/.build

View File

@@ -1,107 +1,52 @@
[package]
name = "monero-serai"
version = "0.1.4-alpha"
description = "A modern Monero transaction library"
version = "0.1.0"
description = "A modern Monero wallet library"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[build-dependencies]
cc = "1.0"
[dependencies]
std-shims = { path = "../../common/std-shims", version = "0.1", default-features = false }
hex-literal = "0.3"
lazy_static = "1"
thiserror = "1"
async-trait = { version = "0.1", default-features = false }
thiserror = { version = "1", optional = true }
rand_core = "0.6"
rand_chacha = { version = "0.3", optional = true }
rand = "0.8"
rand_distr = "0.4"
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
subtle = { version = "^2.4", default-features = false }
subtle = "2.4"
rand_core = { version = "0.6", default-features = false }
# Used to send transactions
rand = { version = "0.8", default-features = false }
rand_chacha = { version = "0.3", default-features = false }
# Used to select decoys
rand_distr = { version = "0.4", default-features = false }
tiny-keccak = { version = "2", features = ["keccak"] }
blake2 = { version = "0.10", optional = true }
crc = { version = "3", default-features = false }
sha3 = { version = "0.10", default-features = false }
curve25519-dalek = { version = "3", features = ["std"] }
curve25519-dalek = { version = "^3.2", default-features = false }
group = { version = "0.12" }
dalek-ff-group = { path = "../../crypto/dalek-ff-group" }
# Used for the hash to curve, along with the more complicated proofs
group = { version = "0.13", default-features = false }
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.3", default-features = false }
multiexp = { path = "../../crypto/multiexp", version = "0.3", default-features = false, features = ["batch"] }
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", features = ["recommended"], optional = true }
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["ed25519"], optional = true }
dleq = { package = "dleq-serai", path = "../../crypto/dleq", features = ["serialize"], optional = true }
# Needed for multisig
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
dleq = { path = "../../crypto/dleq", version = "0.3", features = ["serialize"], optional = true }
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["ed25519"], optional = true }
hex = "0.4"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
monero-generators = { path = "generators", version = "0.3", default-features = false }
base58-monero = "1"
monero-epee-bin-serde = "1.0"
monero = "0.16"
futures = { version = "0.3", default-features = false, features = ["alloc"], optional = true }
hex-literal = "0.4"
hex = { version = "0.4", default-features = false, features = ["alloc"] }
serde = { version = "1", default-features = false, features = ["derive"] }
serde_json = { version = "1", default-features = false, features = ["alloc"] }
base58-monero = { version = "1", git = "https://github.com/monero-rs/base58-monero", rev = "5045e8d2b817b3b6c1190661f504e879bc769c29", default-features = false, features = ["check"] }
# Used for the provided RPC
digest_auth = { version = "0.3", optional = true }
reqwest = { version = "0.11", features = ["json"], optional = true }
# Used for the binaries
tokio = { version = "1", features = ["full"], optional = true }
[build-dependencies]
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.3", default-features = false }
monero-generators = { path = "generators", version = "0.3", default-features = false }
[dev-dependencies]
tokio = { version = "1", features = ["full"] }
monero-rpc = "0.3"
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["tests"] }
reqwest = { version = "0.11", features = ["json"] }
[features]
std = [
"std-shims/std",
"thiserror",
"zeroize/std",
"subtle/std",
"rand_core/std",
"rand_chacha/std",
"rand/std",
"rand_distr/std",
"sha3/std",
"curve25519-dalek/std",
"multiexp/std",
"monero-generators/std",
"futures/std",
"hex/std",
"serde/std",
"serde_json/std",
]
http_rpc = ["digest_auth", "reqwest"]
multisig = ["transcript", "frost", "dleq", "std"]
binaries = ["tokio"]
experimental = []
multisig = ["rand_chacha", "blake2", "transcript", "frost", "dleq"]
default = ["std", "http_rpc"]
[dev-dependencies]
sha2 = "0.10"
tokio = { version = "1", features = ["full"] }

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2022-2023 Luke Parker
Copyright (c) 2022 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -4,46 +4,4 @@ A modern Monero transaction library intended for usage in wallets. It prides
itself on accuracy, correctness, and removing common pit falls developers may
face.
monero-serai also offers the following features:
- Featured Addresses
- A FROST-based multisig orders of magnitude more performant than Monero's
### Purpose and support
monero-serai was written for Serai, a decentralized exchange aiming to support
Monero. Despite this, monero-serai is intended to be a widely usable library,
accurate to Monero. monero-serai guarantees the functionality needed for Serai,
yet will not deprive functionality from other users.
Various legacy transaction formats are not currently implemented, yet we are
willing to add support for them. There aren't active development efforts around
them however.
### Caveats
This library DOES attempt to do the following:
- Create on-chain transactions identical to how wallet2 would (unless told not
to)
- Not be detectable as monero-serai when scanning outputs
- Not reveal spent outputs to the connected RPC node
This library DOES NOT attempt to do the following:
- Have identical RPC behavior when creating transactions
- Be a wallet
This means that monero-serai shouldn't be fingerprintable on-chain. It also
shouldn't be fingerprintable if a targeted attack occurs to detect if the
receiving wallet is monero-serai or wallet2. It also should be generally safe
for usage with remote nodes.
It won't hide from remote nodes it's monero-serai however, potentially
allowing a remote node to profile you. The implications of this are left to the
user to consider.
It also won't act as a wallet, just as a transaction library. wallet2 has
several *non-transaction-level* policies, such as always attempting to use two
inputs to create transactions. These are considered out of scope to
monero-serai.
Threshold multisignature support is available via the `multisig` feature.

View File

@@ -1,67 +1,72 @@
use std::{
io::Write,
env,
path::Path,
fs::{File, remove_file},
};
use dalek_ff_group::EdwardsPoint;
use monero_generators::bulletproofs_generators;
fn serialize(generators_string: &mut String, points: &[EdwardsPoint]) {
for generator in points {
generators_string.extend(
format!(
"
dalek_ff_group::EdwardsPoint(
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap()
),
",
generator.compress().to_bytes()
)
.chars(),
);
}
}
fn generators(prefix: &'static str, path: &str) {
let generators = bulletproofs_generators(prefix.as_bytes());
#[allow(non_snake_case)]
let mut G_str = String::new();
serialize(&mut G_str, &generators.G);
#[allow(non_snake_case)]
let mut H_str = String::new();
serialize(&mut H_str, &generators.H);
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
let _ = remove_file(&path);
File::create(&path)
.unwrap()
.write_all(
format!(
"
pub static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
pub fn GENERATORS() -> &'static Generators {{
GENERATORS_CELL.get_or_init(|| Generators {{
G: [
{G_str}
],
H: [
{H_str}
],
}})
}}
",
)
.as_bytes(),
)
.unwrap();
}
use std::{env, path::Path, process::Command};
fn main() {
println!("cargo:rerun-if-changed=build.rs");
if !Command::new("git").args(&["submodule", "update", "--init", "--recursive"]).status().unwrap().success() {
panic!("git failed to init submodules");
}
generators("bulletproof", "generators.rs");
generators("bulletproof_plus", "generators_plus.rs");
if !Command ::new("mkdir").args(&["-p", ".build"])
.current_dir(&Path::new("c")).status().unwrap().success() {
panic!("failed to create a directory to track build progress");
}
let out_dir = &env::var("OUT_DIR").unwrap();
// Use a file to signal if Monero was already built, as that should never be rebuilt
// If the signaling file was deleted, run this script again to rebuild Monero though
println!("cargo:rerun-if-changed=c/.build/monero");
if !Path::new("c/.build/monero").exists() {
if !Command::new("make").arg(format!("-j{}", &env::var("THREADS").unwrap_or("2".to_string())))
.current_dir(&Path::new("c/monero")).status().unwrap().success() {
panic!("make failed to build Monero. Please check your dependencies");
}
if !Command::new("touch").arg("monero")
.current_dir(&Path::new("c/.build")).status().unwrap().success() {
panic!("failed to create a file to label Monero as built");
}
}
println!("cargo:rerun-if-changed=c/wrapper.cpp");
cc::Build::new()
.static_flag(true)
.warnings(false)
.extra_warnings(false)
.flag("-Wno-deprecated-declarations")
.include("c/monero/external/supercop/include")
.include("c/monero/contrib/epee/include")
.include("c/monero/src")
.include("c/monero/build/release/generated_include")
.define("AUTO_INITIALIZE_EASYLOGGINGPP", None)
.include("c/monero/external/easylogging++")
.file("c/monero/external/easylogging++/easylogging++.cc")
.file("c/monero/src/common/aligned.c")
.file("c/monero/src/common/perf_timer.cpp")
.include("c/monero/src/crypto")
.file("c/monero/src/crypto/crypto-ops-data.c")
.file("c/monero/src/crypto/crypto-ops.c")
.file("c/monero/src/crypto/keccak.c")
.file("c/monero/src/crypto/hash.c")
.include("c/monero/src/device")
.file("c/monero/src/device/device_default.cpp")
.include("c/monero/src/ringct")
.file("c/monero/src/ringct/rctCryptoOps.c")
.file("c/monero/src/ringct/rctTypes.cpp")
.file("c/monero/src/ringct/rctOps.cpp")
.file("c/monero/src/ringct/multiexp.cc")
.file("c/monero/src/ringct/bulletproofs.cc")
.file("c/monero/src/ringct/rctSigs.cpp")
.file("c/wrapper.cpp")
.compile("wrapper");
println!("cargo:rustc-link-search={}", out_dir);
println!("cargo:rustc-link-lib=wrapper");
println!("cargo:rustc-link-lib=stdc++");
}

1
coins/monero/c/monero Submodule

Submodule coins/monero/c/monero added at 424e4de16b

158
coins/monero/c/wrapper.cpp Normal file
View File

@@ -0,0 +1,158 @@
#include <mutex>
#include "device/device_default.hpp"
#include "ringct/bulletproofs.h"
#include "ringct/rctSigs.h"
typedef std::lock_guard<std::mutex> lock;
std::mutex rng_mutex;
uint8_t rng_entropy[64];
extern "C" {
void rng(uint8_t* seed) {
// Set the first half to the seed
memcpy(rng_entropy, seed, 32);
// Set the second half to the hash of a DST to ensure a lack of collisions
crypto::cn_fast_hash("RNG_entropy_seed", 16, (char*) &rng_entropy[32]);
}
}
extern "C" void monero_wide_reduce(uint8_t* value);
namespace crypto {
void generate_random_bytes_not_thread_safe(size_t n, void* value) {
size_t written = 0;
while (written != n) {
uint8_t hash[32];
crypto::cn_fast_hash(rng_entropy, 64, (char*) hash);
// Step the RNG by setting the latter half to the most recent result
// Does not leak the RNG, even if the values are leaked (which they are
// expected to be) due to the first half remaining constant and
// undisclosed
memcpy(&rng_entropy[32], hash, 32);
size_t next = n - written;
if (next > 32) {
next = 32;
}
memcpy(&((uint8_t*) value)[written], hash, next);
written += next;
}
}
void random32_unbiased(unsigned char *bytes) {
uint8_t value[64];
generate_random_bytes_not_thread_safe(64, value);
monero_wide_reduce(value);
memcpy(bytes, value, 32);
}
}
extern "C" {
void c_hash_to_point(uint8_t* point) {
rct::key key_point;
ge_p3 e_p3;
memcpy(key_point.bytes, point, 32);
rct::hash_to_p3(e_p3, key_point);
ge_p3_tobytes(point, &e_p3);
}
uint8_t* c_generate_bp(uint8_t* seed, uint8_t len, uint64_t* a, uint8_t* m) {
lock guard(rng_mutex);
rng(seed);
rct::keyV masks;
std::vector<uint64_t> amounts;
masks.resize(len);
amounts.resize(len);
for (uint8_t i = 0; i < len; i++) {
memcpy(masks[i].bytes, m + (i * 32), 32);
amounts[i] = a[i];
}
rct::Bulletproof bp = rct::bulletproof_PROVE(amounts, masks);
std::stringstream ss;
binary_archive<true> ba(ss);
::serialization::serialize(ba, bp);
uint8_t* res = (uint8_t*) calloc(ss.str().size(), 1);
memcpy(res, ss.str().data(), ss.str().size());
return res;
}
bool c_verify_bp(
uint8_t* seed,
uint s_len,
uint8_t* s,
uint8_t c_len,
uint8_t* c
) {
// BPs are batch verified which use RNG based weights to ensure individual
// integrity
// That's why this must also have control over RNG, to prevent interrupting
// multisig signing while not using known seeds. Considering this doesn't
// actually define a batch, and it's only verifying a single BP,
// it'd probably be fine, but...
lock guard(rng_mutex);
rng(seed);
rct::Bulletproof bp;
std::stringstream ss;
std::string str;
str.assign((char*) s, (size_t) s_len);
ss << str;
binary_archive<false> ba(ss);
::serialization::serialize(ba, bp);
if (!ss.good()) {
return false;
}
bp.V.resize(c_len);
for (uint8_t i = 0; i < c_len; i++) {
memcpy(bp.V[i].bytes, &c[i * 32], 32);
}
try { return rct::bulletproof_VERIFY(bp); } catch(...) { return false; }
}
bool c_verify_clsag(
uint s_len,
uint8_t* s,
uint8_t k_len,
uint8_t* k,
uint8_t* I,
uint8_t* p,
uint8_t* m
) {
rct::clsag clsag;
std::stringstream ss;
std::string str;
str.assign((char*) s, (size_t) s_len);
ss << str;
binary_archive<false> ba(ss);
::serialization::serialize(ba, clsag);
if (!ss.good()) {
return false;
}
rct::ctkeyV keys;
keys.resize(k_len);
for (uint8_t i = 0; i < k_len; i++) {
memcpy(keys[i].dest.bytes, &k[(i * 2) * 32], 32);
memcpy(keys[i].mask.bytes, &k[((i * 2) + 1) * 32], 32);
}
memcpy(clsag.I.bytes, I, 32);
rct::key pseudo_out;
memcpy(pseudo_out.bytes, p, 32);
rct::key msg;
memcpy(msg.bytes, m, 32);
try {
return verRctCLSAGSimple(msg, clsag, keys, pseudo_out);
} catch(...) { return false; }
}
}

View File

@@ -1,28 +0,0 @@
[package]
name = "monero-generators"
version = "0.3.0"
description = "Monero's hash_to_point and generators"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
std-shims = { path = "../../../common/std-shims", version = "0.1", default-features = false }
subtle = { version = "^2.4", default-features = false }
sha3 = { version = "0.10", default-features = false }
curve25519-dalek = { version = "3", default-features = false }
group = { version = "0.13", default-features = false }
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.3" }
[features]
std = ["std-shims/std"]
default = ["std"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2023 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,7 +0,0 @@
# Monero Generators
Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
`hash_to_point` here, is included, as needed to generate generators.
This library is usable under no_std when the `alloc` feature is enabled.

View File

@@ -1,52 +0,0 @@
use subtle::ConditionallySelectable;
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
use group::ff::{Field, PrimeField};
use dalek_ff_group::FieldElement;
use crate::hash;
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
#[allow(clippy::many_single_char_names)]
pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
#[allow(non_snake_case, clippy::unreadable_literal)]
let A = FieldElement::from(486662u64);
let v = FieldElement::from_square(hash(&bytes)).double();
let w = v + FieldElement::ONE;
let x = w.square() + (-A.square() * v);
// This isn't the complete X, yet its initial value
// We don't calculate the full X, and instead solely calculate Y, letting dalek reconstruct X
// While inefficient, it solves API boundaries and reduces the amount of work done here
#[allow(non_snake_case)]
let X = {
let u = w;
let v = x;
let v3 = v * v * v;
let uv3 = u * v3;
let v7 = v3 * v3 * v;
let uv7 = u * v7;
uv3 * uv7.pow((-FieldElement::from(5u8)) * FieldElement::from(8u8).invert().unwrap())
};
let x = X.square() * x;
let y = w - x;
let non_zero_0 = !y.is_zero();
let y_if_non_zero_0 = w + x;
let sign = non_zero_0 & (!y_if_non_zero_0.is_zero());
let mut z = -A;
z *= FieldElement::conditional_select(&v, &FieldElement::from(1u8), sign);
#[allow(non_snake_case)]
let Z = z + w;
#[allow(non_snake_case)]
let mut Y = z - w;
Y *= Z.invert().unwrap();
let mut bytes = Y.to_repr();
bytes[31] |= sign.unwrap_u8() << 7;
CompressedEdwardsY(bytes).decompress().unwrap().mul_by_cofactor()
}

View File

@@ -1,80 +0,0 @@
//! Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
//!
//! An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
//! `hash_to_point` here, is included, as needed to generate generators.
#![cfg_attr(not(feature = "std"), no_std)]
use std_shims::sync::OnceLock;
use sha3::{Digest, Keccak256};
use curve25519_dalek::edwards::{EdwardsPoint as DalekPoint, CompressedEdwardsY};
use group::{Group, GroupEncoding};
use dalek_ff_group::EdwardsPoint;
mod varint;
use varint::write_varint;
mod hash_to_point;
pub use hash_to_point::hash_to_point;
fn hash(data: &[u8]) -> [u8; 32] {
Keccak256::digest(data).into()
}
static H_CELL: OnceLock<DalekPoint> = OnceLock::new();
/// Monero's alternate generator `H`, used for amounts in Pedersen commitments.
#[allow(non_snake_case)]
pub fn H() -> DalekPoint {
*H_CELL.get_or_init(|| {
CompressedEdwardsY(hash(&EdwardsPoint::generator().to_bytes()))
.decompress()
.unwrap()
.mul_by_cofactor()
})
}
static H_POW_2_CELL: OnceLock<[DalekPoint; 64]> = OnceLock::new();
/// Monero's alternate generator `H`, multiplied by 2**i for i in 1 ..= 64.
#[allow(non_snake_case)]
pub fn H_pow_2() -> &'static [DalekPoint; 64] {
H_POW_2_CELL.get_or_init(|| {
let mut res = [H(); 64];
for i in 1 .. 64 {
res[i] = res[i - 1] + res[i - 1];
}
res
})
}
const MAX_M: usize = 16;
const N: usize = 64;
const MAX_MN: usize = MAX_M * N;
/// Container struct for Bulletproofs(+) generators.
#[allow(non_snake_case)]
pub struct Generators {
pub G: [EdwardsPoint; MAX_MN],
pub H: [EdwardsPoint; MAX_MN],
}
/// Generate generators as needed for Bulletproofs(+), as Monero does.
pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
let mut res =
Generators { G: [EdwardsPoint::identity(); MAX_MN], H: [EdwardsPoint::identity(); MAX_MN] };
for i in 0 .. MAX_MN {
let i = 2 * i;
let mut even = H().compress().to_bytes().to_vec();
even.extend(dst);
let mut odd = even.clone();
write_varint(&i.try_into().unwrap(), &mut even).unwrap();
write_varint(&(i + 1).try_into().unwrap(), &mut odd).unwrap();
res.H[i / 2] = EdwardsPoint(hash_to_point(hash(&even)));
res.G[i / 2] = EdwardsPoint(hash_to_point(hash(&odd)));
}
res
}

View File

@@ -1,18 +0,0 @@
use std_shims::io::{self, Write};
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
#[allow(clippy::trivially_copy_pass_by_ref)] // &u64 is needed for API consistency
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
let mut varint = *varint;
while {
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
varint >>= 7;
if varint != 0 {
b |= VARINT_CONTINUATION_MASK;
}
w.write_all(&[b])?;
varint != 0
} {}
Ok(())
}

View File

@@ -1,155 +0,0 @@
use std::sync::Arc;
use serde::Deserialize;
use serde_json::json;
use monero_serai::{
transaction::Transaction,
block::Block,
rpc::{Rpc, HttpRpc},
};
use tokio::task::JoinHandle;
async fn check_block(rpc: Arc<Rpc<HttpRpc>>, block_i: usize) {
let hash = rpc.get_block_hash(block_i).await.expect("couldn't get block {block_i}'s hash");
// TODO: Grab the JSON to also check it was deserialized correctly
#[derive(Deserialize, Debug)]
struct BlockResponse {
blob: String,
}
let res: BlockResponse = rpc
.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) })))
.await
.expect("couldn't get block {block} via block.hash()");
let blob = hex::decode(res.blob).expect("node returned non-hex block");
let block = Block::read(&mut blob.as_slice()).expect("couldn't deserialize block {block_i}");
assert_eq!(block.hash(), hash, "hash differs");
assert_eq!(block.serialize(), blob, "serialization differs");
let txs_len = 1 + block.txs.len();
if !block.txs.is_empty() {
#[derive(Deserialize, Debug)]
struct TransactionResponse {
tx_hash: String,
as_hex: String,
}
#[derive(Deserialize, Debug)]
struct TransactionsResponse {
#[serde(default)]
missed_tx: Vec<String>,
txs: Vec<TransactionResponse>,
}
let mut hashes_hex = block.txs.iter().map(hex::encode).collect::<Vec<_>>();
let mut all_txs = vec![];
while !hashes_hex.is_empty() {
let txs: TransactionsResponse = rpc
.rpc_call(
"get_transactions",
Some(json!({
"txs_hashes": hashes_hex.drain(.. hashes_hex.len().min(100)).collect::<Vec<_>>(),
})),
)
.await
.expect("couldn't call get_transactions");
assert!(txs.missed_tx.is_empty());
all_txs.extend(txs.txs);
}
for (tx_hash, tx_res) in block.txs.into_iter().zip(all_txs.into_iter()) {
assert_eq!(
tx_res.tx_hash,
hex::encode(tx_hash),
"node returned a transaction with different hash"
);
let tx = Transaction::read(
&mut hex::decode(&tx_res.as_hex).expect("node returned non-hex transaction").as_slice(),
)
.expect("couldn't deserialize transaction");
assert_eq!(
hex::encode(tx.serialize()),
tx_res.as_hex,
"Transaction serialization was different"
);
assert_eq!(tx.hash(), tx_hash, "Transaction hash was different");
}
}
println!("Deserialized, hashed, and reserialized {block_i} with {} TXs", txs_len);
}
#[tokio::main]
async fn main() {
let args = std::env::args().collect::<Vec<String>>();
// Read start block as the first arg
let mut block_i = args[1].parse::<usize>().expect("invalid start block");
// How many blocks to work on at once
let async_parallelism: usize =
args.get(2).unwrap_or(&"8".to_string()).parse::<usize>().expect("invalid parallelism argument");
// Read further args as RPC URLs
let default_nodes = vec![
"http://xmr-node.cakewallet.com:18081".to_string(),
"https://node.sethforprivacy.com".to_string(),
];
let mut specified_nodes = vec![];
{
let mut i = 0;
loop {
let Some(node) = args.get(3 + i) else { break };
specified_nodes.push(node.clone());
i += 1;
}
}
let nodes = if specified_nodes.is_empty() { default_nodes } else { specified_nodes };
let rpc = |url: String| {
HttpRpc::new(url.clone())
.unwrap_or_else(|_| panic!("couldn't create HttpRpc connected to {url}"))
};
let main_rpc = rpc(nodes[0].clone());
let mut rpcs = vec![];
for i in 0 .. async_parallelism {
rpcs.push(Arc::new(rpc(nodes[i % nodes.len()].clone())));
}
let mut rpc_i = 0;
let mut handles: Vec<JoinHandle<()>> = vec![];
let mut height = 0;
loop {
let new_height = main_rpc.get_height().await.expect("couldn't call get_height");
if new_height == height {
break;
}
height = new_height;
while block_i < height {
if handles.len() >= async_parallelism {
// Guarantee one handle is complete
handles.swap_remove(0).await.unwrap();
// Remove all of the finished handles
let mut i = 0;
while i < handles.len() {
if handles[i].is_finished() {
handles.swap_remove(i).await.unwrap();
continue;
}
i += 1;
}
}
handles.push(tokio::spawn(check_block(rpcs[rpc_i].clone(), block_i)));
rpc_i = (rpc_i + 1) % rpcs.len();
block_i += 1;
}
}
}

View File

@@ -1,31 +1,19 @@
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use crate::{
hash,
merkle::merkle_root,
serialize::*,
transaction::{Input, Transaction},
transaction::Transaction
};
const CORRECT_BLOCK_HASH_202612: [u8; 32] =
hex_literal::hex!("426d16cff04c71f8b16340b722dc4010a2dd3831c22041431f772547ba6e331a");
const EXISTING_BLOCK_HASH_202612: [u8; 32] =
hex_literal::hex!("bbd604d2ba11ba27935e006ed39c9bfdd99b76bf4a50654bc1e1e61217962698");
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct BlockHeader {
pub major_version: u64,
pub minor_version: u64,
pub timestamp: u64,
pub previous: [u8; 32],
pub nonce: u32,
pub nonce: u32
}
impl BlockHeader {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
write_varint(&self.major_version, w)?;
write_varint(&self.minor_version, w)?;
write_varint(&self.timestamp, w)?;
@@ -33,41 +21,30 @@ impl BlockHeader {
w.write_all(&self.nonce.to_le_bytes())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
Ok(Self {
major_version: read_varint(r)?,
minor_version: read_varint(r)?,
timestamp: read_varint(r)?,
previous: read_bytes(r)?,
nonce: read_bytes(r).map(u32::from_le_bytes)?,
})
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<BlockHeader> {
Ok(
BlockHeader {
major_version: read_varint(r)?,
minor_version: read_varint(r)?,
timestamp: read_varint(r)?,
previous: { let mut previous = [0; 32]; r.read_exact(&mut previous)?; previous },
nonce: { let mut nonce = [0; 4]; r.read_exact(&mut nonce)?; u32::from_le_bytes(nonce) }
}
)
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct Block {
pub header: BlockHeader,
pub miner_tx: Transaction,
pub txs: Vec<[u8; 32]>,
pub txs: Vec<[u8; 32]>
}
impl Block {
pub fn number(&self) -> usize {
match self.miner_tx.prefix.inputs.get(0) {
Some(Input::Gen(number)) => (*number).try_into().unwrap(),
_ => panic!("invalid block, miner TX didn't have a Input::Gen"),
}
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.header.write(w)?;
self.miner_tx.write(w)?;
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.header.serialize(w)?;
self.miner_tx.serialize(w)?;
write_varint(&self.txs.len().try_into().unwrap(), w)?;
for tx in &self.txs {
w.write_all(tx)?;
@@ -75,42 +52,15 @@ impl Block {
Ok(())
}
fn tx_merkle_root(&self) -> [u8; 32] {
merkle_root(self.miner_tx.hash(), &self.txs)
}
fn serialize_hashable(&self) -> Vec<u8> {
let mut blob = self.header.serialize();
blob.extend_from_slice(&self.tx_merkle_root());
write_varint(&(1 + u64::try_from(self.txs.len()).unwrap()), &mut blob).unwrap();
let mut out = Vec::with_capacity(8 + blob.len());
write_varint(&u64::try_from(blob.len()).unwrap(), &mut out).unwrap();
out.append(&mut blob);
out
}
pub fn hash(&self) -> [u8; 32] {
let hash = hash(&self.serialize_hashable());
if hash == CORRECT_BLOCK_HASH_202612 {
return EXISTING_BLOCK_HASH_202612;
};
hash
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
Ok(Self {
header: BlockHeader::read(r)?,
miner_tx: Transaction::read(r)?,
txs: (0 .. read_varint(r)?).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
})
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Block> {
Ok(
Block {
header: BlockHeader::deserialize(r)?,
miner_tx: Transaction::deserialize(r)?,
txs: (0 .. read_varint(r)?).map(
|_| { let mut tx = [0; 32]; r.read_exact(&mut tx).map(|_| tx) }
).collect::<Result<_, _>>()?
}
)
}
}

76
coins/monero/src/frost.rs Normal file
View File

@@ -0,0 +1,76 @@
use std::io::Read;
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use group::{Group, GroupEncoding};
use transcript::{Transcript, RecommendedTranscript};
use dalek_ff_group as dfg;
use dleq::DLEqProof;
#[derive(Clone, Error, Debug)]
pub enum MultisigError {
#[error("internal error ({0})")]
InternalError(String),
#[error("invalid discrete log equality proof")]
InvalidDLEqProof(u16),
#[error("invalid key image {0}")]
InvalidKeyImage(u16)
}
fn transcript() -> RecommendedTranscript {
RecommendedTranscript::new(b"monero_key_image_dleq")
}
#[allow(non_snake_case)]
pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
rng: &mut R,
H: EdwardsPoint,
x: Scalar
) -> Vec<u8> {
let mut res = Vec::with_capacity(64);
DLEqProof::prove(
rng,
// Doesn't take in a larger transcript object due to the usage of this
// Every prover would immediately write their own DLEq proof, when they can only do so in
// the proper order if they want to reach consensus
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to try to
// merge later in some form, when it should instead just merge xH (as it does)
&mut transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)],
dfg::Scalar(x)
).serialize(&mut res).unwrap();
res
}
#[allow(non_snake_case)]
pub(crate) fn read_dleq<Re: Read>(
serialized: &mut Re,
H: EdwardsPoint,
l: u16,
xG: dfg::EdwardsPoint
) -> Result<dfg::EdwardsPoint, MultisigError> {
let mut bytes = [0; 32];
serialized.read_exact(&mut bytes).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
// dfg ensures the point is torsion free
let xH = Option::<dfg::EdwardsPoint>::from(
dfg::EdwardsPoint::from_bytes(&bytes)).ok_or(MultisigError::InvalidDLEqProof(l)
)?;
// Ensure this is a canonical point
if xH.to_bytes() != bytes {
Err(MultisigError::InvalidDLEqProof(l))?;
}
DLEqProof::<dfg::EdwardsPoint>::deserialize(
serialized
).map_err(|_| MultisigError::InvalidDLEqProof(l))?.verify(
&mut transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)],
&[xG, xH]
).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
Ok(xH)
}

View File

@@ -1,178 +1,100 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(not(feature = "std"))]
#[macro_use]
extern crate alloc;
use std_shims::{sync::OnceLock, io};
use std::slice;
use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop};
use subtle::ConstantTimeEq;
use sha3::{Digest, Keccak256};
use tiny_keccak::{Hasher, Keccak};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::{EdwardsPoint, EdwardsBasepointTable, CompressedEdwardsY}
};
pub use monero_generators::H;
mod merkle;
#[cfg(feature = "multisig")]
pub mod frost;
mod serialize;
use serialize::{read_byte, read_u16};
/// RingCT structs and functionality.
pub mod ringct;
use ringct::RctType;
/// Transaction structs.
pub mod transaction;
/// Block structs.
pub mod block;
/// Monero daemon RPC interface.
pub mod rpc;
/// Wallet functionality, enabling scanning and sending transactions.
pub mod wallet;
#[cfg(test)]
mod tests;
static INV_EIGHT_CELL: OnceLock<Scalar> = OnceLock::new();
lazy_static! {
static ref H: EdwardsPoint = CompressedEdwardsY(
hex::decode("8b655970153799af2aeadc9ff1add0ea6c7251d54154cfa92c173a0dd39c1f94").unwrap().try_into().unwrap()
).decompress().unwrap();
static ref H_TABLE: EdwardsBasepointTable = EdwardsBasepointTable::create(&*H);
}
// Function from libsodium our subsection of Monero relies on. Implementing it here means we don't
// need to link against libsodium
#[no_mangle]
unsafe extern "C" fn crypto_verify_32(a: *const u8, b: *const u8) -> isize {
isize::from(
slice::from_raw_parts(a, 32).ct_eq(slice::from_raw_parts(b, 32)).unwrap_u8()
) - 1
}
// Offer a wide reduction to C. Our seeded RNG prevented Monero from defining an unbiased scalar
// generation function, and in order to not use Monero code (which would require propagating its
// license), the function was rewritten. It was rewritten with wide reduction, instead of rejection
// sampling however, hence the need for this function
#[no_mangle]
unsafe extern "C" fn monero_wide_reduce(value: *mut u8) {
let res = Scalar::from_bytes_mod_order_wide(
std::slice::from_raw_parts(value, 64).try_into().unwrap()
);
for (i, b) in res.to_bytes().iter().enumerate() {
value.add(i).write(*b);
}
}
#[allow(non_snake_case)]
pub(crate) fn INV_EIGHT() -> Scalar {
*INV_EIGHT_CELL.get_or_init(|| Scalar::from(8u8).invert())
}
/// Monero protocol version.
///
/// v15 is omitted as v15 was simply v14 and v16 being active at the same time, with regards to the
/// transactions supported. Accordingly, v16 should be used during v15.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
#[allow(non_camel_case_types)]
pub enum Protocol {
v14,
v16,
Custom { ring_len: usize, bp_plus: bool, optimal_rct_type: RctType },
}
impl Protocol {
/// Amount of ring members under this protocol version.
pub const fn ring_len(&self) -> usize {
match self {
Self::v14 => 11,
Self::v16 => 16,
Self::Custom { ring_len, .. } => *ring_len,
}
}
/// Whether or not the specified version uses Bulletproofs or Bulletproofs+.
///
/// This method will likely be reworked when versions not using Bulletproofs at all are added.
pub const fn bp_plus(&self) -> bool {
match self {
Self::v14 => false,
Self::v16 => true,
Self::Custom { bp_plus, .. } => *bp_plus,
}
}
// TODO: Make this an Option when we support pre-RCT protocols
pub const fn optimal_rct_type(&self) -> RctType {
match self {
Self::v14 => RctType::Clsag,
Self::v16 => RctType::BulletproofsPlus,
Self::Custom { optimal_rct_type, .. } => *optimal_rct_type,
}
}
pub(crate) fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
match self {
Self::v14 => w.write_all(&[0, 14]),
Self::v16 => w.write_all(&[0, 16]),
Self::Custom { ring_len, bp_plus, optimal_rct_type } => {
// Custom, version 0
w.write_all(&[1, 0])?;
w.write_all(&u16::try_from(*ring_len).unwrap().to_le_bytes())?;
w.write_all(&[u8::from(*bp_plus)])?;
w.write_all(&[optimal_rct_type.to_byte()])
}
}
}
pub(crate) fn read<R: io::Read>(r: &mut R) -> io::Result<Self> {
Ok(match read_byte(r)? {
// Monero protocol
0 => match read_byte(r)? {
14 => Self::v14,
16 => Self::v16,
_ => Err(io::Error::new(io::ErrorKind::Other, "unrecognized monero protocol"))?,
},
// Custom
1 => match read_byte(r)? {
0 => Self::Custom {
ring_len: read_u16(r)?.into(),
bp_plus: match read_byte(r)? {
0 => false,
1 => true,
_ => Err(io::Error::new(io::ErrorKind::Other, "invalid bool serialization"))?,
},
optimal_rct_type: RctType::from_byte(read_byte(r)?)
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid RctType serialization"))?,
},
_ => {
Err(io::Error::new(io::ErrorKind::Other, "unrecognized custom protocol serialization"))?
}
},
_ => Err(io::Error::new(io::ErrorKind::Other, "unrecognized protocol serialization"))?,
})
}
}
/// Transparent structure representing a Pedersen commitment's contents.
#[allow(non_snake_case)]
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct Commitment {
pub mask: Scalar,
pub amount: u64,
pub amount: u64
}
impl Commitment {
/// A commitment to zero, defined with a mask of 1 (as to not be the identity).
pub fn zero() -> Self {
Self { mask: Scalar::one(), amount: 0 }
pub fn zero() -> Commitment {
Commitment { mask: Scalar::one(), amount: 0}
}
pub fn new(mask: Scalar, amount: u64) -> Self {
Self { mask, amount }
pub fn new(mask: Scalar, amount: u64) -> Commitment {
Commitment { mask, amount }
}
/// Calculate a Pedersen commitment, as a point, from the transparent structure.
pub fn calculate(&self) -> EdwardsPoint {
(&self.mask * &ED25519_BASEPOINT_TABLE) + (Scalar::from(self.amount) * H())
(&self.mask * &ED25519_BASEPOINT_TABLE) + (&Scalar::from(self.amount) * &*H_TABLE)
}
}
/// Support generating a random scalar using a modern rand, as dalek's is notoriously dated.
// Allows using a modern rand as dalek's is notoriously dated
pub fn random_scalar<R: RngCore + CryptoRng>(rng: &mut R) -> Scalar {
let mut r = [0; 64];
rng.fill_bytes(&mut r);
Scalar::from_bytes_mod_order_wide(&r)
}
pub(crate) fn hash(data: &[u8]) -> [u8; 32] {
Keccak256::digest(data).into()
pub fn hash(data: &[u8]) -> [u8; 32] {
let mut keccak = Keccak::v256();
keccak.update(data);
let mut res = [0; 32];
keccak.finalize(&mut res);
res
}
/// Hash the provided data to a scalar via keccak256(data) % l.
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
let scalar = Scalar::from_bytes_mod_order(hash(data));
// Monero will explicitly error in this case
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
// not generate/verify a proof we believe to be valid when it isn't
assert!(scalar != Scalar::zero(), "ZERO HASH: {data:?}");
scalar
Scalar::from_bytes_mod_order(hash(&data))
}

View File

@@ -1,55 +0,0 @@
use std_shims::vec::Vec;
use crate::hash;
pub(crate) fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] {
match leafs.len() {
0 => root,
1 => hash(&[root, leafs[0]].concat()),
_ => {
let mut hashes = Vec::with_capacity(1 + leafs.len());
hashes.push(root);
hashes.extend(leafs);
// Monero preprocess this so the length is a power of 2
let mut high_pow_2 = 4; // 4 is the lowest value this can be
while high_pow_2 < hashes.len() {
high_pow_2 *= 2;
}
let low_pow_2 = high_pow_2 / 2;
// Merge right-most hashes until we're at the low_pow_2
{
let overage = hashes.len() - low_pow_2;
let mut rightmost = hashes.drain((low_pow_2 - overage) ..);
// This is true since we took overage from beneath and above low_pow_2, taking twice as
// many elements as overage
debug_assert_eq!(rightmost.len() % 2, 0);
let mut paired_hashes = Vec::with_capacity(overage);
while let Some(left) = rightmost.next() {
let right = rightmost.next().unwrap();
paired_hashes.push(hash(&[left.as_ref(), &right].concat()));
}
drop(rightmost);
hashes.extend(paired_hashes);
assert_eq!(hashes.len(), low_pow_2);
}
// Do a traditional pairing off
let mut new_hashes = Vec::with_capacity(hashes.len() / 2);
while hashes.len() > 1 {
let mut i = 0;
while i < hashes.len() {
new_hashes.push(hash(&[hashes[i], hashes[i + 1]].concat()));
i += 2;
}
hashes = new_hashes;
new_hashes = Vec::with_capacity(hashes.len() / 2);
}
hashes[0]
}
}
}

View File

@@ -1,102 +0,0 @@
use core::fmt::Debug;
use std_shims::io::{self, Read, Write};
use curve25519_dalek::edwards::EdwardsPoint;
#[cfg(feature = "experimental")]
use curve25519_dalek::{traits::Identity, scalar::Scalar};
#[cfg(feature = "experimental")]
use monero_generators::H_pow_2;
#[cfg(feature = "experimental")]
use crate::hash_to_scalar;
use crate::serialize::*;
/// 64 Borromean ring signatures.
///
/// This type keeps the data as raw bytes as Monero has some transactions with unreduced scalars in
/// this field. While we could use `from_bytes_mod_order`, we'd then not be able to encode this
/// back into it's original form.
///
/// Those scalars also have a custom reduction algorithm...
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct BorromeanSignatures {
pub s0: [[u8; 32]; 64],
pub s1: [[u8; 32]; 64],
pub ee: [u8; 32],
}
impl BorromeanSignatures {
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
Ok(Self { s0: read_array(read_bytes, r)?, s1: read_array(read_bytes, r)?, ee: read_bytes(r)? })
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
for s0 in &self.s0 {
w.write_all(s0)?;
}
for s1 in &self.s1 {
w.write_all(s1)?;
}
w.write_all(&self.ee)
}
#[cfg(feature = "experimental")]
fn verify(&self, keys_a: &[EdwardsPoint], keys_b: &[EdwardsPoint]) -> bool {
let mut transcript = [0; 2048];
for i in 0 .. 64 {
// TODO: These aren't the correct reduction
// TODO: Can either of these be tightened?
#[allow(non_snake_case)]
let LL = EdwardsPoint::vartime_double_scalar_mul_basepoint(
&Scalar::from_bytes_mod_order(self.ee),
&keys_a[i],
&Scalar::from_bytes_mod_order(self.s0[i]),
);
#[allow(non_snake_case)]
let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint(
&hash_to_scalar(LL.compress().as_bytes()),
&keys_b[i],
&Scalar::from_bytes_mod_order(self.s1[i]),
);
transcript[i .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes());
}
// TODO: This isn't the correct reduction
// TODO: Can this be tightened to from_canonical_bytes?
hash_to_scalar(&transcript) == Scalar::from_bytes_mod_order(self.ee)
}
}
/// A range proof premised on Borromean ring signatures.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct BorromeanRange {
pub sigs: BorromeanSignatures,
pub bit_commitments: [EdwardsPoint; 64],
}
impl BorromeanRange {
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
Ok(Self { sigs: BorromeanSignatures::read(r)?, bit_commitments: read_array(read_point, r)? })
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.sigs.write(w)?;
write_raw_vec(write_point, &self.bit_commitments, w)
}
#[cfg(feature = "experimental")]
#[must_use]
pub fn verify(&self, commitment: &EdwardsPoint) -> bool {
if &self.bit_commitments.iter().sum::<EdwardsPoint>() != commitment {
return false;
}
#[allow(non_snake_case)]
let H_pow_2 = H_pow_2();
let mut commitments_sub_one = [EdwardsPoint::identity(); 64];
for i in 0 .. 64 {
commitments_sub_one[i] = self.bit_commitments[i] - H_pow_2[i];
}
self.sigs.verify(&self.bit_commitments, &commitments_sub_one)
}
}

View File

@@ -0,0 +1,161 @@
#![allow(non_snake_case)]
use rand_core::{RngCore, CryptoRng};
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use crate::{Commitment, wallet::TransactionError, serialize::*};
pub(crate) const MAX_OUTPUTS: usize = 16;
#[derive(Clone, PartialEq, Debug)]
pub struct Bulletproofs {
pub A: EdwardsPoint,
pub S: EdwardsPoint,
pub T1: EdwardsPoint,
pub T2: EdwardsPoint,
pub taux: Scalar,
pub mu: Scalar,
pub L: Vec<EdwardsPoint>,
pub R: Vec<EdwardsPoint>,
pub a: Scalar,
pub b: Scalar,
pub t: Scalar
}
impl Bulletproofs {
pub(crate) fn fee_weight(outputs: usize) -> usize {
let proofs = 6 + usize::try_from(usize::BITS - (outputs - 1).leading_zeros()).unwrap();
let len = (9 + (2 * proofs)) * 32;
let mut clawback = 0;
let padded = 1 << (proofs - 6);
if padded > 2 {
const BP_BASE: usize = 368;
clawback = ((BP_BASE * padded) - len) * 4 / 5;
}
len + clawback
}
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, outputs: &[Commitment]) -> Result<Bulletproofs, TransactionError> {
if outputs.len() > MAX_OUTPUTS {
return Err(TransactionError::TooManyOutputs)?;
}
let mut seed = [0; 32];
rng.fill_bytes(&mut seed);
let masks = outputs.iter().map(|commitment| commitment.mask.to_bytes()).collect::<Vec<_>>();
let amounts = outputs.iter().map(|commitment| commitment.amount).collect::<Vec<_>>();
let res;
unsafe {
#[link(name = "wrapper")]
extern "C" {
fn free(ptr: *const u8);
fn c_generate_bp(seed: *const u8, len: u8, amounts: *const u64, masks: *const [u8; 32]) -> *const u8;
}
let ptr = c_generate_bp(
seed.as_ptr(),
u8::try_from(outputs.len()).unwrap(),
amounts.as_ptr(),
masks.as_ptr()
);
let mut len = 6 * 32;
len += (2 * (1 + (usize::from(ptr.add(len).read()) * 32))) + (3 * 32);
res = Bulletproofs::deserialize(
// Wrap in a cursor to provide a mutable Reader
&mut std::io::Cursor::new(std::slice::from_raw_parts(ptr, len))
).expect("Couldn't deserialize Bulletproofs from Monero");
free(ptr);
};
Ok(res)
}
#[must_use]
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
if commitments.len() > 16 {
return false;
}
let mut seed = [0; 32];
rng.fill_bytes(&mut seed);
let mut serialized = Vec::with_capacity((9 + (2 * self.L.len())) * 32);
self.serialize(&mut serialized).unwrap();
let commitments: Vec<[u8; 32]> = commitments.iter().map(
|commitment| (commitment * Scalar::from(8u8).invert()).compress().to_bytes()
).collect();
unsafe {
#[link(name = "wrapper")]
extern "C" {
fn c_verify_bp(
seed: *const u8,
serialized_len: usize,
serialized: *const u8,
commitments_len: u8,
commitments: *const [u8; 32]
) -> bool;
}
c_verify_bp(
seed.as_ptr(),
serialized.len(),
serialized.as_ptr(),
u8::try_from(commitments.len()).unwrap(),
commitments.as_ptr()
)
}
}
fn serialize_core<
W: std::io::Write,
F: Fn(&[EdwardsPoint], &mut W) -> std::io::Result<()>
>(&self, w: &mut W, specific_write_vec: F) -> std::io::Result<()> {
write_point(&self.A, w)?;
write_point(&self.S, w)?;
write_point(&self.T1, w)?;
write_point(&self.T2, w)?;
write_scalar(&self.taux, w)?;
write_scalar(&self.mu, w)?;
specific_write_vec(&self.L, w)?;
specific_write_vec(&self.R, w)?;
write_scalar(&self.a, w)?;
write_scalar(&self.b, w)?;
write_scalar(&self.t, w)
}
pub fn signature_serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.serialize_core(w, |points, w| write_raw_vec(write_point, points, w))
}
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.serialize_core(w, |points, w| write_vec(write_point, points, w))
}
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Bulletproofs> {
let bp = Bulletproofs {
A: read_point(r)?,
S: read_point(r)?,
T1: read_point(r)?,
T2: read_point(r)?,
taux: read_scalar(r)?,
mu: read_scalar(r)?,
L: read_vec(read_point, r)?,
R: read_vec(read_point, r)?,
a: read_scalar(r)?,
b: read_scalar(r)?,
t: read_scalar(r)?
};
if bp.L.len() != bp.R.len() {
Err(std::io::Error::new(std::io::ErrorKind::Other, "mismatched L/R len"))?;
}
Ok(bp)
}
}

View File

@@ -1,151 +0,0 @@
use std_shims::{vec::Vec, sync::OnceLock};
use rand_core::{RngCore, CryptoRng};
use subtle::{Choice, ConditionallySelectable};
use curve25519_dalek::edwards::EdwardsPoint as DalekPoint;
use group::{ff::Field, Group};
use dalek_ff_group::{Scalar, EdwardsPoint};
use multiexp::multiexp as multiexp_const;
pub(crate) use monero_generators::Generators;
use crate::{INV_EIGHT as DALEK_INV_EIGHT, H as DALEK_H, Commitment, hash_to_scalar as dalek_hash};
pub(crate) use crate::ringct::bulletproofs::scalar_vector::*;
#[inline]
pub(crate) fn INV_EIGHT() -> Scalar {
Scalar(DALEK_INV_EIGHT())
}
#[inline]
pub(crate) fn H() -> EdwardsPoint {
EdwardsPoint(DALEK_H())
}
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
Scalar(dalek_hash(data))
}
// Components common between variants
pub(crate) const MAX_M: usize = 16;
pub(crate) const LOG_N: usize = 6; // 2 << 6 == N
pub(crate) const N: usize = 64;
pub(crate) fn prove_multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
multiexp_const(pairs) * INV_EIGHT()
}
pub(crate) fn vector_exponent(
generators: &Generators,
a: &ScalarVector,
b: &ScalarVector,
) -> EdwardsPoint {
debug_assert_eq!(a.len(), b.len());
(a * &generators.G[.. a.len()]) + (b * &generators.H[.. b.len()])
}
pub(crate) fn hash_cache(cache: &mut Scalar, mash: &[[u8; 32]]) -> Scalar {
let slice =
&[cache.to_bytes().as_ref(), mash.iter().copied().flatten().collect::<Vec<_>>().as_ref()]
.concat();
*cache = hash_to_scalar(slice);
*cache
}
pub(crate) fn MN(outputs: usize) -> (usize, usize, usize) {
let mut logM = 0;
let mut M;
while {
M = 1 << logM;
(M <= MAX_M) && (M < outputs)
} {
logM += 1;
}
(logM + LOG_N, M, M * N)
}
pub(crate) fn bit_decompose(commitments: &[Commitment]) -> (ScalarVector, ScalarVector) {
let (_, M, MN) = MN(commitments.len());
let sv = commitments.iter().map(|c| Scalar::from(c.amount)).collect::<Vec<_>>();
let mut aL = ScalarVector::new(MN);
let mut aR = ScalarVector::new(MN);
for j in 0 .. M {
for i in (0 .. N).rev() {
let bit =
if j < sv.len() { Choice::from((sv[j][i / 8] >> (i % 8)) & 1) } else { Choice::from(0) };
aL.0[(j * N) + i] = Scalar::conditional_select(&Scalar::ZERO, &Scalar::ONE, bit);
aR.0[(j * N) + i] = Scalar::conditional_select(&-Scalar::ONE, &Scalar::ZERO, bit);
}
}
(aL, aR)
}
pub(crate) fn hash_commitments<C: IntoIterator<Item = DalekPoint>>(
commitments: C,
) -> (Scalar, Vec<EdwardsPoint>) {
let V = commitments.into_iter().map(|c| EdwardsPoint(c) * INV_EIGHT()).collect::<Vec<_>>();
(hash_to_scalar(&V.iter().flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>()), V)
}
pub(crate) fn alpha_rho<R: RngCore + CryptoRng>(
rng: &mut R,
generators: &Generators,
aL: &ScalarVector,
aR: &ScalarVector,
) -> (Scalar, EdwardsPoint) {
let ar = Scalar::random(rng);
(ar, (vector_exponent(generators, aL, aR) + (EdwardsPoint::generator() * ar)) * INV_EIGHT())
}
pub(crate) fn LR_statements(
a: &ScalarVector,
G_i: &[EdwardsPoint],
b: &ScalarVector,
H_i: &[EdwardsPoint],
cL: Scalar,
U: EdwardsPoint,
) -> Vec<(Scalar, EdwardsPoint)> {
let mut res = a
.0
.iter()
.copied()
.zip(G_i.iter().copied())
.chain(b.0.iter().copied().zip(H_i.iter().copied()))
.collect::<Vec<_>>();
res.push((cL, U));
res
}
static TWO_N_CELL: OnceLock<ScalarVector> = OnceLock::new();
pub(crate) fn TWO_N() -> &'static ScalarVector {
TWO_N_CELL.get_or_init(|| ScalarVector::powers(Scalar::from(2u8), N))
}
pub(crate) fn challenge_products(w: &[Scalar], winv: &[Scalar]) -> Vec<Scalar> {
let mut products = vec![Scalar::ZERO; 1 << w.len()];
products[0] = winv[0];
products[1] = w[0];
for j in 1 .. w.len() {
let mut slots = (1 << (j + 1)) - 1;
while slots > 0 {
products[slots] = products[slots / 2] * w[j];
products[slots - 1] = products[slots / 2] * winv[j];
slots = slots.saturating_sub(2);
}
}
// Sanity check as if the above failed to populate, it'd be critical
for w in &products {
debug_assert!(!bool::from(w.is_zero()));
}
products
}

View File

@@ -1,179 +0,0 @@
#![allow(non_snake_case)]
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use curve25519_dalek::edwards::EdwardsPoint;
use multiexp::BatchVerifier;
use crate::{Commitment, wallet::TransactionError, serialize::*};
pub(crate) mod scalar_vector;
pub(crate) mod core;
use self::core::LOG_N;
pub(crate) mod original;
pub use original::GENERATORS as BULLETPROOFS_GENERATORS;
pub(crate) mod plus;
pub use plus::GENERATORS as BULLETPROOFS_PLUS_GENERATORS;
pub(crate) use self::original::OriginalStruct;
pub(crate) use self::plus::PlusStruct;
pub(crate) const MAX_OUTPUTS: usize = self::core::MAX_M;
/// Bulletproofs enum, supporting the original and plus formulations.
#[allow(clippy::large_enum_variant)]
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum Bulletproofs {
Original(OriginalStruct),
Plus(PlusStruct),
}
impl Bulletproofs {
pub(crate) fn fee_weight(plus: bool, outputs: usize) -> usize {
let fields = if plus { 6 } else { 9 };
// TODO: Shouldn't this use u32/u64?
#[allow(non_snake_case)]
let mut LR_len = usize::try_from(usize::BITS - (outputs - 1).leading_zeros()).unwrap();
let padded_outputs = 1 << LR_len;
LR_len += LOG_N;
let len = (fields + (2 * LR_len)) * 32;
len +
if padded_outputs <= 2 {
0
} else {
let base = ((fields + (2 * (LOG_N + 1))) * 32) / 2;
let size = (fields + (2 * LR_len)) * 32;
((base * padded_outputs) - size) * 4 / 5
}
}
/// Prove the list of commitments are within [0 .. 2^64).
pub fn prove<R: RngCore + CryptoRng>(
rng: &mut R,
outputs: &[Commitment],
plus: bool,
) -> Result<Self, TransactionError> {
if outputs.len() > MAX_OUTPUTS {
return Err(TransactionError::TooManyOutputs)?;
}
Ok(if !plus {
Self::Plus(PlusStruct::prove(rng, outputs))
} else {
Self::Original(OriginalStruct::prove(rng, outputs))
})
}
/// Verify the given Bulletproofs.
#[must_use]
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
match self {
Self::Original(bp) => bp.verify(rng, commitments),
Self::Plus(bp) => bp.verify(rng, commitments),
}
}
/// Accumulate the verification for the given Bulletproofs into the specified BatchVerifier.
/// Returns false if the Bulletproofs aren't sane, without mutating the BatchVerifier.
/// Returns true if the Bulletproofs are sane, regardless of their validity.
#[must_use]
pub fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, dalek_ff_group::EdwardsPoint>,
id: ID,
commitments: &[EdwardsPoint],
) -> bool {
match self {
Self::Original(bp) => bp.batch_verify(rng, verifier, id, commitments),
Self::Plus(bp) => bp.batch_verify(rng, verifier, id, commitments),
}
}
fn write_core<W: Write, F: Fn(&[EdwardsPoint], &mut W) -> io::Result<()>>(
&self,
w: &mut W,
specific_write_vec: F,
) -> io::Result<()> {
match self {
Self::Original(bp) => {
write_point(&bp.A, w)?;
write_point(&bp.S, w)?;
write_point(&bp.T1, w)?;
write_point(&bp.T2, w)?;
write_scalar(&bp.taux, w)?;
write_scalar(&bp.mu, w)?;
specific_write_vec(&bp.L, w)?;
specific_write_vec(&bp.R, w)?;
write_scalar(&bp.a, w)?;
write_scalar(&bp.b, w)?;
write_scalar(&bp.t, w)
}
Self::Plus(bp) => {
write_point(&bp.A, w)?;
write_point(&bp.A1, w)?;
write_point(&bp.B, w)?;
write_scalar(&bp.r1, w)?;
write_scalar(&bp.s1, w)?;
write_scalar(&bp.d1, w)?;
specific_write_vec(&bp.L, w)?;
specific_write_vec(&bp.R, w)
}
}
}
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.write_core(w, |points, w| write_raw_vec(write_point, points, w))
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.write_core(w, |points, w| write_vec(write_point, points, w))
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
/// Read Bulletproofs.
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
Ok(Self::Original(OriginalStruct {
A: read_point(r)?,
S: read_point(r)?,
T1: read_point(r)?,
T2: read_point(r)?,
taux: read_scalar(r)?,
mu: read_scalar(r)?,
L: read_vec(read_point, r)?,
R: read_vec(read_point, r)?,
a: read_scalar(r)?,
b: read_scalar(r)?,
t: read_scalar(r)?,
}))
}
/// Read Bulletproofs+.
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Self> {
Ok(Self::Plus(PlusStruct {
A: read_point(r)?,
A1: read_point(r)?,
B: read_point(r)?,
r1: read_scalar(r)?,
s1: read_scalar(r)?,
d1: read_scalar(r)?,
L: read_vec(read_point, r)?,
R: read_vec(read_point, r)?,
}))
}
}

View File

@@ -1,308 +0,0 @@
use std_shims::{vec::Vec, sync::OnceLock};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
use group::{ff::Field, Group};
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
use multiexp::BatchVerifier;
use crate::{Commitment, ringct::bulletproofs::core::*};
include!(concat!(env!("OUT_DIR"), "/generators.rs"));
static IP12_CELL: OnceLock<Scalar> = OnceLock::new();
pub(crate) fn IP12() -> Scalar {
*IP12_CELL.get_or_init(|| inner_product(&ScalarVector(vec![Scalar::ONE; N]), TWO_N()))
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct OriginalStruct {
pub(crate) A: DalekPoint,
pub(crate) S: DalekPoint,
pub(crate) T1: DalekPoint,
pub(crate) T2: DalekPoint,
pub(crate) taux: DalekScalar,
pub(crate) mu: DalekScalar,
pub(crate) L: Vec<DalekPoint>,
pub(crate) R: Vec<DalekPoint>,
pub(crate) a: DalekScalar,
pub(crate) b: DalekScalar,
pub(crate) t: DalekScalar,
}
impl OriginalStruct {
#[allow(clippy::many_single_char_names)]
pub(crate) fn prove<R: RngCore + CryptoRng>(rng: &mut R, commitments: &[Commitment]) -> Self {
let (logMN, M, MN) = MN(commitments.len());
let (aL, aR) = bit_decompose(commitments);
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
let (mut cache, _) = hash_commitments(commitments_points.clone());
let (sL, sR) =
ScalarVector((0 .. (MN * 2)).map(|_| Scalar::random(&mut *rng)).collect::<Vec<_>>()).split();
let generators = GENERATORS();
let (mut alpha, A) = alpha_rho(&mut *rng, generators, &aL, &aR);
let (mut rho, S) = alpha_rho(&mut *rng, generators, &sL, &sR);
let y = hash_cache(&mut cache, &[A.compress().to_bytes(), S.compress().to_bytes()]);
let mut cache = hash_to_scalar(&y.to_bytes());
let z = cache;
let l0 = &aL - z;
let l1 = sL;
let mut zero_twos = Vec::with_capacity(MN);
let zpow = ScalarVector::powers(z, M + 2);
for j in 0 .. M {
for i in 0 .. N {
zero_twos.push(zpow[j + 2] * TWO_N()[i]);
}
}
let yMN = ScalarVector::powers(y, MN);
let r0 = (&(aR + z) * &yMN) + ScalarVector(zero_twos);
let r1 = yMN * sR;
let (T1, T2, x, mut taux) = {
let t1 = inner_product(&l0, &r1) + inner_product(&l1, &r0);
let t2 = inner_product(&l1, &r1);
let mut tau1 = Scalar::random(&mut *rng);
let mut tau2 = Scalar::random(&mut *rng);
let T1 = prove_multiexp(&[(t1, H()), (tau1, EdwardsPoint::generator())]);
let T2 = prove_multiexp(&[(t2, H()), (tau2, EdwardsPoint::generator())]);
let x =
hash_cache(&mut cache, &[z.to_bytes(), T1.compress().to_bytes(), T2.compress().to_bytes()]);
let taux = (tau2 * (x * x)) + (tau1 * x);
tau1.zeroize();
tau2.zeroize();
(T1, T2, x, taux)
};
let mu = (x * rho) + alpha;
alpha.zeroize();
rho.zeroize();
for (i, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
taux += zpow[i + 2] * gamma;
}
let l = &l0 + &(l1 * x);
let r = &r0 + &(r1 * x);
let t = inner_product(&l, &r);
let x_ip =
hash_cache(&mut cache, &[x.to_bytes(), taux.to_bytes(), mu.to_bytes(), t.to_bytes()]);
let mut a = l;
let mut b = r;
let yinv = y.invert().unwrap();
let yinvpow = ScalarVector::powers(yinv, MN);
let mut G_proof = generators.G[.. a.len()].to_vec();
let mut H_proof = generators.H[.. a.len()].to_vec();
H_proof.iter_mut().zip(yinvpow.0.iter()).for_each(|(this_H, yinvpow)| *this_H *= yinvpow);
let U = H() * x_ip;
let mut L = Vec::with_capacity(logMN);
let mut R = Vec::with_capacity(logMN);
while a.len() != 1 {
let (aL, aR) = a.split();
let (bL, bR) = b.split();
let cL = inner_product(&aL, &bR);
let cR = inner_product(&aR, &bL);
let (G_L, G_R) = G_proof.split_at(aL.len());
let (H_L, H_R) = H_proof.split_at(aL.len());
let L_i = prove_multiexp(&LR_statements(&aL, G_R, &bR, H_L, cL, U));
let R_i = prove_multiexp(&LR_statements(&aR, G_L, &bL, H_R, cR, U));
L.push(*L_i);
R.push(*R_i);
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
let winv = w.invert().unwrap();
a = (aL * w) + (aR * winv);
b = (bL * winv) + (bR * w);
if a.len() != 1 {
G_proof = hadamard_fold(G_L, G_R, winv, w);
H_proof = hadamard_fold(H_L, H_R, w, winv);
}
}
let res = Self {
A: *A,
S: *S,
T1: *T1,
T2: *T2,
taux: *taux,
mu: *mu,
L,
R,
a: *a[0],
b: *b[0],
t: *t,
};
debug_assert!(res.verify(rng, &commitments_points));
res
}
#[allow(clippy::many_single_char_names)]
#[must_use]
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
id: ID,
commitments: &[DalekPoint],
) -> bool {
// Verify commitments are valid
if commitments.is_empty() || (commitments.len() > MAX_M) {
return false;
}
// Verify L and R are properly sized
if self.L.len() != self.R.len() {
return false;
}
let (logMN, M, MN) = MN(commitments.len());
if self.L.len() != logMN {
return false;
}
// Rebuild all challenges
let (mut cache, commitments) = hash_commitments(commitments.iter().copied());
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes(), self.S.compress().to_bytes()]);
let z = hash_to_scalar(&y.to_bytes());
cache = z;
let x = hash_cache(
&mut cache,
&[z.to_bytes(), self.T1.compress().to_bytes(), self.T2.compress().to_bytes()],
);
let x_ip = hash_cache(
&mut cache,
&[x.to_bytes(), self.taux.to_bytes(), self.mu.to_bytes(), self.t.to_bytes()],
);
let mut w = Vec::with_capacity(logMN);
let mut winv = Vec::with_capacity(logMN);
for (L, R) in self.L.iter().zip(&self.R) {
w.push(hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]));
winv.push(cache.invert().unwrap());
}
// Convert the proof from * INV_EIGHT to its actual form
let normalize = |point: &DalekPoint| EdwardsPoint(point.mul_by_cofactor());
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
let T1 = normalize(&self.T1);
let T2 = normalize(&self.T2);
let A = normalize(&self.A);
let S = normalize(&self.S);
let commitments = commitments.iter().map(EdwardsPoint::mul_by_cofactor).collect::<Vec<_>>();
// Verify it
let mut proof = Vec::with_capacity(4 + commitments.len());
let zpow = ScalarVector::powers(z, M + 3);
let ip1y = ScalarVector::powers(y, M * N).sum();
let mut k = -(zpow[2] * ip1y);
for j in 1 ..= M {
k -= zpow[j + 2] * IP12();
}
let y1 = Scalar(self.t) - ((z * ip1y) + k);
proof.push((-y1, H()));
proof.push((-Scalar(self.taux), G));
for (j, commitment) in commitments.iter().enumerate() {
proof.push((zpow[j + 2], *commitment));
}
proof.push((x, T1));
proof.push((x * x, T2));
verifier.queue(&mut *rng, id, proof);
proof = Vec::with_capacity(4 + (2 * (MN + logMN)));
let z3 = (Scalar(self.t) - (Scalar(self.a) * Scalar(self.b))) * x_ip;
proof.push((z3, H()));
proof.push((-Scalar(self.mu), G));
proof.push((Scalar::ONE, A));
proof.push((x, S));
{
let ypow = ScalarVector::powers(y, MN);
let yinv = y.invert().unwrap();
let yinvpow = ScalarVector::powers(yinv, MN);
let w_cache = challenge_products(&w, &winv);
let generators = GENERATORS();
for i in 0 .. MN {
let g = (Scalar(self.a) * w_cache[i]) + z;
proof.push((-g, generators.G[i]));
let mut h = Scalar(self.b) * yinvpow[i] * w_cache[(!i) & (MN - 1)];
h -= ((zpow[(i / N) + 2] * TWO_N()[i % N]) + (z * ypow[i])) * yinvpow[i];
proof.push((-h, generators.H[i]));
}
}
for i in 0 .. logMN {
proof.push((w[i] * w[i], L[i]));
proof.push((winv[i] * winv[i], R[i]));
}
verifier.queue(rng, id, proof);
true
}
#[must_use]
pub(crate) fn verify<R: RngCore + CryptoRng>(
&self,
rng: &mut R,
commitments: &[DalekPoint],
) -> bool {
let mut verifier = BatchVerifier::new(1);
if self.verify_core(rng, &mut verifier, (), commitments) {
verifier.verify_vartime()
} else {
false
}
}
#[must_use]
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
id: ID,
commitments: &[DalekPoint],
) -> bool {
self.verify_core(rng, verifier, id, commitments)
}
}

View File

@@ -1,300 +0,0 @@
use std_shims::{vec::Vec, sync::OnceLock};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
use group::ff::Field;
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
use multiexp::BatchVerifier;
use crate::{
Commitment, hash,
ringct::{hash_to_point::raw_hash_to_point, bulletproofs::core::*},
};
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
static TRANSCRIPT_CELL: OnceLock<[u8; 32]> = OnceLock::new();
pub(crate) fn TRANSCRIPT() -> [u8; 32] {
*TRANSCRIPT_CELL.get_or_init(|| {
EdwardsPoint(raw_hash_to_point(hash(b"bulletproof_plus_transcript"))).compress().to_bytes()
})
}
// TRANSCRIPT isn't a Scalar, so we need this alternative for the first hash
fn hash_plus<C: IntoIterator<Item = DalekPoint>>(commitments: C) -> (Scalar, Vec<EdwardsPoint>) {
let (cache, commitments) = hash_commitments(commitments);
(hash_to_scalar(&[TRANSCRIPT().as_ref(), &cache.to_bytes()].concat()), commitments)
}
// d[j*N+i] = z**(2*(j+1)) * 2**i
fn d(z: Scalar, M: usize, MN: usize) -> (ScalarVector, ScalarVector) {
let zpow = ScalarVector::even_powers(z, 2 * M);
let mut d = vec![Scalar::ZERO; MN];
for j in 0 .. M {
for i in 0 .. N {
d[(j * N) + i] = zpow[j] * TWO_N()[i];
}
}
(zpow, ScalarVector(d))
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct PlusStruct {
pub(crate) A: DalekPoint,
pub(crate) A1: DalekPoint,
pub(crate) B: DalekPoint,
pub(crate) r1: DalekScalar,
pub(crate) s1: DalekScalar,
pub(crate) d1: DalekScalar,
pub(crate) L: Vec<DalekPoint>,
pub(crate) R: Vec<DalekPoint>,
}
impl PlusStruct {
#[allow(clippy::many_single_char_names)]
pub(crate) fn prove<R: RngCore + CryptoRng>(rng: &mut R, commitments: &[Commitment]) -> Self {
let generators = GENERATORS();
let (logMN, M, MN) = MN(commitments.len());
let (aL, aR) = bit_decompose(commitments);
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
let (mut cache, _) = hash_plus(commitments_points.clone());
let (mut alpha1, A) = alpha_rho(&mut *rng, generators, &aL, &aR);
let y = hash_cache(&mut cache, &[A.compress().to_bytes()]);
let mut cache = hash_to_scalar(&y.to_bytes());
let z = cache;
let (zpow, d) = d(z, M, MN);
let aL1 = aL - z;
let ypow = ScalarVector::powers(y, MN + 2);
let mut y_for_d = ScalarVector(ypow.0[1 ..= MN].to_vec());
y_for_d.0.reverse();
let aR1 = (aR + z) + (y_for_d * d);
for (j, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
alpha1 += zpow[j] * ypow[MN + 1] * gamma;
}
let mut a = aL1;
let mut b = aR1;
let yinv = y.invert().unwrap();
let yinvpow = ScalarVector::powers(yinv, MN);
let mut G_proof = generators.G[.. a.len()].to_vec();
let mut H_proof = generators.H[.. a.len()].to_vec();
let mut L = Vec::with_capacity(logMN);
let mut R = Vec::with_capacity(logMN);
while a.len() != 1 {
let (aL, aR) = a.split();
let (bL, bR) = b.split();
let cL = weighted_inner_product(&aL, &bR, y);
let cR = weighted_inner_product(&(&aR * ypow[aR.len()]), &bL, y);
let (mut dL, mut dR) = (Scalar::random(&mut *rng), Scalar::random(&mut *rng));
let (G_L, G_R) = G_proof.split_at(aL.len());
let (H_L, H_R) = H_proof.split_at(aL.len());
let mut L_i = LR_statements(&(&aL * yinvpow[aL.len()]), G_R, &bR, H_L, cL, H());
L_i.push((dL, G));
let L_i = prove_multiexp(&L_i);
L.push(*L_i);
let mut R_i = LR_statements(&(&aR * ypow[aR.len()]), G_L, &bL, H_R, cR, H());
R_i.push((dR, G));
let R_i = prove_multiexp(&R_i);
R.push(*R_i);
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
let winv = w.invert().unwrap();
G_proof = hadamard_fold(G_L, G_R, winv, w * yinvpow[aL.len()]);
H_proof = hadamard_fold(H_L, H_R, w, winv);
a = (&aL * w) + (aR * (winv * ypow[aL.len()]));
b = (bL * winv) + (bR * w);
alpha1 += (dL * (w * w)) + (dR * (winv * winv));
dL.zeroize();
dR.zeroize();
}
let mut r = Scalar::random(&mut *rng);
let mut s = Scalar::random(&mut *rng);
let mut d = Scalar::random(&mut *rng);
let mut eta = Scalar::random(&mut *rng);
let A1 = prove_multiexp(&[
(r, G_proof[0]),
(s, H_proof[0]),
(d, G),
((r * y * b[0]) + (s * y * a[0]), H()),
]);
let B = prove_multiexp(&[(r * y * s, H()), (eta, G)]);
let e = hash_cache(&mut cache, &[A1.compress().to_bytes(), B.compress().to_bytes()]);
let r1 = (a[0] * e) + r;
r.zeroize();
let s1 = (b[0] * e) + s;
s.zeroize();
let d1 = ((d * e) + eta) + (alpha1 * (e * e));
d.zeroize();
eta.zeroize();
alpha1.zeroize();
let res = Self { A: *A, A1: *A1, B: *B, r1: *r1, s1: *s1, d1: *d1, L, R };
debug_assert!(res.verify(rng, &commitments_points));
res
}
#[allow(clippy::many_single_char_names)]
#[must_use]
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
id: ID,
commitments: &[DalekPoint],
) -> bool {
// Verify commitments are valid
if commitments.is_empty() || (commitments.len() > MAX_M) {
return false;
}
// Verify L and R are properly sized
if self.L.len() != self.R.len() {
return false;
}
let (logMN, M, MN) = MN(commitments.len());
if self.L.len() != logMN {
return false;
}
// Rebuild all challenges
let (mut cache, commitments) = hash_plus(commitments.iter().copied());
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes()]);
let yinv = y.invert().unwrap();
let z = hash_to_scalar(&y.to_bytes());
cache = z;
let mut w = Vec::with_capacity(logMN);
let mut winv = Vec::with_capacity(logMN);
for (L, R) in self.L.iter().zip(&self.R) {
w.push(hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]));
winv.push(cache.invert().unwrap());
}
let e = hash_cache(&mut cache, &[self.A1.compress().to_bytes(), self.B.compress().to_bytes()]);
// Convert the proof from * INV_EIGHT to its actual form
let normalize = |point: &DalekPoint| EdwardsPoint(point.mul_by_cofactor());
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
let A = normalize(&self.A);
let A1 = normalize(&self.A1);
let B = normalize(&self.B);
// Verify it
let mut proof = Vec::with_capacity(logMN + 5 + (2 * (MN + logMN)));
let mut yMN = y;
for _ in 0 .. logMN {
yMN *= yMN;
}
let yMNy = yMN * y;
let (zpow, d) = d(z, M, MN);
let zsq = zpow[0];
let esq = e * e;
let minus_esq = -esq;
let commitment_weight = minus_esq * yMNy;
for (i, commitment) in commitments.iter().map(EdwardsPoint::mul_by_cofactor).enumerate() {
proof.push((commitment_weight * zpow[i], commitment));
}
// Invert B, instead of the Scalar, as the latter is only 2x as expensive yet enables reduction
// to a single addition under vartime for the first BP verified in the batch, which is expected
// to be much more significant
proof.push((Scalar::ONE, -B));
proof.push((-e, A1));
proof.push((minus_esq, A));
proof.push((Scalar(self.d1), G));
let d_sum = zpow.sum() * Scalar::from(u64::MAX);
let y_sum = weighted_powers(y, MN).sum();
proof.push((
Scalar(self.r1 * y.0 * self.s1) + (esq * ((yMNy * z * d_sum) + ((zsq - z) * y_sum))),
H(),
));
let w_cache = challenge_products(&w, &winv);
let mut e_r1_y = e * Scalar(self.r1);
let e_s1 = e * Scalar(self.s1);
let esq_z = esq * z;
let minus_esq_z = -esq_z;
let mut minus_esq_y = minus_esq * yMN;
let generators = GENERATORS();
for i in 0 .. MN {
proof.push((e_r1_y * w_cache[i] + esq_z, generators.G[i]));
proof.push((
(e_s1 * w_cache[(!i) & (MN - 1)]) + minus_esq_z + (minus_esq_y * d[i]),
generators.H[i],
));
e_r1_y *= yinv;
minus_esq_y *= yinv;
}
for i in 0 .. logMN {
proof.push((minus_esq * w[i] * w[i], L[i]));
proof.push((minus_esq * winv[i] * winv[i], R[i]));
}
verifier.queue(rng, id, proof);
true
}
#[must_use]
pub(crate) fn verify<R: RngCore + CryptoRng>(
&self,
rng: &mut R,
commitments: &[DalekPoint],
) -> bool {
let mut verifier = BatchVerifier::new(1);
if self.verify_core(rng, &mut verifier, (), commitments) {
verifier.verify_vartime()
} else {
false
}
}
#[must_use]
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
id: ID,
commitments: &[DalekPoint],
) -> bool {
self.verify_core(rng, verifier, id, commitments)
}
}

View File

@@ -1,137 +0,0 @@
use core::ops::{Add, Sub, Mul, Index};
use std_shims::vec::Vec;
use zeroize::{Zeroize, ZeroizeOnDrop};
use group::ff::Field;
use dalek_ff_group::{Scalar, EdwardsPoint};
use multiexp::multiexp;
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
macro_rules! math_op {
($Op: ident, $op: ident, $f: expr) => {
impl $Op<Scalar> for ScalarVector {
type Output = Self;
fn $op(self, b: Scalar) -> Self {
Self(self.0.iter().map(|a| $f((a, &b))).collect())
}
}
impl $Op<Scalar> for &ScalarVector {
type Output = ScalarVector;
fn $op(self, b: Scalar) -> ScalarVector {
ScalarVector(self.0.iter().map(|a| $f((a, &b))).collect())
}
}
impl $Op<ScalarVector> for ScalarVector {
type Output = Self;
fn $op(self, b: Self) -> Self {
debug_assert_eq!(self.len(), b.len());
Self(self.0.iter().zip(b.0.iter()).map($f).collect())
}
}
impl $Op<Self> for &ScalarVector {
type Output = ScalarVector;
fn $op(self, b: Self) -> ScalarVector {
debug_assert_eq!(self.len(), b.len());
ScalarVector(self.0.iter().zip(b.0.iter()).map($f).collect())
}
}
};
}
math_op!(Add, add, |(a, b): (&Scalar, &Scalar)| *a + *b);
math_op!(Sub, sub, |(a, b): (&Scalar, &Scalar)| *a - *b);
math_op!(Mul, mul, |(a, b): (&Scalar, &Scalar)| *a * *b);
impl ScalarVector {
pub(crate) fn new(len: usize) -> Self {
Self(vec![Scalar::ZERO; len])
}
pub(crate) fn powers(x: Scalar, len: usize) -> Self {
debug_assert!(len != 0);
let mut res = Vec::with_capacity(len);
res.push(Scalar::ONE);
for i in 1 .. len {
res.push(res[i - 1] * x);
}
Self(res)
}
pub(crate) fn even_powers(x: Scalar, pow: usize) -> Self {
debug_assert!(pow != 0);
// Verify pow is a power of two
debug_assert_eq!(((pow - 1) & pow), 0);
let xsq = x * x;
let mut res = Self(Vec::with_capacity(pow / 2));
res.0.push(xsq);
let mut prev = 2;
while prev < pow {
res.0.push(res[res.len() - 1] * xsq);
prev += 2;
}
res
}
pub(crate) fn sum(mut self) -> Scalar {
self.0.drain(..).sum()
}
pub(crate) fn len(&self) -> usize {
self.0.len()
}
pub(crate) fn split(self) -> (Self, Self) {
let (l, r) = self.0.split_at(self.0.len() / 2);
(Self(l.to_vec()), Self(r.to_vec()))
}
}
impl Index<usize> for ScalarVector {
type Output = Scalar;
fn index(&self, index: usize) -> &Scalar {
&self.0[index]
}
}
pub(crate) fn inner_product(a: &ScalarVector, b: &ScalarVector) -> Scalar {
(a * b).sum()
}
pub(crate) fn weighted_powers(x: Scalar, len: usize) -> ScalarVector {
ScalarVector(ScalarVector::powers(x, len + 1).0[1 ..].to_vec())
}
pub(crate) fn weighted_inner_product(a: &ScalarVector, b: &ScalarVector, y: Scalar) -> Scalar {
// y ** 0 is not used as a power
(a * b * weighted_powers(y, a.len())).sum()
}
impl Mul<&[EdwardsPoint]> for &ScalarVector {
type Output = EdwardsPoint;
fn mul(self, b: &[EdwardsPoint]) -> EdwardsPoint {
debug_assert_eq!(self.len(), b.len());
multiexp(&self.0.iter().copied().zip(b.iter().copied()).collect::<Vec<_>>())
}
}
pub(crate) fn hadamard_fold(
l: &[EdwardsPoint],
r: &[EdwardsPoint],
a: Scalar,
b: Scalar,
) -> Vec<EdwardsPoint> {
let mut res = Vec::with_capacity(l.len() / 2);
for i in 0 .. l.len() {
res.push(multiexp(&[(a, l[i]), (b, r[i])]));
}
res
}

View File

@@ -1,70 +1,65 @@
#![allow(non_snake_case)]
use core::ops::Deref;
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use subtle::{ConstantTimeEq, Choice, CtOption};
use lazy_static::lazy_static;
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
traits::{IsIdentity, VartimePrecomputedMultiscalarMul},
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation},
traits::VartimePrecomputedMultiscalarMul,
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation}
};
use crate::{
INV_EIGHT, Commitment, random_scalar, hash_to_scalar, wallet::decoys::Decoys,
ringct::hash_to_point, serialize::*,
Commitment, random_scalar, hash_to_scalar,
transaction::RING_LEN,
wallet::decoys::Decoys,
ringct::hash_to_point,
serialize::*
};
#[cfg(feature = "multisig")]
mod multisig;
#[cfg(feature = "multisig")]
pub use multisig::{ClsagDetails, ClsagAddendum, ClsagMultisig};
#[cfg(feature = "multisig")]
pub(crate) use multisig::add_key_image_share;
pub use multisig::{ClsagDetails, ClsagMultisig};
/// Errors returned when CLSAG signing fails.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum ClsagError {
#[cfg_attr(feature = "std", error("internal error ({0})"))]
InternalError(&'static str),
#[cfg_attr(feature = "std", error("invalid ring"))]
InvalidRing,
#[cfg_attr(feature = "std", error("invalid ring member (member {0}, ring size {1})"))]
InvalidRingMember(u8, u8),
#[cfg_attr(feature = "std", error("invalid commitment"))]
InvalidCommitment,
#[cfg_attr(feature = "std", error("invalid key image"))]
InvalidImage,
#[cfg_attr(feature = "std", error("invalid D"))]
InvalidD,
#[cfg_attr(feature = "std", error("invalid s"))]
InvalidS,
#[cfg_attr(feature = "std", error("invalid c1"))]
InvalidC1,
lazy_static! {
static ref INV_EIGHT: Scalar = Scalar::from(8u8).invert();
}
/// Input being signed for.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
#[derive(Clone, Error, Debug)]
pub enum ClsagError {
#[error("internal error ({0})")]
InternalError(String),
#[error("invalid ring member (member {0}, ring size {1})")]
InvalidRingMember(u8, u8),
#[error("invalid commitment")]
InvalidCommitment,
#[error("invalid D")]
InvalidD,
#[error("invalid s")]
InvalidS,
#[error("invalid c1")]
InvalidC1
}
#[derive(Clone, PartialEq, Debug)]
pub struct ClsagInput {
// The actual commitment for the true spend
pub(crate) commitment: Commitment,
pub commitment: Commitment,
// True spend index, offsets, and ring
pub(crate) decoys: Decoys,
pub decoys: Decoys
}
impl ClsagInput {
pub fn new(commitment: Commitment, decoys: Decoys) -> Result<Self, ClsagError> {
pub fn new(
commitment: Commitment,
decoys: Decoys
) -> Result<ClsagInput, ClsagError> {
let n = decoys.len();
if n > u8::MAX.into() {
Err(ClsagError::InternalError("max ring size in this library is u8 max"))?;
Err(ClsagError::InternalError("max ring size in this library is u8 max".to_string()))?;
}
let n = u8::try_from(n).unwrap();
if decoys.i >= n {
@@ -76,14 +71,14 @@ impl ClsagInput {
Err(ClsagError::InvalidCommitment)?;
}
Ok(Self { commitment, decoys })
Ok(ClsagInput { commitment, decoys })
}
}
#[allow(clippy::large_enum_variant)]
enum Mode {
Sign(usize, EdwardsPoint, EdwardsPoint),
Verify(Scalar),
#[cfg(feature = "experimental")]
Verify(Scalar)
}
// Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences
@@ -95,26 +90,22 @@ fn core(
msg: &[u8; 32],
D: &EdwardsPoint,
s: &[Scalar],
A_c1: Mode,
A_c1: Mode
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
let n = ring.len();
let images_precomp = VartimeEdwardsPrecomputation::new([I, D]);
let D = D * INV_EIGHT();
let D = D * *INV_EIGHT;
// Generate the transcript
// Instead of generating multiple, a single transcript is created and then edited as needed
const PREFIX: &[u8] = b"CLSAG_";
#[rustfmt::skip]
const AGG_0: &[u8] = b"agg_0";
#[rustfmt::skip]
const ROUND: &[u8] = b"round";
const PREFIX_AGG_0_LEN: usize = PREFIX.len() + AGG_0.len();
let mut to_hash = Vec::with_capacity(((2 * n) + 5) * 32);
to_hash.extend(PREFIX);
let mut to_hash = vec![];
to_hash.reserve_exact(((2 * n) + 5) * 32);
const PREFIX: &[u8] = "CLSAG_".as_bytes();
const AGG_0: &[u8] = "CLSAG_agg_0".as_bytes();
const ROUND: &[u8] = "round".as_bytes();
to_hash.extend(AGG_0);
to_hash.extend([0; 32 - PREFIX_AGG_0_LEN]);
to_hash.extend([0; 32 - AGG_0.len()]);
let mut P = Vec::with_capacity(n);
for member in ring {
@@ -134,7 +125,7 @@ fn core(
// mu_P with agg_0
let mu_P = hash_to_scalar(&to_hash);
// mu_C with agg_1
to_hash[PREFIX_AGG_0_LEN - 1] = b'1';
to_hash[AGG_0.len() - 1] = b'1';
let mu_C = hash_to_scalar(&to_hash);
// Truncate it for the round transcript, altering the DST as needed
@@ -158,8 +149,9 @@ fn core(
to_hash.extend(A.compress().to_bytes());
to_hash.extend(AH.compress().to_bytes());
c = hash_to_scalar(&to_hash);
}
},
#[cfg(feature = "experimental")]
Mode::Verify(c1) => {
start = 0;
end = n;
@@ -168,12 +160,11 @@ fn core(
}
// Perform the core loop
let mut c1 = CtOption::new(Scalar::zero(), Choice::from(0));
let mut c1 = None;
for i in (start .. end).map(|i| i % n) {
// This will only execute once and shouldn't need to be constant time. Making it constant time
// removes the risk of branch prediction creating timing differences depending on ring index
// however
c1 = c1.or_else(|| CtOption::new(c, i.ct_eq(&0)));
if i == 0 {
c1 = Some(c);
}
let c_p = mu_P * c;
let c_c = mu_C * c;
@@ -181,7 +172,7 @@ fn core(
let L = (&s[i] * &ED25519_BASEPOINT_TABLE) + (c_p * P[i]) + (c_c * C[i]);
let PH = hash_to_point(P[i]);
// Shouldn't be an issue as all of the variables in this vartime statement are public
let R = (s[i] * PH) + images_precomp.vartime_multiscalar_mul([c_p, c_c]);
let R = (s[i] * PH) + images_precomp.vartime_multiscalar_mul(&[c_p, c_c]);
to_hash.truncate(((2 * n) + 3) * 32);
to_hash.extend(L.compress().to_bytes());
@@ -193,18 +184,16 @@ fn core(
((D, c * mu_P, c * mu_C), c1.unwrap_or(c))
}
/// CLSAG signature, as used in Monero.
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct Clsag {
pub D: EdwardsPoint,
pub s: Vec<Scalar>,
pub c1: Scalar,
pub c1: Scalar
}
impl Clsag {
// Sign core is the extension of core as needed for signing, yet is shared between single signer
// and multisig, hence why it's still core
#[allow(clippy::many_single_char_names)]
pub(crate) fn sign_core<R: RngCore + CryptoRng>(
rng: &mut R,
I: &EdwardsPoint,
@@ -212,8 +201,8 @@ impl Clsag {
mask: Scalar,
msg: &[u8; 32],
A: EdwardsPoint,
AH: EdwardsPoint,
) -> (Self, EdwardsPoint, Scalar, Scalar) {
AH: EdwardsPoint
) -> (Clsag, EdwardsPoint, Scalar, Scalar) {
let r: usize = input.decoys.i.into();
let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate();
@@ -225,51 +214,49 @@ impl Clsag {
for _ in 0 .. input.decoys.ring.len() {
s.push(random_scalar(rng));
}
let ((D, p, c), c1) =
core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
let ((D, p, c), c1) = core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
(Self { D, s, c1 }, pseudo_out, p, c * z)
(
Clsag { D, s, c1 },
pseudo_out,
p,
c * z
)
}
/// Generate CLSAG signatures for the given inputs.
/// inputs is of the form (private key, key image, input).
/// sum_outputs is for the sum of the outputs' commitment masks.
// Single signer CLSAG
pub fn sign<R: RngCore + CryptoRng>(
rng: &mut R,
mut inputs: Vec<(Zeroizing<Scalar>, EdwardsPoint, ClsagInput)>,
inputs: &[(Scalar, EdwardsPoint, ClsagInput)],
sum_outputs: Scalar,
msg: [u8; 32],
) -> Vec<(Self, EdwardsPoint)> {
msg: [u8; 32]
) -> Vec<(Clsag, EdwardsPoint)> {
let nonce = random_scalar(rng);
let mut rand_source = [0; 64];
rng.fill_bytes(&mut rand_source);
let mut res = Vec::with_capacity(inputs.len());
let mut sum_pseudo_outs = Scalar::zero();
for i in 0 .. inputs.len() {
let mask = if i == (inputs.len() - 1) {
sum_outputs - sum_pseudo_outs
let mut mask = random_scalar(rng);
if i == (inputs.len() - 1) {
mask = sum_outputs - sum_pseudo_outs;
} else {
let mask = random_scalar(rng);
sum_pseudo_outs += mask;
mask
};
}
let mut nonce = Zeroizing::new(random_scalar(rng));
let (mut clsag, pseudo_out, p, c) = Self::sign_core(
let mut rand_source = [0; 64];
rng.fill_bytes(&mut rand_source);
let (mut clsag, pseudo_out, p, c) = Clsag::sign_core(
rng,
&inputs[i].1,
&inputs[i].2,
mask,
&msg,
nonce.deref() * &ED25519_BASEPOINT_TABLE,
nonce.deref() *
hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
&nonce * &ED25519_BASEPOINT_TABLE,
nonce * hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0])
);
clsag.s[usize::from(inputs[i].2.decoys.i)] =
(-((p * inputs[i].0.deref()) + c)) + nonce.deref();
inputs[i].0.zeroize();
nonce.zeroize();
debug_assert!(clsag
.verify(&inputs[i].2.decoys.ring, &inputs[i].1, &pseudo_out, &msg)
.is_ok());
clsag.s[usize::from(inputs[i].2.decoys.i)] = nonce - ((p * inputs[i].0) + c);
res.push((clsag, pseudo_out));
}
@@ -277,49 +264,97 @@ impl Clsag {
res
}
/// Verify the CLSAG signature against the given Transaction data.
pub fn verify(
// Not extensively tested nor guaranteed to have expected parity with Monero
#[cfg(feature = "experimental")]
pub fn rust_verify(
&self,
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32],
msg: &[u8; 32]
) -> Result<(), ClsagError> {
// Preliminary checks. s, c1, and points must also be encoded canonically, which isn't checked
// here
if ring.is_empty() {
Err(ClsagError::InvalidRing)?;
}
if ring.len() != self.s.len() {
Err(ClsagError::InvalidS)?;
}
if I.is_identity() {
Err(ClsagError::InvalidImage)?;
}
let D = self.D.mul_by_cofactor();
if D.is_identity() {
Err(ClsagError::InvalidD)?;
}
let (_, c1) = core(ring, I, pseudo_out, msg, &D, &self.s, Mode::Verify(self.c1));
let (_, c1) = core(
ring,
I,
pseudo_out,
msg,
&self.D.mul_by_cofactor(),
&self.s,
Mode::Verify(self.c1)
);
if c1 != self.c1 {
Err(ClsagError::InvalidC1)?;
}
Ok(())
}
pub(crate) fn fee_weight(ring_len: usize) -> usize {
(ring_len * 32) + 32 + 32
pub(crate) fn fee_weight() -> usize {
(RING_LEN * 32) + 32 + 32
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
write_raw_vec(write_scalar, &self.s, w)?;
w.write_all(&self.c1.to_bytes())?;
write_point(&self.D, w)
}
pub fn read<R: Read>(decoys: usize, r: &mut R) -> io::Result<Self> {
Ok(Self { s: read_raw_vec(read_scalar, decoys, r)?, c1: read_scalar(r)?, D: read_point(r)? })
pub fn deserialize<R: std::io::Read>(decoys: usize, r: &mut R) -> std::io::Result<Clsag> {
Ok(
Clsag {
s: read_raw_vec(read_scalar, decoys, r)?,
c1: read_scalar(r)?,
D: read_point(r)?
}
)
}
pub fn verify(
&self,
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32]
) -> Result<(), ClsagError> {
// Serialize it to pass the struct to Monero without extensive FFI
let mut serialized = Vec::with_capacity(1 + ((self.s.len() + 2) * 32));
write_varint(&self.s.len().try_into().unwrap(), &mut serialized).unwrap();
self.serialize(&mut serialized).unwrap();
let I_bytes = I.compress().to_bytes();
let mut ring_bytes = vec![];
for member in ring {
ring_bytes.extend(&member[0].compress().to_bytes());
ring_bytes.extend(&member[1].compress().to_bytes());
}
let pseudo_out_bytes = pseudo_out.compress().to_bytes();
unsafe {
// Uses Monero's C verification function to ensure compatibility with Monero
#[link(name = "wrapper")]
extern "C" {
pub(crate) fn c_verify_clsag(
serialized_len: usize,
serialized: *const u8,
ring_size: u8,
ring: *const u8,
I: *const u8,
pseudo_out: *const u8,
msg: *const u8
) -> bool;
}
if c_verify_clsag(
serialized.len(), serialized.as_ptr(),
u8::try_from(ring.len()).map_err(|_| ClsagError::InternalError("too large ring".to_string()))?,
ring_bytes.as_ptr(),
I_bytes.as_ptr(), pseudo_out_bytes.as_ptr(), msg.as_ptr()
) {
Ok(())
} else {
Err(ClsagError::InvalidC1)
}
}
}
}

View File

@@ -1,58 +1,44 @@
use core::{ops::Deref, fmt::Debug};
use std_shims::{
sync::Arc,
io::{self, Read, Write},
};
use std::sync::RwLock;
use core::fmt::Debug;
use std::{io::Read, sync::{Arc, RwLock}};
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha20Rng;
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use rand_chacha::ChaCha12Rng;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
traits::{Identity, IsIdentity},
scalar::Scalar,
edwards::EdwardsPoint,
edwards::EdwardsPoint
};
use group::{ff::Field, Group, GroupEncoding};
use group::Group;
use transcript::{Transcript, RecommendedTranscript};
use frost::{curve::Ed25519, FrostError, FrostView, algorithm::Algorithm};
use dalek_ff_group as dfg;
use dleq::DLEqProof;
use frost::{
dkg::lagrange,
curve::Ed25519,
Participant, FrostError, ThresholdKeys, ThresholdView,
algorithm::{WriteAddendum, Algorithm},
};
use crate::ringct::{
hash_to_point,
clsag::{ClsagInput, Clsag},
use crate::{
frost::{MultisigError, write_dleq, read_dleq},
ringct::{hash_to_point, clsag::{ClsagInput, Clsag}}
};
fn dleq_transcript() -> RecommendedTranscript {
RecommendedTranscript::new(b"monero_key_image_dleq")
}
impl ClsagInput {
fn transcript<T: Transcript>(&self, transcript: &mut T) {
// Doesn't domain separate as this is considered part of the larger CLSAG proof
// Ring index
transcript.append_message(b"real_spend", [self.decoys.i]);
transcript.append_message(b"ring_index", &[self.decoys.i]);
// Ring
for (i, pair) in self.decoys.ring.iter().enumerate() {
let mut ring = vec![];
for pair in &self.decoys.ring {
// Doesn't include global output indexes as CLSAG doesn't care and won't be affected by it
// They're just a unreliable reference to this data which will be included in the message
// if in use
transcript.append_message(b"member", [u8::try_from(i).expect("ring size exceeded 255")]);
transcript.append_message(b"key", pair[0].compress().to_bytes());
transcript.append_message(b"commitment", pair[1].compress().to_bytes());
ring.extend(&pair[0].compress().to_bytes());
ring.extend(&pair[1].compress().to_bytes());
}
transcript.append_message(b"ring", &ring);
// Doesn't include the commitment's parts as the above ring + index includes the commitment
// The only potential malleability would be if the G/H relationship is known breaking the
@@ -60,77 +46,66 @@ impl ClsagInput {
}
}
/// CLSAG input and the mask to use for it.
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
#[derive(Clone, Debug)]
pub struct ClsagDetails {
input: ClsagInput,
mask: Scalar,
mask: Scalar
}
impl ClsagDetails {
pub fn new(input: ClsagInput, mask: Scalar) -> Self {
Self { input, mask }
}
}
/// Addendum produced during the FROST signing process with relevant data.
#[derive(Clone, PartialEq, Eq, Zeroize, Debug)]
pub struct ClsagAddendum {
pub(crate) key_image: dfg::EdwardsPoint,
dleq: DLEqProof<dfg::EdwardsPoint>,
}
impl WriteAddendum for ClsagAddendum {
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(self.key_image.compress().to_bytes().as_ref())?;
self.dleq.write(writer)
pub fn new(input: ClsagInput, mask: Scalar) -> ClsagDetails {
ClsagDetails { input, mask }
}
}
#[allow(non_snake_case)]
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
struct Interim {
p: Scalar,
c: Scalar,
clsag: Clsag,
pseudo_out: EdwardsPoint,
pseudo_out: EdwardsPoint
}
/// FROST algorithm for producing a CLSAG signature.
#[allow(non_snake_case)]
#[derive(Clone, Debug)]
pub struct ClsagMultisig {
transcript: RecommendedTranscript,
pub(crate) H: EdwardsPoint,
// Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires
// an extra round
H: EdwardsPoint,
// Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires a round
image: EdwardsPoint,
details: Arc<RwLock<Option<ClsagDetails>>>,
msg: Option<[u8; 32]>,
interim: Option<Interim>,
interim: Option<Interim>
}
impl ClsagMultisig {
pub fn new(
transcript: RecommendedTranscript,
output_key: EdwardsPoint,
details: Arc<RwLock<Option<ClsagDetails>>>,
) -> Self {
Self {
transcript,
details: Arc<RwLock<Option<ClsagDetails>>>
) -> Result<ClsagMultisig, MultisigError> {
Ok(
ClsagMultisig {
transcript,
H: hash_to_point(output_key),
image: EdwardsPoint::identity(),
H: hash_to_point(output_key),
image: EdwardsPoint::identity(),
details,
details,
msg: None,
interim: None,
}
msg: None,
interim: None
}
)
}
pub const fn serialized_len() -> usize {
32 + (2 * 32)
}
fn input(&self) -> ClsagInput {
@@ -142,23 +117,8 @@ impl ClsagMultisig {
}
}
pub(crate) fn add_key_image_share(
image: &mut EdwardsPoint,
generator: EdwardsPoint,
offset: Scalar,
included: &[Participant],
participant: Participant,
share: EdwardsPoint,
) {
if image.is_identity() {
*image = generator * offset;
}
*image += share * lagrange::<dfg::Scalar>(participant, included).0;
}
impl Algorithm<Ed25519> for ClsagMultisig {
type Transcript = RecommendedTranscript;
type Addendum = ClsagAddendum;
type Signature = (Clsag, EdwardsPoint);
fn nonces(&self) -> Vec<Vec<dfg::EdwardsPoint>> {
@@ -168,70 +128,35 @@ impl Algorithm<Ed25519> for ClsagMultisig {
fn preprocess_addendum<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
keys: &ThresholdKeys<Ed25519>,
) -> ClsagAddendum {
ClsagAddendum {
key_image: dfg::EdwardsPoint(self.H) * keys.secret_share().deref(),
dleq: DLEqProof::prove(
rng,
// Doesn't take in a larger transcript object due to the usage of this
// Every prover would immediately write their own DLEq proof, when they can only do so in
// the proper order if they want to reach consensus
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to
// try to merge later in some form, when it should instead just merge xH (as it does)
&mut dleq_transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
keys.secret_share(),
),
}
view: &FrostView<Ed25519>
) -> Vec<u8> {
let mut serialized = Vec::with_capacity(Self::serialized_len());
serialized.extend((view.secret_share().0 * self.H).compress().to_bytes());
serialized.extend(write_dleq(rng, self.H, view.secret_share().0));
serialized
}
fn read_addendum<R: Read>(&self, reader: &mut R) -> io::Result<ClsagAddendum> {
let mut bytes = [0; 32];
reader.read_exact(&mut bytes)?;
// dfg ensures the point is torsion free
let xH = Option::<dfg::EdwardsPoint>::from(dfg::EdwardsPoint::from_bytes(&bytes))
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid key image"))?;
// Ensure this is a canonical point
if xH.to_bytes() != bytes {
Err(io::Error::new(io::ErrorKind::Other, "non-canonical key image"))?;
}
Ok(ClsagAddendum { key_image: xH, dleq: DLEqProof::<dfg::EdwardsPoint>::read(reader)? })
}
fn process_addendum(
fn process_addendum<Re: Read>(
&mut self,
view: &ThresholdView<Ed25519>,
l: Participant,
addendum: ClsagAddendum,
view: &FrostView<Ed25519>,
l: u16,
serialized: &mut Re
) -> Result<(), FrostError> {
if self.image.is_identity() {
if self.image.is_identity().into() {
self.transcript.domain_separate(b"CLSAG");
self.input().transcript(&mut self.transcript);
self.transcript.append_message(b"mask", self.mask().to_bytes());
self.transcript.append_message(b"mask", &self.mask().to_bytes());
}
self.transcript.append_message(b"participant", l.to_bytes());
addendum
.dleq
.verify(
&mut dleq_transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
&[view.original_verification_share(l), addendum.key_image],
)
.map_err(|_| FrostError::InvalidPreprocess(l))?;
self.transcript.append_message(b"key_image_share", addendum.key_image.compress().to_bytes());
add_key_image_share(
&mut self.image,
self.transcript.append_message(b"participant", &l.to_be_bytes());
let image = read_dleq(
serialized,
self.H,
view.offset().0,
view.included(),
l,
addendum.key_image.0,
);
view.verification_share(l)
).map_err(|_| FrostError::InvalidCommitment(l))?.0;
self.transcript.append_message(b"key_image_share", image.compress().to_bytes().as_ref());
self.image += image;
Ok(())
}
@@ -242,17 +167,17 @@ impl Algorithm<Ed25519> for ClsagMultisig {
fn sign_share(
&mut self,
view: &ThresholdView<Ed25519>,
view: &FrostView<Ed25519>,
nonce_sums: &[Vec<dfg::EdwardsPoint>],
nonces: Vec<Zeroizing<dfg::Scalar>>,
msg: &[u8],
nonces: &[dfg::Scalar],
msg: &[u8]
) -> dfg::Scalar {
// Use the transcript to get a seeded random number generator
// The transcript contains private data, preventing passive adversaries from recreating this
// process even if they have access to commitments (specifically, the ring index being signed
// for, along with the mask which should not only require knowing the shared keys yet also the
// input commitment masks)
let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"decoy_responses"));
let mut rng = ChaCha12Rng::from_seed(self.transcript.rng_seed(b"decoy_responses"));
self.msg = Some(msg.try_into().expect("CLSAG message should be 32-bytes"));
@@ -262,13 +187,15 @@ impl Algorithm<Ed25519> for ClsagMultisig {
&self.image,
&self.input(),
self.mask(),
self.msg.as_ref().unwrap(),
&self.msg.as_ref().unwrap(),
nonce_sums[0][0].0,
nonce_sums[0][1].0,
nonce_sums[0][1].0
);
self.interim = Some(Interim { p, c, clsag, pseudo_out });
(-(dfg::Scalar(p) * view.secret_share().deref())) + nonces[0].deref()
let share = dfg::Scalar(nonces[0].0 - (p * view.secret_share().0));
share
}
#[must_use]
@@ -276,36 +203,32 @@ impl Algorithm<Ed25519> for ClsagMultisig {
&self,
_: dfg::EdwardsPoint,
_: &[Vec<dfg::EdwardsPoint>],
sum: dfg::Scalar,
sum: dfg::Scalar
) -> Option<Self::Signature> {
let interim = self.interim.as_ref().unwrap();
let mut clsag = interim.clsag.clone();
clsag.s[usize::from(self.input().decoys.i)] = sum.0 - interim.c;
if clsag
.verify(
&self.input().decoys.ring,
&self.image,
&interim.pseudo_out,
self.msg.as_ref().unwrap(),
)
.is_ok()
{
if clsag.verify(
&self.input().decoys.ring,
&self.image,
&interim.pseudo_out,
&self.msg.as_ref().unwrap()
).is_ok() {
return Some((clsag, interim.pseudo_out));
}
None
return None;
}
#[must_use]
fn verify_share(
&self,
verification_share: dfg::EdwardsPoint,
nonces: &[Vec<dfg::EdwardsPoint>],
share: dfg::Scalar,
) -> Result<Vec<(dfg::Scalar, dfg::EdwardsPoint)>, ()> {
) -> bool {
let interim = self.interim.as_ref().unwrap();
Ok(vec![
(share, dfg::EdwardsPoint::generator()),
(dfg::Scalar(interim.p), verification_share),
(-dfg::Scalar::ONE, nonces[0][0]),
])
return (&share.0 * &ED25519_BASEPOINT_TABLE) == (
nonces[0][0].0 - (interim.p * verification_share.0)
);
}
}

View File

@@ -1,8 +1,67 @@
use curve25519_dalek::edwards::EdwardsPoint;
use subtle::ConditionallySelectable;
pub use monero_generators::{hash_to_point as raw_hash_to_point};
use curve25519_dalek::edwards::{CompressedEdwardsY, EdwardsPoint};
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
pub fn hash_to_point(key: EdwardsPoint) -> EdwardsPoint {
raw_hash_to_point(key.compress().to_bytes())
use group::ff::{Field, PrimeField};
use dalek_ff_group::field::FieldElement;
use crate::hash;
pub fn hash_to_point(point: EdwardsPoint) -> EdwardsPoint {
let mut bytes = point.compress().to_bytes();
unsafe {
#[link(name = "wrapper")]
extern "C" {
fn c_hash_to_point(point: *const u8);
}
c_hash_to_point(bytes.as_mut_ptr());
}
CompressedEdwardsY::from_slice(&bytes).decompress().unwrap()
}
// This works without issue. It's also 140 times slower (@ 3.5ms), and despite checking it passes
// for all branches, there still could be *some* discrepancy somewhere. There's no reason to use it
// unless we're trying to purge that section of the C static library, which we aren't right now
#[allow(dead_code)]
pub(crate) fn rust_hash_to_point(key: EdwardsPoint) -> EdwardsPoint {
#[allow(non_snake_case)]
let A = FieldElement::from(486662u64);
let v = FieldElement::from_square(hash(&key.compress().to_bytes())).double();
let w = v + FieldElement::one();
let x = w.square() + (-A.square() * v);
// This isn't the complete X, yet its initial value
// We don't calculate the full X, and instead solely calculate Y, letting dalek reconstruct X
// While inefficient, it solves API boundaries and reduces the amount of work done here
#[allow(non_snake_case)]
let X = {
let u = w;
let v = x;
let v3 = v * v * v;
let uv3 = u * v3;
let v7 = v3 * v3 * v;
let uv7 = u * v7;
uv3 * uv7.pow((-FieldElement::from(5u8)) * FieldElement::from(8u8).invert().unwrap())
};
let x = X.square() * x;
let y = w - x;
let non_zero_0 = !y.is_zero();
let y_if_non_zero_0 = w + x;
let sign = non_zero_0 & (!y_if_non_zero_0.is_zero());
let mut z = -A;
z *= FieldElement::conditional_select(&v, &FieldElement::from(1u8), sign);
#[allow(non_snake_case)]
let Z = z + w;
#[allow(non_snake_case)]
let mut Y = z - w;
Y = Y * Z.invert().unwrap();
let mut bytes = Y.to_repr();
bytes[31] |= sign.unwrap_u8() << 7;
CompressedEdwardsY(bytes).decompress().unwrap().mul_by_cofactor()
}

View File

@@ -1,72 +0,0 @@
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use curve25519_dalek::scalar::Scalar;
#[cfg(feature = "experimental")]
use curve25519_dalek::edwards::EdwardsPoint;
use crate::serialize::*;
#[cfg(feature = "experimental")]
use crate::{hash_to_scalar, ringct::hash_to_point};
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Mlsag {
pub ss: Vec<[Scalar; 2]>,
pub cc: Scalar,
}
impl Mlsag {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
for ss in &self.ss {
write_raw_vec(write_scalar, ss, w)?;
}
write_scalar(&self.cc, w)
}
pub fn read<R: Read>(mixins: usize, r: &mut R) -> io::Result<Self> {
Ok(Self {
ss: (0 .. mixins).map(|_| read_array(read_scalar, r)).collect::<Result<_, _>>()?,
cc: read_scalar(r)?,
})
}
#[cfg(feature = "experimental")]
#[must_use]
pub fn verify(
&self,
msg: &[u8; 32],
ring: &[[EdwardsPoint; 2]],
key_image: &EdwardsPoint,
) -> bool {
if ring.is_empty() {
return false;
}
let mut buf = Vec::with_capacity(6 * 32);
let mut ci = self.cc;
for (i, ring_member) in ring.iter().enumerate() {
buf.extend_from_slice(msg);
#[allow(non_snake_case)]
let L =
|r| EdwardsPoint::vartime_double_scalar_mul_basepoint(&ci, &ring_member[r], &self.ss[i][r]);
buf.extend_from_slice(ring_member[0].compress().as_bytes());
buf.extend_from_slice(L(0).compress().as_bytes());
#[allow(non_snake_case)]
let R = (self.ss[i][0] * hash_to_point(ring_member[0])) + (ci * key_image);
buf.extend_from_slice(R.compress().as_bytes());
buf.extend_from_slice(ring_member[1].compress().as_bytes());
buf.extend_from_slice(L(1).compress().as_bytes());
ci = hash_to_scalar(&buf);
buf.clear();
}
ci == self.cc
}
}

View File

@@ -1,121 +1,25 @@
use core::ops::Deref;
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use zeroize::{Zeroize, Zeroizing};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
pub(crate) mod hash_to_point;
pub use hash_to_point::{raw_hash_to_point, hash_to_point};
pub use hash_to_point::hash_to_point;
/// MLSAG struct, along with verifying functionality.
pub mod mlsag;
/// CLSAG struct, along with signing and verifying functionality.
pub mod clsag;
/// BorromeanRange struct, along with verifying functionality.
pub mod borromean;
/// Bulletproofs(+) structs, along with proving and verifying functionality.
pub mod bulletproofs;
use crate::{
Protocol,
serialize::*,
ringct::{mlsag::Mlsag, clsag::Clsag, borromean::BorromeanRange, bulletproofs::Bulletproofs},
ringct::{clsag::Clsag, bulletproofs::Bulletproofs}
};
/// Generate a key image for a given key. Defined as `x * hash_to_point(xG)`.
pub fn generate_key_image(secret: &Zeroizing<Scalar>) -> EdwardsPoint {
hash_to_point(&ED25519_BASEPOINT_TABLE * secret.deref()) * secret.deref()
pub fn generate_key_image(secret: Scalar) -> EdwardsPoint {
secret * hash_to_point(&secret * &ED25519_BASEPOINT_TABLE)
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum EncryptedAmount {
Original { mask: [u8; 32], amount: [u8; 32] },
Compact { amount: [u8; 8] },
}
impl EncryptedAmount {
pub fn read<R: Read>(compact: bool, r: &mut R) -> io::Result<Self> {
Ok(if compact {
Self::Compact { amount: read_bytes(r)? }
} else {
Self::Original { mask: read_bytes(r)?, amount: read_bytes(r)? }
})
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
match self {
Self::Original { mask, amount } => {
w.write_all(mask)?;
w.write_all(amount)
}
Self::Compact { amount } => w.write_all(amount),
}
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub enum RctType {
/// No RCT proofs.
Null,
/// One MLSAG for a single input and a Borromean range proof (RCTTypeFull).
MlsagAggregate,
// One MLSAG for each input and a Borromean range proof (RCTTypeSimple).
MlsagIndividual,
// One MLSAG for each input and a Bulletproof (RCTTypeBulletproof).
Bulletproofs,
/// One MLSAG for each input and a Bulletproof, yet starting to use EncryptedAmount::Compact
/// (RCTTypeBulletproof2).
BulletproofsCompactAmount,
/// One CLSAG for each input and a Bulletproof (RCTTypeCLSAG).
Clsag,
/// One CLSAG for each input and a Bulletproof+ (RCTTypeBulletproofPlus).
BulletproofsPlus,
}
impl RctType {
pub fn to_byte(self) -> u8 {
match self {
Self::Null => 0,
Self::MlsagAggregate => 1,
Self::MlsagIndividual => 2,
Self::Bulletproofs => 3,
Self::BulletproofsCompactAmount => 4,
Self::Clsag => 5,
Self::BulletproofsPlus => 6,
}
}
pub fn from_byte(byte: u8) -> Option<Self> {
Some(match byte {
0 => Self::Null,
1 => Self::MlsagAggregate,
2 => Self::MlsagIndividual,
3 => Self::Bulletproofs,
4 => Self::BulletproofsCompactAmount,
5 => Self::Clsag,
6 => Self::BulletproofsPlus,
_ => None?,
})
}
pub fn compact_encrypted_amounts(&self) -> bool {
match self {
Self::Null | Self::MlsagAggregate | Self::MlsagIndividual | Self::Bulletproofs => false,
Self::BulletproofsCompactAmount | Self::Clsag | Self::BulletproofsPlus => true,
}
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct RctBase {
pub fee: u64,
pub pseudo_outs: Vec<EdwardsPoint>,
pub encrypted_amounts: Vec<EncryptedAmount>,
pub commitments: Vec<EdwardsPoint>,
pub ecdh_info: Vec<[u8; 8]>,
pub commitments: Vec<EdwardsPoint>
}
impl RctBase {
@@ -123,261 +27,119 @@ impl RctBase {
1 + 8 + (outputs * (8 + 32))
}
pub fn write<W: Write>(&self, w: &mut W, rct_type: RctType) -> io::Result<()> {
w.write_all(&[rct_type.to_byte()])?;
pub fn serialize<W: std::io::Write>(&self, w: &mut W, rct_type: u8) -> std::io::Result<()> {
w.write_all(&[rct_type])?;
match rct_type {
RctType::Null => Ok(()),
RctType::MlsagAggregate |
RctType::MlsagIndividual |
RctType::Bulletproofs |
RctType::BulletproofsCompactAmount |
RctType::Clsag |
RctType::BulletproofsPlus => {
0 => Ok(()),
5 => {
write_varint(&self.fee, w)?;
if rct_type == RctType::MlsagIndividual {
write_raw_vec(write_point, &self.pseudo_outs, w)?;
}
for encrypted_amount in &self.encrypted_amounts {
encrypted_amount.write(w)?;
for ecdh in &self.ecdh_info {
w.write_all(ecdh)?;
}
write_raw_vec(write_point, &self.commitments, w)
}
},
_ => panic!("Serializing unknown RctType's Base")
}
}
pub fn read<R: Read>(inputs: usize, outputs: usize, r: &mut R) -> io::Result<(Self, RctType)> {
let rct_type = RctType::from_byte(read_byte(r)?)
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid RCT type"))?;
match rct_type {
RctType::Null | RctType::MlsagAggregate | RctType::MlsagIndividual => {}
RctType::Bulletproofs |
RctType::BulletproofsCompactAmount |
RctType::Clsag |
RctType::BulletproofsPlus => {
if outputs == 0 {
// Because the Bulletproofs(+) layout must be canonical, there must be 1 Bulletproof if
// Bulletproofs are in use
// If there are Bulletproofs, there must be a matching amount of outputs, implicitly
// banning 0 outputs
// Since HF 12 (CLSAG being 13), a 2-output minimum has also been enforced
Err(io::Error::new(io::ErrorKind::Other, "RCT with Bulletproofs(+) had 0 outputs"))?;
}
}
}
pub fn deserialize<R: std::io::Read>(outputs: usize, r: &mut R) -> std::io::Result<(RctBase, u8)> {
let mut rct_type = [0];
r.read_exact(&mut rct_type)?;
Ok((
if rct_type == RctType::Null {
Self { fee: 0, pseudo_outs: vec![], encrypted_amounts: vec![], commitments: vec![] }
if rct_type[0] == 0 {
RctBase { fee: 0, ecdh_info: vec![], commitments: vec![] }
} else {
Self {
RctBase {
fee: read_varint(r)?,
pseudo_outs: if rct_type == RctType::MlsagIndividual {
read_raw_vec(read_point, inputs, r)?
} else {
vec![]
},
encrypted_amounts: (0 .. outputs)
.map(|_| EncryptedAmount::read(rct_type.compact_encrypted_amounts(), r))
.collect::<Result<_, _>>()?,
commitments: read_raw_vec(read_point, outputs, r)?,
ecdh_info: (0 .. outputs).map(
|_| { let mut ecdh = [0; 8]; r.read_exact(&mut ecdh).map(|_| ecdh) }
).collect::<Result<_, _>>()?,
commitments: read_raw_vec(read_point, outputs, r)?
}
},
rct_type,
rct_type[0]
))
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub enum RctPrunable {
Null,
MlsagBorromean {
borromean: Vec<BorromeanRange>,
mlsags: Vec<Mlsag>,
},
MlsagBulletproofs {
bulletproofs: Bulletproofs,
mlsags: Vec<Mlsag>,
pseudo_outs: Vec<EdwardsPoint>,
},
Clsag {
bulletproofs: Bulletproofs,
bulletproofs: Vec<Bulletproofs>,
clsags: Vec<Clsag>,
pseudo_outs: Vec<EdwardsPoint>,
},
pseudo_outs: Vec<EdwardsPoint>
}
}
impl RctPrunable {
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
1 + Bulletproofs::fee_weight(protocol.bp_plus(), outputs) +
(inputs * (Clsag::fee_weight(protocol.ring_len()) + 32))
pub fn rct_type(&self) -> u8 {
match self {
RctPrunable::Null => 0,
RctPrunable::Clsag { .. } => 5
}
}
pub fn write<W: Write>(&self, w: &mut W, rct_type: RctType) -> io::Result<()> {
pub(crate) fn fee_weight(inputs: usize, outputs: usize) -> usize {
1 + Bulletproofs::fee_weight(outputs) + (inputs * (Clsag::fee_weight() + 32))
}
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
match self {
Self::Null => Ok(()),
Self::MlsagBorromean { borromean, mlsags } => {
write_raw_vec(BorromeanRange::write, borromean, w)?;
write_raw_vec(Mlsag::write, mlsags, w)
}
Self::MlsagBulletproofs { bulletproofs, mlsags, pseudo_outs } => {
if rct_type == RctType::Bulletproofs {
w.write_all(&1u32.to_le_bytes())?;
} else {
w.write_all(&[1])?;
}
bulletproofs.write(w)?;
write_raw_vec(Mlsag::write, mlsags, w)?;
write_raw_vec(write_point, pseudo_outs, w)
}
Self::Clsag { bulletproofs, clsags, pseudo_outs } => {
w.write_all(&[1])?;
bulletproofs.write(w)?;
write_raw_vec(Clsag::write, clsags, w)?;
write_raw_vec(write_point, pseudo_outs, w)
RctPrunable::Null => Ok(()),
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs } => {
write_vec(Bulletproofs::serialize, &bulletproofs, w)?;
write_raw_vec(Clsag::serialize, &clsags, w)?;
write_raw_vec(write_point, &pseudo_outs, w)
}
}
}
pub fn serialize(&self, rct_type: RctType) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized, rct_type).unwrap();
serialized
}
pub fn read<R: Read>(
rct_type: RctType,
pub fn deserialize<R: std::io::Read>(
rct_type: u8,
decoys: &[usize],
outputs: usize,
r: &mut R,
) -> io::Result<Self> {
Ok(match rct_type {
RctType::Null => Self::Null,
RctType::MlsagAggregate | RctType::MlsagIndividual => Self::MlsagBorromean {
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
mlsags: decoys.iter().map(|d| Mlsag::read(*d, r)).collect::<Result<_, _>>()?,
},
RctType::Bulletproofs | RctType::BulletproofsCompactAmount => Self::MlsagBulletproofs {
bulletproofs: {
if (if rct_type == RctType::Bulletproofs {
u64::from(read_u32(r)?)
} else {
read_varint(r)?
}) != 1
{
Err(io::Error::new(io::ErrorKind::Other, "n bulletproofs instead of one"))?;
}
Bulletproofs::read(r)?
r: &mut R
) -> std::io::Result<RctPrunable> {
Ok(
match rct_type {
0 => RctPrunable::Null,
5 => RctPrunable::Clsag {
// TODO: Can the amount of outputs be calculated from the BPs for any validly formed TX?
bulletproofs: read_vec(Bulletproofs::deserialize, r)?,
clsags: (0 .. decoys.len()).map(|o| Clsag::deserialize(decoys[o], r)).collect::<Result<_, _>>()?,
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?
},
mlsags: decoys.iter().map(|d| Mlsag::read(*d, r)).collect::<Result<_, _>>()?,
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
},
RctType::Clsag | RctType::BulletproofsPlus => Self::Clsag {
bulletproofs: {
if read_varint(r)? != 1 {
Err(io::Error::new(io::ErrorKind::Other, "n bulletproofs instead of one"))?;
}
(if rct_type == RctType::Clsag { Bulletproofs::read } else { Bulletproofs::read_plus })(
r,
)?
},
clsags: (0 .. decoys.len()).map(|o| Clsag::read(decoys[o], r)).collect::<Result<_, _>>()?,
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
},
})
_ => Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown RCT type"))?
}
)
}
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
pub fn signature_serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
match self {
Self::Null => panic!("Serializing RctPrunable::Null for a signature"),
Self::MlsagBorromean { borromean, .. } => borromean.iter().try_for_each(|rs| rs.write(w)),
Self::MlsagBulletproofs { bulletproofs, .. } => bulletproofs.signature_write(w),
Self::Clsag { bulletproofs, .. } => bulletproofs.signature_write(w),
RctPrunable::Null => panic!("Serializing RctPrunable::Null for a signature"),
RctPrunable::Clsag { bulletproofs, .. } => bulletproofs.iter().map(|bp| bp.signature_serialize(w)).collect(),
}
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct RctSignatures {
pub base: RctBase,
pub prunable: RctPrunable,
pub prunable: RctPrunable
}
impl RctSignatures {
/// RctType for a given RctSignatures struct.
pub fn rct_type(&self) -> RctType {
match &self.prunable {
RctPrunable::Null => RctType::Null,
RctPrunable::MlsagBorromean { .. } => {
/*
This type of RctPrunable may have no outputs, yet pseudo_outs are per input
This will only be a valid RctSignatures if it's for a TX with inputs
That makes this valid for any valid RctSignatures
While it will be invalid for any invalid RctSignatures, potentially letting an invalid
MlsagAggregate be interpreted as a valid MlsagIndividual (or vice versa), they have
incompatible deserializations
This means it's impossible to receive a MlsagAggregate over the wire and interpret it
as a MlsagIndividual (or vice versa)
That only makes manual manipulation unsafe, which will always be true since these fields
are all pub
TODO: Consider making them private with read-only accessors?
*/
if self.base.pseudo_outs.is_empty() {
RctType::MlsagAggregate
} else {
RctType::MlsagIndividual
}
}
// RctBase ensures there's at least one output, making the following
// inferences guaranteed/expects impossible on any valid RctSignatures
RctPrunable::MlsagBulletproofs { .. } => {
if matches!(
self
.base
.encrypted_amounts
.get(0)
.expect("MLSAG with Bulletproofs didn't have any outputs"),
EncryptedAmount::Original { .. }
) {
RctType::Bulletproofs
} else {
RctType::BulletproofsCompactAmount
}
}
RctPrunable::Clsag { bulletproofs, .. } => {
if matches!(bulletproofs, Bulletproofs::Original { .. }) {
RctType::Clsag
} else {
RctType::BulletproofsPlus
}
}
}
pub(crate) fn fee_weight(inputs: usize, outputs: usize) -> usize {
RctBase::fee_weight(outputs) + RctPrunable::fee_weight(inputs, outputs)
}
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
RctBase::fee_weight(outputs) + RctPrunable::fee_weight(protocol, inputs, outputs)
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.base.serialize(w, self.prunable.rct_type())?;
self.prunable.serialize(w)
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
let rct_type = self.rct_type();
self.base.write(w, rct_type)?;
self.prunable.write(w, rct_type)
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(decoys: Vec<usize>, outputs: usize, r: &mut R) -> io::Result<Self> {
let base = RctBase::read(decoys.len(), outputs, r)?;
Ok(Self { base: base.0, prunable: RctPrunable::read(base.1, &decoys, outputs, r)? })
pub fn deserialize<R: std::io::Read>(decoys: Vec<usize>, outputs: usize, r: &mut R) -> std::io::Result<RctSignatures> {
let base = RctBase::deserialize(outputs, r)?;
Ok(RctSignatures { base: base.0, prunable: RctPrunable::deserialize(base.1, &decoys, r)? })
}
}

353
coins/monero/src/rpc.rs Normal file
View File

@@ -0,0 +1,353 @@
use std::fmt::Debug;
use thiserror::Error;
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
use serde::{Serialize, Deserialize, de::DeserializeOwned};
use serde_json::json;
use reqwest;
use crate::{transaction::{Input, Timelock, Transaction}, block::Block, wallet::Fee};
#[derive(Deserialize, Debug)]
pub struct EmptyResponse {}
#[derive(Deserialize, Debug)]
pub struct JsonRpcResponse<T> {
result: T
}
#[derive(Clone, Error, Debug)]
pub enum RpcError {
#[error("internal error ({0})")]
InternalError(String),
#[error("connection error")]
ConnectionError,
#[error("transactions not found")]
TransactionsNotFound(Vec<[u8; 32]>),
#[error("invalid point ({0})")]
InvalidPoint(String),
#[error("pruned transaction")]
PrunedTransaction,
#[error("invalid transaction ({0:?})")]
InvalidTransaction([u8; 32])
}
fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
hex::decode(value).map_err(|_| RpcError::InternalError("Monero returned invalid hex".to_string()))
}
fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
CompressedEdwardsY(
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?
).decompress().ok_or(RpcError::InvalidPoint(point.to_string()))
}
#[derive(Clone, Debug)]
pub struct Rpc(String);
impl Rpc {
pub fn new(daemon: String) -> Rpc {
Rpc(daemon)
}
pub async fn rpc_call<
Params: Serialize + Debug,
Response: DeserializeOwned + Debug
>(&self, method: &str, params: Option<Params>) -> Result<Response, RpcError> {
let client = reqwest::Client::new();
let mut builder = client.post(&(self.0.clone() + "/" + method));
if let Some(params) = params.as_ref() {
builder = builder.json(params);
}
self.call_tail(method, builder).await
}
pub async fn bin_call<
Response: DeserializeOwned + Debug
>(&self, method: &str, params: Vec<u8>) -> Result<Response, RpcError> {
let client = reqwest::Client::new();
let builder = client.post(&(self.0.clone() + "/" + method)).body(params);
self.call_tail(method, builder.header("Content-Type", "application/octet-stream")).await
}
async fn call_tail<
Response: DeserializeOwned + Debug
>(&self, method: &str, builder: reqwest::RequestBuilder) -> Result<Response, RpcError> {
let res = builder
.send()
.await
.map_err(|_| RpcError::ConnectionError)?;
Ok(
if !method.ends_with(".bin") {
serde_json::from_str(&res.text().await.map_err(|_| RpcError::ConnectionError)?)
.map_err(|_| RpcError::InternalError("Failed to parse JSON response".to_string()))?
} else {
monero_epee_bin_serde::from_bytes(&res.bytes().await.map_err(|_| RpcError::ConnectionError)?)
.map_err(|_| RpcError::InternalError("Failed to parse binary response".to_string()))?
}
)
}
pub async fn get_height(&self) -> Result<usize, RpcError> {
#[derive(Deserialize, Debug)]
struct HeightResponse {
height: usize
}
Ok(self.rpc_call::<Option<()>, HeightResponse>("get_height", None).await?.height)
}
async fn get_transactions_core(
&self,
hashes: &[[u8; 32]]
) -> Result<(Vec<Result<Transaction, RpcError>>, Vec<[u8; 32]>), RpcError> {
if hashes.len() == 0 {
return Ok((vec![], vec![]));
}
#[derive(Deserialize, Debug)]
struct TransactionResponse {
tx_hash: String,
as_hex: String,
pruned_as_hex: String
}
#[derive(Deserialize, Debug)]
struct TransactionsResponse {
#[serde(default)]
missed_tx: Vec<String>,
txs: Vec<TransactionResponse>
}
let txs: TransactionsResponse = self.rpc_call("get_transactions", Some(json!({
"txs_hashes": hashes.iter().map(|hash| hex::encode(&hash)).collect::<Vec<_>>()
}))).await?;
Ok((
txs.txs.iter().map(|res| {
let tx = Transaction::deserialize(
&mut std::io::Cursor::new(
rpc_hex(if res.as_hex.len() != 0 { &res.as_hex } else { &res.pruned_as_hex }).unwrap()
)
).map_err(|_| RpcError::InvalidTransaction(hex::decode(&res.tx_hash).unwrap().try_into().unwrap()))?;
// https://github.com/monero-project/monero/issues/8311
if res.as_hex.len() == 0 {
match tx.prefix.inputs.get(0) {
Some(Input::Gen { .. }) => (),
_ => Err(RpcError::PrunedTransaction)?
}
}
Ok(tx)
}).collect(),
txs.missed_tx.iter().map(|hash| hex::decode(&hash).unwrap().try_into().unwrap()).collect()
))
}
pub async fn get_transactions(&self, hashes: &[[u8; 32]]) -> Result<Vec<Transaction>, RpcError> {
let (txs, missed) = self.get_transactions_core(hashes).await?;
if missed.len() != 0 {
Err(RpcError::TransactionsNotFound(missed))?;
}
// This will clone several KB and is accordingly inefficient
// TODO: Optimize
txs.iter().cloned().collect::<Result<_, _>>()
}
pub async fn get_transactions_possible(&self, hashes: &[[u8; 32]]) -> Result<Vec<Transaction>, RpcError> {
let (txs, _) = self.get_transactions_core(hashes).await?;
Ok(txs.iter().cloned().filter_map(|tx| tx.ok()).collect())
}
pub async fn get_block(&self, height: usize) -> Result<Block, RpcError> {
#[derive(Deserialize, Debug)]
struct BlockResponse {
blob: String
}
let block: JsonRpcResponse<BlockResponse> = self.rpc_call("json_rpc", Some(json!({
"method": "get_block",
"params": {
"height": height
}
}))).await?;
Ok(
Block::deserialize(
&mut std::io::Cursor::new(rpc_hex(&block.result.blob)?)
).expect("Monero returned a block we couldn't deserialize")
)
}
async fn get_block_transactions_core(
&self,
height: usize,
possible: bool
) -> Result<Vec<Transaction>, RpcError> {
let block = self.get_block(height).await?;
let mut res = vec![block.miner_tx];
res.extend(
if possible {
self.get_transactions_possible(&block.txs).await?
} else {
self.get_transactions(&block.txs).await?
}
);
Ok(res)
}
pub async fn get_block_transactions(&self, height: usize) -> Result<Vec<Transaction>, RpcError> {
self.get_block_transactions_core(height, false).await
}
pub async fn get_block_transactions_possible(&self, height: usize) -> Result<Vec<Transaction>, RpcError> {
self.get_block_transactions_core(height, true).await
}
pub async fn get_o_indexes(&self, hash: [u8; 32]) -> Result<Vec<u64>, RpcError> {
#[derive(Serialize, Debug)]
struct Request {
txid: [u8; 32]
}
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct OIndexes {
o_indexes: Vec<u64>,
status: String,
untrusted: bool,
credits: usize,
top_hash: String
}
let indexes: OIndexes = self.bin_call("get_o_indexes.bin", monero_epee_bin_serde::to_bytes(
&Request {
txid: hash
}).unwrap()
).await?;
Ok(indexes.o_indexes)
}
// from and to are inclusive
pub async fn get_output_distribution(&self, from: usize, to: usize) -> Result<Vec<u64>, RpcError> {
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
pub struct Distribution {
distribution: Vec<u64>
}
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct Distributions {
distributions: Vec<Distribution>
}
let mut distributions: JsonRpcResponse<Distributions> = self.rpc_call("json_rpc", Some(json!({
"method": "get_output_distribution",
"params": {
"binary": false,
"amounts": [0],
"cumulative": true,
"from_height": from,
"to_height": to
}
}))).await?;
Ok(distributions.result.distributions.swap_remove(0).distribution)
}
pub async fn get_outputs(
&self,
indexes: &[u64],
height: usize
) -> Result<Vec<Option<[EdwardsPoint; 2]>>, RpcError> {
#[derive(Deserialize, Debug)]
pub struct Out {
key: String,
mask: String,
txid: String
}
#[derive(Deserialize, Debug)]
struct Outs {
outs: Vec<Out>
}
let outs: Outs = self.rpc_call("get_outs", Some(json!({
"get_txid": true,
"outputs": indexes.iter().map(|o| json!({
"amount": 0,
"index": o
})).collect::<Vec<_>>()
}))).await?;
let txs = self.get_transactions(
&outs.outs.iter().map(|out|
rpc_hex(&out.txid).expect("Monero returned an invalidly encoded hash")
.try_into().expect("Monero returned an invalid sized hash")
).collect::<Vec<_>>()
).await?;
// TODO: Support time based lock times. These shouldn't be needed, and it may be painful to
// get the median time for the given height, yet we do need to in order to be complete
outs.outs.iter().enumerate().map(
|(i, out)| Ok(
Some([rpc_point(&out.key)?, rpc_point(&out.mask)?]).filter(|_| {
match txs[i].prefix.timelock {
Timelock::Block(t_height) => (t_height <= height),
_ => false
}
})
)
).collect()
}
pub async fn get_fee(&self) -> Result<Fee, RpcError> {
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct FeeResponse {
fee: u64,
quantization_mask: u64
}
let res: JsonRpcResponse<FeeResponse> = self.rpc_call("json_rpc", Some(json!({
"method": "get_fee_estimate"
}))).await?;
Ok(Fee { per_weight: res.result.fee, mask: res.result.quantization_mask })
}
pub async fn publish_transaction(&self, tx: &Transaction) -> Result<(), RpcError> {
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct SendRawResponse {
status: String,
double_spend: bool,
fee_too_low: bool,
invalid_input: bool,
invalid_output: bool,
low_mixin: bool,
not_relayed: bool,
overspend: bool,
too_big: bool,
too_few_outputs: bool,
reason: String
}
let mut buf = Vec::with_capacity(2048);
tx.serialize(&mut buf).unwrap();
let res: SendRawResponse = self.rpc_call("send_raw_transaction", Some(json!({
"tx_as_hex": hex::encode(&buf)
}))).await?;
if res.status != "OK" {
Err(RpcError::InvalidTransaction(tx.hash()))?;
}
Ok(())
}
}

View File

@@ -1,91 +0,0 @@
use async_trait::async_trait;
use digest_auth::AuthContext;
use reqwest::Client;
use crate::rpc::{RpcError, RpcConnection, Rpc};
#[derive(Clone, Debug)]
pub struct HttpRpc {
client: Client,
userpass: Option<(String, String)>,
url: String,
}
impl HttpRpc {
/// Create a new HTTP(S) RPC connection.
///
/// A daemon requiring authentication can be used via including the username and password in the
/// URL.
pub fn new(mut url: String) -> Result<Rpc<Self>, RpcError> {
// Parse out the username and password
let userpass = if url.contains('@') {
let url_clone = url;
let split_url = url_clone.split('@').collect::<Vec<_>>();
if split_url.len() != 2 {
Err(RpcError::InvalidNode)?;
}
let mut userpass = split_url[0];
url = split_url[1].to_string();
// If there was additionally a protocol string, restore that to the daemon URL
if userpass.contains("://") {
let split_userpass = userpass.split("://").collect::<Vec<_>>();
if split_userpass.len() != 2 {
Err(RpcError::InvalidNode)?;
}
url = split_userpass[0].to_string() + "://" + &url;
userpass = split_userpass[1];
}
let split_userpass = userpass.split(':').collect::<Vec<_>>();
if split_userpass.len() != 2 {
Err(RpcError::InvalidNode)?;
}
Some((split_userpass[0].to_string(), split_userpass[1].to_string()))
} else {
None
};
Ok(Rpc(Self { client: Client::new(), userpass, url }))
}
}
#[async_trait]
impl RpcConnection for HttpRpc {
async fn post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError> {
let mut builder = self.client.post(self.url.clone() + "/" + route).body(body);
if let Some((user, pass)) = &self.userpass {
let req = self.client.post(&self.url).send().await.map_err(|_| RpcError::InvalidNode)?;
// Only provide authentication if this daemon actually expects it
if let Some(header) = req.headers().get("www-authenticate") {
builder = builder.header(
"Authorization",
digest_auth::parse(header.to_str().map_err(|_| RpcError::InvalidNode)?)
.map_err(|_| RpcError::InvalidNode)?
.respond(&AuthContext::new_post::<_, _, _, &[u8]>(
user,
pass,
"/".to_string() + route,
None,
))
.map_err(|_| RpcError::InvalidNode)?
.to_header_string(),
);
}
}
Ok(
builder
.send()
.await
.map_err(|_| RpcError::ConnectionError)?
.bytes()
.await
.map_err(|_| RpcError::ConnectionError)?
.slice(..)
.to_vec(),
)
}
}

View File

@@ -1,617 +0,0 @@
use core::fmt::Debug;
#[cfg(not(feature = "std"))]
use alloc::boxed::Box;
use std_shims::{
vec::Vec,
io,
string::{String, ToString},
};
use async_trait::async_trait;
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
use serde::{Serialize, Deserialize, de::DeserializeOwned};
use serde_json::{Value, json};
use crate::{
Protocol,
serialize::*,
transaction::{Input, Timelock, Transaction},
block::Block,
wallet::Fee,
};
#[cfg(feature = "http_rpc")]
mod http;
#[cfg(feature = "http_rpc")]
pub use http::*;
#[derive(Deserialize, Debug)]
pub struct EmptyResponse;
#[derive(Deserialize, Debug)]
pub struct JsonRpcResponse<T> {
result: T,
}
#[derive(Deserialize, Debug)]
struct TransactionResponse {
tx_hash: String,
as_hex: String,
pruned_as_hex: String,
}
#[derive(Deserialize, Debug)]
struct TransactionsResponse {
#[serde(default)]
missed_tx: Vec<String>,
txs: Vec<TransactionResponse>,
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum RpcError {
#[cfg_attr(feature = "std", error("internal error ({0})"))]
InternalError(&'static str),
#[cfg_attr(feature = "std", error("connection error"))]
ConnectionError,
#[cfg_attr(feature = "std", error("invalid node"))]
InvalidNode,
#[cfg_attr(feature = "std", error("unsupported protocol version ({0})"))]
UnsupportedProtocol(usize),
#[cfg_attr(feature = "std", error("transactions not found"))]
TransactionsNotFound(Vec<[u8; 32]>),
#[cfg_attr(feature = "std", error("invalid point ({0})"))]
InvalidPoint(String),
#[cfg_attr(feature = "std", error("pruned transaction"))]
PrunedTransaction,
#[cfg_attr(feature = "std", error("invalid transaction ({0:?})"))]
InvalidTransaction([u8; 32]),
}
fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
hex::decode(value).map_err(|_| RpcError::InvalidNode)
}
fn hash_hex(hash: &str) -> Result<[u8; 32], RpcError> {
rpc_hex(hash)?.try_into().map_err(|_| RpcError::InvalidNode)
}
fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
CompressedEdwardsY(
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?,
)
.decompress()
.ok_or_else(|| RpcError::InvalidPoint(point.to_string()))
}
// Read an EPEE VarInt, distinct from the VarInts used throughout the rest of the protocol
fn read_epee_vi<R: io::Read>(reader: &mut R) -> io::Result<u64> {
let vi_start = read_byte(reader)?;
let len = match vi_start & 0b11 {
0 => 1,
1 => 2,
2 => 4,
3 => 8,
_ => unreachable!(),
};
let mut vi = u64::from(vi_start >> 2);
for i in 1 .. len {
vi |= u64::from(read_byte(reader)?) << (((i - 1) * 8) + 6);
}
Ok(vi)
}
#[async_trait]
pub trait RpcConnection: Send + Sync + Clone + Debug {
/// Perform a POST request to the specified route with the specified body.
///
/// The implementor is left to handle anything such as authentication.
async fn post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError>;
}
// TODO: Make this provided methods for RpcConnection?
#[derive(Clone, Debug)]
pub struct Rpc<R: RpcConnection>(R);
impl<R: RpcConnection> Rpc<R> {
/// Perform a RPC call to the specified route with the provided parameters.
///
/// This is NOT a JSON-RPC call. They use a route of "json_rpc" and are available via
/// `json_rpc_call`.
pub async fn rpc_call<Params: Send + Serialize + Debug, Response: DeserializeOwned + Debug>(
&self,
route: &str,
params: Option<Params>,
) -> Result<Response, RpcError> {
serde_json::from_str(
std_shims::str::from_utf8(
&self
.0
.post(
route,
if let Some(params) = params {
serde_json::to_string(&params).unwrap().into_bytes()
} else {
vec![]
},
)
.await?,
)
.map_err(|_| RpcError::InvalidNode)?,
)
.map_err(|_| RpcError::InvalidNode)
}
/// Perform a JSON-RPC call with the specified method with the provided parameters
pub async fn json_rpc_call<Response: DeserializeOwned + Debug>(
&self,
method: &str,
params: Option<Value>,
) -> Result<Response, RpcError> {
let mut req = json!({ "method": method });
if let Some(params) = params {
req.as_object_mut().unwrap().insert("params".into(), params);
}
Ok(self.rpc_call::<_, JsonRpcResponse<Response>>("json_rpc", Some(req)).await?.result)
}
/// Perform a binary call to the specified route with the provided parameters.
pub async fn bin_call(&self, route: &str, params: Vec<u8>) -> Result<Vec<u8>, RpcError> {
self.0.post(route, params).await
}
/// Get the active blockchain protocol version.
pub async fn get_protocol(&self) -> Result<Protocol, RpcError> {
#[derive(Deserialize, Debug)]
struct ProtocolResponse {
major_version: usize,
}
#[derive(Deserialize, Debug)]
struct LastHeaderResponse {
block_header: ProtocolResponse,
}
Ok(
match self
.json_rpc_call::<LastHeaderResponse>("get_last_block_header", None)
.await?
.block_header
.major_version
{
13 | 14 => Protocol::v14,
15 | 16 => Protocol::v16,
protocol => Err(RpcError::UnsupportedProtocol(protocol))?,
},
)
}
pub async fn get_height(&self) -> Result<usize, RpcError> {
#[derive(Deserialize, Debug)]
struct HeightResponse {
height: usize,
}
Ok(self.rpc_call::<Option<()>, HeightResponse>("get_height", None).await?.height)
}
pub async fn get_transactions(&self, hashes: &[[u8; 32]]) -> Result<Vec<Transaction>, RpcError> {
if hashes.is_empty() {
return Ok(vec![]);
}
let mut hashes_hex = hashes.iter().map(hex::encode).collect::<Vec<_>>();
let mut all_txs = Vec::with_capacity(hashes.len());
while !hashes_hex.is_empty() {
// Monero errors if more than 100 is requested unless using a non-restricted RPC
const TXS_PER_REQUEST: usize = 100;
let this_count = TXS_PER_REQUEST.min(hashes_hex.len());
let txs: TransactionsResponse = self
.rpc_call(
"get_transactions",
Some(json!({
"txs_hashes": hashes_hex.drain(.. this_count).collect::<Vec<_>>(),
})),
)
.await?;
if !txs.missed_tx.is_empty() {
Err(RpcError::TransactionsNotFound(
txs.missed_tx.iter().map(|hash| hash_hex(hash)).collect::<Result<_, _>>()?,
))?;
}
all_txs.extend(txs.txs);
}
all_txs
.iter()
.enumerate()
.map(|(i, res)| {
let tx = Transaction::read::<&[u8]>(
&mut rpc_hex(if !res.as_hex.is_empty() { &res.as_hex } else { &res.pruned_as_hex })?
.as_ref(),
)
.map_err(|_| match hash_hex(&res.tx_hash) {
Ok(hash) => RpcError::InvalidTransaction(hash),
Err(err) => err,
})?;
// https://github.com/monero-project/monero/issues/8311
if res.as_hex.is_empty() {
match tx.prefix.inputs.get(0) {
Some(Input::Gen { .. }) => (),
_ => Err(RpcError::PrunedTransaction)?,
}
}
// This does run a few keccak256 hashes, which is pointless if the node is trusted
// In exchange, this provides resilience against invalid/malicious nodes
if tx.hash() != hashes[i] {
Err(RpcError::InvalidNode)?;
}
Ok(tx)
})
.collect()
}
pub async fn get_transaction(&self, tx: [u8; 32]) -> Result<Transaction, RpcError> {
self.get_transactions(&[tx]).await.map(|mut txs| txs.swap_remove(0))
}
/// Get the hash of a block from the node by the block's numbers.
/// This function does not verify the returned block hash is actually for the number in question.
pub async fn get_block_hash(&self, number: usize) -> Result<[u8; 32], RpcError> {
#[derive(Deserialize, Debug)]
struct BlockHeaderResponse {
hash: String,
}
#[derive(Deserialize, Debug)]
struct BlockHeaderByHeightResponse {
block_header: BlockHeaderResponse,
}
let header: BlockHeaderByHeightResponse =
self.json_rpc_call("get_block_header_by_height", Some(json!({ "height": number }))).await?;
rpc_hex(&header.block_header.hash)?.try_into().map_err(|_| RpcError::InvalidNode)
}
/// Get a block from the node by its hash.
/// This function does not verify the returned block actually has the hash in question.
pub async fn get_block(&self, hash: [u8; 32]) -> Result<Block, RpcError> {
#[derive(Deserialize, Debug)]
struct BlockResponse {
blob: String,
}
let res: BlockResponse =
self.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await?;
let block =
Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref()).map_err(|_| RpcError::InvalidNode)?;
if block.hash() != hash {
Err(RpcError::InvalidNode)?;
}
Ok(block)
}
pub async fn get_block_by_number(&self, number: usize) -> Result<Block, RpcError> {
match self.get_block(self.get_block_hash(number).await?).await {
Ok(block) => {
// Make sure this is actually the block for this number
match block.miner_tx.prefix.inputs.get(0) {
Some(Input::Gen(actual)) => {
if usize::try_from(*actual).unwrap() == number {
Ok(block)
} else {
Err(RpcError::InvalidNode)
}
}
Some(Input::ToKey { .. }) | None => Err(RpcError::InvalidNode),
}
}
e => e,
}
}
pub async fn get_block_transactions(&self, hash: [u8; 32]) -> Result<Vec<Transaction>, RpcError> {
let block = self.get_block(hash).await?;
let mut res = vec![block.miner_tx];
res.extend(self.get_transactions(&block.txs).await?);
Ok(res)
}
pub async fn get_block_transactions_by_number(
&self,
number: usize,
) -> Result<Vec<Transaction>, RpcError> {
self.get_block_transactions(self.get_block_hash(number).await?).await
}
/// Get the output indexes of the specified transaction.
pub async fn get_o_indexes(&self, hash: [u8; 32]) -> Result<Vec<u64>, RpcError> {
/*
TODO: Use these when a suitable epee serde lib exists
#[derive(Serialize, Debug)]
struct Request {
txid: [u8; 32],
}
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct OIndexes {
o_indexes: Vec<u64>,
}
*/
// Given the immaturity of Rust epee libraries, this is a homegrown one which is only validated
// to work against this specific function
// Header for EPEE, an 8-byte magic and a version
const EPEE_HEADER: &[u8] = b"\x01\x11\x01\x01\x01\x01\x02\x01\x01";
let mut request = EPEE_HEADER.to_vec();
// Number of fields (shifted over 2 bits as the 2 LSBs are reserved for metadata)
request.push(1 << 2);
// Length of field name
request.push(4);
// Field name
request.extend(b"txid");
// Type of field
request.push(10);
// Length of string, since this byte array is technically a string
request.push(32 << 2);
// The "string"
request.extend(hash);
let indexes_buf = self.bin_call("get_o_indexes.bin", request).await?;
let mut indexes: &[u8] = indexes_buf.as_ref();
(|| {
if read_bytes::<_, { EPEE_HEADER.len() }>(&mut indexes)? != EPEE_HEADER {
Err(io::Error::new(io::ErrorKind::Other, "invalid header"))?;
}
let read_object = |reader: &mut &[u8]| {
let fields = read_byte(reader)? >> 2;
for _ in 0 .. fields {
let name_len = read_byte(reader)?;
let name = read_raw_vec(read_byte, name_len.into(), reader)?;
let type_with_array_flag = read_byte(reader)?;
let kind = type_with_array_flag & (!0x80);
let iters = if type_with_array_flag != kind { read_epee_vi(reader)? } else { 1 };
if (&name == b"o_indexes") && (kind != 5) {
Err(io::Error::new(io::ErrorKind::Other, "o_indexes weren't u64s"))?;
}
let f = match kind {
// i64
1 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
// i32
2 => |reader: &mut &[u8]| read_raw_vec(read_byte, 4, reader),
// i16
3 => |reader: &mut &[u8]| read_raw_vec(read_byte, 2, reader),
// i8
4 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
// u64
5 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
// u32
6 => |reader: &mut &[u8]| read_raw_vec(read_byte, 4, reader),
// u16
7 => |reader: &mut &[u8]| read_raw_vec(read_byte, 2, reader),
// u8
8 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
// double
9 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
// string, or any collection of bytes
10 => |reader: &mut &[u8]| {
let len = read_epee_vi(reader)?;
read_raw_vec(
read_byte,
len
.try_into()
.map_err(|_| io::Error::new(io::ErrorKind::Other, "u64 length exceeded usize"))?,
reader,
)
},
// bool
11 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
// object, errors here as it shouldn't be used on this call
12 => |_: &mut &[u8]| {
Err(io::Error::new(
io::ErrorKind::Other,
"node used object in reply to get_o_indexes",
))
},
// array, so far unused
13 => |_: &mut &[u8]| {
Err(io::Error::new(io::ErrorKind::Other, "node used the unused array type"))
},
_ => {
|_: &mut &[u8]| Err(io::Error::new(io::ErrorKind::Other, "node used an invalid type"))
}
};
let mut res = vec![];
for _ in 0 .. iters {
res.push(f(reader)?);
}
let mut actual_res = Vec::with_capacity(res.len());
if &name == b"o_indexes" {
for o_index in res {
actual_res.push(u64::from_le_bytes(o_index.try_into().map_err(|_| {
io::Error::new(io::ErrorKind::Other, "node didn't provide 8 bytes for a u64")
})?));
}
return Ok(actual_res);
}
}
// Didn't return a response with o_indexes
// TODO: Check if this didn't have o_indexes because it's an error response
Err(io::Error::new(io::ErrorKind::Other, "response didn't contain o_indexes"))
};
read_object(&mut indexes)
})()
.map_err(|_| RpcError::InvalidNode)
}
/// Get the output distribution, from the specified height to the specified height (both
/// inclusive).
pub async fn get_output_distribution(
&self,
from: usize,
to: usize,
) -> Result<Vec<u64>, RpcError> {
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct Distribution {
distribution: Vec<u64>,
}
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct Distributions {
distributions: Vec<Distribution>,
}
let mut distributions: Distributions = self
.json_rpc_call(
"get_output_distribution",
Some(json!({
"binary": false,
"amounts": [0],
"cumulative": true,
"from_height": from,
"to_height": to,
})),
)
.await?;
Ok(distributions.distributions.swap_remove(0).distribution)
}
/// Get the specified outputs from the RingCT (zero-amount) pool, but only return them if their
/// timelock has been satisfied. This is distinct from being free of the 10-block lock applied to
/// all Monero transactions.
pub async fn get_unlocked_outputs(
&self,
indexes: &[u64],
height: usize,
) -> Result<Vec<Option<[EdwardsPoint; 2]>>, RpcError> {
#[derive(Deserialize, Debug)]
struct Out {
key: String,
mask: String,
txid: String,
}
#[derive(Deserialize, Debug)]
struct Outs {
outs: Vec<Out>,
}
let outs: Outs = self
.rpc_call(
"get_outs",
Some(json!({
"get_txid": true,
"outputs": indexes.iter().map(|o| json!({
"amount": 0,
"index": o
})).collect::<Vec<_>>()
})),
)
.await?;
let txs = self
.get_transactions(
&outs
.outs
.iter()
.map(|out| rpc_hex(&out.txid)?.try_into().map_err(|_| RpcError::InvalidNode))
.collect::<Result<Vec<_>, _>>()?,
)
.await?;
// TODO: https://github.com/serai-dex/serai/issues/104
outs
.outs
.iter()
.enumerate()
.map(|(i, out)| {
Ok(
Some([rpc_point(&out.key)?, rpc_point(&out.mask)?])
.filter(|_| Timelock::Block(height) >= txs[i].prefix.timelock),
)
})
.collect()
}
/// Get the currently estimated fee from the node. This may be manipulated to unsafe levels and
/// MUST be sanity checked.
// TODO: Take a sanity check argument
pub async fn get_fee(&self) -> Result<Fee, RpcError> {
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct FeeResponse {
fee: u64,
quantization_mask: u64,
}
let res: FeeResponse = self.json_rpc_call("get_fee_estimate", None).await?;
Ok(Fee { per_weight: res.fee, mask: res.quantization_mask })
}
pub async fn publish_transaction(&self, tx: &Transaction) -> Result<(), RpcError> {
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct SendRawResponse {
status: String,
double_spend: bool,
fee_too_low: bool,
invalid_input: bool,
invalid_output: bool,
low_mixin: bool,
not_relayed: bool,
overspend: bool,
too_big: bool,
too_few_outputs: bool,
reason: String,
}
let res: SendRawResponse = self
.rpc_call("send_raw_transaction", Some(json!({ "tx_as_hex": hex::encode(tx.serialize()) })))
.await?;
if res.status != "OK" {
Err(RpcError::InvalidTransaction(tx.hash()))?;
}
Ok(())
}
pub async fn generate_blocks(&self, address: &str, block_count: usize) -> Result<(), RpcError> {
self
.rpc_call::<_, EmptyResponse>(
"json_rpc",
Some(json!({
"method": "generateblocks",
"params": {
"wallet_address": address,
"amount_of_blocks": block_count
},
})),
)
.await?;
Ok(())
}
}

View File

@@ -1,25 +1,14 @@
use core::fmt::Debug;
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use std::io;
use curve25519_dalek::{
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
use curve25519_dalek::{scalar::Scalar, edwards::{EdwardsPoint, CompressedEdwardsY}};
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
pub const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
pub(crate) fn varint_len(varint: usize) -> usize {
pub fn varint_len(varint: usize) -> usize {
((usize::try_from(usize::BITS - varint.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
}
pub(crate) fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
w.write_all(&[*byte])
}
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
pub fn write_varint<W: io::Write>(varint: &u64, w: &mut W) -> io::Result<()> {
let mut varint = *varint;
while {
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
@@ -27,130 +16,89 @@ pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()>
if varint != 0 {
b |= VARINT_CONTINUATION_MASK;
}
write_byte(&b, w)?;
w.write_all(&[b])?;
varint != 0
} {}
Ok(())
}
pub(crate) fn write_scalar<W: Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
pub fn write_scalar<W: io::Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
w.write_all(&scalar.to_bytes())
}
pub(crate) fn write_point<W: Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
pub fn write_point<W: io::Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
w.write_all(&point.compress().to_bytes())
}
pub(crate) fn write_raw_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
f: F,
values: &[T],
w: &mut W,
) -> io::Result<()> {
pub fn write_raw_vec<
T,
W: io::Write,
F: Fn(&T, &mut W) -> io::Result<()>
>(f: F, values: &[T], w: &mut W) -> io::Result<()> {
for value in values {
f(value, w)?;
}
Ok(())
}
pub(crate) fn write_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
f: F,
values: &[T],
w: &mut W,
) -> io::Result<()> {
pub fn write_vec<
T,
W: io::Write,
F: Fn(&T, &mut W) -> io::Result<()>
>(f: F, values: &[T], w: &mut W) -> io::Result<()> {
write_varint(&values.len().try_into().unwrap(), w)?;
write_raw_vec(f, values, w)
write_raw_vec(f, &values, w)
}
pub(crate) fn read_bytes<R: Read, const N: usize>(r: &mut R) -> io::Result<[u8; N]> {
let mut res = [0; N];
pub fn read_byte<R: io::Read>(r: &mut R) -> io::Result<u8> {
let mut res = [0; 1];
r.read_exact(&mut res)?;
Ok(res)
Ok(res[0])
}
pub(crate) fn read_byte<R: Read>(r: &mut R) -> io::Result<u8> {
Ok(read_bytes::<_, 1>(r)?[0])
}
pub(crate) fn read_u16<R: Read>(r: &mut R) -> io::Result<u16> {
read_bytes(r).map(u16::from_le_bytes)
}
pub(crate) fn read_u32<R: Read>(r: &mut R) -> io::Result<u32> {
read_bytes(r).map(u32::from_le_bytes)
}
pub(crate) fn read_u64<R: Read>(r: &mut R) -> io::Result<u64> {
read_bytes(r).map(u64::from_le_bytes)
}
pub(crate) fn read_varint<R: Read>(r: &mut R) -> io::Result<u64> {
pub fn read_varint<R: io::Read>(r: &mut R) -> io::Result<u64> {
let mut bits = 0;
let mut res = 0;
while {
let b = read_byte(r)?;
if (bits != 0) && (b == 0) {
Err(io::Error::new(io::ErrorKind::Other, "non-canonical varint"))?;
}
if ((bits + 7) > 64) && (b >= (1 << (64 - bits))) {
Err(io::Error::new(io::ErrorKind::Other, "varint overflow"))?;
}
res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits;
// TODO: Error if bits exceed u64
bits += 7;
b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK
} {}
Ok(res)
}
// All scalar fields supported by monero-serai are checked to be canonical for valid transactions
// While from_bytes_mod_order would be more flexible, it's not currently needed and would be
// inaccurate to include now. While casting a wide net may be preferable, it'd also be inaccurate
// for now. There's also further edge cases as noted by
// https://github.com/monero-project/monero/issues/8438, where some scalars had an archaic
// reduction applied
pub(crate) fn read_scalar<R: Read>(r: &mut R) -> io::Result<Scalar> {
Scalar::from_canonical_bytes(read_bytes(r)?)
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
pub fn read_32<R: io::Read>(r: &mut R) -> io::Result<[u8; 32]> {
let mut res = [0; 32];
r.read_exact(&mut res)?;
Ok(res)
}
pub(crate) fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
let bytes = read_bytes(r)?;
CompressedEdwardsY(bytes)
.decompress()
// Ban points which are either unreduced or -0
.filter(|point| point.compress().to_bytes() == bytes)
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
// TODO: Potentially update to Monero's parsing rules on scalars/points, which should be any arbitrary 32-bytes
// We may be able to consider such transactions as malformed and accordingly be opinionated in ignoring them
pub fn read_scalar<R: io::Read>(r: &mut R) -> io::Result<Scalar> {
Scalar::from_canonical_bytes(
read_32(r)?
).ok_or(io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
}
pub(crate) fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
read_point(r)
.ok()
.filter(EdwardsPoint::is_torsion_free)
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
pub fn read_point<R: io::Read>(r: &mut R) -> io::Result<EdwardsPoint> {
CompressedEdwardsY(
read_32(r)?
).decompress().filter(|point| point.is_torsion_free()).ok_or(io::Error::new(io::ErrorKind::Other, "invalid point"))
}
pub(crate) fn read_raw_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
f: F,
len: usize,
r: &mut R,
) -> io::Result<Vec<T>> {
let mut res = vec![];
pub fn read_raw_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, len: usize, r: &mut R) -> io::Result<Vec<T>> {
let mut res = Vec::with_capacity(
len.try_into().map_err(|_| io::Error::new(io::ErrorKind::Other, "length exceeds usize"))?
);
for _ in 0 .. len {
res.push(f(r)?);
}
Ok(res)
}
pub(crate) fn read_array<R: Read, T: Debug, F: Fn(&mut R) -> io::Result<T>, const N: usize>(
f: F,
r: &mut R,
) -> io::Result<[T; N]> {
read_raw_vec(f, N, r).map(|vec| vec.try_into().unwrap())
}
pub(crate) fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
f: F,
r: &mut R,
) -> io::Result<Vec<T>> {
pub fn read_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, r: &mut R) -> io::Result<Vec<T>> {
read_raw_vec(f, read_varint(r)?.try_into().unwrap(), r)
}

View File

@@ -1,176 +1,45 @@
use hex_literal::hex;
use rand_core::{RngCore, OsRng};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, edwards::CompressedEdwardsY};
use crate::{
random_scalar,
wallet::address::{Network, AddressType, AddressMeta, MoneroAddress},
};
use crate::wallet::address::{Network, AddressType, Address};
const SPEND: [u8; 32] = hex!("f8631661f6ab4e6fda310c797330d86e23a682f20d5bc8cc27b18051191f16d7");
const VIEW: [u8; 32] = hex!("4a1535063ad1fee2dabbf909d4fd9a873e29541b401f0944754e17c9a41820ce");
const STANDARD: &str =
"4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey";
const STANDARD: &'static str = "4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey";
const PAYMENT_ID: [u8; 8] = hex!("b8963a57855cf73f");
const INTEGRATED: &str =
"4Ljin4CrSNHKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KbaTH6Mn\
pXSn88oBX35";
const INTEGRATED: &'static str = "4Ljin4CrSNHKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KbaTH6MnpXSn88oBX35";
const SUB_SPEND: [u8; 32] =
hex!("fe358188b528335ad1cfdc24a22a23988d742c882b6f19a602892eaab3c1b62b");
const SUB_SPEND: [u8; 32] = hex!("fe358188b528335ad1cfdc24a22a23988d742c882b6f19a602892eaab3c1b62b");
const SUB_VIEW: [u8; 32] = hex!("9bc2b464de90d058468522098d5610c5019c45fd1711a9517db1eea7794f5470");
const SUBADDRESS: &str =
"8C5zHM5ud8nGC4hC2ULiBLSWx9infi8JUUmWEat4fcTf8J4H38iWYVdFmPCA9UmfLTZxD43RsyKnGEdZkoGij6csDeUnbEB";
const FEATURED_JSON: &str = include_str!("vectors/featured_addresses.json");
const SUBADDRESS: &'static str = "8C5zHM5ud8nGC4hC2ULiBLSWx9infi8JUUmWEat4fcTf8J4H38iWYVdFmPCA9UmfLTZxD43RsyKnGEdZkoGij6csDeUnbEB";
#[test]
fn standard_address() {
let addr = MoneroAddress::from_str(Network::Mainnet, STANDARD).unwrap();
let addr = Address::from_str(STANDARD, Network::Mainnet).unwrap();
assert_eq!(addr.meta.network, Network::Mainnet);
assert_eq!(addr.meta.kind, AddressType::Standard);
assert!(!addr.meta.kind.is_subaddress());
assert_eq!(addr.meta.kind.payment_id(), None);
assert!(!addr.meta.kind.is_guaranteed());
assert_eq!(addr.meta.guaranteed, false);
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
assert_eq!(addr.view.compress().to_bytes(), VIEW);
assert_eq!(addr.to_string(), STANDARD);
}
#[test]
fn integrated_address() {
let addr = MoneroAddress::from_str(Network::Mainnet, INTEGRATED).unwrap();
let addr = Address::from_str(INTEGRATED, Network::Mainnet).unwrap();
assert_eq!(addr.meta.network, Network::Mainnet);
assert_eq!(addr.meta.kind, AddressType::Integrated(PAYMENT_ID));
assert!(!addr.meta.kind.is_subaddress());
assert_eq!(addr.meta.kind.payment_id(), Some(PAYMENT_ID));
assert!(!addr.meta.kind.is_guaranteed());
assert_eq!(addr.meta.guaranteed, false);
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
assert_eq!(addr.view.compress().to_bytes(), VIEW);
assert_eq!(addr.to_string(), INTEGRATED);
}
#[test]
fn subaddress() {
let addr = MoneroAddress::from_str(Network::Mainnet, SUBADDRESS).unwrap();
let addr = Address::from_str(SUBADDRESS, Network::Mainnet).unwrap();
assert_eq!(addr.meta.network, Network::Mainnet);
assert_eq!(addr.meta.kind, AddressType::Subaddress);
assert!(addr.meta.kind.is_subaddress());
assert_eq!(addr.meta.kind.payment_id(), None);
assert!(!addr.meta.kind.is_guaranteed());
assert_eq!(addr.meta.guaranteed, false);
assert_eq!(addr.spend.compress().to_bytes(), SUB_SPEND);
assert_eq!(addr.view.compress().to_bytes(), SUB_VIEW);
assert_eq!(addr.to_string(), SUBADDRESS);
}
#[test]
fn featured() {
for (network, first) in
[(Network::Mainnet, 'C'), (Network::Testnet, 'K'), (Network::Stagenet, 'F')]
{
for _ in 0 .. 100 {
let spend = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
let view = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
for features in 0 .. (1 << 3) {
const SUBADDRESS_FEATURE_BIT: u8 = 1;
const INTEGRATED_FEATURE_BIT: u8 = 1 << 1;
const GUARANTEED_FEATURE_BIT: u8 = 1 << 2;
let subaddress = (features & SUBADDRESS_FEATURE_BIT) == SUBADDRESS_FEATURE_BIT;
let mut payment_id = [0; 8];
OsRng.fill_bytes(&mut payment_id);
let payment_id = Some(payment_id)
.filter(|_| (features & INTEGRATED_FEATURE_BIT) == INTEGRATED_FEATURE_BIT);
let guaranteed = (features & GUARANTEED_FEATURE_BIT) == GUARANTEED_FEATURE_BIT;
let kind = AddressType::Featured { subaddress, payment_id, guaranteed };
let meta = AddressMeta::new(network, kind);
let addr = MoneroAddress::new(meta, spend, view);
assert_eq!(addr.to_string().chars().next().unwrap(), first);
assert_eq!(MoneroAddress::from_str(network, &addr.to_string()).unwrap(), addr);
assert_eq!(addr.spend, spend);
assert_eq!(addr.view, view);
assert_eq!(addr.is_subaddress(), subaddress);
assert_eq!(addr.payment_id(), payment_id);
assert_eq!(addr.is_guaranteed(), guaranteed);
}
}
}
}
#[test]
fn featured_vectors() {
#[derive(serde::Deserialize)]
struct Vector {
address: String,
network: String,
spend: String,
view: String,
subaddress: bool,
integrated: bool,
payment_id: Option<[u8; 8]>,
guaranteed: bool,
}
let vectors = serde_json::from_str::<Vec<Vector>>(FEATURED_JSON).unwrap();
for vector in vectors {
let first = vector.address.chars().next().unwrap();
let network = match vector.network.as_str() {
"Mainnet" => {
assert_eq!(first, 'C');
Network::Mainnet
}
"Testnet" => {
assert_eq!(first, 'K');
Network::Testnet
}
"Stagenet" => {
assert_eq!(first, 'F');
Network::Stagenet
}
_ => panic!("Unknown network"),
};
let spend =
CompressedEdwardsY::from_slice(&hex::decode(vector.spend).unwrap()).decompress().unwrap();
let view =
CompressedEdwardsY::from_slice(&hex::decode(vector.view).unwrap()).decompress().unwrap();
let addr = MoneroAddress::from_str(network, &vector.address).unwrap();
assert_eq!(addr.spend, spend);
assert_eq!(addr.view, view);
assert_eq!(addr.is_subaddress(), vector.subaddress);
assert_eq!(vector.integrated, vector.payment_id.is_some());
assert_eq!(addr.payment_id(), vector.payment_id);
assert_eq!(addr.is_guaranteed(), vector.guaranteed);
assert_eq!(
MoneroAddress::new(
AddressMeta::new(
network,
AddressType::Featured {
subaddress: vector.subaddress,
payment_id: vector.payment_id,
guaranteed: vector.guaranteed
}
),
spend,
view
)
.to_string(),
vector.address
);
}
}

View File

@@ -1,92 +0,0 @@
use hex_literal::hex;
use rand_core::OsRng;
use curve25519_dalek::{scalar::Scalar, edwards::CompressedEdwardsY};
use multiexp::BatchVerifier;
use crate::{
Commitment, random_scalar,
ringct::bulletproofs::{Bulletproofs, original::OriginalStruct},
};
#[test]
fn bulletproofs_vector() {
let scalar = |scalar| Scalar::from_canonical_bytes(scalar).unwrap();
let point = |point| CompressedEdwardsY(point).decompress().unwrap();
// Generated from Monero
assert!(Bulletproofs::Original(OriginalStruct {
A: point(hex!("ef32c0b9551b804decdcb107eb22aa715b7ce259bf3c5cac20e24dfa6b28ac71")),
S: point(hex!("e1285960861783574ee2b689ae53622834eb0b035d6943103f960cd23e063fa0")),
T1: point(hex!("4ea07735f184ba159d0e0eb662bac8cde3eb7d39f31e567b0fbda3aa23fe5620")),
T2: point(hex!("b8390aa4b60b255630d40e592f55ec6b7ab5e3a96bfcdcd6f1cd1d2fc95f441e")),
taux: scalar(hex!("5957dba8ea9afb23d6e81cc048a92f2d502c10c749dc1b2bd148ae8d41ec7107")),
mu: scalar(hex!("923023b234c2e64774b820b4961f7181f6c1dc152c438643e5a25b0bf271bc02")),
L: vec![
point(hex!("c45f656316b9ebf9d357fb6a9f85b5f09e0b991dd50a6e0ae9b02de3946c9d99")),
point(hex!("9304d2bf0f27183a2acc58cc755a0348da11bd345485fda41b872fee89e72aac")),
point(hex!("1bb8b71925d155dd9569f64129ea049d6149fdc4e7a42a86d9478801d922129b")),
point(hex!("5756a7bf887aa72b9a952f92f47182122e7b19d89e5dd434c747492b00e1c6b7")),
point(hex!("6e497c910d102592830555356af5ff8340e8d141e3fb60ea24cfa587e964f07d")),
point(hex!("f4fa3898e7b08e039183d444f3d55040f3c790ed806cb314de49f3068bdbb218")),
point(hex!("0bbc37597c3ead517a3841e159c8b7b79a5ceaee24b2a9a20350127aab428713")),
],
R: vec![
point(hex!("609420ba1702781692e84accfd225adb3d077aedc3cf8125563400466b52dbd9")),
point(hex!("fb4e1d079e7a2b0ec14f7e2a3943bf50b6d60bc346a54fcf562fb234b342abf8")),
point(hex!("6ae3ac97289c48ce95b9c557289e82a34932055f7f5e32720139824fe81b12e5")),
point(hex!("d071cc2ffbdab2d840326ad15f68c01da6482271cae3cf644670d1632f29a15c")),
point(hex!("e52a1754b95e1060589ba7ce0c43d0060820ebfc0d49dc52884bc3c65ad18af5")),
point(hex!("41573b06140108539957df71aceb4b1816d2409ce896659aa5c86f037ca5e851")),
point(hex!("a65970b2cc3c7b08b2b5b739dbc8e71e646783c41c625e2a5b1535e3d2e0f742")),
],
a: scalar(hex!("0077c5383dea44d3cd1bc74849376bd60679612dc4b945255822457fa0c0a209")),
b: scalar(hex!("fe80cf5756473482581e1d38644007793ddc66fdeb9404ec1689a907e4863302")),
t: scalar(hex!("40dfb08e09249040df997851db311bd6827c26e87d6f0f332c55be8eef10e603"))
})
.verify(
&mut OsRng,
&[
// For some reason, these vectors are * INV_EIGHT
point(hex!("8e8f23f315edae4f6c2f948d9a861e0ae32d356b933cd11d2f0e031ac744c41f"))
.mul_by_cofactor(),
point(hex!("2829cbd025aa54cd6e1b59a032564f22f0b2e5627f7f2c4297f90da438b5510f"))
.mul_by_cofactor(),
]
));
}
macro_rules! bulletproofs_tests {
($name: ident, $max: ident, $plus: literal) => {
#[test]
fn $name() {
// Create Bulletproofs for all possible output quantities
let mut verifier = BatchVerifier::new(16);
for i in 1 .. 17 {
let commitments = (1 ..= i)
.map(|i| Commitment::new(random_scalar(&mut OsRng), u64::try_from(i).unwrap()))
.collect::<Vec<_>>();
let bp = Bulletproofs::prove(&mut OsRng, &commitments, $plus).unwrap();
let commitments = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
assert!(bp.verify(&mut OsRng, &commitments));
assert!(bp.batch_verify(&mut OsRng, &mut verifier, i, &commitments));
}
assert!(verifier.verify_vartime());
}
#[test]
fn $max() {
// Check Bulletproofs errors if we try to prove for too many outputs
let mut commitments = vec![];
for _ in 0 .. 17 {
commitments.push(Commitment::new(Scalar::zero(), 0));
}
assert!(Bulletproofs::prove(&mut OsRng, &commitments, $plus).is_err());
}
};
}
bulletproofs_tests!(bulletproofs, bulletproofs_max, false);
bulletproofs_tests!(bulletproofs_plus, bulletproofs_plus_max, true);

View File

@@ -1,11 +1,7 @@
use core::ops::Deref;
#[cfg(feature = "multisig")]
use std_shims::sync::Arc;
#[cfg(feature = "multisig")]
use std::sync::RwLock;
use std::sync::{Arc, RwLock};
use zeroize::Zeroizing;
use rand_core::{RngCore, OsRng};
use rand::{RngCore, rngs::OsRng};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
@@ -15,21 +11,16 @@ use transcript::{Transcript, RecommendedTranscript};
use frost::curve::Ed25519;
use crate::{
Commitment, random_scalar,
Commitment,
random_scalar,
wallet::Decoys,
ringct::{
generate_key_image,
clsag::{ClsagInput, Clsag},
},
ringct::{generate_key_image, clsag::{ClsagInput, Clsag}}
};
#[cfg(feature = "multisig")]
use crate::ringct::clsag::{ClsagDetails, ClsagMultisig};
use crate::{frost::MultisigError, ringct::clsag::{ClsagDetails, ClsagMultisig}};
#[cfg(feature = "multisig")]
use frost::{
Participant,
tests::{key_gen, algorithm_machines, sign},
};
use frost::tests::{key_gen, algorithm_machines, sign};
const RING_LEN: u64 = 11;
const AMOUNT: u64 = 1337;
@@ -42,48 +33,48 @@ fn clsag() {
for real in 0 .. RING_LEN {
let msg = [1; 32];
let mut secrets = (Zeroizing::new(Scalar::zero()), Scalar::zero());
let mut secrets = [Scalar::zero(), Scalar::zero()];
let mut ring = vec![];
for i in 0 .. RING_LEN {
let dest = Zeroizing::new(random_scalar(&mut OsRng));
let dest = random_scalar(&mut OsRng);
let mask = random_scalar(&mut OsRng);
let amount = if i == real {
secrets = (dest.clone(), mask);
AMOUNT
let amount;
if i == u64::from(real) {
secrets = [dest, mask];
amount = AMOUNT;
} else {
OsRng.next_u64()
};
ring
.push([dest.deref() * &ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]);
amount = OsRng.next_u64();
}
ring.push([&dest * &ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]);
}
let image = generate_key_image(&secrets.0);
let image = generate_key_image(secrets[0]);
let (clsag, pseudo_out) = Clsag::sign(
&mut OsRng,
vec![(
secrets.0,
&vec![(
secrets[0],
image,
ClsagInput::new(
Commitment::new(secrets.1, AMOUNT),
Commitment::new(secrets[1], AMOUNT),
Decoys {
i: u8::try_from(real).unwrap(),
offsets: (1 ..= RING_LEN).collect(),
ring: ring.clone(),
},
)
.unwrap(),
offsets: (1 ..= RING_LEN).into_iter().collect(),
ring: ring.clone()
}
).unwrap()
)],
random_scalar(&mut OsRng),
msg,
)
.swap_remove(0);
msg
).swap_remove(0);
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
#[cfg(feature = "experimental")]
clsag.rust_verify(&ring, &image, &pseudo_out, &msg).unwrap();
}
}
#[cfg(feature = "multisig")]
#[test]
fn clsag_multisig() {
fn clsag_multisig() -> Result<(), MultisigError> {
let keys = key_gen::<_, Ed25519>(&mut OsRng);
let randomness = random_scalar(&mut OsRng);
@@ -91,37 +82,45 @@ fn clsag_multisig() {
for i in 0 .. RING_LEN {
let dest;
let mask;
let amount = if i == u64::from(RING_INDEX) {
dest = keys[&Participant::new(1).unwrap()].group_key().0;
mask = randomness;
AMOUNT
} else {
let amount;
if i != u64::from(RING_INDEX) {
dest = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
mask = random_scalar(&mut OsRng);
OsRng.next_u64()
};
amount = OsRng.next_u64();
} else {
dest = keys[&1].group_key().0;
mask = randomness;
amount = AMOUNT;
}
ring.push([dest, Commitment::new(mask, amount).calculate()]);
}
let mask_sum = random_scalar(&mut OsRng);
let algorithm = ClsagMultisig::new(
RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
keys[&Participant::new(1).unwrap()].group_key().0,
Arc::new(RwLock::new(Some(ClsagDetails::new(
ClsagInput::new(
Commitment::new(randomness, AMOUNT),
Decoys { i: RING_INDEX, offsets: (1 ..= RING_LEN).collect(), ring: ring.clone() },
)
.unwrap(),
mask_sum,
)))),
);
sign(
&mut OsRng,
algorithm.clone(),
keys.clone(),
algorithm_machines(&mut OsRng, algorithm, &keys),
&[1; 32],
algorithm_machines(
&mut OsRng,
ClsagMultisig::new(
RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
keys[&1].group_key().0,
Arc::new(RwLock::new(Some(
ClsagDetails::new(
ClsagInput::new(
Commitment::new(randomness, AMOUNT),
Decoys {
i: RING_INDEX,
offsets: (1 ..= RING_LEN).into_iter().collect(),
ring: ring.clone()
}
).unwrap(),
mask_sum
)
)))
).unwrap(),
&keys
),
&[1; 32]
);
Ok(())
}

View File

@@ -0,0 +1,13 @@
use rand::rngs::OsRng;
use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
use crate::{random_scalar, ringct::hash_to_point::{hash_to_point as c_hash_to_point, rust_hash_to_point}};
#[test]
fn hash_to_point() {
for _ in 0 .. 50 {
let point = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
assert_eq!(rust_hash_to_point(point), c_hash_to_point(point));
}
}

View File

@@ -1,4 +1,3 @@
mod hash_to_point;
mod clsag;
mod bulletproofs;
mod address;
mod seed;

View File

@@ -1,177 +0,0 @@
use zeroize::Zeroizing;
use rand_core::OsRng;
use curve25519_dalek::scalar::Scalar;
use crate::{
hash,
wallet::seed::{Seed, Language, classic::trim_by_lang},
};
#[test]
fn test_classic_seed() {
struct Vector {
language: Language,
seed: String,
spend: String,
view: String,
}
let vectors = [
Vector {
language: Language::Chinese,
seed: "摇 曲 艺 武 滴 然 效 似 赏 式 祥 歌 买 疑 小 碧 堆 博 键 房 鲜 悲 付 喷 武".into(),
spend: "a5e4fff1706ef9212993a69f246f5c95ad6d84371692d63e9bb0ea112a58340d".into(),
view: "1176c43ce541477ea2f3ef0b49b25112b084e26b8a843e1304ac4677b74cdf02".into(),
},
Vector {
language: Language::English,
seed: "washing thirsty occur lectures tuesday fainted toxic adapt \
abnormal memoir nylon mostly building shrugged online ember northern \
ruby woes dauntless boil family illness inroads northern"
.into(),
spend: "c0af65c0dd837e666b9d0dfed62745f4df35aed7ea619b2798a709f0fe545403".into(),
view: "513ba91c538a5a9069e0094de90e927c0cd147fa10428ce3ac1afd49f63e3b01".into(),
},
Vector {
language: Language::Dutch,
seed: "setwinst riphagen vimmetje extase blief tuitelig fuiven meifeest \
ponywagen zesmaal ripdeal matverf codetaal leut ivoor rotten \
wisgerhof winzucht typograaf atrium rein zilt traktaat verzaagd setwinst"
.into(),
spend: "e2d2873085c447c2bc7664222ac8f7d240df3aeac137f5ff2022eaa629e5b10a".into(),
view: "eac30b69477e3f68093d131c7fd961564458401b07f8c87ff8f6030c1a0c7301".into(),
},
Vector {
language: Language::French,
seed: "poids vaseux tarte bazar poivre effet entier nuance \
sensuel ennui pacte osselet poudre battre alibi mouton \
stade paquet pliage gibier type question position projet pliage"
.into(),
spend: "2dd39ff1a4628a94b5c2ec3e42fb3dfe15c2b2f010154dc3b3de6791e805b904".into(),
view: "6725b32230400a1032f31d622b44c3a227f88258939b14a7c72e00939e7bdf0e".into(),
},
Vector {
language: Language::Spanish,
seed: "minero ocupar mirar evadir octubre cal logro miope \
opaco disco ancla litio clase cuello nasal clase \
fiar avance deseo mente grumo negro cordón croqueta clase"
.into(),
spend: "ae2c9bebdddac067d73ec0180147fc92bdf9ac7337f1bcafbbe57dd13558eb02".into(),
view: "18deafb34d55b7a43cae2c1c1c206a3c80c12cc9d1f84640b484b95b7fec3e05".into(),
},
Vector {
language: Language::German,
seed: "Kaliber Gabelung Tapir Liveband Favorit Specht Enklave Nabel \
Jupiter Foliant Chronik nisten löten Vase Aussage Rekord \
Yeti Gesetz Eleganz Alraune Künstler Almweide Jahr Kastanie Almweide"
.into(),
spend: "79801b7a1b9796856e2397d862a113862e1fdc289a205e79d8d70995b276db06".into(),
view: "99f0ec556643bd9c038a4ed86edcb9c6c16032c4622ed2e000299d527a792701".into(),
},
Vector {
language: Language::Italian,
seed: "cavo pancetta auto fulmine alleanza filmato diavolo prato \
forzare meritare litigare lezione segreto evasione votare buio \
licenza cliente dorso natale crescere vento tutelare vetta evasione"
.into(),
spend: "5e7fd774eb00fa5877e2a8b4dc9c7ffe111008a3891220b56a6e49ac816d650a".into(),
view: "698a1dce6018aef5516e82ca0cb3e3ec7778d17dfb41a137567bfa2e55e63a03".into(),
},
Vector {
language: Language::Portuguese,
seed: "agito eventualidade onus itrio holograma sodomizar objetos dobro \
iugoslavo bcrepuscular odalisca abjeto iuane darwinista eczema acetona \
cibernetico hoquei gleba driver buffer azoto megera nogueira agito"
.into(),
spend: "13b3115f37e35c6aa1db97428b897e584698670c1b27854568d678e729200c0f".into(),
view: "ad1b4fd35270f5f36c4da7166672b347e75c3f4d41346ec2a06d1d0193632801".into(),
},
Vector {
language: Language::Japanese,
seed: "ぜんぶ どうぐ おたがい せんきょ おうじ そんちょう じゅしん いろえんぴつ \
かほう つかれる えらぶ にちじょう くのう にちようび ぬまえび さんきゃく \
おおや ちぬき うすめる いがく せつでん さうな すいえい せつだん おおや"
.into(),
spend: "c56e895cdb13007eda8399222974cdbab493640663804b93cbef3d8c3df80b0b".into(),
view: "6c3634a313ec2ee979d565c33888fd7c3502d696ce0134a8bc1a2698c7f2c508".into(),
},
Vector {
language: Language::Russian,
seed: "шатер икра нация ехать получать инерция доза реальный \
рыжий таможня лопата душа веселый клетка атлас лекция \
обгонять паек наивный лыжный дурак стать ежик задача паек"
.into(),
spend: "7cb5492df5eb2db4c84af20766391cd3e3662ab1a241c70fc881f3d02c381f05".into(),
view: "fcd53e41ec0df995ab43927f7c44bc3359c93523d5009fb3f5ba87431d545a03".into(),
},
Vector {
language: Language::Esperanto,
seed: "ukazo klini peco etikedo fabriko imitado onklino urino \
pudro incidento kumuluso ikono smirgi hirundo uretro krii \
sparkado super speciala pupo alpinisto cvana vokegi zombio fabriko"
.into(),
spend: "82ebf0336d3b152701964ed41df6b6e9a035e57fc98b84039ed0bd4611c58904".into(),
view: "cd4d120e1ea34360af528f6a3e6156063312d9cefc9aa6b5218d366c0ed6a201".into(),
},
Vector {
language: Language::Lojban,
seed: "jetnu vensa julne xrotu xamsi julne cutci dakli \
mlatu xedja muvgau palpi xindo sfubu ciste cinri \
blabi darno dembi janli blabi fenki bukpu burcu blabi"
.into(),
spend: "e4f8c6819ab6cf792cebb858caabac9307fd646901d72123e0367ebc0a79c200".into(),
view: "c806ce62bafaa7b2d597f1a1e2dbe4a2f96bfd804bf6f8420fc7f4a6bd700c00".into(),
},
Vector {
language: Language::EnglishOld,
seed: "glorious especially puff son moment add youth nowhere \
throw glide grip wrong rhythm consume very swear \
bitter heavy eventually begin reason flirt type unable"
.into(),
spend: "647f4765b66b636ff07170ab6280a9a6804dfbaf19db2ad37d23be024a18730b".into(),
view: "045da65316a906a8c30046053119c18020b07a7a3a6ef5c01ab2a8755416bd02".into(),
},
];
for vector in vectors {
let trim_seed = |seed: &str| {
seed
.split_whitespace()
.map(|word| trim_by_lang(word, vector.language))
.collect::<Vec<_>>()
.join(" ")
};
// Test against Monero
{
let seed = Seed::from_string(Zeroizing::new(vector.seed.clone())).unwrap();
assert_eq!(seed, Seed::from_string(Zeroizing::new(trim_seed(&vector.seed))).unwrap());
let spend: [u8; 32] = hex::decode(vector.spend).unwrap().try_into().unwrap();
// For classical seeds, Monero directly uses the entropy as a spend key
assert_eq!(
Scalar::from_canonical_bytes(*seed.entropy()),
Scalar::from_canonical_bytes(spend)
);
let view: [u8; 32] = hex::decode(vector.view).unwrap().try_into().unwrap();
// Monero then derives the view key as H(spend)
assert_eq!(
Scalar::from_bytes_mod_order(hash(&spend)),
Scalar::from_canonical_bytes(view).unwrap()
);
assert_eq!(Seed::from_entropy(vector.language, Zeroizing::new(spend)).unwrap(), seed);
}
// Test against ourself
{
let seed = Seed::new(&mut OsRng, vector.language);
assert_eq!(seed, Seed::from_string(Zeroizing::new(trim_seed(&seed.to_string()))).unwrap());
assert_eq!(seed, Seed::from_entropy(vector.language, seed.entropy()).unwrap());
assert_eq!(seed, Seed::from_string(seed.to_string()).unwrap());
}
}
}

View File

@@ -1,230 +0,0 @@
[
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v3pYyUDn",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": false,
"integrated": false,
"guaranteed": false
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v3wfMHCy",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": true,
"integrated": false,
"guaranteed": false
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJTo4p5ayvj36PStM5AX",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": false,
"integrated": true,
"payment_id": [46, 48, 134, 34, 245, 148, 243, 195],
"guaranteed": false
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJWv5WqMCNE2hRs9rJfy",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": true,
"integrated": true,
"payment_id": [153, 176, 98, 204, 151, 27, 197, 168],
"guaranteed": false
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v4DwqwH1",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": false,
"integrated": false,
"guaranteed": true
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v4Pyz8bD",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": true,
"integrated": false,
"guaranteed": true
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJcwt7hykou237MqZZDA",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": false,
"integrated": true,
"payment_id": [88, 37, 149, 111, 171, 108, 120, 181],
"guaranteed": true
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJfTrFAp69u2MYbf5YeN",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": true,
"integrated": true,
"payment_id": [125, 69, 155, 152, 140, 160, 157, 186],
"guaranteed": true
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712U9w7ScYA",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": false,
"integrated": false,
"guaranteed": false
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UA2gCrT1",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": true,
"integrated": false,
"guaranteed": false
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71Vc1DbPKwJu81cxJjqBkS",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": false,
"integrated": true,
"payment_id": [92, 225, 118, 220, 39, 3, 72, 51],
"guaranteed": false
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71Vc2o1rPMaXN31Fe5J6dn",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": true,
"integrated": true,
"payment_id": [20, 120, 47, 89, 72, 165, 233, 115],
"guaranteed": false
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UAQHCRZ4",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": false,
"integrated": false,
"guaranteed": true
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UAUzqaii",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": true,
"integrated": false,
"guaranteed": true
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71VcAsfQc3gJQ2gHLd5DiQ",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": false,
"integrated": true,
"payment_id": [193, 149, 123, 214, 180, 205, 195, 91],
"guaranteed": true
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71VcDBAD5jbZQ3AMHFyvQB",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": true,
"integrated": true,
"payment_id": [205, 170, 65, 0, 51, 175, 251, 184],
"guaranteed": true
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPJnBtTP",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": false,
"integrated": false,
"guaranteed": false
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPUrwMvP",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": true,
"integrated": false,
"guaranteed": false
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AY5ECEhP5Nr1aCRPXdxk",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": false,
"integrated": true,
"payment_id": [173, 149, 78, 64, 215, 211, 66, 170],
"guaranteed": false
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AY882kTUS1D2LttnPvTR",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": true,
"integrated": true,
"payment_id": [254, 159, 186, 162, 1, 8, 156, 108],
"guaranteed": false
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPpBBo8F",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": false,
"integrated": false,
"guaranteed": true
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPuUJX3b",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": true,
"integrated": false,
"guaranteed": true
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AYCZPxVAoDu21DryMoto",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": false,
"integrated": true,
"payment_id": [3, 115, 230, 129, 172, 108, 116, 235],
"guaranteed": true
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AYFYCqKQAWL18KkpBQ8R",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": true,
"integrated": true,
"payment_id": [94, 122, 63, 167, 209, 225, 14, 180],
"guaranteed": true
}
]

View File

@@ -1,83 +1,69 @@
use core::cmp::Ordering;
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use zeroize::Zeroize;
use curve25519_dalek::edwards::EdwardsPoint;
use curve25519_dalek::{
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
use crate::{hash, serialize::*, ringct::{RctPrunable, RctSignatures}};
use crate::{
Protocol, hash,
serialize::*,
ringct::{RctBase, RctPrunable, RctSignatures},
};
pub const RING_LEN: usize = 11;
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub enum Input {
Gen(u64),
ToKey { amount: Option<u64>, key_offsets: Vec<u64>, key_image: EdwardsPoint },
ToKey {
amount: u64,
key_offsets: Vec<u64>,
key_image: EdwardsPoint
}
}
impl Input {
// Worst-case predictive len
pub(crate) fn fee_weight(ring_len: usize) -> usize {
pub(crate) fn fee_weight() -> usize {
// Uses 1 byte for the VarInt amount due to amount being 0
// Uses 1 byte for the VarInt encoding of the length of the ring as well
1 + 1 + 1 + (8 * ring_len) + 32
1 + 1 + 1 + (8 * RING_LEN) + 32
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
match self {
Self::Gen(height) => {
Input::Gen(height) => {
w.write_all(&[255])?;
write_varint(height, w)
}
},
Self::ToKey { amount, key_offsets, key_image } => {
Input::ToKey { amount, key_offsets, key_image } => {
w.write_all(&[2])?;
write_varint(&amount.unwrap_or(0), w)?;
write_varint(amount, w)?;
write_vec(write_varint, key_offsets, w)?;
write_point(key_image, w)
}
}
}
pub fn serialize(&self) -> Vec<u8> {
let mut res = vec![];
self.write(&mut res).unwrap();
res
}
pub fn read<R: Read>(interpret_as_rct: bool, r: &mut R) -> io::Result<Self> {
Ok(match read_byte(r)? {
255 => Self::Gen(read_varint(r)?),
2 => {
let amount = read_varint(r)?;
let amount = if (amount == 0) && interpret_as_rct { None } else { Some(amount) };
Self::ToKey {
amount,
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Input> {
let mut variant = [0];
r.read_exact(&mut variant)?;
Ok(
match variant[0] {
255 => Input::Gen(read_varint(r)?),
2 => Input::ToKey {
amount: read_varint(r)?,
key_offsets: read_vec(read_varint, r)?,
key_image: read_torsion_free_point(r)?,
}
key_image: read_point(r)?
},
_ => Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown/unused input type"))?
}
_ => {
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown/unused input type"))?
}
})
)
}
}
// Doesn't bother moving to an enum for the unused Script classes
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct Output {
pub amount: Option<u64>,
pub key: CompressedEdwardsY,
pub view_tag: Option<u8>,
pub amount: u64,
pub key: EdwardsPoint,
pub tag: Option<u8>
}
impl Output {
@@ -85,76 +71,64 @@ impl Output {
1 + 1 + 32 + 1
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
write_varint(&self.amount.unwrap_or(0), w)?;
w.write_all(&[2 + u8::from(self.view_tag.is_some())])?;
w.write_all(&self.key.to_bytes())?;
if let Some(view_tag) = self.view_tag {
w.write_all(&[view_tag])?;
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
write_varint(&self.amount, w)?;
w.write_all(&[2 + (if self.tag.is_some() { 1 } else { 0 })])?;
write_point(&self.key, w)?;
if let Some(tag) = self.tag {
w.write_all(&[tag])?;
}
Ok(())
}
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(8 + 1 + 32);
self.write(&mut res).unwrap();
res
}
pub fn read<R: Read>(interpret_as_rct: bool, r: &mut R) -> io::Result<Self> {
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Output> {
let amount = read_varint(r)?;
let amount = if interpret_as_rct {
if amount != 0 {
Err(io::Error::new(io::ErrorKind::Other, "RCT TX output wasn't 0"))?;
let mut tag = [0];
r.read_exact(&mut tag)?;
if (tag[0] != 2) && (tag[0] != 3) {
Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown/unused output type"))?;
}
Ok(
Output {
amount,
key: read_point(r)?,
tag: if tag[0] == 3 { r.read_exact(&mut tag)?; Some(tag[0]) } else { None }
}
None
} else {
Some(amount)
};
let view_tag = match read_byte(r)? {
2 => false,
3 => true,
_ => Err(io::Error::new(
io::ErrorKind::Other,
"Tried to deserialize unknown/unused output type",
))?,
};
Ok(Self {
amount,
key: CompressedEdwardsY(read_bytes(r)?),
view_tag: if view_tag { Some(read_byte(r)?) } else { None },
})
)
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum Timelock {
None,
Block(usize),
Time(u64),
Time(u64)
}
impl Timelock {
fn from_raw(raw: u64) -> Self {
fn from_raw(raw: u64) -> Timelock {
if raw == 0 {
Self::None
Timelock::None
} else if raw < 500_000_000 {
Self::Block(usize::try_from(raw).unwrap())
Timelock::Block(usize::try_from(raw).unwrap())
} else {
Self::Time(raw)
Timelock::Time(raw)
}
}
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
pub(crate) fn fee_weight() -> usize {
8
}
fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
write_varint(
&match self {
Self::None => 0,
Self::Block(block) => (*block).try_into().unwrap(),
Self::Time(time) => *time,
Timelock::None => 0,
Timelock::Block(block) => (*block).try_into().unwrap(),
Timelock::Time(time) => *time
},
w,
w
)
}
}
@@ -162,216 +136,138 @@ impl Timelock {
impl PartialOrd for Timelock {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
match (self, other) {
(Self::None, _) => Some(Ordering::Less),
(Self::Block(a), Self::Block(b)) => a.partial_cmp(b),
(Self::Time(a), Self::Time(b)) => a.partial_cmp(b),
_ => None,
(Timelock::None, _) => Some(Ordering::Less),
(Timelock::Block(a), Timelock::Block(b)) => a.partial_cmp(b),
(Timelock::Time(a), Timelock::Time(b)) => a.partial_cmp(b),
_ => None
}
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct TransactionPrefix {
pub version: u64,
pub timelock: Timelock,
pub inputs: Vec<Input>,
pub outputs: Vec<Output>,
pub extra: Vec<u8>,
pub extra: Vec<u8>
}
impl TransactionPrefix {
pub(crate) fn fee_weight(ring_len: usize, inputs: usize, outputs: usize, extra: usize) -> usize {
pub(crate) fn fee_weight(inputs: usize, outputs: usize, extra: usize) -> usize {
// Assumes Timelock::None since this library won't let you create a TX with a timelock
1 + 1 +
varint_len(inputs) +
(inputs * Input::fee_weight(ring_len)) +
1 +
(outputs * Output::fee_weight()) +
varint_len(extra) +
extra
varint_len(inputs) + (inputs * Input::fee_weight()) +
// Only 16 outputs are possible under transactions by this lib
1 + (outputs * Output::fee_weight()) +
varint_len(extra) + extra
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
write_varint(&self.version, w)?;
self.timelock.write(w)?;
write_vec(Input::write, &self.inputs, w)?;
write_vec(Output::write, &self.outputs, w)?;
self.timelock.serialize(w)?;
write_vec(Input::serialize, &self.inputs, w)?;
write_vec(Output::serialize, &self.outputs, w)?;
write_varint(&self.extra.len().try_into().unwrap(), w)?;
w.write_all(&self.extra)
}
pub fn serialize(&self) -> Vec<u8> {
let mut res = vec![];
self.write(&mut res).unwrap();
res
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
let version = read_varint(r)?;
// TODO: Create an enum out of version
if (version == 0) || (version > 2) {
Err(io::Error::new(io::ErrorKind::Other, "unrecognized transaction version"))?;
}
let timelock = Timelock::from_raw(read_varint(r)?);
let inputs = read_vec(|r| Input::read(version == 2, r), r)?;
if inputs.is_empty() {
Err(io::Error::new(io::ErrorKind::Other, "transaction had no inputs"))?;
}
let is_miner_tx = matches!(inputs[0], Input::Gen { .. });
let mut prefix = Self {
version,
timelock,
inputs,
outputs: read_vec(|r| Output::read((!is_miner_tx) && (version == 2), r), r)?,
extra: vec![],
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<TransactionPrefix> {
let mut prefix = TransactionPrefix {
version: read_varint(r)?,
timelock: Timelock::from_raw(read_varint(r)?),
inputs: read_vec(Input::deserialize, r)?,
outputs: read_vec(Output::deserialize, r)?,
extra: vec![]
};
prefix.extra = read_vec(read_byte, r)?;
Ok(prefix)
}
pub fn hash(&self) -> [u8; 32] {
hash(&self.serialize())
let len = read_varint(r)?;
prefix.extra.resize(len.try_into().unwrap(), 0);
r.read_exact(&mut prefix.extra)?;
Ok(prefix)
}
}
/// Monero transaction. For version 1, rct_signatures still contains an accurate fee value.
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct Transaction {
pub prefix: TransactionPrefix,
pub signatures: Vec<Vec<(Scalar, Scalar)>>,
pub rct_signatures: RctSignatures,
pub rct_signatures: RctSignatures
}
impl Transaction {
pub(crate) fn fee_weight(
protocol: Protocol,
inputs: usize,
outputs: usize,
extra: usize,
) -> usize {
TransactionPrefix::fee_weight(protocol.ring_len(), inputs, outputs, extra) +
RctSignatures::fee_weight(protocol, inputs, outputs)
pub(crate) fn fee_weight(inputs: usize, outputs: usize, extra: usize) -> usize {
TransactionPrefix::fee_weight(inputs, outputs, extra) + RctSignatures::fee_weight(inputs, outputs)
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.prefix.write(w)?;
if self.prefix.version == 1 {
for sigs in &self.signatures {
for sig in sigs {
write_scalar(&sig.0, w)?;
write_scalar(&sig.1, w)?;
}
}
Ok(())
} else if self.prefix.version == 2 {
self.rct_signatures.write(w)
} else {
panic!("Serializing a transaction with an unknown version");
}
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.prefix.serialize(w)?;
self.rct_signatures.serialize(w)
}
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(2048);
self.write(&mut res).unwrap();
res
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
let prefix = TransactionPrefix::read(r)?;
let mut signatures = vec![];
let mut rct_signatures = RctSignatures {
base: RctBase { fee: 0, encrypted_amounts: vec![], pseudo_outs: vec![], commitments: vec![] },
prunable: RctPrunable::Null,
};
if prefix.version == 1 {
signatures = prefix
.inputs
.iter()
.filter_map(|input| match input {
Input::ToKey { key_offsets, .. } => Some(
key_offsets
.iter()
.map(|_| Ok((read_scalar(r)?, read_scalar(r)?)))
.collect::<Result<_, io::Error>>(),
),
_ => None,
})
.collect::<Result<_, _>>()?;
rct_signatures.base.fee = prefix
.inputs
.iter()
.map(|input| match input {
Input::Gen(..) => 0,
Input::ToKey { amount, .. } => amount.unwrap(),
})
.sum::<u64>()
.saturating_sub(prefix.outputs.iter().map(|output| output.amount.unwrap()).sum());
} else if prefix.version == 2 {
rct_signatures = RctSignatures::read(
prefix
.inputs
.iter()
.map(|input| match input {
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Transaction> {
let prefix = TransactionPrefix::deserialize(r)?;
Ok(
Transaction {
rct_signatures: RctSignatures::deserialize(
prefix.inputs.iter().map(|input| match input {
Input::Gen(_) => 0,
Input::ToKey { key_offsets, .. } => key_offsets.len(),
})
.collect(),
prefix.outputs.len(),
r,
)?;
} else {
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown version"))?;
}
Ok(Self { prefix, signatures, rct_signatures })
Input::ToKey { key_offsets, .. } => key_offsets.len()
}).collect(),
prefix.outputs.len(),
r
)?,
prefix
}
)
}
pub fn hash(&self) -> [u8; 32] {
let mut buf = Vec::with_capacity(2048);
let mut serialized = Vec::with_capacity(2048);
if self.prefix.version == 1 {
self.write(&mut buf).unwrap();
hash(&buf)
self.serialize(&mut serialized).unwrap();
hash(&serialized)
} else {
let mut hashes = Vec::with_capacity(96);
let mut sig_hash = Vec::with_capacity(96);
hashes.extend(self.prefix.hash());
self.prefix.serialize(&mut serialized).unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();
self.rct_signatures.base.write(&mut buf, self.rct_signatures.rct_type()).unwrap();
hashes.extend(hash(&buf));
buf.clear();
self.rct_signatures.base.serialize(
&mut serialized,
self.rct_signatures.prunable.rct_type()
).unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();
hashes.extend(&match self.rct_signatures.prunable {
RctPrunable::Null => [0; 32],
RctPrunable::MlsagBorromean { .. } |
RctPrunable::MlsagBulletproofs { .. } |
RctPrunable::Clsag { .. } => {
self.rct_signatures.prunable.write(&mut buf, self.rct_signatures.rct_type()).unwrap();
hash(&buf)
match self.rct_signatures.prunable {
RctPrunable::Null => serialized.resize(32, 0),
_ => {
self.rct_signatures.prunable.serialize(&mut serialized).unwrap();
serialized = hash(&serialized).to_vec();
}
});
}
sig_hash.extend(&serialized);
hash(&hashes)
hash(&sig_hash)
}
}
/// Calculate the hash of this transaction as needed for signing it.
pub fn signature_hash(&self) -> [u8; 32] {
let mut buf = Vec::with_capacity(2048);
let mut serialized = Vec::with_capacity(2048);
let mut sig_hash = Vec::with_capacity(96);
sig_hash.extend(self.prefix.hash());
self.prefix.serialize(&mut serialized).unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();
self.rct_signatures.base.write(&mut buf, self.rct_signatures.rct_type()).unwrap();
sig_hash.extend(hash(&buf));
buf.clear();
self.rct_signatures.base.serialize(&mut serialized, self.rct_signatures.prunable.rct_type()).unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();
self.rct_signatures.prunable.signature_write(&mut buf).unwrap();
sig_hash.extend(hash(&buf));
self.rct_signatures.prunable.signature_serialize(&mut serialized).unwrap();
sig_hash.extend(&hash(&serialized));
hash(&sig_hash)
}

View File

@@ -1,311 +1,152 @@
use core::{marker::PhantomData, fmt::Debug};
use std_shims::string::{String, ToString};
use std::string::ToString;
use zeroize::Zeroize;
use thiserror::Error;
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, edwards::{EdwardsPoint, CompressedEdwardsY}};
use base58_monero::base58::{encode_check, decode_check};
/// The network this address is for.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
use crate::wallet::ViewPair;
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum Network {
Mainnet,
Testnet,
Stagenet,
Stagenet
}
/// The address type, supporting the officially documented addresses, along with
/// [Featured Addresses](https://gist.github.com/kayabaNerve/01c50bbc35441e0bbdcee63a9d823789).
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum AddressType {
Standard,
Integrated([u8; 8]),
Subaddress,
Featured { subaddress: bool, payment_id: Option<[u8; 8]>, guaranteed: bool },
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct SubaddressIndex {
pub(crate) account: u32,
pub(crate) address: u32,
}
impl SubaddressIndex {
pub const fn new(account: u32, address: u32) -> Option<Self> {
if (account == 0) && (address == 0) {
return None;
}
Some(Self { account, address })
}
pub const fn account(&self) -> u32 {
self.account
}
pub const fn address(&self) -> u32 {
self.address
}
}
/// Address specification. Used internally to create addresses.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub enum AddressSpec {
Standard,
Integrated([u8; 8]),
Subaddress(SubaddressIndex),
Featured { subaddress: Option<SubaddressIndex>, payment_id: Option<[u8; 8]>, guaranteed: bool },
Subaddress
}
impl AddressType {
pub const fn is_subaddress(&self) -> bool {
matches!(self, Self::Subaddress) || matches!(self, Self::Featured { subaddress: true, .. })
}
pub const fn payment_id(&self) -> Option<[u8; 8]> {
if let Self::Integrated(id) = self {
Some(*id)
} else if let Self::Featured { payment_id, .. } = self {
*payment_id
} else {
None
}
}
pub const fn is_guaranteed(&self) -> bool {
matches!(self, Self::Featured { guaranteed: true, .. })
}
}
/// A type which returns the byte for a given address.
pub trait AddressBytes: Clone + Copy + PartialEq + Eq + Debug {
fn network_bytes(network: Network) -> (u8, u8, u8, u8);
}
/// Address bytes for Monero.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct MoneroAddressBytes;
impl AddressBytes for MoneroAddressBytes {
fn network_bytes(network: Network) -> (u8, u8, u8, u8) {
fn network_bytes(network: Network) -> (u8, u8, u8) {
match network {
Network::Mainnet => (18, 19, 42, 70),
Network::Testnet => (53, 54, 63, 111),
Network::Stagenet => (24, 25, 36, 86),
Network::Mainnet => (18, 19, 42),
Network::Testnet => (53, 54, 63),
Network::Stagenet => (24, 25, 36)
}
}
}
/// Address metadata.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct AddressMeta<B: AddressBytes> {
_bytes: PhantomData<B>,
pub struct AddressMeta {
pub network: Network,
pub kind: AddressType,
pub guaranteed: bool
}
impl<B: AddressBytes> Zeroize for AddressMeta<B> {
fn zeroize(&mut self) {
self.network.zeroize();
self.kind.zeroize();
}
}
/// Error when decoding an address.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
#[derive(Clone, Error, Debug)]
pub enum AddressError {
#[cfg_attr(feature = "std", error("invalid address byte"))]
#[error("invalid address byte")]
InvalidByte,
#[cfg_attr(feature = "std", error("invalid address encoding"))]
#[error("invalid address encoding")]
InvalidEncoding,
#[cfg_attr(feature = "std", error("invalid length"))]
#[error("invalid length")]
InvalidLength,
#[cfg_attr(feature = "std", error("invalid key"))]
InvalidKey,
#[cfg_attr(feature = "std", error("unknown features"))]
UnknownFeatures,
#[cfg_attr(feature = "std", error("different network than expected"))]
#[error("different network than expected")]
DifferentNetwork,
#[error("invalid key")]
InvalidKey
}
impl<B: AddressBytes> AddressMeta<B> {
#[allow(clippy::wrong_self_convention)]
impl AddressMeta {
fn to_byte(&self) -> u8 {
let bytes = B::network_bytes(self.network);
match self.kind {
let bytes = AddressType::network_bytes(self.network);
let byte = match self.kind {
AddressType::Standard => bytes.0,
AddressType::Integrated(_) => bytes.1,
AddressType::Subaddress => bytes.2,
AddressType::Featured { .. } => bytes.3,
}
AddressType::Subaddress => bytes.2
};
byte | (if self.guaranteed { 1 << 7 } else { 0 })
}
/// Create an address's metadata.
pub const fn new(network: Network, kind: AddressType) -> Self {
Self { _bytes: PhantomData, network, kind }
}
// Returns an incomplete type in the case of Integrated addresses
fn from_byte(byte: u8) -> Result<AddressMeta, AddressError> {
let actual = byte & 0b01111111;
let guaranteed = (byte >> 7) == 1;
// Returns an incomplete instantiation in the case of Integrated/Featured addresses
fn from_byte(byte: u8) -> Result<Self, AddressError> {
let mut meta = None;
for network in [Network::Mainnet, Network::Testnet, Network::Stagenet] {
let (standard, integrated, subaddress, featured) = B::network_bytes(network);
if let Some(kind) = match byte {
_ if byte == standard => Some(AddressType::Standard),
_ if byte == integrated => Some(AddressType::Integrated([0; 8])),
_ if byte == subaddress => Some(AddressType::Subaddress),
_ if byte == featured => {
Some(AddressType::Featured { subaddress: false, payment_id: None, guaranteed: false })
}
_ => None,
let (standard, integrated, subaddress) = AddressType::network_bytes(network);
if let Some(kind) = match actual {
_ if actual == standard => Some(AddressType::Standard),
_ if actual == integrated => Some(AddressType::Integrated([0; 8])),
_ if actual == subaddress => Some(AddressType::Subaddress),
_ => None
} {
meta = Some(Self::new(network, kind));
meta = Some(AddressMeta { network, kind, guaranteed });
break;
}
}
meta.ok_or(AddressError::InvalidByte)
}
pub const fn is_subaddress(&self) -> bool {
self.kind.is_subaddress()
}
pub const fn payment_id(&self) -> Option<[u8; 8]> {
self.kind.payment_id()
}
pub const fn is_guaranteed(&self) -> bool {
self.kind.is_guaranteed()
}
}
/// A Monero address, composed of metadata and a spend/view key.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct Address<B: AddressBytes> {
pub meta: AddressMeta<B>,
pub struct Address {
pub meta: AddressMeta,
pub spend: EdwardsPoint,
pub view: EdwardsPoint,
pub view: EdwardsPoint
}
impl<B: AddressBytes> Zeroize for Address<B> {
fn zeroize(&mut self) {
self.meta.zeroize();
self.spend.zeroize();
self.view.zeroize();
impl ViewPair {
pub fn address(&self, network: Network, kind: AddressType, guaranteed: bool) -> Address {
Address {
meta: AddressMeta {
network,
kind,
guaranteed
},
spend: self.spend,
view: &self.view * &ED25519_BASEPOINT_TABLE
}
}
}
impl<B: AddressBytes> ToString for Address<B> {
impl ToString for Address {
fn to_string(&self) -> String {
let mut data = vec![self.meta.to_byte()];
data.extend(self.spend.compress().to_bytes());
data.extend(self.view.compress().to_bytes());
if let AddressType::Featured { subaddress, payment_id, guaranteed } = self.meta.kind {
// Technically should be a VarInt, yet we don't have enough features it's needed
data.push(
u8::from(subaddress) + (u8::from(payment_id.is_some()) << 1) + (u8::from(guaranteed) << 2),
);
}
if let Some(id) = self.meta.kind.payment_id() {
if let AddressType::Integrated(id) = self.meta.kind {
data.extend(id);
}
encode_check(&data).unwrap()
}
}
impl<B: AddressBytes> Address<B> {
pub const fn new(meta: AddressMeta<B>, spend: EdwardsPoint, view: EdwardsPoint) -> Self {
Self { meta, spend, view }
}
pub fn from_str_raw(s: &str) -> Result<Self, AddressError> {
impl Address {
pub fn from_str(s: &str, network: Network) -> Result<Self, AddressError> {
let raw = decode_check(s).map_err(|_| AddressError::InvalidEncoding)?;
if raw.len() < (1 + 32 + 32) {
if raw.len() == 1 {
Err(AddressError::InvalidLength)?;
}
let mut meta = AddressMeta::from_byte(raw[0])?;
let spend = CompressedEdwardsY(raw[1 .. 33].try_into().unwrap())
.decompress()
.ok_or(AddressError::InvalidKey)?;
let view = CompressedEdwardsY(raw[33 .. 65].try_into().unwrap())
.decompress()
.ok_or(AddressError::InvalidKey)?;
let mut read = 65;
if matches!(meta.kind, AddressType::Featured { .. }) {
if raw[read] >= (2 << 3) {
Err(AddressError::UnknownFeatures)?;
}
let subaddress = (raw[read] & 1) == 1;
let integrated = ((raw[read] >> 1) & 1) == 1;
let guaranteed = ((raw[read] >> 2) & 1) == 1;
meta.kind = AddressType::Featured {
subaddress,
payment_id: Some([0; 8]).filter(|_| integrated),
guaranteed,
};
read += 1;
if meta.network != network {
Err(AddressError::DifferentNetwork)?;
}
// Update read early so we can verify the length
if meta.kind.payment_id().is_some() {
read += 8;
}
if raw.len() != read {
let len = match meta.kind {
AddressType::Standard | AddressType::Subaddress => 65,
AddressType::Integrated(_) => 73
};
if raw.len() != len {
Err(AddressError::InvalidLength)?;
}
if let AddressType::Integrated(ref mut id) = meta.kind {
id.copy_from_slice(&raw[(read - 8) .. read]);
}
if let AddressType::Featured { payment_id: Some(ref mut id), .. } = meta.kind {
id.copy_from_slice(&raw[(read - 8) .. read]);
let spend = CompressedEdwardsY(raw[1 .. 33].try_into().unwrap()).decompress().ok_or(AddressError::InvalidKey)?;
let view = CompressedEdwardsY(raw[33 .. 65].try_into().unwrap()).decompress().ok_or(AddressError::InvalidKey)?;
if let AddressType::Integrated(ref mut payment_id) = meta.kind {
payment_id.copy_from_slice(&raw[65 .. 73]);
}
Ok(Self { meta, spend, view })
}
pub fn from_str(network: Network, s: &str) -> Result<Self, AddressError> {
Self::from_str_raw(s).and_then(|addr| {
if addr.meta.network == network {
Ok(addr)
} else {
Err(AddressError::DifferentNetwork)?
}
})
}
pub const fn network(&self) -> Network {
self.meta.network
}
pub const fn is_subaddress(&self) -> bool {
self.meta.is_subaddress()
}
pub const fn payment_id(&self) -> Option<[u8; 8]> {
self.meta.payment_id()
}
pub const fn is_guaranteed(&self) -> bool {
self.meta.is_guaranteed()
}
}
/// Instantiation of the Address type with Monero's network bytes.
pub type MoneroAddress = Address<MoneroAddressBytes>;
// Allow re-interpreting of an arbitrary address as a Monero address so it can be used with the
// rest of this library. Doesn't use From as it was conflicting with From<T> for T.
impl MoneroAddress {
pub const fn from<B: AddressBytes>(address: Address<B>) -> Self {
Self::new(
AddressMeta::new(address.meta.network, address.meta.kind),
address.spend,
address.view,
)
Ok(Address { meta, spend, view })
}
}

View File

@@ -1,76 +1,51 @@
use std_shims::{sync::OnceLock, vec::Vec, collections::HashSet};
use std::{sync::Mutex, collections::HashSet};
#[cfg(not(feature = "std"))]
use std_shims::sync::Mutex;
#[cfg(feature = "std")]
use futures::lock::Mutex;
use zeroize::{Zeroize, ZeroizeOnDrop};
use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use rand_distr::{Distribution, Gamma};
#[cfg(not(feature = "std"))]
use rand_distr::num_traits::Float;
use curve25519_dalek::edwards::EdwardsPoint;
use crate::{
wallet::SpendableOutput,
rpc::{RpcError, RpcConnection, Rpc},
};
use crate::{transaction::RING_LEN, wallet::SpendableOutput, rpc::{RpcError, Rpc}};
const LOCK_WINDOW: usize = 10;
const MATURITY: u64 = 60;
const RECENT_WINDOW: usize = 15;
const BLOCK_TIME: usize = 120;
const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME;
#[allow(clippy::as_conversions)]
const TIP_APPLICATION: f64 = (LOCK_WINDOW * BLOCK_TIME) as f64;
// TODO: Expose an API to reset this in case a reorg occurs/the RPC fails/returns garbage
// TODO: Update this when scanning a block, as possible
static DISTRIBUTION_CELL: OnceLock<Mutex<Vec<u64>>> = OnceLock::new();
#[allow(non_snake_case)]
fn DISTRIBUTION() -> &'static Mutex<Vec<u64>> {
DISTRIBUTION_CELL.get_or_init(|| Mutex::new(Vec::with_capacity(3_000_000)))
const DECOYS: usize = RING_LEN - 1;
lazy_static! {
static ref GAMMA: Gamma<f64> = Gamma::new(19.28, 1.0 / 1.61).unwrap();
static ref DISTRIBUTION: Mutex<Vec<u64>> = Mutex::new(Vec::with_capacity(3000000));
}
#[allow(clippy::too_many_arguments, clippy::as_conversions)]
async fn select_n<'a, R: Send + RngCore + CryptoRng, RPC: RpcConnection>(
async fn select_n<R: RngCore + CryptoRng>(
rng: &mut R,
rpc: &Rpc<RPC>,
distribution: &[u64],
rpc: &Rpc,
height: usize,
high: u64,
per_second: f64,
real: &[u64],
used: &mut HashSet<u64>,
count: usize,
count: usize
) -> Result<Vec<(u64, [EdwardsPoint; 2])>, RpcError> {
if height >= rpc.get_height().await? {
// TODO: Don't use InternalError for the caller's failure
Err(RpcError::InternalError("decoys being requested from too young blocks"))?;
}
#[cfg(test)]
let mut iters = 0;
let mut confirmed = Vec::with_capacity(count);
// Retries on failure. Retries are obvious as decoys, yet should be minimal
while confirmed.len() != count {
let remaining = count - confirmed.len();
let mut candidates = Vec::with_capacity(remaining);
while candidates.len() != remaining {
#[cfg(test)]
{
iters += 1;
// This is cheap and on fresh chains, a lot of rounds may be needed
if iters == 100 {
Err(RpcError::InternalError("hit decoy selection round limit"))?;
}
iters += 1;
// This is cheap and on fresh chains, thousands of rounds may be needed
if iters == 10000 {
Err(RpcError::InternalError("not enough decoy candidates".to_string()))?;
}
// Use a gamma distribution
let mut age = Gamma::<f64>::new(19.28, 1.0 / 1.61).unwrap().sample(rng).exp();
let mut age = GAMMA.sample(rng).exp();
if age > TIP_APPLICATION {
age -= TIP_APPLICATION;
} else {
@@ -80,6 +55,7 @@ async fn select_n<'a, R: Send + RngCore + CryptoRng, RPC: RpcConnection>(
let o = (age * per_second) as u64;
if o < high {
let distribution = DISTRIBUTION.lock().unwrap();
let i = distribution.partition_point(|s| *s < (high - 1 - o));
let prev = i.saturating_sub(1);
let n = distribution[i] - distribution[prev];
@@ -94,28 +70,9 @@ async fn select_n<'a, R: Send + RngCore + CryptoRng, RPC: RpcConnection>(
}
}
// If this is the first time we're requesting these outputs, include the real one as well
// Prevents the node we're connected to from having a list of known decoys and then seeing a
// TX which uses all of them, with one additional output (the true spend)
let mut real_indexes = HashSet::with_capacity(real.len());
if confirmed.is_empty() {
for real in real {
candidates.push(*real);
}
// Sort candidates so the real spends aren't the ones at the end
candidates.sort();
for real in real {
real_indexes.insert(candidates.binary_search(real).unwrap());
}
}
for (i, output) in rpc.get_unlocked_outputs(&candidates, height).await?.iter_mut().enumerate() {
// Don't include the real spend as a decoy, despite requesting it
if real_indexes.contains(&i) {
continue;
}
if let Some(output) = output.take() {
let outputs = rpc.get_outputs(&candidates, height).await?;
for i in 0 .. outputs.len() {
if let Some(output) = outputs[i] {
confirmed.push((candidates[i], output));
}
}
@@ -133,12 +90,11 @@ fn offset(ring: &[u64]) -> Vec<u64> {
res
}
/// Decoy data, containing the actual member as well (at index `i`).
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
#[derive(Clone, PartialEq, Debug)]
pub struct Decoys {
pub i: u8,
pub offsets: Vec<u64>,
pub ring: Vec<[EdwardsPoint; 2]>,
pub ring: Vec<[EdwardsPoint; 2]>
}
impl Decoys {
@@ -146,43 +102,44 @@ impl Decoys {
self.offsets.len()
}
/// Select decoys using the same distribution as Monero.
#[allow(clippy::as_conversions)]
pub async fn select<R: Send + RngCore + CryptoRng, RPC: RpcConnection>(
pub(crate) async fn select<R: RngCore + CryptoRng>(
rng: &mut R,
rpc: &Rpc<RPC>,
ring_len: usize,
rpc: &Rpc,
height: usize,
inputs: &[SpendableOutput],
) -> Result<Vec<Self>, RpcError> {
#[cfg(not(feature = "std"))]
let mut distribution = DISTRIBUTION().lock();
#[cfg(feature = "std")]
let mut distribution = DISTRIBUTION().lock().await;
let decoy_count = ring_len - 1;
inputs: &[SpendableOutput]
) -> Result<Vec<Decoys>, RpcError> {
// Convert the inputs in question to the raw output data
let mut real = Vec::with_capacity(inputs.len());
let mut outputs = Vec::with_capacity(inputs.len());
for input in inputs {
real.push(input.global_index);
outputs.push((real[real.len() - 1], [input.key(), input.commitment().calculate()]));
outputs.push((
rpc.get_o_indexes(input.tx).await?[usize::from(input.o)],
[input.key, input.commitment.calculate()]
));
}
if distribution.len() <= height {
let extension = rpc.get_output_distribution(distribution.len(), height).await?;
distribution.extend(extension);
let distribution_len = {
let distribution = DISTRIBUTION.lock().unwrap();
distribution.len()
};
if distribution_len <= height {
let extension = rpc.get_output_distribution(distribution_len, height).await?;
DISTRIBUTION.lock().unwrap().extend(extension);
}
// If asked to use an older height than previously asked, truncate to ensure accuracy
// Should never happen, yet risks desyncing if it did
distribution.truncate(height + 1); // height is inclusive, and 0 is a valid height
let high = distribution[distribution.len() - 1];
let per_second = {
let blocks = distribution.len().min(BLOCKS_PER_YEAR);
let outputs = high - distribution[distribution.len().saturating_sub(blocks + 1)];
(outputs as f64) / ((blocks * BLOCK_TIME) as f64)
let high;
let per_second;
{
let mut distribution = DISTRIBUTION.lock().unwrap();
// If asked to use an older height than previously asked, truncate to ensure accuracy
// Should never happen, yet risks desyncing if it did
distribution.truncate(height + 1); // height is inclusive, and 0 is a valid height
high = distribution[distribution.len() - 1];
per_second = {
let blocks = distribution.len().min(BLOCKS_PER_YEAR);
let outputs = high - distribution[distribution.len().saturating_sub(blocks + 1)];
(outputs as f64) / ((blocks * BLOCK_TIME) as f64)
};
};
let mut used = HashSet::<u64>::new();
@@ -190,32 +147,28 @@ impl Decoys {
used.insert(o.0);
}
// TODO: Create a TX with less than the target amount, as allowed by the protocol
if (high - MATURITY) < u64::try_from(inputs.len() * ring_len).unwrap() {
Err(RpcError::InternalError("not enough decoy candidates"))?;
// TODO: Simply create a TX with less than the target amount
if (high - MATURITY) < u64::try_from(inputs.len() * RING_LEN).unwrap() {
Err(RpcError::InternalError("not enough decoy candidates".to_string()))?;
}
// Select all decoys for this transaction, assuming we generate a sane transaction
// We should almost never naturally generate an insane transaction, hence why this doesn't
// bother with an overage
// We should almost never naturally generate an insane transaction, hence why this doesn't bother
// with an overage
let mut decoys = select_n(
rng,
rpc,
&distribution,
height,
high,
per_second,
&real,
&mut used,
inputs.len() * decoy_count,
)
.await?;
real.zeroize();
inputs.len() * DECOYS
).await?;
let mut res = Vec::with_capacity(inputs.len());
for o in outputs {
// Grab the decoys for this specific output
let mut ring = decoys.drain((decoys.len() - decoy_count) ..).collect::<Vec<_>>();
let mut ring = decoys.drain((decoys.len() - DECOYS) ..).collect::<Vec<_>>();
ring.push(o);
ring.sort_by(|a, b| a.0.cmp(&b.0));
@@ -225,54 +178,40 @@ impl Decoys {
// 500 outputs since while itself not being a sufficiently mature blockchain
// Considering Monero's p2p layer doesn't actually check transaction sanity, it should be
// fine for us to not have perfectly matching rules, especially since this code will infinite
// loop if it can't determine sanity, which is possible with sufficient inputs on
// sufficiently small chains
// loop if it can't determine sanity, which is possible with sufficient inputs on sufficiently
// small chains
if high > 500 {
// Make sure the TX passes the sanity check that the median output is within the last 40%
let target_median = high * 3 / 5;
while ring[ring_len / 2].0 < target_median {
while ring[RING_LEN / 2].0 < target_median {
// If it's not, update the bottom half with new values to ensure the median only moves up
#[allow(clippy::needless_collect)] // Needed for ownership reasons
for removed in ring.drain(0 .. (ring_len / 2)).collect::<Vec<_>>() {
for removed in ring.drain(0 .. (RING_LEN / 2)).collect::<Vec<_>>() {
// If we removed the real spend, add it back
if removed.0 == o.0 {
ring.push(o);
} else {
// We could not remove this, saving CPU time and removing low values as
// possibilities, yet it'd increase the amount of decoys required to create this
// transaction and some removed outputs may be the best option (as we drop the first
// half, not just the bottom n)
// We could not remove this, saving CPU time and removing low values as possibilities, yet
// it'd increase the amount of decoys required to create this transaction and some removed
// outputs may be the best option (as we drop the first half, not just the bottom n)
used.remove(&removed.0);
}
}
// Select new outputs until we have a full sized ring again
ring.extend(
select_n(
rng,
rpc,
&distribution,
height,
high,
per_second,
&[],
&mut used,
ring_len - ring.len(),
)
.await?,
select_n(rng, rpc, height, high, per_second, &mut used, RING_LEN - ring.len()).await?
);
ring.sort_by(|a, b| a.0.cmp(&b.0));
}
// The other sanity check rule is about duplicates, yet we already enforce unique ring
// members
// The other sanity check rule is about duplicates, yet we already enforce unique ring members
}
res.push(Self {
res.push(Decoys {
// Binary searches for the real spend since we don't know where it sorted to
i: u8::try_from(ring.partition_point(|x| x.0 < o.0)).unwrap(),
offsets: offset(&ring.iter().map(|output| output.0).collect::<Vec<_>>()),
ring: ring.iter().map(|output| output.1).collect(),
ring: ring.iter().map(|output| output.1).collect()
});
}

View File

@@ -1,218 +0,0 @@
use core::ops::BitXor;
use std_shims::{
vec::Vec,
io::{self, Read, Write},
};
use zeroize::Zeroize;
use curve25519_dalek::edwards::EdwardsPoint;
use crate::serialize::{
varint_len, read_byte, read_bytes, read_varint, read_point, read_vec, write_byte, write_varint,
write_point, write_vec,
};
pub const MAX_TX_EXTRA_NONCE_SIZE: usize = 255;
pub const PAYMENT_ID_MARKER: u8 = 0;
pub const ENCRYPTED_PAYMENT_ID_MARKER: u8 = 1;
// Used as it's the highest value not interpretable as a continued VarInt
pub const ARBITRARY_DATA_MARKER: u8 = 127;
// 1 byte is used for the marker
pub const MAX_ARBITRARY_DATA_SIZE: usize = MAX_TX_EXTRA_NONCE_SIZE - 1;
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub enum PaymentId {
Unencrypted([u8; 32]),
Encrypted([u8; 8]),
}
impl BitXor<[u8; 8]> for PaymentId {
type Output = Self;
fn bitxor(self, bytes: [u8; 8]) -> Self {
match self {
// Don't perform the xor since this isn't intended to be encrypted with xor
Self::Unencrypted(_) => self,
Self::Encrypted(id) => {
Self::Encrypted((u64::from_le_bytes(id) ^ u64::from_le_bytes(bytes)).to_le_bytes())
}
}
}
}
impl PaymentId {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
match self {
Self::Unencrypted(id) => {
w.write_all(&[PAYMENT_ID_MARKER])?;
w.write_all(id)?;
}
Self::Encrypted(id) => {
w.write_all(&[ENCRYPTED_PAYMENT_ID_MARKER])?;
w.write_all(id)?;
}
}
Ok(())
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
Ok(match read_byte(r)? {
0 => Self::Unencrypted(read_bytes(r)?),
1 => Self::Encrypted(read_bytes(r)?),
_ => Err(io::Error::new(io::ErrorKind::Other, "unknown payment ID type"))?,
})
}
}
// Doesn't bother with padding nor MinerGate
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub enum ExtraField {
PublicKey(EdwardsPoint),
Nonce(Vec<u8>),
MergeMining(usize, [u8; 32]),
PublicKeys(Vec<EdwardsPoint>),
}
impl ExtraField {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
match self {
Self::PublicKey(key) => {
w.write_all(&[1])?;
w.write_all(&key.compress().to_bytes())?;
}
Self::Nonce(data) => {
w.write_all(&[2])?;
write_vec(write_byte, data, w)?;
}
Self::MergeMining(height, merkle) => {
w.write_all(&[3])?;
write_varint(&u64::try_from(*height).unwrap(), w)?;
w.write_all(merkle)?;
}
Self::PublicKeys(keys) => {
w.write_all(&[4])?;
write_vec(write_point, keys, w)?;
}
}
Ok(())
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
Ok(match read_byte(r)? {
1 => Self::PublicKey(read_point(r)?),
2 => Self::Nonce({
let nonce = read_vec(read_byte, r)?;
if nonce.len() > MAX_TX_EXTRA_NONCE_SIZE {
Err(io::Error::new(io::ErrorKind::Other, "too long nonce"))?;
}
nonce
}),
3 => Self::MergeMining(
usize::try_from(read_varint(r)?)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "varint for height exceeds usize"))?,
read_bytes(r)?,
),
4 => Self::PublicKeys(read_vec(read_point, r)?),
_ => Err(io::Error::new(io::ErrorKind::Other, "unknown extra field"))?,
})
}
}
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct Extra(Vec<ExtraField>);
impl Extra {
pub fn keys(&self) -> Option<(EdwardsPoint, Option<Vec<EdwardsPoint>>)> {
let mut key = None;
let mut additional = None;
for field in &self.0 {
match field.clone() {
ExtraField::PublicKey(this_key) => key = key.or(Some(this_key)),
ExtraField::PublicKeys(these_additional) => {
additional = additional.or(Some(these_additional));
}
ExtraField::Nonce(_) | ExtraField::MergeMining(..) => (),
}
}
// Don't return any keys if this was non-standard and didn't include the primary key
key.map(|key| (key, additional))
}
pub fn payment_id(&self) -> Option<PaymentId> {
for field in &self.0 {
if let ExtraField::Nonce(data) = field {
return PaymentId::read::<&[u8]>(&mut data.as_ref()).ok();
}
}
None
}
pub fn data(&self) -> Vec<Vec<u8>> {
let mut res = vec![];
for field in &self.0 {
if let ExtraField::Nonce(data) = field {
if data[0] == ARBITRARY_DATA_MARKER {
res.push(data[1 ..].to_vec());
}
}
}
res
}
pub(crate) fn new(key: EdwardsPoint, additional: Vec<EdwardsPoint>) -> Self {
let mut res = Self(Vec::with_capacity(3));
res.push(ExtraField::PublicKey(key));
if !additional.is_empty() {
res.push(ExtraField::PublicKeys(additional));
}
res
}
pub(crate) fn push(&mut self, field: ExtraField) {
self.0.push(field);
}
#[rustfmt::skip]
pub(crate) fn fee_weight(
outputs: usize,
additional: bool,
payment_id: bool,
data: &[Vec<u8>]
) -> usize {
// PublicKey, key
(1 + 32) +
// PublicKeys, length, additional keys
(if additional { 1 + 1 + (outputs * 32) } else { 0 }) +
// PaymentId (Nonce), length, encrypted, ID
(if payment_id { 1 + 1 + 1 + 8 } else { 0 }) +
// Nonce, length, ARBITRARY_DATA_MARKER, data
data.iter().map(|v| 1 + varint_len(1 + v.len()) + 1 + v.len()).sum::<usize>()
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
for field in &self.0 {
field.write(w)?;
}
Ok(())
}
pub fn serialize(&self) -> Vec<u8> {
let mut buf = vec![];
self.write(&mut buf).unwrap();
buf
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
let mut res = Self(vec![]);
let mut field;
while {
field = ExtraField::read(r);
field.is_ok()
} {
res.0.push(field.unwrap());
}
Ok(res)
}
}

View File

@@ -1,44 +1,25 @@
use core::ops::Deref;
use std_shims::collections::{HashSet, HashMap};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use crate::{
hash, hash_to_scalar, serialize::write_varint, ringct::EncryptedAmount, transaction::Input,
hash, hash_to_scalar,
serialize::write_varint,
transaction::Input
};
pub mod extra;
pub(crate) use extra::{PaymentId, ExtraField, Extra};
/// Seed creation and parsing functionality.
pub mod seed;
/// Address encoding and decoding functionality.
pub mod address;
use address::{Network, AddressType, SubaddressIndex, AddressSpec, AddressMeta, MoneroAddress};
mod scan;
pub use scan::{ReceivedOutput, SpendableOutput, Timelocked};
pub use scan::SpendableOutput;
pub(crate) mod decoys;
pub(crate) use decoys::Decoys;
mod send;
pub use send::{Fee, TransactionError, Change, SignableTransaction, Eventuality};
#[cfg(feature = "std")]
pub use send::SignableTransactionBuilder;
#[cfg(feature = "multisig")]
pub(crate) use send::InternalPayment;
pub use send::{Fee, TransactionError, SignableTransaction};
#[cfg(feature = "multisig")]
pub use send::TransactionMachine;
fn key_image_sort(x: &EdwardsPoint, y: &EdwardsPoint) -> core::cmp::Ordering {
fn key_image_sort(x: &EdwardsPoint, y: &EdwardsPoint) -> std::cmp::Ordering {
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
}
@@ -49,43 +30,34 @@ pub(crate) fn uniqueness(inputs: &[Input]) -> [u8; 32] {
match input {
// If Gen, this should be the only input, making this loop somewhat pointless
// This works and even if there were somehow multiple inputs, it'd be a false negative
Input::Gen(height) => {
write_varint(height, &mut u).unwrap();
}
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
Input::Gen(height) => { write_varint(&(*height).try_into().unwrap(), &mut u).unwrap(); },
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes())
}
}
hash(&u)
}
// Hs("view_tag" || 8Ra || o), Hs(8Ra || o), and H(8Ra || 0x8d) with uniqueness inclusion in the
// Scalar as an option
// Hs(8Ra || o) with https://github.com/monero-project/research-lab/issues/103 as an option
#[allow(non_snake_case)]
pub(crate) fn shared_key(
uniqueness: Option<[u8; 32]>,
ecdh: EdwardsPoint,
o: usize,
) -> (u8, Scalar, [u8; 8]) {
// 8Ra
let mut output_derivation = ecdh.mul_by_cofactor().compress().to_bytes().to_vec();
let mut payment_id_xor = [0; 8];
payment_id_xor
.copy_from_slice(&hash(&[output_derivation.as_ref(), [0x8d].as_ref()].concat())[.. 8]);
pub(crate) fn shared_key(uniqueness: Option<[u8; 32]>, s: Scalar, P: &EdwardsPoint, o: usize) -> Scalar {
// uniqueness
let mut shared = uniqueness.map_or(vec![], |uniqueness| uniqueness.to_vec());
// || 8Ra
shared.extend((s * P).mul_by_cofactor().compress().to_bytes().to_vec());
// || o
write_varint(&o.try_into().unwrap(), &mut output_derivation).unwrap();
write_varint(&o.try_into().unwrap(), &mut shared).unwrap();
// Hs()
hash_to_scalar(&shared)
}
let view_tag = hash(&[b"view_tag".as_ref(), &output_derivation].concat())[0];
pub(crate) fn amount_encryption(amount: u64, key: Scalar) -> [u8; 8] {
let mut amount_mask = b"amount".to_vec();
amount_mask.extend(key.to_bytes());
(amount ^ u64::from_le_bytes(hash(&amount_mask)[0 .. 8].try_into().unwrap())).to_le_bytes()
}
// uniqueness ||
let shared_key = if let Some(uniqueness) = uniqueness {
[uniqueness.as_ref(), &output_derivation].concat()
} else {
output_derivation
};
(view_tag, hash_to_scalar(&shared_key), payment_id_xor)
fn amount_decryption(amount: [u8; 8], key: Scalar) -> u64 {
u64::from_le_bytes(amount_encryption(u64::from_le_bytes(amount), key))
}
pub(crate) fn commitment_mask(shared_key: Scalar) -> Scalar {
@@ -94,175 +66,8 @@ pub(crate) fn commitment_mask(shared_key: Scalar) -> Scalar {
hash_to_scalar(&mask)
}
pub(crate) fn amount_encryption(amount: u64, key: Scalar) -> [u8; 8] {
let mut amount_mask = b"amount".to_vec();
amount_mask.extend(key.to_bytes());
(amount ^ u64::from_le_bytes(hash(&amount_mask)[.. 8].try_into().unwrap())).to_le_bytes()
}
// TODO: Move this under EncryptedAmount?
fn amount_decryption(amount: &EncryptedAmount, key: Scalar) -> (Scalar, u64) {
match amount {
EncryptedAmount::Original { mask, amount } => {
#[cfg(feature = "experimental")]
{
let mask_shared_sec = hash(key.as_bytes());
let mask =
Scalar::from_bytes_mod_order(*mask) - Scalar::from_bytes_mod_order(mask_shared_sec);
let amount_shared_sec = hash(&mask_shared_sec);
let amount_scalar =
Scalar::from_bytes_mod_order(*amount) - Scalar::from_bytes_mod_order(amount_shared_sec);
// d2b from rctTypes.cpp
let amount = u64::from_le_bytes(amount_scalar.to_bytes()[0 .. 8].try_into().unwrap());
(mask, amount)
}
#[cfg(not(feature = "experimental"))]
{
let _ = mask;
let _ = amount;
todo!("decrypting a legacy monero transaction's amount")
}
}
EncryptedAmount::Compact { amount } => (
commitment_mask(key),
u64::from_le_bytes(amount_encryption(u64::from_le_bytes(*amount), key)),
),
}
}
/// The private view key and public spend key, enabling scanning transactions.
#[derive(Clone, Zeroize, ZeroizeOnDrop)]
#[derive(Clone, Copy)]
pub struct ViewPair {
spend: EdwardsPoint,
view: Zeroizing<Scalar>,
}
impl ViewPair {
pub const fn new(spend: EdwardsPoint, view: Zeroizing<Scalar>) -> Self {
Self { spend, view }
}
pub const fn spend(&self) -> EdwardsPoint {
self.spend
}
pub fn view(&self) -> EdwardsPoint {
self.view.deref() * &ED25519_BASEPOINT_TABLE
}
fn subaddress_derivation(&self, index: SubaddressIndex) -> Scalar {
hash_to_scalar(&Zeroizing::new(
[
b"SubAddr\0".as_ref(),
Zeroizing::new(self.view.to_bytes()).as_ref(),
&index.account().to_le_bytes(),
&index.address().to_le_bytes(),
]
.concat(),
))
}
fn subaddress_keys(&self, index: SubaddressIndex) -> (EdwardsPoint, EdwardsPoint) {
let scalar = self.subaddress_derivation(index);
let spend = self.spend + (&scalar * &ED25519_BASEPOINT_TABLE);
let view = self.view.deref() * spend;
(spend, view)
}
/// Returns an address with the provided specification.
pub fn address(&self, network: Network, spec: AddressSpec) -> MoneroAddress {
let mut spend = self.spend;
let mut view: EdwardsPoint = self.view.deref() * &ED25519_BASEPOINT_TABLE;
// construct the address meta
let meta = match spec {
AddressSpec::Standard => AddressMeta::new(network, AddressType::Standard),
AddressSpec::Integrated(payment_id) => {
AddressMeta::new(network, AddressType::Integrated(payment_id))
}
AddressSpec::Subaddress(index) => {
(spend, view) = self.subaddress_keys(index);
AddressMeta::new(network, AddressType::Subaddress)
}
AddressSpec::Featured { subaddress, payment_id, guaranteed } => {
if let Some(index) = subaddress {
(spend, view) = self.subaddress_keys(index);
}
AddressMeta::new(
network,
AddressType::Featured { subaddress: subaddress.is_some(), payment_id, guaranteed },
)
}
};
MoneroAddress::new(meta, spend, view)
}
}
/// Transaction scanner.
/// This scanner is capable of generating subaddresses, additionally scanning for them once they've
/// been explicitly generated. If the burning bug is attempted, any secondary outputs will be
/// ignored.
#[derive(Clone)]
pub struct Scanner {
pair: ViewPair,
// Also contains the spend key as None
pub(crate) subaddresses: HashMap<CompressedEdwardsY, Option<SubaddressIndex>>,
pub(crate) burning_bug: Option<HashSet<CompressedEdwardsY>>,
}
impl Zeroize for Scanner {
fn zeroize(&mut self) {
self.pair.zeroize();
// These may not be effective, unfortunately
for (mut key, mut value) in self.subaddresses.drain() {
key.zeroize();
value.zeroize();
}
if let Some(ref mut burning_bug) = self.burning_bug.take() {
for mut output in burning_bug.drain() {
output.zeroize();
}
}
}
}
impl Drop for Scanner {
fn drop(&mut self) {
self.zeroize();
}
}
impl ZeroizeOnDrop for Scanner {}
impl Scanner {
/// Create a Scanner from a ViewPair.
///
/// burning_bug is a HashSet of used keys, intended to prevent key reuse which would burn funds.
///
/// When an output is successfully scanned, the output key MUST be saved to disk.
///
/// When a new scanner is created, ALL saved output keys must be passed in to be secure.
///
/// If None is passed, a modified shared key derivation is used which is immune to the burning
/// bug (specifically the Guaranteed feature from Featured Addresses).
pub fn from_view(pair: ViewPair, burning_bug: Option<HashSet<CompressedEdwardsY>>) -> Self {
let mut subaddresses = HashMap::new();
subaddresses.insert(pair.spend.compress(), None);
Self { pair, subaddresses, burning_bug }
}
/// Register a subaddress.
// There used to be an address function here, yet it wasn't safe. It could generate addresses
// incompatible with the Scanner. While we could return None for that, then we have the issue
// of runtime failures to generate an address.
// Removing that API was the simplest option.
pub fn register_subaddress(&mut self, subaddress: SubaddressIndex) {
let (spend, _) = self.pair.subaddress_keys(subaddress);
self.subaddresses.insert(spend.compress(), Some(subaddress));
}
pub spend: EdwardsPoint,
pub view: Scalar
}

View File

@@ -1,414 +1,154 @@
use core::ops::Deref;
use std_shims::{
vec::Vec,
io::{self, Read, Write},
use std::convert::TryFrom;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::EdwardsPoint
};
use zeroize::{Zeroize, ZeroizeOnDrop};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
use monero::{consensus::deserialize, blockdata::transaction::ExtraField};
use crate::{
Commitment,
serialize::{read_byte, read_u32, read_u64, read_bytes, read_scalar, read_point, read_raw_vec},
transaction::{Input, Timelock, Transaction},
block::Block,
rpc::{RpcError, RpcConnection, Rpc},
wallet::{
PaymentId, Extra, address::SubaddressIndex, Scanner, uniqueness, shared_key, amount_decryption,
},
serialize::{write_varint, read_32, read_scalar, read_point},
transaction::{Timelock, Transaction},
wallet::{ViewPair, uniqueness, shared_key, amount_decryption, commitment_mask}
};
/// An absolute output ID, defined as its transaction hash and output index.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct AbsoluteId {
#[derive(Clone, PartialEq, Debug)]
pub struct SpendableOutput {
pub tx: [u8; 32],
pub o: u8,
}
impl AbsoluteId {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(&self.tx)?;
w.write_all(&[self.o])
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(32 + 1);
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
Ok(Self { tx: read_bytes(r)?, o: read_byte(r)? })
}
}
/// The data contained with an output.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct OutputData {
pub key: EdwardsPoint,
/// Absolute difference between the spend key and the key in this output
pub key_offset: Scalar,
pub commitment: Commitment,
pub commitment: Commitment
}
impl OutputData {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(&self.key.compress().to_bytes())?;
w.write_all(&self.key_offset.to_bytes())?;
w.write_all(&self.commitment.mask.to_bytes())?;
w.write_all(&self.commitment.amount.to_le_bytes())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(32 + 32 + 32 + 8);
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
Ok(Self {
key: read_point(r)?,
key_offset: read_scalar(r)?,
commitment: Commitment::new(read_scalar(r)?, read_u64(r)?),
})
}
}
/// The metadata for an output.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct Metadata {
/// The subaddress this output was sent to.
pub subaddress: Option<SubaddressIndex>,
/// The payment ID included with this output.
/// This will be gibberish if the payment ID wasn't intended for the recipient or wasn't included.
// Could be an Option, as extra doesn't necessarily have a payment ID, yet all Monero TXs should
// have this making it simplest for it to be as-is.
pub payment_id: [u8; 8],
/// Arbitrary data encoded in TX extra.
pub arbitrary_data: Vec<Vec<u8>>,
}
impl Metadata {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
if let Some(subaddress) = self.subaddress {
w.write_all(&[1])?;
w.write_all(&subaddress.account().to_le_bytes())?;
w.write_all(&subaddress.address().to_le_bytes())?;
} else {
w.write_all(&[0])?;
}
w.write_all(&self.payment_id)?;
w.write_all(&u32::try_from(self.arbitrary_data.len()).unwrap().to_le_bytes())?;
for part in &self.arbitrary_data {
w.write_all(&[u8::try_from(part.len()).unwrap()])?;
w.write_all(part)?;
}
Ok(())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(1 + 8 + 1);
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
#[allow(clippy::if_then_some_else_none)] // The Result usage makes this invalid
let subaddress = if read_byte(r)? == 1 {
Some(
SubaddressIndex::new(read_u32(r)?, read_u32(r)?)
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid subaddress in metadata"))?,
)
} else {
None
};
Ok(Self {
subaddress,
payment_id: read_bytes(r)?,
arbitrary_data: {
let mut data = vec![];
for _ in 0 .. read_u32(r)? {
let len = read_byte(r)?;
data.push(read_raw_vec(read_byte, usize::from(len), r)?);
}
data
},
})
}
}
/// A received output, defined as its absolute ID, data, and metadara.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct ReceivedOutput {
pub absolute: AbsoluteId,
pub data: OutputData,
pub metadata: Metadata,
}
impl ReceivedOutput {
pub fn key(&self) -> EdwardsPoint {
self.data.key
}
pub fn key_offset(&self) -> Scalar {
self.data.key_offset
}
pub fn commitment(&self) -> Commitment {
self.data.commitment.clone()
}
pub fn arbitrary_data(&self) -> &[Vec<u8>] {
&self.metadata.arbitrary_data
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.absolute.write(w)?;
self.data.write(w)?;
self.metadata.write(w)
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
Ok(Self {
absolute: AbsoluteId::read(r)?,
data: OutputData::read(r)?,
metadata: Metadata::read(r)?,
})
}
}
/// A spendable output, defined as a received output and its index on the Monero blockchain.
/// This index is dependent on the Monero blockchain and will only be known once the output is
/// included within a block. This may change if there's a reorganization.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct SpendableOutput {
pub output: ReceivedOutput,
pub global_index: u64,
}
impl SpendableOutput {
/// Update the spendable output's global index. This is intended to be called if a
/// re-organization occurred.
pub async fn refresh_global_index<RPC: RpcConnection>(
&mut self,
rpc: &Rpc<RPC>,
) -> Result<(), RpcError> {
self.global_index =
rpc.get_o_indexes(self.output.absolute.tx).await?[usize::from(self.output.absolute.o)];
Ok(())
}
pub async fn from<RPC: RpcConnection>(
rpc: &Rpc<RPC>,
output: ReceivedOutput,
) -> Result<Self, RpcError> {
let mut output = Self { output, global_index: 0 };
output.refresh_global_index(rpc).await?;
Ok(output)
}
pub fn key(&self) -> EdwardsPoint {
self.output.key()
}
pub fn key_offset(&self) -> Scalar {
self.output.key_offset()
}
pub fn commitment(&self) -> Commitment {
self.output.commitment()
}
pub fn arbitrary_data(&self) -> &[Vec<u8>] {
self.output.arbitrary_data()
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.output.write(w)?;
w.write_all(&self.global_index.to_le_bytes())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
Ok(Self { output: ReceivedOutput::read(r)?, global_index: read_u64(r)? })
}
}
/// A collection of timelocked outputs, either received or spendable.
#[derive(Zeroize)]
pub struct Timelocked<O: Clone + Zeroize>(Timelock, Vec<O>);
impl<O: Clone + Zeroize> Drop for Timelocked<O> {
fn drop(&mut self) {
self.zeroize();
}
}
impl<O: Clone + Zeroize> ZeroizeOnDrop for Timelocked<O> {}
impl<O: Clone + Zeroize> Timelocked<O> {
pub struct Timelocked(Timelock, Vec<SpendableOutput>);
impl Timelocked {
pub fn timelock(&self) -> Timelock {
self.0
}
/// Return the outputs if they're not timelocked, or an empty vector if they are.
#[must_use]
pub fn not_locked(&self) -> Vec<O> {
pub fn not_locked(&self) -> Vec<SpendableOutput> {
if self.0 == Timelock::None {
return self.1.clone();
}
vec![]
}
/// Returns None if the Timelocks aren't comparable. Returns Some(vec![]) if none are unlocked.
#[must_use]
pub fn unlocked(&self, timelock: Timelock) -> Option<Vec<O>> {
/// Returns None if the Timelocks aren't comparable. Returns Some(vec![]) if none are unlocked
pub fn unlocked(&self, timelock: Timelock) -> Option<Vec<SpendableOutput>> {
// If the Timelocks are comparable, return the outputs if they're now unlocked
if self.0 <= timelock {
Some(self.1.clone())
} else {
None
}
self.0.partial_cmp(&timelock).filter(|_| self.0 <= timelock).map(|_| self.1.clone())
}
#[must_use]
pub fn ignore_timelock(&self) -> Vec<O> {
pub fn ignore_timelock(&self) -> Vec<SpendableOutput> {
self.1.clone()
}
}
impl Scanner {
/// Scan a transaction to discover the received outputs.
pub fn scan_transaction(&mut self, tx: &Transaction) -> Timelocked<ReceivedOutput> {
// Only scan RCT TXs since we can only spend RCT outputs
if tx.prefix.version != 2 {
return Timelocked(tx.prefix.timelock, vec![]);
}
impl SpendableOutput {
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(32 + 1 + 32 + 32 + 40);
res.extend(&self.tx);
res.push(self.o);
res.extend(self.key.compress().to_bytes());
res.extend(self.key_offset.to_bytes());
res.extend(self.commitment.mask.to_bytes());
res.extend(self.commitment.amount.to_le_bytes());
res
}
let Ok(extra) = Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref()) else {
return Timelocked(tx.prefix.timelock, vec![]);
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<SpendableOutput> {
Ok(
SpendableOutput {
tx: read_32(r)?,
o: { let mut o = [0; 1]; r.read_exact(&mut o)?; o[0] },
key: read_point(r)?,
key_offset: read_scalar(r)?,
commitment: Commitment::new(
read_scalar(r)?,
{ let mut amount = [0; 8]; r.read_exact(&mut amount)?; u64::from_le_bytes(amount) }
)
}
)
}
}
impl Transaction {
pub fn scan(
&self,
view: ViewPair,
guaranteed: bool
) -> Timelocked {
let mut extra = vec![];
write_varint(&u64::try_from(self.prefix.extra.len()).unwrap(), &mut extra).unwrap();
extra.extend(&self.prefix.extra);
let extra = deserialize::<ExtraField>(&extra);
let pubkeys: Vec<EdwardsPoint>;
if let Ok(extra) = extra {
let mut m_pubkeys = vec![];
if let Some(key) = extra.tx_pubkey() {
m_pubkeys.push(key);
}
if let Some(keys) = extra.tx_additional_pubkeys() {
m_pubkeys.extend(&keys);
}
pubkeys = m_pubkeys.iter().map(|key| key.point.decompress()).filter_map(|key| key).collect();
} else {
return Timelocked(self.prefix.timelock, vec![]);
};
let Some((tx_key, additional)) = extra.keys() else {
return Timelocked(tx.prefix.timelock, vec![]);
};
let payment_id = extra.payment_id();
let mut res = vec![];
for (o, output) in tx.prefix.outputs.iter().enumerate() {
// https://github.com/serai-dex/serai/issues/106
if let Some(burning_bug) = self.burning_bug.as_ref() {
if burning_bug.contains(&output.key) {
continue;
}
}
let output_key = output.key.decompress();
if output_key.is_none() {
continue;
}
let output_key = output_key.unwrap();
for key in [Some(Some(&tx_key)), additional.as_ref().map(|additional| additional.get(o))] {
let Some(Some(key)) = key else {
if key == Some(None) {
// This is non-standard. There were additional keys, yet not one for this output
// https://github.com/monero-project/monero/
// blob/04a1e2875d6e35e27bb21497988a6c822d319c28/
// src/cryptonote_basic/cryptonote_format_utils.cpp#L1062
// TODO: Should this return? Where does Monero set the trap handler for this exception?
continue;
} else {
break;
}
};
let (view_tag, shared_key, payment_id_xor) = shared_key(
if self.burning_bug.is_none() { Some(uniqueness(&tx.prefix.inputs)) } else { None },
self.pair.view.deref() * key,
o,
for (o, output) in self.prefix.outputs.iter().enumerate() {
// TODO: This may be replaceable by pubkeys[o]
for pubkey in &pubkeys {
let key_offset = shared_key(
Some(uniqueness(&self.prefix.inputs)).filter(|_| guaranteed),
view.view,
pubkey,
o
);
let payment_id =
if let Some(PaymentId::Encrypted(id)) = payment_id.map(|id| id ^ payment_id_xor) {
id
} else {
payment_id_xor
};
if let Some(actual_view_tag) = output.view_tag {
if actual_view_tag != view_tag {
continue;
}
}
// P - shared == spend
let subaddress = self
.subaddresses
.get(&(output_key - (&shared_key * &ED25519_BASEPOINT_TABLE)).compress());
if subaddress.is_none() {
if (output.key - (&key_offset * &ED25519_BASEPOINT_TABLE)) != view.spend {
continue;
}
let subaddress = *subaddress.unwrap();
// If it has torsion, it'll substract the non-torsioned shared key to a torsioned key
// We will not have a torsioned key in our HashMap of keys, so we wouldn't identify it as
// ours
// If we did though, it'd enable bypassing the included burning bug protection
assert!(output_key.is_torsion_free());
let mut key_offset = shared_key;
if let Some(subaddress) = subaddress {
key_offset += self.pair.subaddress_derivation(subaddress);
}
// Since we've found an output to us, get its amount
let mut commitment = Commitment::zero();
// Miner transaction
if let Some(amount) = output.amount {
commitment.amount = amount;
if output.amount != 0 {
commitment.amount = output.amount;
// Regular transaction
} else {
let (mask, amount) = match tx.rct_signatures.base.encrypted_amounts.get(o) {
Some(amount) => amount_decryption(amount, shared_key),
let amount = match self.rct_signatures.base.ecdh_info.get(o) {
Some(amount) => amount_decryption(*amount, key_offset),
// This should never happen, yet it may be possible with miner transactions?
// Using get just decreases the possibility of a panic and lets us move on in that case
None => break,
None => break
};
// Rebuild the commitment to verify it
commitment = Commitment::new(mask, amount);
commitment = Commitment::new(commitment_mask(key_offset), amount);
// If this is a malicious commitment, move to the next output
// Any other R value will calculate to a different spend key and are therefore ignorable
if Some(&commitment.calculate()) != tx.rct_signatures.base.commitments.get(o) {
if Some(&commitment.calculate()) != self.rct_signatures.base.commitments.get(o) {
break;
}
}
if commitment.amount != 0 {
res.push(ReceivedOutput {
absolute: AbsoluteId { tx: tx.hash(), o: o.try_into().unwrap() },
data: OutputData { key: output_key, key_offset, commitment },
metadata: Metadata { subaddress, payment_id, arbitrary_data: extra.data() },
res.push(SpendableOutput {
tx: self.hash(),
o: o.try_into().unwrap(),
key: output.key,
key_offset,
commitment
});
if let Some(burning_bug) = self.burning_bug.as_mut() {
burning_bug.insert(output.key);
}
}
// Break to prevent public keys from being included multiple times, triggering multiple
// inclusions of the same output
@@ -416,59 +156,6 @@ impl Scanner {
}
}
Timelocked(tx.prefix.timelock, res)
}
/// Scan a block to obtain its spendable outputs. Its the presence in a block giving these
/// transactions their global index, and this must be batched as asking for the index of specific
/// transactions is a dead giveaway for which transactions you successfully scanned. This
/// function obtains the output indexes for the miner transaction, incrementing from there
/// instead.
pub async fn scan<RPC: RpcConnection>(
&mut self,
rpc: &Rpc<RPC>,
block: &Block,
) -> Result<Vec<Timelocked<SpendableOutput>>, RpcError> {
let mut index = rpc.get_o_indexes(block.miner_tx.hash()).await?[0];
let mut txs = vec![block.miner_tx.clone()];
txs.extend(rpc.get_transactions(&block.txs).await?);
let map = |mut timelock: Timelocked<ReceivedOutput>, index| {
if timelock.1.is_empty() {
None
} else {
Some(Timelocked(
timelock.0,
timelock
.1
.drain(..)
.map(|output| SpendableOutput {
global_index: index + u64::from(output.absolute.o),
output,
})
.collect(),
))
}
};
let mut res = vec![];
for tx in txs {
if let Some(timelock) = map(self.scan_transaction(&tx), index) {
res.push(timelock);
}
index += u64::try_from(
tx.prefix
.outputs
.iter()
// Filter to v2 miner TX outputs/RCT outputs since we're tracking the RCT output index
.filter(|output| {
((tx.prefix.version == 2) && matches!(tx.prefix.inputs.get(0), Some(Input::Gen(..)))) ||
output.amount.is_none()
})
.count(),
)
.unwrap()
}
Ok(res)
Timelocked(self.prefix.timelock, res)
}
}

View File

@@ -1,271 +0,0 @@
use core::ops::Deref;
use std_shims::{
sync::OnceLock,
vec::Vec,
string::{String, ToString},
collections::HashMap,
};
use zeroize::{Zeroize, Zeroizing};
use rand_core::{RngCore, CryptoRng};
use crc::{Crc, CRC_32_ISO_HDLC};
use curve25519_dalek::scalar::Scalar;
use crate::{
random_scalar,
wallet::seed::{SeedError, Language},
};
pub(crate) const CLASSIC_SEED_LENGTH: usize = 24;
pub(crate) const CLASSIC_SEED_LENGTH_WITH_CHECKSUM: usize = 25;
fn trim(word: &str, len: usize) -> Zeroizing<String> {
Zeroizing::new(word.chars().take(len).collect())
}
struct WordList {
word_list: Vec<&'static str>,
word_map: HashMap<&'static str, usize>,
trimmed_word_map: HashMap<String, usize>,
unique_prefix_length: usize,
}
impl WordList {
fn new(word_list: Vec<&'static str>, prefix_length: usize) -> Self {
let mut lang = Self {
word_list,
word_map: HashMap::new(),
trimmed_word_map: HashMap::new(),
unique_prefix_length: prefix_length,
};
for (i, word) in lang.word_list.iter().enumerate() {
lang.word_map.insert(word, i);
lang.trimmed_word_map.insert(trim(word, lang.unique_prefix_length).deref().clone(), i);
}
lang
}
}
static LANGUAGES_CELL: OnceLock<HashMap<Language, WordList>> = OnceLock::new();
#[allow(non_snake_case)]
fn LANGUAGES() -> &'static HashMap<Language, WordList> {
LANGUAGES_CELL.get_or_init(|| {
HashMap::from([
(Language::Chinese, WordList::new(include!("./classic/zh.rs"), 1)),
(Language::English, WordList::new(include!("./classic/en.rs"), 3)),
(Language::Dutch, WordList::new(include!("./classic/nl.rs"), 4)),
(Language::French, WordList::new(include!("./classic/fr.rs"), 4)),
(Language::Spanish, WordList::new(include!("./classic/es.rs"), 4)),
(Language::German, WordList::new(include!("./classic/de.rs"), 4)),
(Language::Italian, WordList::new(include!("./classic/it.rs"), 4)),
(Language::Portuguese, WordList::new(include!("./classic/pt.rs"), 4)),
(Language::Japanese, WordList::new(include!("./classic/ja.rs"), 3)),
(Language::Russian, WordList::new(include!("./classic/ru.rs"), 4)),
(Language::Esperanto, WordList::new(include!("./classic/eo.rs"), 4)),
(Language::Lojban, WordList::new(include!("./classic/jbo.rs"), 4)),
(Language::EnglishOld, WordList::new(include!("./classic/ang.rs"), 4)),
])
})
}
#[cfg(test)]
pub(crate) fn trim_by_lang(word: &str, lang: Language) -> String {
if lang == Language::EnglishOld {
word.to_string()
} else {
word.chars().take(LANGUAGES()[&lang].unique_prefix_length).collect()
}
}
fn checksum_index(words: &[Zeroizing<String>], lang: &WordList) -> usize {
let mut trimmed_words = Zeroizing::new(String::new());
for w in words {
*trimmed_words += &trim(w, lang.unique_prefix_length);
}
let crc = Crc::<u32>::new(&CRC_32_ISO_HDLC);
let mut digest = crc.digest();
digest.update(trimmed_words.as_bytes());
usize::try_from(digest.finalize()).unwrap() % words.len()
}
// Convert a private key to a seed
fn key_to_seed(lang: Language, key: Zeroizing<Scalar>) -> ClassicSeed {
let bytes = Zeroizing::new(key.to_bytes());
// get the language words
let words = &LANGUAGES()[&lang].word_list;
let list_len = u64::try_from(words.len()).unwrap();
// To store the found words & add the checksum word later.
let mut seed = Vec::with_capacity(25);
// convert to words
// 4 bytes -> 3 words. 8 digits base 16 -> 3 digits base 1626
let mut segment = [0; 4];
let mut indices = [0; 4];
for i in 0 .. 8 {
// convert first 4 byte to u32 & get the word indices
let start = i * 4;
// convert 4 byte to u32
segment.copy_from_slice(&bytes[start .. (start + 4)]);
// Actually convert to a u64 so we can add without overflowing
indices[0] = u64::from(u32::from_le_bytes(segment));
indices[1] = indices[0];
indices[0] /= list_len;
indices[2] = indices[0] + indices[1];
indices[0] /= list_len;
indices[3] = indices[0] + indices[2];
// append words to seed
for i in indices.iter().skip(1) {
let word = usize::try_from(i % list_len).unwrap();
seed.push(Zeroizing::new(words[word].to_string()));
}
}
segment.zeroize();
indices.zeroize();
// create a checksum word for all languages except old english
if lang != Language::EnglishOld {
let checksum = seed[checksum_index(&seed, &LANGUAGES()[&lang])].clone();
seed.push(checksum);
}
let mut res = Zeroizing::new(String::new());
for (i, word) in seed.iter().enumerate() {
if i != 0 {
*res += " ";
}
*res += word;
}
ClassicSeed(res)
}
// Convert a seed to bytes
pub(crate) fn seed_to_bytes(words: &str) -> Result<(Language, Zeroizing<[u8; 32]>), SeedError> {
// get seed words
let words = words.split_whitespace().map(|w| Zeroizing::new(w.to_string())).collect::<Vec<_>>();
if (words.len() != CLASSIC_SEED_LENGTH) && (words.len() != CLASSIC_SEED_LENGTH_WITH_CHECKSUM) {
panic!("invalid seed passed to seed_to_bytes");
}
// find the language
let (matched_indices, lang_name, lang) = (|| {
let has_checksum = words.len() == CLASSIC_SEED_LENGTH_WITH_CHECKSUM;
let mut matched_indices = Zeroizing::new(vec![]);
// Iterate through all the languages
'language: for (lang_name, lang) in LANGUAGES().iter() {
matched_indices.zeroize();
matched_indices.clear();
// Iterate through all the words and see if they're all present
for word in &words {
let trimmed = trim(word, lang.unique_prefix_length);
let word = if has_checksum { &trimmed } else { word };
if let Some(index) = if has_checksum {
lang.trimmed_word_map.get(word.deref())
} else {
lang.word_map.get(&word.as_str())
} {
matched_indices.push(*index);
} else {
continue 'language;
}
}
if has_checksum {
if lang_name == &Language::EnglishOld {
Err(SeedError::EnglishOldWithChecksum)?;
}
// exclude the last word when calculating a checksum.
let last_word = words.last().unwrap().clone();
let checksum = words[checksum_index(&words[.. words.len() - 1], lang)].clone();
// check the trimmed checksum and trimmed last word line up
if trim(&checksum, lang.unique_prefix_length) != trim(&last_word, lang.unique_prefix_length)
{
Err(SeedError::InvalidChecksum)?;
}
}
return Ok((matched_indices, lang_name, lang));
}
Err(SeedError::UnknownLanguage)?
})()?;
// convert to bytes
let mut res = Zeroizing::new([0; 32]);
let mut indices = Zeroizing::new([0; 4]);
for i in 0 .. 8 {
// read 3 indices at a time
let i3 = i * 3;
indices[1] = matched_indices[i3];
indices[2] = matched_indices[i3 + 1];
indices[3] = matched_indices[i3 + 2];
let inner = |i| {
let mut base = (lang.word_list.len() - indices[i] + indices[i + 1]) % lang.word_list.len();
// Shift the index over
for _ in 0 .. i {
base *= lang.word_list.len();
}
base
};
// set the last index
indices[0] = indices[1] + inner(1) + inner(2);
if (indices[0] % lang.word_list.len()) != indices[1] {
Err(SeedError::InvalidSeed)?;
}
let pos = i * 4;
let mut bytes = u32::try_from(indices[0]).unwrap().to_le_bytes();
res[pos .. (pos + 4)].copy_from_slice(&bytes);
bytes.zeroize();
}
Ok((*lang_name, res))
}
#[derive(Clone, PartialEq, Eq, Zeroize)]
pub struct ClassicSeed(Zeroizing<String>);
impl ClassicSeed {
pub(crate) fn new<R: RngCore + CryptoRng>(rng: &mut R, lang: Language) -> Self {
key_to_seed(lang, Zeroizing::new(random_scalar(rng)))
}
pub fn from_string(words: Zeroizing<String>) -> Result<Self, SeedError> {
let (lang, entropy) = seed_to_bytes(&words)?;
// Make sure this is a valid scalar
let mut scalar = Scalar::from_canonical_bytes(*entropy);
if scalar.is_none() {
Err(SeedError::InvalidSeed)?;
}
scalar.zeroize();
// Call from_entropy so a trimmed seed becomes a full seed
Ok(Self::from_entropy(lang, entropy).unwrap())
}
pub fn from_entropy(lang: Language, entropy: Zeroizing<[u8; 32]>) -> Option<Self> {
Scalar::from_canonical_bytes(*entropy).map(|scalar| key_to_seed(lang, Zeroizing::new(scalar)))
}
pub(crate) fn to_string(&self) -> Zeroizing<String> {
self.0.clone()
}
pub(crate) fn entropy(&self) -> Zeroizing<[u8; 32]> {
seed_to_bytes(&self.0).unwrap().1
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More