1 Commits

Author SHA1 Message Date
Luke Parker
f029471f9f Initial work on a tables lib
Incomplete lib intended to offer tables for all cryptographic libraries 
expecting them. Right now, it creates the tables at runtime. While that 
would still offer improvements, ideally large tables are built at 
compile time and simply entered into memory at runtime.

My best idea for that was a linked list in the binary itself, where this 
app (at first call), reads a table stored to a section of data, then 
grabs the next reference to one and continues reading. The main issue 
with this idea, besides not yet researching how to encode data into the 
binary, was the fact the same table would be saved to disk multiple 
times under this simplistic model. An O(n) iteration when writing to the 
bin could potentially solve that.

Related https://github.com/serai-dex/serai/issues/41.
2022-07-16 16:25:55 -04:00
296 changed files with 5673 additions and 32053 deletions

3
.gitattributes vendored
View File

@@ -1,3 +0,0 @@
# Auto detect text files and perform LF normalization
* text=auto
* text eol=lf

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2023 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,50 +0,0 @@
name: build-dependencies
description: Installs build dependencies for Serai
inputs:
github-token:
description: "GitHub token to install Protobuf with"
require: true
default:
rust-toolchain:
description: "Rust toolchain to install"
required: false
default: stable
rust-components:
description: "Rust components to install"
required: false
default:
runs:
using: "composite"
steps:
- name: Install Protobuf
uses: arduino/setup-protoc@master
with:
repo-token: ${{ inputs.github-token }}
- name: Install solc
shell: bash
run: |
pip3 install solc-select==0.2.1
solc-select install 0.8.16
solc-select use 0.8.16
- name: Install Rust
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ inputs.rust-toolchain }}
components: ${{ inputs.rust-components }}
- name: Get nightly version to use
id: nightly
shell: bash
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
- name: Install WASM toolchain
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ steps.nightly.outputs.version }}
targets: wasm32-unknown-unknown

View File

@@ -1,44 +0,0 @@
name: monero-wallet-rpc
description: Spawns a Monero Wallet-RPC.
inputs:
version:
description: "Version to download and run"
required: false
default: v0.18.1.2
runs:
using: "composite"
steps:
- name: Monero Wallet RPC Cache
id: cache-monero-wallet-rpc
uses: actions/cache@v3
with:
path: monero-wallet-rpc
key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
- name: Download the Monero Wallet RPC
if: steps.cache-monero-wallet-rpc.outputs.cache-hit != 'true'
# Calculates OS/ARCH to demonstrate it, yet then locks to linux-x64 due
# to the contained folder not following the same naming scheme and
# requiring further expansion not worth doing right now
shell: bash
run: |
RUNNER_OS=${{ runner.os }}
RUNNER_ARCH=${{ runner.arch }}
RUNNER_OS=${RUNNER_OS,,}
RUNNER_ARCH=${RUNNER_ARCH,,}
RUNNER_OS=linux
RUNNER_ARCH=x64
FILE=monero-$RUNNER_OS-$RUNNER_ARCH-${{ inputs.version }}.tar.bz2
wget https://downloads.getmonero.org/cli/$FILE
tar -xvf $FILE
mv monero-x86_64-linux-gnu-${{ inputs.version }}/monero-wallet-rpc monero-wallet-rpc
- name: Monero Wallet RPC
shell: bash
run: ./monero-wallet-rpc --disable-rpc-login --rpc-bind-port 6061 --allow-mismatched-daemon-version --wallet-dir ./ --detach

View File

@@ -1,44 +0,0 @@
name: monero-regtest
description: Spawns a regtest Monero daemon
inputs:
version:
description: "Version to download and run"
required: false
default: v0.18.1.2
runs:
using: "composite"
steps:
- name: Monero Daemon Cache
id: cache-monerod
uses: actions/cache@v3
with:
path: monerod
key: monerod-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
- name: Download the Monero Daemon
if: steps.cache-monerod.outputs.cache-hit != 'true'
# Calculates OS/ARCH to demonstrate it, yet then locks to linux-x64 due
# to the contained folder not following the same naming scheme and
# requiring further expansion not worth doing right now
shell: bash
run: |
RUNNER_OS=${{ runner.os }}
RUNNER_ARCH=${{ runner.arch }}
RUNNER_OS=${RUNNER_OS,,}
RUNNER_ARCH=${RUNNER_ARCH,,}
RUNNER_OS=linux
RUNNER_ARCH=x64
FILE=monero-$RUNNER_OS-$RUNNER_ARCH-${{ inputs.version }}.tar.bz2
wget https://downloads.getmonero.org/cli/$FILE
tar -xvf $FILE
mv monero-x86_64-linux-gnu-${{ inputs.version }}/monerod monerod
- name: Monero Regtest Daemon
shell: bash
run: ./monerod --regtest --offline --fixed-difficulty=1 --detach

View File

@@ -1,34 +0,0 @@
name: test-dependencies
description: Installs test dependencies for Serai
inputs:
github-token:
description: "GitHub token to install Protobuf with"
require: true
default:
monero-version:
description: "Monero version to download and run as a regtest node"
required: false
default: v0.18.0.0
runs:
using: "composite"
steps:
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ inputs.github-token }}
- name: Install Foundry
uses: foundry-rs/foundry-toolchain@v1
with:
version: nightly
- name: Run a Monero Regtest Node
uses: ./.github/actions/monero
with:
version: ${{ inputs.monero-version }}
- name: Run a Monero Wallet-RPC
uses: ./.github/actions/monero-wallet-rpc

View File

@@ -1 +0,0 @@
nightly-2022-12-01

View File

@@ -1,27 +0,0 @@
name: Daily Deny Check
on:
schedule:
- cron: "0 0 * * *"
jobs:
deny:
name: Run cargo deny
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Advisory Cache
uses: actions/cache@v3
with:
path: ~/.cargo/advisory-db
key: rust-advisory-db
- name: Install cargo
uses: dtolnay/rust-toolchain@stable
- name: Install cargo deny
run: cargo install --locked cargo-deny
- name: Run cargo deny
run: cargo deny -L error --all-features check

View File

@@ -1,56 +0,0 @@
name: Monero Tests
on:
push:
branches:
- develop
paths:
- "coins/monero/**"
pull_request:
paths:
- "coins/monero/**"
jobs:
# Only run these once since they will be consistent regardless of any node
unit-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Test Dependencies
uses: ./.github/actions/test-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Run Unit Tests Without Features
run: cargo test --package monero-serai --lib
# Doesn't run unit tests with features as the tests workflow will
integration-tests:
runs-on: ubuntu-latest
# Test against all supported protocol versions
strategy:
matrix:
version: [v0.17.3.2, v0.18.1.2]
steps:
- uses: actions/checkout@v3
- name: Test Dependencies
uses: ./.github/actions/test-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
monero-version: ${{ matrix.version }}
- name: Run Integration Tests Without Features
# https://github.com/rust-lang/cargo/issues/8396
run: cargo test --package monero-serai --test '*'
- name: Run Integration Tests
# Don't run if the the tests workflow also will
if: ${{ matrix.version != 'v0.18.1.2' }}
run: |
cargo test --package monero-serai --all-features --test '*'
cargo test --package serai-processor monero

View File

@@ -1,53 +0,0 @@
name: Monthly Nightly Update
on:
schedule:
- cron: "0 0 1 * *"
jobs:
update:
name: Update nightly
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
submodules: "recursive"
- name: Write nightly version
run: echo $(date +"nightly-%Y-%m"-01) > .github/nightly-version
- name: Create the commit
run: |
git config user.name "GitHub Actions"
git config user.email "<>"
git checkout -b $(date +"nightly-%Y-%m")
git add .github/nightly-version
git commit -m "Update nightly"
git push -u origin $(date +"nightly-%Y-%m")
- name: Pull Request
uses: actions/github-script@v6
with:
script: |
const { repo, owner } = context.repo;
const result = await github.rest.pulls.create({
title: (new Date()).toLocaleString(
false,
{ month: "long", year: "numeric" }
) + " - Rust Nightly Update",
owner,
repo,
head: "nightly-" + (new Date()).toISOString().split("-").splice(0, 2).join("-"),
base: "develop",
body: "PR auto-generated by a GitHub workflow."
});
github.rest.issues.addLabels({
owner,
repo,
issue_number: result.data.number,
labels: ["improvement"]
});

View File

@@ -1,80 +0,0 @@
name: Tests
on:
push:
branches:
- develop
pull_request:
workflow_dispatch:
jobs:
clippy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Get nightly version to use
id: nightly
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
- name: Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
# Clippy requires nightly due to serai-runtime requiring it
rust-toolchain: ${{ steps.nightly.outputs.version }}
rust-components: clippy
- name: Run Clippy
run: cargo clippy --all-features --tests -- -D warnings -A dead_code
deny:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Advisory Cache
uses: actions/cache@v3
with:
path: ~/.cargo/advisory-db
key: rust-advisory-db
- name: Install cargo
uses: dtolnay/rust-toolchain@stable
- name: Install cargo deny
run: cargo install --locked cargo-deny
- name: Run cargo deny
run: cargo deny -L error --all-features check
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Test Dependencies
uses: ./.github/actions/test-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Run Tests
run: cargo test --all-features
fmt:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Get nightly version to use
id: nightly
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
- name: Install rustfmt
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ steps.nightly.outputs.version }}
components: rustfmt
- name: Run rustfmt
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check

2
.gitignore vendored
View File

@@ -1,2 +1,2 @@
target
.vscode
Cargo.lock

3
.gitmodules vendored Normal file
View File

@@ -0,0 +1,3 @@
[submodule "coins/monero/c/monero"]
path = coins/monero/c/monero
url = https://github.com/monero-project/monero

View File

@@ -1,16 +0,0 @@
tab_spaces = 2
max_width = 100
# Let the developer decide based on the 100 char line limit
use_small_heuristics = "Max"
error_on_line_overflow = true
error_on_unformatted = true
imports_granularity = "Crate"
reorder_imports = false
reorder_modules = false
unstable_features = true
spaces_around_ranges = true
binop_separator = "Back"

View File

@@ -1,37 +0,0 @@
# Contributing
Contributions come in a variety of forms. Developing Serai, helping document it,
using its libraries in another project, using and testing it, and simply sharing
it are all valuable ways of contributing.
This document will specifically focus on contributions to this repository in the
form of code and documentation.
### Rules
- Stable native Rust, nightly wasm and tools.
- `cargo fmt` must be used.
- `cargo clippy` must pass, except for the ignored rules (`type_complexity` and
`dead_code`).
- The CI must pass.
- Only use uppercase variable names when relevant to cryptography.
- Use a two-space ident when possible.
- Put a space after comment markers.
- Don't use multiple newlines between sections of code.
- Have a newline before EOF.
### Guidelines
- Sort inputs as core, std, third party, and then Serai.
- Comment code reasonably.
- Include tests for new features.
- Sign commits.
### Submission
All submissions should be through GitHub. Contributions to a crate will be
licensed according to the crate's existing license, with the crate's copyright
holders (distinct from authors) having the right to re-license the crate via a
unanimous decision.

10878
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,57 +1,22 @@
[workspace]
members = [
"common/zalloc",
"crypto/transcript",
"crypto/ff-group-tests",
"crypto/dalek-ff-group",
"crypto/ed448",
"crypto/ciphersuite",
"crypto/tables",
"crypto/multiexp",
"crypto/schnorr",
"crypto/dleq",
"crypto/dkg",
"crypto/frost",
"coins/ethereum",
"coins/monero/generators",
"coins/monero",
"processor",
"substrate/serai/primitives",
"substrate/validator-sets/primitives",
"substrate/validator-sets/pallet",
"substrate/tendermint/machine",
"substrate/tendermint/primitives",
"substrate/tendermint/client",
"substrate/tendermint/pallet",
"substrate/runtime",
"substrate/node",
"substrate/consensus",
"substrate/node"
]
# Always compile Monero (and a variety of dependencies) with optimizations due
# to the unoptimized performance of Bulletproofs
[profile.dev.package]
subtle = { opt-level = 3 }
curve25519-dalek = { opt-level = 3 }
ff = { opt-level = 3 }
group = { opt-level = 3 }
crypto-bigint = { opt-level = 3 }
dalek-ff-group = { opt-level = 3 }
minimal-ed448 = { opt-level = 3 }
multiexp = { opt-level = 3 }
monero-serai = { opt-level = 3 }
[profile.release]
panic = "unwind"

View File

@@ -1,8 +0,0 @@
Serai crates are licensed under one of two licenses, either MIT or AGPL-3.0,
depending on the crate in question. Each crate declares their license in their
`Cargo.toml` and includes a `LICENSE` file detailing its status. Additionally,
a full copy of the AGPL-3.0 License is included in the root of this repository
as a reference text. This copy should be provided with any distribution of a
crate licensed under the AGPL-3.0, as per its terms.
The GitHub actions (`.github/actions`) are licensed under the MIT license.

View File

@@ -1,39 +1,22 @@
# Serai
Serai is a new DEX, built from the ground up, initially planning on listing
Bitcoin, Ethereum, DAI, and Monero, offering a liquidity-pool-based trading
experience. Funds are stored in an economically secured threshold-multisig
Bitcoin, Ethereum, Monero, DAI, and USDC, offering a liquidity pool trading
experience. Funds are stored in an economically secured threshold multisig
wallet.
[Getting Started](docs/Getting%20Started.md)
### Layout
- `docs`: Documentation on the Serai protocol.
- `docs` - Documentation on the Serai protocol.
- `common`: Crates containing utilities common to a variety of areas under
Serai, none neatly fitting under another category.
- `coins` - Various coin libraries intended for usage in Serai yet also by the
wider community. This means they will always support the functionality Serai
needs, yet won't disadvantage other use cases when possible.
- `crypto`: A series of composable cryptographic libraries built around the
- `crypto` - A series of composable cryptographic libraries built around the
`ff`/`group` APIs achieving a variety of tasks. These range from generic
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
needed for Bitcoin-Monero atomic swaps.
- `coins`: Various coin libraries intended for usage in Serai yet also by the
wider community. This means they will always support the functionality Serai
needs, yet won't disadvantage other use cases when possible.
- `processor`: A generic chain processor to process data for Serai and process
- `processor` - A generic chain processor to process data for Serai and process
events from Serai, executing transactions as expected and needed.
- `substrate`: Substrate crates used to instantiate the Serai network.
- `deploy`: Scripts to deploy a Serai node/test environment.
### Links
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
- [Mastodon](https://cryptodon.lol/@serai): https://cryptodon.lol/@serai
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
- [Matrix](https://matrix.to/#/#serai:matrix.org):
https://matrix.to/#/#serai:matrix.org

View File

@@ -1,3 +0,0 @@
# solidity build outputs
cache
artifacts

View File

@@ -1,37 +0,0 @@
[package]
name = "ethereum-serai"
version = "0.1.0"
description = "An Ethereum library supporting Schnorr signing and on-chain verification"
license = "AGPL-3.0-only"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
edition = "2021"
publish = false
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
hex-literal = "0.3"
thiserror = "1"
rand_core = "0.6"
serde_json = "1.0"
serde = "1.0"
sha3 = "0.10"
group = "0.12"
k256 = { version = "0.11", features = ["arithmetic", "keccak256", "ecdsa"] }
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] }
eyre = "0.6"
ethers = { version = "1", features = ["abigen", "ethers-solc"] }
[build-dependencies]
ethers-solc = "1"
[dev-dependencies]
tokio = { version = "1", features = ["macros"] }

View File

@@ -1,15 +0,0 @@
AGPL-3.0-only license
Copyright (c) 2022-2023 Luke Parker
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License Version 3 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.

View File

@@ -1,9 +0,0 @@
# Ethereum
This package contains Ethereum-related functionality, specifically deploying and
interacting with Serai contracts.
### Dependencies
- solc
- [Foundry](https://github.com/foundry-rs/foundry)

View File

@@ -1,16 +0,0 @@
use ethers_solc::{Project, ProjectPathsConfig};
fn main() {
println!("cargo:rerun-if-changed=contracts");
println!("cargo:rerun-if-changed=artifacts");
// configure the project with all its paths, solc, cache etc.
let project = Project::builder()
.paths(ProjectPathsConfig::hardhat(env!("CARGO_MANIFEST_DIR")).unwrap())
.build()
.unwrap();
project.compile().unwrap();
// Tell Cargo that if a source file changes, to rerun this build script.
project.rerun_if_sources_changed();
}

View File

@@ -1,36 +0,0 @@
//SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
// see https://github.com/noot/schnorr-verify for implementation details
contract Schnorr {
// secp256k1 group order
uint256 constant public Q =
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
// parity := public key y-coord parity (27 or 28)
// px := public key x-coord
// message := 32-byte message
// s := schnorr signature
// e := schnorr signature challenge
function verify(
uint8 parity,
bytes32 px,
bytes32 message,
bytes32 s,
bytes32 e
) public view returns (bool) {
// ecrecover = (m, v, r, s);
bytes32 sp = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
bytes32 ep = bytes32(Q - mulmod(uint256(e), uint256(px), Q));
require(sp != 0);
// the ecrecover precompile implementation checks that the `r` and `s`
// inputs are non-zero (in this case, `px` and `ep`), thus we don't need to
// check if they're zero.will make me
address R = ecrecover(sp, parity, px, ep);
require(R != address(0), "ecrecover failed");
return e == keccak256(
abi.encodePacked(R, uint8(parity), px, block.chainid, message)
);
}
}

View File

@@ -1,52 +0,0 @@
use crate::crypto::ProcessedSignature;
use ethers::{contract::ContractFactory, prelude::*, solc::artifacts::contract::ContractBytecode};
use eyre::{eyre, Result};
use std::fs::File;
use std::sync::Arc;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum EthereumError {
#[error("failed to verify Schnorr signature")]
VerificationError,
}
abigen!(
Schnorr,
"./artifacts/Schnorr.sol/Schnorr.json",
event_derives(serde::Deserialize, serde::Serialize),
);
pub async fn deploy_schnorr_verifier_contract(
client: Arc<SignerMiddleware<Provider<Http>, LocalWallet>>,
) -> Result<Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>> {
let path = "./artifacts/Schnorr.sol/Schnorr.json";
let artifact: ContractBytecode = serde_json::from_reader(File::open(path).unwrap()).unwrap();
let abi = artifact.abi.unwrap();
let bin = artifact.bytecode.unwrap().object;
let factory = ContractFactory::new(abi, bin.into_bytes().unwrap(), client.clone());
let contract = factory.deploy(())?.send().await?;
let contract = Schnorr::new(contract.address(), client);
Ok(contract)
}
pub async fn call_verify(
contract: &Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>,
params: &ProcessedSignature,
) -> Result<()> {
if contract
.verify(
params.parity + 27,
params.px.to_bytes().into(),
params.message,
params.s.to_bytes().into(),
params.e.to_bytes().into(),
)
.call()
.await?
{
Ok(())
} else {
Err(eyre!(EthereumError::VerificationError))
}
}

View File

@@ -1,105 +0,0 @@
use sha3::{Digest, Keccak256};
use group::Group;
use k256::{
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, sec1::ToEncodedPoint, DecompressPoint},
AffinePoint, ProjectivePoint, Scalar, U256,
};
use frost::{algorithm::Hram, curve::Secp256k1};
pub fn keccak256(data: &[u8]) -> [u8; 32] {
Keccak256::digest(data).try_into().unwrap()
}
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
Scalar::from_uint_reduced(U256::from_be_slice(&keccak256(data)))
}
pub fn address(point: &ProjectivePoint) -> [u8; 20] {
let encoded_point = point.to_encoded_point(false);
keccak256(&encoded_point.as_ref()[1 .. 65])[12 .. 32].try_into().unwrap()
}
pub fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
if r.is_zero().into() || s.is_zero().into() {
return None;
}
#[allow(non_snake_case)]
let R = AffinePoint::decompress(&r.to_bytes(), v.into());
#[allow(non_snake_case)]
if let Some(R) = Option::<AffinePoint>::from(R) {
#[allow(non_snake_case)]
let R = ProjectivePoint::from(R);
let r = r.invert().unwrap();
let u1 = ProjectivePoint::GENERATOR * (-message * r);
let u2 = R * (s * r);
let key: ProjectivePoint = u1 + u2;
if !bool::from(key.is_identity()) {
return Some(address(&key));
}
}
None
}
#[derive(Clone, Default)]
pub struct EthereumHram {}
impl Hram<Secp256k1> for EthereumHram {
#[allow(non_snake_case)]
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
let a_encoded_point = A.to_encoded_point(true);
let mut a_encoded = a_encoded_point.as_ref().to_owned();
a_encoded[0] += 25; // Ethereum uses 27/28 for point parity
let mut data = address(R).to_vec();
data.append(&mut a_encoded);
data.append(&mut m.to_vec());
Scalar::from_uint_reduced(U256::from_be_slice(&keccak256(&data)))
}
}
pub struct ProcessedSignature {
pub s: Scalar,
pub px: Scalar,
pub parity: u8,
pub message: [u8; 32],
pub e: Scalar,
}
#[allow(non_snake_case)]
pub fn preprocess_signature_for_ecrecover(
m: [u8; 32],
R: &ProjectivePoint,
s: Scalar,
A: &ProjectivePoint,
chain_id: U256,
) -> (Scalar, Scalar) {
let processed_sig = process_signature_for_contract(m, R, s, A, chain_id);
let sr = processed_sig.s.mul(&processed_sig.px).negate();
let er = processed_sig.e.mul(&processed_sig.px).negate();
(sr, er)
}
#[allow(non_snake_case)]
pub fn process_signature_for_contract(
m: [u8; 32],
R: &ProjectivePoint,
s: Scalar,
A: &ProjectivePoint,
chain_id: U256,
) -> ProcessedSignature {
let encoded_pk = A.to_encoded_point(true);
let px = &encoded_pk.as_ref()[1 .. 33];
let px_scalar = Scalar::from_uint_reduced(U256::from_be_slice(px));
let e = EthereumHram::hram(R, A, &[chain_id.to_be_byte_array().as_slice(), &m].concat());
ProcessedSignature {
s,
px: px_scalar,
parity: &encoded_pk.as_ref()[0] - 2,
#[allow(non_snake_case)]
message: m,
e,
}
}

View File

@@ -1,2 +0,0 @@
pub mod contract;
pub mod crypto;

View File

@@ -1,70 +0,0 @@
use std::{convert::TryFrom, sync::Arc, time::Duration};
use rand_core::OsRng;
use k256::{elliptic_curve::bigint::ArrayEncoding, U256};
use ethers::{
prelude::*,
utils::{keccak256, Anvil, AnvilInstance},
};
use frost::{
curve::Secp256k1,
algorithm::Schnorr as Algo,
tests::{key_gen, algorithm_machines, sign},
};
use ethereum_serai::{
crypto,
contract::{Schnorr, call_verify, deploy_schnorr_verifier_contract},
};
async fn deploy_test_contract(
) -> (u32, AnvilInstance, Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>) {
let anvil = Anvil::new().spawn();
let wallet: LocalWallet = anvil.keys()[0].clone().into();
let provider =
Provider::<Http>::try_from(anvil.endpoint()).unwrap().interval(Duration::from_millis(10u64));
let chain_id = provider.get_chainid().await.unwrap().as_u32();
let client = Arc::new(SignerMiddleware::new_with_provider_chain(provider, wallet).await.unwrap());
(chain_id, anvil, deploy_schnorr_verifier_contract(client).await.unwrap())
}
#[tokio::test]
async fn test_deploy_contract() {
deploy_test_contract().await;
}
#[tokio::test]
async fn test_ecrecover_hack() {
let (chain_id, _anvil, contract) = deploy_test_contract().await;
let chain_id = U256::from(chain_id);
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
let group_key = keys[&1].group_key();
const MESSAGE: &[u8] = b"Hello, World!";
let hashed_message = keccak256(MESSAGE);
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
let algo = Algo::<Secp256k1, crypto::EthereumHram>::new();
let sig = sign(
&mut OsRng,
algo.clone(),
keys.clone(),
algorithm_machines(&mut OsRng, algo, &keys),
full_message,
);
let mut processed_sig =
crypto::process_signature_for_contract(hashed_message, &sig.R, sig.s, &group_key, chain_id);
call_verify(&contract, &processed_sig).await.unwrap();
// test invalid signature fails
processed_sig.message[0] = 0;
assert!(call_verify(&contract, &processed_sig).await.is_err());
}

View File

@@ -1,86 +0,0 @@
use ethereum_serai::crypto::*;
use frost::curve::Secp256k1;
use k256::{
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, sec1::ToEncodedPoint},
ProjectivePoint, Scalar, U256,
};
#[test]
fn test_ecrecover() {
use k256::ecdsa::{
recoverable::Signature,
signature::{Signer, Verifier},
SigningKey, VerifyingKey,
};
use rand_core::OsRng;
let private = SigningKey::random(&mut OsRng);
let public = VerifyingKey::from(&private);
const MESSAGE: &[u8] = b"Hello, World!";
let sig: Signature = private.sign(MESSAGE);
public.verify(MESSAGE, &sig).unwrap();
assert_eq!(
ecrecover(hash_to_scalar(MESSAGE), sig.as_ref()[64], *sig.r(), *sig.s()).unwrap(),
address(&ProjectivePoint::from(public))
);
}
#[test]
fn test_signing() {
use frost::{
algorithm::Schnorr,
tests::{algorithm_machines, key_gen, sign},
};
use rand_core::OsRng;
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
let _group_key = keys[&1].group_key();
const MESSAGE: &[u8] = b"Hello, World!";
let algo = Schnorr::<Secp256k1, EthereumHram>::new();
let _sig = sign(
&mut OsRng,
algo,
keys.clone(),
algorithm_machines(&mut OsRng, Schnorr::<Secp256k1, EthereumHram>::new(), &keys),
MESSAGE,
);
}
#[test]
fn test_ecrecover_hack() {
use frost::{
algorithm::Schnorr,
tests::{algorithm_machines, key_gen, sign},
};
use rand_core::OsRng;
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
let group_key = keys[&1].group_key();
let group_key_encoded = group_key.to_encoded_point(true);
let group_key_compressed = group_key_encoded.as_ref();
let group_key_x = Scalar::from_uint_reduced(U256::from_be_slice(&group_key_compressed[1 .. 33]));
const MESSAGE: &[u8] = b"Hello, World!";
let hashed_message = keccak256(MESSAGE);
let chain_id = U256::ONE;
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
let algo = Schnorr::<Secp256k1, EthereumHram>::new();
let sig = sign(
&mut OsRng,
algo.clone(),
keys.clone(),
algorithm_machines(&mut OsRng, algo, &keys),
full_message,
);
let (sr, er) =
preprocess_signature_for_ecrecover(hashed_message, &sig.R, sig.s, &group_key, chain_id);
let q = ecrecover(sr, group_key_compressed[0] - 2, group_key_x, er).unwrap();
assert_eq!(q, address(&sig.R));
}

View File

@@ -1,2 +0,0 @@
mod contract;
mod crypto;

1
coins/monero/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
c/.build

View File

@@ -1,15 +1,13 @@
[package]
name = "monero-serai"
version = "0.1.2-alpha"
description = "A modern Monero transaction library"
version = "0.1.0"
description = "A modern Monero wallet library"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[build-dependencies]
cc = "1.0"
[dependencies]
hex-literal = "0.3"
@@ -21,23 +19,19 @@ rand_chacha = { version = "0.3", optional = true }
rand = "0.8"
rand_distr = "0.4"
zeroize = { version = "1.5", features = ["zeroize_derive"] }
subtle = "2.4"
sha3 = "0.10"
tiny-keccak = { version = "2", features = ["keccak"] }
blake2 = { version = "0.10", optional = true }
curve25519-dalek = { version = "3", features = ["std"] }
group = { version = "0.12" }
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.1" }
multiexp = { path = "../../crypto/multiexp", version = "0.2", features = ["batch"] }
dalek-ff-group = { path = "../../crypto/dalek-ff-group" }
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.2", features = ["recommended"], optional = true }
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.5", features = ["ed25519"], optional = true }
dleq = { path = "../../crypto/dleq", version = "0.2", features = ["serialize"], optional = true }
monero-generators = { path = "generators", version = "0.1" }
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", features = ["recommended"], optional = true }
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["ed25519"], optional = true }
dleq = { package = "dleq-serai", path = "../../crypto/dleq", features = ["serialize"], optional = true }
hex = "0.4"
serde = { version = "1.0", features = ["derive"] }
@@ -45,19 +39,14 @@ serde_json = "1.0"
base58-monero = "1"
monero-epee-bin-serde = "1.0"
monero = "0.16"
digest_auth = "0.3"
reqwest = { version = "0.11", features = ["json"] }
[build-dependencies]
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.1" }
monero-generators = { path = "generators", version = "0.1" }
[features]
experimental = []
multisig = ["rand_chacha", "blake2", "transcript", "frost", "dleq"]
[dev-dependencies]
sha2 = "0.10"
tokio = { version = "1", features = ["full"] }
monero-rpc = "0.3"
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.5", features = ["ed25519", "tests"] }
[features]
multisig = ["rand_chacha", "blake2", "transcript", "frost", "dleq"]

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2022-2023 Luke Parker
Copyright (c) 2022 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -4,16 +4,4 @@ A modern Monero transaction library intended for usage in wallets. It prides
itself on accuracy, correctness, and removing common pit falls developers may
face.
monero-serai contains safety features, such as first-class acknowledgement of
the burning bug, yet also a high level API around creating transactions.
monero-serai also offers a FROST-based multisig, which is orders of magnitude
more performant than Monero's.
monero-serai was written for Serai, a decentralized exchange aiming to support
Monero. Despite this, monero-serai is intended to be a widely usable library,
accurate to Monero. monero-serai guarantees the functionality needed for Serai,
yet will not deprive functionality from other users, and may potentially leave
Serai's umbrella at some point.
Various legacy transaction formats are not currently implemented, yet
monero-serai is still increasing its support for various transaction types.
Threshold multisignature support is available via the `multisig` feature.

View File

@@ -1,66 +1,72 @@
use std::{
io::Write,
env,
path::Path,
fs::{File, remove_file},
};
use dalek_ff_group::EdwardsPoint;
use monero_generators::bulletproofs_generators;
fn serialize(generators_string: &mut String, points: &[EdwardsPoint]) {
for generator in points {
generators_string.extend(
format!(
"
dalek_ff_group::EdwardsPoint(
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap()
),
",
generator.compress().to_bytes()
)
.chars(),
);
}
}
fn generators(prefix: &'static str, path: &str) {
let generators = bulletproofs_generators(prefix.as_bytes());
#[allow(non_snake_case)]
let mut G_str = "".to_string();
serialize(&mut G_str, &generators.G);
#[allow(non_snake_case)]
let mut H_str = "".to_string();
serialize(&mut H_str, &generators.H);
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
let _ = remove_file(&path);
File::create(&path)
.unwrap()
.write_all(
format!(
"
lazy_static! {{
pub static ref GENERATORS: Generators = Generators {{
G: [
{G_str}
],
H: [
{H_str}
],
}};
}}
",
)
.as_bytes(),
)
.unwrap();
}
use std::{env, path::Path, process::Command};
fn main() {
println!("cargo:rerun-if-changed=build.rs");
if !Command::new("git").args(&["submodule", "update", "--init", "--recursive"]).status().unwrap().success() {
panic!("git failed to init submodules");
}
generators("bulletproof", "generators.rs");
generators("bulletproof_plus", "generators_plus.rs");
if !Command ::new("mkdir").args(&["-p", ".build"])
.current_dir(&Path::new("c")).status().unwrap().success() {
panic!("failed to create a directory to track build progress");
}
let out_dir = &env::var("OUT_DIR").unwrap();
// Use a file to signal if Monero was already built, as that should never be rebuilt
// If the signaling file was deleted, run this script again to rebuild Monero though
println!("cargo:rerun-if-changed=c/.build/monero");
if !Path::new("c/.build/monero").exists() {
if !Command::new("make").arg(format!("-j{}", &env::var("THREADS").unwrap_or("2".to_string())))
.current_dir(&Path::new("c/monero")).status().unwrap().success() {
panic!("make failed to build Monero. Please check your dependencies");
}
if !Command::new("touch").arg("monero")
.current_dir(&Path::new("c/.build")).status().unwrap().success() {
panic!("failed to create a file to label Monero as built");
}
}
println!("cargo:rerun-if-changed=c/wrapper.cpp");
cc::Build::new()
.static_flag(true)
.warnings(false)
.extra_warnings(false)
.flag("-Wno-deprecated-declarations")
.include("c/monero/external/supercop/include")
.include("c/monero/contrib/epee/include")
.include("c/monero/src")
.include("c/monero/build/release/generated_include")
.define("AUTO_INITIALIZE_EASYLOGGINGPP", None)
.include("c/monero/external/easylogging++")
.file("c/monero/external/easylogging++/easylogging++.cc")
.file("c/monero/src/common/aligned.c")
.file("c/monero/src/common/perf_timer.cpp")
.include("c/monero/src/crypto")
.file("c/monero/src/crypto/crypto-ops-data.c")
.file("c/monero/src/crypto/crypto-ops.c")
.file("c/monero/src/crypto/keccak.c")
.file("c/monero/src/crypto/hash.c")
.include("c/monero/src/device")
.file("c/monero/src/device/device_default.cpp")
.include("c/monero/src/ringct")
.file("c/monero/src/ringct/rctCryptoOps.c")
.file("c/monero/src/ringct/rctTypes.cpp")
.file("c/monero/src/ringct/rctOps.cpp")
.file("c/monero/src/ringct/multiexp.cc")
.file("c/monero/src/ringct/bulletproofs.cc")
.file("c/monero/src/ringct/rctSigs.cpp")
.file("c/wrapper.cpp")
.compile("wrapper");
println!("cargo:rustc-link-search={}", out_dir);
println!("cargo:rustc-link-lib=wrapper");
println!("cargo:rustc-link-lib=stdc++");
}

1
coins/monero/c/monero Submodule

Submodule coins/monero/c/monero added at 424e4de16b

158
coins/monero/c/wrapper.cpp Normal file
View File

@@ -0,0 +1,158 @@
#include <mutex>
#include "device/device_default.hpp"
#include "ringct/bulletproofs.h"
#include "ringct/rctSigs.h"
typedef std::lock_guard<std::mutex> lock;
std::mutex rng_mutex;
uint8_t rng_entropy[64];
extern "C" {
void rng(uint8_t* seed) {
// Set the first half to the seed
memcpy(rng_entropy, seed, 32);
// Set the second half to the hash of a DST to ensure a lack of collisions
crypto::cn_fast_hash("RNG_entropy_seed", 16, (char*) &rng_entropy[32]);
}
}
extern "C" void monero_wide_reduce(uint8_t* value);
namespace crypto {
void generate_random_bytes_not_thread_safe(size_t n, void* value) {
size_t written = 0;
while (written != n) {
uint8_t hash[32];
crypto::cn_fast_hash(rng_entropy, 64, (char*) hash);
// Step the RNG by setting the latter half to the most recent result
// Does not leak the RNG, even if the values are leaked (which they are
// expected to be) due to the first half remaining constant and
// undisclosed
memcpy(&rng_entropy[32], hash, 32);
size_t next = n - written;
if (next > 32) {
next = 32;
}
memcpy(&((uint8_t*) value)[written], hash, next);
written += next;
}
}
void random32_unbiased(unsigned char *bytes) {
uint8_t value[64];
generate_random_bytes_not_thread_safe(64, value);
monero_wide_reduce(value);
memcpy(bytes, value, 32);
}
}
extern "C" {
void c_hash_to_point(uint8_t* point) {
rct::key key_point;
ge_p3 e_p3;
memcpy(key_point.bytes, point, 32);
rct::hash_to_p3(e_p3, key_point);
ge_p3_tobytes(point, &e_p3);
}
uint8_t* c_generate_bp(uint8_t* seed, uint8_t len, uint64_t* a, uint8_t* m) {
lock guard(rng_mutex);
rng(seed);
rct::keyV masks;
std::vector<uint64_t> amounts;
masks.resize(len);
amounts.resize(len);
for (uint8_t i = 0; i < len; i++) {
memcpy(masks[i].bytes, m + (i * 32), 32);
amounts[i] = a[i];
}
rct::Bulletproof bp = rct::bulletproof_PROVE(amounts, masks);
std::stringstream ss;
binary_archive<true> ba(ss);
::serialization::serialize(ba, bp);
uint8_t* res = (uint8_t*) calloc(ss.str().size(), 1);
memcpy(res, ss.str().data(), ss.str().size());
return res;
}
bool c_verify_bp(
uint8_t* seed,
uint s_len,
uint8_t* s,
uint8_t c_len,
uint8_t* c
) {
// BPs are batch verified which use RNG based weights to ensure individual
// integrity
// That's why this must also have control over RNG, to prevent interrupting
// multisig signing while not using known seeds. Considering this doesn't
// actually define a batch, and it's only verifying a single BP,
// it'd probably be fine, but...
lock guard(rng_mutex);
rng(seed);
rct::Bulletproof bp;
std::stringstream ss;
std::string str;
str.assign((char*) s, (size_t) s_len);
ss << str;
binary_archive<false> ba(ss);
::serialization::serialize(ba, bp);
if (!ss.good()) {
return false;
}
bp.V.resize(c_len);
for (uint8_t i = 0; i < c_len; i++) {
memcpy(bp.V[i].bytes, &c[i * 32], 32);
}
try { return rct::bulletproof_VERIFY(bp); } catch(...) { return false; }
}
bool c_verify_clsag(
uint s_len,
uint8_t* s,
uint8_t k_len,
uint8_t* k,
uint8_t* I,
uint8_t* p,
uint8_t* m
) {
rct::clsag clsag;
std::stringstream ss;
std::string str;
str.assign((char*) s, (size_t) s_len);
ss << str;
binary_archive<false> ba(ss);
::serialization::serialize(ba, clsag);
if (!ss.good()) {
return false;
}
rct::ctkeyV keys;
keys.resize(k_len);
for (uint8_t i = 0; i < k_len; i++) {
memcpy(keys[i].dest.bytes, &k[(i * 2) * 32], 32);
memcpy(keys[i].mask.bytes, &k[((i * 2) + 1) * 32], 32);
}
memcpy(clsag.I.bytes, I, 32);
rct::key pseudo_out;
memcpy(pseudo_out.bytes, p, 32);
rct::key msg;
memcpy(msg.bytes, m, 32);
try {
return verRctCLSAGSimple(msg, clsag, keys, pseudo_out);
} catch(...) { return false; }
}
}

View File

@@ -1,24 +0,0 @@
[package]
name = "monero-generators"
version = "0.1.1"
description = "Monero's hash_to_point and generators"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
lazy_static = "1"
subtle = "2.4"
sha3 = "0.10"
curve25519-dalek = { version = "3", features = ["std"] }
group = "0.12"
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.1.4" }

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2023 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,5 +0,0 @@
# Monero Generators
Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
`hash_to_point` here, is included, as needed to generate generators.

View File

@@ -1,51 +0,0 @@
use subtle::ConditionallySelectable;
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
use group::ff::{Field, PrimeField};
use dalek_ff_group::field::FieldElement;
use crate::hash;
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
#[allow(non_snake_case)]
let A = FieldElement::from(486662u64);
let v = FieldElement::from_square(hash(&bytes)).double();
let w = v + FieldElement::one();
let x = w.square() + (-A.square() * v);
// This isn't the complete X, yet its initial value
// We don't calculate the full X, and instead solely calculate Y, letting dalek reconstruct X
// While inefficient, it solves API boundaries and reduces the amount of work done here
#[allow(non_snake_case)]
let X = {
let u = w;
let v = x;
let v3 = v * v * v;
let uv3 = u * v3;
let v7 = v3 * v3 * v;
let uv7 = u * v7;
uv3 * uv7.pow((-FieldElement::from(5u8)) * FieldElement::from(8u8).invert().unwrap())
};
let x = X.square() * x;
let y = w - x;
let non_zero_0 = !y.is_zero();
let y_if_non_zero_0 = w + x;
let sign = non_zero_0 & (!y_if_non_zero_0.is_zero());
let mut z = -A;
z *= FieldElement::conditional_select(&v, &FieldElement::from(1u8), sign);
#[allow(non_snake_case)]
let Z = z + w;
#[allow(non_snake_case)]
let mut Y = z - w;
Y *= Z.invert().unwrap();
let mut bytes = Y.to_repr();
bytes[31] |= sign.unwrap_u8() << 7;
CompressedEdwardsY(bytes).decompress().unwrap().mul_by_cofactor()
}

View File

@@ -1,64 +0,0 @@
//! Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
//! An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
//! `hash_to_point` here, is included, as needed to generate generators.
use lazy_static::lazy_static;
use sha3::{Digest, Keccak256};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_POINT,
edwards::{EdwardsPoint as DalekPoint, CompressedEdwardsY},
};
use group::Group;
use dalek_ff_group::EdwardsPoint;
mod varint;
use varint::write_varint;
mod hash_to_point;
pub use hash_to_point::hash_to_point;
fn hash(data: &[u8]) -> [u8; 32] {
Keccak256::digest(data).into()
}
lazy_static! {
/// Monero alternate generator `H`, used for amounts in Pedersen commitments.
pub static ref H: DalekPoint =
CompressedEdwardsY(hash(&ED25519_BASEPOINT_POINT.compress().to_bytes()))
.decompress()
.unwrap()
.mul_by_cofactor();
}
const MAX_M: usize = 16;
const N: usize = 64;
const MAX_MN: usize = MAX_M * N;
/// Container struct for Bulletproofs(+) generators.
#[allow(non_snake_case)]
pub struct Generators {
pub G: [EdwardsPoint; MAX_MN],
pub H: [EdwardsPoint; MAX_MN],
}
/// Generate generators as needed for Bulletproofs(+), as Monero does.
pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
let mut res =
Generators { G: [EdwardsPoint::identity(); MAX_MN], H: [EdwardsPoint::identity(); MAX_MN] };
for i in 0 .. MAX_MN {
let i = 2 * i;
let mut even = H.compress().to_bytes().to_vec();
even.extend(dst);
let mut odd = even.clone();
write_varint(&i.try_into().unwrap(), &mut even).unwrap();
write_varint(&(i + 1).try_into().unwrap(), &mut odd).unwrap();
res.H[i / 2] = EdwardsPoint(hash_to_point(hash(&even)));
res.G[i / 2] = EdwardsPoint(hash_to_point(hash(&odd)));
}
res
}

View File

@@ -1,16 +0,0 @@
use std::io::{self, Write};
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
let mut varint = *varint;
while {
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
varint >>= 7;
if varint != 0 {
b |= VARINT_CONTINUATION_MASK;
}
w.write_all(&[b])?;
varint != 0
} {}
Ok(())
}

View File

@@ -1,18 +1,19 @@
use std::io::{self, Read, Write};
use crate::{
serialize::*,
transaction::Transaction
};
use crate::{serialize::*, transaction::Transaction};
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct BlockHeader {
pub major_version: u64,
pub minor_version: u64,
pub timestamp: u64,
pub previous: [u8; 32],
pub nonce: u32,
pub nonce: u32
}
impl BlockHeader {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
write_varint(&self.major_version, w)?;
write_varint(&self.minor_version, w)?;
write_varint(&self.timestamp, w)?;
@@ -20,34 +21,30 @@ impl BlockHeader {
w.write_all(&self.nonce.to_le_bytes())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<BlockHeader> {
Ok(BlockHeader {
major_version: read_varint(r)?,
minor_version: read_varint(r)?,
timestamp: read_varint(r)?,
previous: read_bytes(r)?,
nonce: read_bytes(r).map(u32::from_le_bytes)?,
})
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<BlockHeader> {
Ok(
BlockHeader {
major_version: read_varint(r)?,
minor_version: read_varint(r)?,
timestamp: read_varint(r)?,
previous: { let mut previous = [0; 32]; r.read_exact(&mut previous)?; previous },
nonce: { let mut nonce = [0; 4]; r.read_exact(&mut nonce)?; u32::from_le_bytes(nonce) }
}
)
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct Block {
pub header: BlockHeader,
pub miner_tx: Transaction,
pub txs: Vec<[u8; 32]>,
pub txs: Vec<[u8; 32]>
}
impl Block {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.header.write(w)?;
self.miner_tx.write(w)?;
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.header.serialize(w)?;
self.miner_tx.serialize(w)?;
write_varint(&self.txs.len().try_into().unwrap(), w)?;
for tx in &self.txs {
w.write_all(tx)?;
@@ -55,17 +52,15 @@ impl Block {
Ok(())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Block> {
Ok(Block {
header: BlockHeader::read(r)?,
miner_tx: Transaction::read(r)?,
txs: (0 .. read_varint(r)?).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
})
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Block> {
Ok(
Block {
header: BlockHeader::deserialize(r)?,
miner_tx: Transaction::deserialize(r)?,
txs: (0 .. read_varint(r)?).map(
|_| { let mut tx = [0; 32]; r.read_exact(&mut tx).map(|_| tx) }
).collect::<Result<_, _>>()?
}
)
}
}

76
coins/monero/src/frost.rs Normal file
View File

@@ -0,0 +1,76 @@
use std::io::Read;
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use group::{Group, GroupEncoding};
use transcript::{Transcript, RecommendedTranscript};
use dalek_ff_group as dfg;
use dleq::DLEqProof;
#[derive(Clone, Error, Debug)]
pub enum MultisigError {
#[error("internal error ({0})")]
InternalError(String),
#[error("invalid discrete log equality proof")]
InvalidDLEqProof(u16),
#[error("invalid key image {0}")]
InvalidKeyImage(u16)
}
fn transcript() -> RecommendedTranscript {
RecommendedTranscript::new(b"monero_key_image_dleq")
}
#[allow(non_snake_case)]
pub(crate) fn write_dleq<R: RngCore + CryptoRng>(
rng: &mut R,
H: EdwardsPoint,
x: Scalar
) -> Vec<u8> {
let mut res = Vec::with_capacity(64);
DLEqProof::prove(
rng,
// Doesn't take in a larger transcript object due to the usage of this
// Every prover would immediately write their own DLEq proof, when they can only do so in
// the proper order if they want to reach consensus
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to try to
// merge later in some form, when it should instead just merge xH (as it does)
&mut transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)],
dfg::Scalar(x)
).serialize(&mut res).unwrap();
res
}
#[allow(non_snake_case)]
pub(crate) fn read_dleq<Re: Read>(
serialized: &mut Re,
H: EdwardsPoint,
l: u16,
xG: dfg::EdwardsPoint
) -> Result<dfg::EdwardsPoint, MultisigError> {
let mut bytes = [0; 32];
serialized.read_exact(&mut bytes).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
// dfg ensures the point is torsion free
let xH = Option::<dfg::EdwardsPoint>::from(
dfg::EdwardsPoint::from_bytes(&bytes)).ok_or(MultisigError::InvalidDLEqProof(l)
)?;
// Ensure this is a canonical point
if xH.to_bytes() != bytes {
Err(MultisigError::InvalidDLEqProof(l))?;
}
DLEqProof::<dfg::EdwardsPoint>::deserialize(
serialized
).map_err(|_| MultisigError::InvalidDLEqProof(l))?.verify(
&mut transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(H)],
&[xG, xH]
).map_err(|_| MultisigError::InvalidDLEqProof(l))?;
Ok(xH)
}

View File

@@ -1,136 +1,100 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
//! A modern Monero transaction library intended for usage in wallets. It prides
//! itself on accuracy, correctness, and removing common pit falls developers may
//! face.
//! monero-serai contains safety features, such as first-class acknowledgement of
//! the burning bug, yet also a high level API around creating transactions.
//! monero-serai also offers a FROST-based multisig, which is orders of magnitude
//! more performant than Monero's.
//! monero-serai was written for Serai, a decentralized exchange aiming to support
//! Monero. Despite this, monero-serai is intended to be a widely usable library,
//! accurate to Monero. monero-serai guarantees the functionality needed for Serai,
//! yet will not deprive functionality from other users, and may potentially leave
//! Serai's umbrella at some point.
//! Various legacy transaction formats are not currently implemented, yet
//! monero-serai is still increasing its support for various transaction types.
use std::slice;
use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop};
use subtle::ConstantTimeEq;
use sha3::{Digest, Keccak256};
use tiny_keccak::{Hasher, Keccak};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::{EdwardsPoint, EdwardsBasepointTable},
edwards::{EdwardsPoint, EdwardsBasepointTable, CompressedEdwardsY}
};
pub use monero_generators::H;
#[cfg(feature = "multisig")]
pub mod frost;
mod serialize;
/// RingCT structs and functionality.
pub mod ringct;
/// Transaction structs.
pub mod transaction;
/// Block structs.
pub mod block;
/// Monero daemon RPC interface.
pub mod rpc;
/// Wallet functionality, enabling scanning and sending transactions.
pub mod wallet;
#[cfg(test)]
mod tests;
/// Monero protocol version. v15 is omitted as v15 was simply v14 and v16 being active at the same
/// time, with regards to the transactions supported. Accordingly, v16 should be used during v15.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
#[allow(non_camel_case_types)]
pub enum Protocol {
Unsupported(usize),
v14,
v16,
Custom { ring_len: usize, bp_plus: bool },
}
impl Protocol {
/// Amount of ring members under this protocol version.
pub fn ring_len(&self) -> usize {
match self {
Protocol::Unsupported(_) => panic!("Unsupported protocol version"),
Protocol::v14 => 11,
Protocol::v16 => 16,
Protocol::Custom { ring_len, .. } => *ring_len,
}
}
/// Whether or not the specified version uses Bulletproofs or Bulletproofs+.
/// This method will likely be reworked when versions not using Bulletproofs at all are added.
pub fn bp_plus(&self) -> bool {
match self {
Protocol::Unsupported(_) => panic!("Unsupported protocol version"),
Protocol::v14 => false,
Protocol::v16 => true,
Protocol::Custom { bp_plus, .. } => *bp_plus,
}
}
}
lazy_static! {
static ref H_TABLE: EdwardsBasepointTable = EdwardsBasepointTable::create(&H);
static ref H: EdwardsPoint = CompressedEdwardsY(
hex::decode("8b655970153799af2aeadc9ff1add0ea6c7251d54154cfa92c173a0dd39c1f94").unwrap().try_into().unwrap()
).decompress().unwrap();
static ref H_TABLE: EdwardsBasepointTable = EdwardsBasepointTable::create(&*H);
}
// Function from libsodium our subsection of Monero relies on. Implementing it here means we don't
// need to link against libsodium
#[no_mangle]
unsafe extern "C" fn crypto_verify_32(a: *const u8, b: *const u8) -> isize {
isize::from(
slice::from_raw_parts(a, 32).ct_eq(slice::from_raw_parts(b, 32)).unwrap_u8()
) - 1
}
// Offer a wide reduction to C. Our seeded RNG prevented Monero from defining an unbiased scalar
// generation function, and in order to not use Monero code (which would require propagating its
// license), the function was rewritten. It was rewritten with wide reduction, instead of rejection
// sampling however, hence the need for this function
#[no_mangle]
unsafe extern "C" fn monero_wide_reduce(value: *mut u8) {
let res = Scalar::from_bytes_mod_order_wide(
std::slice::from_raw_parts(value, 64).try_into().unwrap()
);
for (i, b) in res.to_bytes().iter().enumerate() {
value.add(i).write(*b);
}
}
/// Transparent structure representing a Pedersen commitment's contents.
#[allow(non_snake_case)]
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub struct Commitment {
pub mask: Scalar,
pub amount: u64,
pub amount: u64
}
impl Commitment {
/// The zero commitment, defined as a mask of 1 (as to not be the identity) and a 0 amount.
pub fn zero() -> Commitment {
Commitment { mask: Scalar::one(), amount: 0 }
Commitment { mask: Scalar::one(), amount: 0}
}
pub fn new(mask: Scalar, amount: u64) -> Commitment {
Commitment { mask, amount }
}
/// Calculate a Pedersen commitment, as a point, from the transparent structure.
pub fn calculate(&self) -> EdwardsPoint {
(&self.mask * &ED25519_BASEPOINT_TABLE) + (&Scalar::from(self.amount) * &*H_TABLE)
}
}
/// Support generating a random scalar using a modern rand, as dalek's is notoriously dated.
// Allows using a modern rand as dalek's is notoriously dated
pub fn random_scalar<R: RngCore + CryptoRng>(rng: &mut R) -> Scalar {
let mut r = [0; 64];
rng.fill_bytes(&mut r);
Scalar::from_bytes_mod_order_wide(&r)
}
pub(crate) fn hash(data: &[u8]) -> [u8; 32] {
Keccak256::digest(data).into()
pub fn hash(data: &[u8]) -> [u8; 32] {
let mut keccak = Keccak::v256();
keccak.update(data);
let mut res = [0; 32];
keccak.finalize(&mut res);
res
}
/// Hash the provided data to a scalar via keccak256(data) % l.
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
let scalar = Scalar::from_bytes_mod_order(hash(data));
// Monero will explicitly error in this case
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
// not generate/verify a proof we believe to be valid when it isn't
assert!(scalar != Scalar::zero(), "ZERO HASH: {data:?}");
scalar
Scalar::from_bytes_mod_order(hash(&data))
}

View File

@@ -0,0 +1,161 @@
#![allow(non_snake_case)]
use rand_core::{RngCore, CryptoRng};
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use crate::{Commitment, wallet::TransactionError, serialize::*};
pub(crate) const MAX_OUTPUTS: usize = 16;
#[derive(Clone, PartialEq, Debug)]
pub struct Bulletproofs {
pub A: EdwardsPoint,
pub S: EdwardsPoint,
pub T1: EdwardsPoint,
pub T2: EdwardsPoint,
pub taux: Scalar,
pub mu: Scalar,
pub L: Vec<EdwardsPoint>,
pub R: Vec<EdwardsPoint>,
pub a: Scalar,
pub b: Scalar,
pub t: Scalar
}
impl Bulletproofs {
pub(crate) fn fee_weight(outputs: usize) -> usize {
let proofs = 6 + usize::try_from(usize::BITS - (outputs - 1).leading_zeros()).unwrap();
let len = (9 + (2 * proofs)) * 32;
let mut clawback = 0;
let padded = 1 << (proofs - 6);
if padded > 2 {
const BP_BASE: usize = 368;
clawback = ((BP_BASE * padded) - len) * 4 / 5;
}
len + clawback
}
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, outputs: &[Commitment]) -> Result<Bulletproofs, TransactionError> {
if outputs.len() > MAX_OUTPUTS {
return Err(TransactionError::TooManyOutputs)?;
}
let mut seed = [0; 32];
rng.fill_bytes(&mut seed);
let masks = outputs.iter().map(|commitment| commitment.mask.to_bytes()).collect::<Vec<_>>();
let amounts = outputs.iter().map(|commitment| commitment.amount).collect::<Vec<_>>();
let res;
unsafe {
#[link(name = "wrapper")]
extern "C" {
fn free(ptr: *const u8);
fn c_generate_bp(seed: *const u8, len: u8, amounts: *const u64, masks: *const [u8; 32]) -> *const u8;
}
let ptr = c_generate_bp(
seed.as_ptr(),
u8::try_from(outputs.len()).unwrap(),
amounts.as_ptr(),
masks.as_ptr()
);
let mut len = 6 * 32;
len += (2 * (1 + (usize::from(ptr.add(len).read()) * 32))) + (3 * 32);
res = Bulletproofs::deserialize(
// Wrap in a cursor to provide a mutable Reader
&mut std::io::Cursor::new(std::slice::from_raw_parts(ptr, len))
).expect("Couldn't deserialize Bulletproofs from Monero");
free(ptr);
};
Ok(res)
}
#[must_use]
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
if commitments.len() > 16 {
return false;
}
let mut seed = [0; 32];
rng.fill_bytes(&mut seed);
let mut serialized = Vec::with_capacity((9 + (2 * self.L.len())) * 32);
self.serialize(&mut serialized).unwrap();
let commitments: Vec<[u8; 32]> = commitments.iter().map(
|commitment| (commitment * Scalar::from(8u8).invert()).compress().to_bytes()
).collect();
unsafe {
#[link(name = "wrapper")]
extern "C" {
fn c_verify_bp(
seed: *const u8,
serialized_len: usize,
serialized: *const u8,
commitments_len: u8,
commitments: *const [u8; 32]
) -> bool;
}
c_verify_bp(
seed.as_ptr(),
serialized.len(),
serialized.as_ptr(),
u8::try_from(commitments.len()).unwrap(),
commitments.as_ptr()
)
}
}
fn serialize_core<
W: std::io::Write,
F: Fn(&[EdwardsPoint], &mut W) -> std::io::Result<()>
>(&self, w: &mut W, specific_write_vec: F) -> std::io::Result<()> {
write_point(&self.A, w)?;
write_point(&self.S, w)?;
write_point(&self.T1, w)?;
write_point(&self.T2, w)?;
write_scalar(&self.taux, w)?;
write_scalar(&self.mu, w)?;
specific_write_vec(&self.L, w)?;
specific_write_vec(&self.R, w)?;
write_scalar(&self.a, w)?;
write_scalar(&self.b, w)?;
write_scalar(&self.t, w)
}
pub fn signature_serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.serialize_core(w, |points, w| write_raw_vec(write_point, points, w))
}
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.serialize_core(w, |points, w| write_vec(write_point, points, w))
}
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Bulletproofs> {
let bp = Bulletproofs {
A: read_point(r)?,
S: read_point(r)?,
T1: read_point(r)?,
T2: read_point(r)?,
taux: read_scalar(r)?,
mu: read_scalar(r)?,
L: read_vec(read_point, r)?,
R: read_vec(read_point, r)?,
a: read_scalar(r)?,
b: read_scalar(r)?,
t: read_scalar(r)?
};
if bp.L.len() != bp.R.len() {
Err(std::io::Error::new(std::io::ErrorKind::Other, "mismatched L/R len"))?;
}
Ok(bp)
}
}

View File

@@ -1,150 +0,0 @@
// Required to be for this entire file, which isn't an issue, as it wouldn't bind to the static
#![allow(non_upper_case_globals)]
use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use subtle::{Choice, ConditionallySelectable};
use curve25519_dalek::edwards::EdwardsPoint as DalekPoint;
use group::{ff::Field, Group};
use dalek_ff_group::{Scalar, EdwardsPoint};
use multiexp::multiexp as multiexp_const;
pub(crate) use monero_generators::Generators;
use crate::{H as DALEK_H, Commitment, hash_to_scalar as dalek_hash};
pub(crate) use crate::ringct::bulletproofs::scalar_vector::*;
// Bring things into ff/group
lazy_static! {
pub(crate) static ref INV_EIGHT: Scalar = Scalar::from(8u8).invert().unwrap();
pub(crate) static ref H: EdwardsPoint = EdwardsPoint(*DALEK_H);
}
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
Scalar(dalek_hash(data))
}
// Components common between variants
pub(crate) const MAX_M: usize = 16;
pub(crate) const LOG_N: usize = 6; // 2 << 6 == N
pub(crate) const N: usize = 64;
pub(crate) fn prove_multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
multiexp_const(pairs) * *INV_EIGHT
}
pub(crate) fn vector_exponent(
generators: &Generators,
a: &ScalarVector,
b: &ScalarVector,
) -> EdwardsPoint {
debug_assert_eq!(a.len(), b.len());
(a * &generators.G[.. a.len()]) + (b * &generators.H[.. b.len()])
}
pub(crate) fn hash_cache(cache: &mut Scalar, mash: &[[u8; 32]]) -> Scalar {
let slice =
&[cache.to_bytes().as_ref(), mash.iter().cloned().flatten().collect::<Vec<_>>().as_ref()]
.concat();
*cache = hash_to_scalar(slice);
*cache
}
pub(crate) fn MN(outputs: usize) -> (usize, usize, usize) {
let mut logM = 0;
let mut M;
while {
M = 1 << logM;
(M <= MAX_M) && (M < outputs)
} {
logM += 1;
}
(logM + LOG_N, M, M * N)
}
pub(crate) fn bit_decompose(commitments: &[Commitment]) -> (ScalarVector, ScalarVector) {
let (_, M, MN) = MN(commitments.len());
let sv = commitments.iter().map(|c| Scalar::from(c.amount)).collect::<Vec<_>>();
let mut aL = ScalarVector::new(MN);
let mut aR = ScalarVector::new(MN);
for j in 0 .. M {
for i in (0 .. N).rev() {
let mut bit = Choice::from(0);
if j < sv.len() {
bit = Choice::from((sv[j][i / 8] >> (i % 8)) & 1);
}
aL.0[(j * N) + i] = Scalar::conditional_select(&Scalar::zero(), &Scalar::one(), bit);
aR.0[(j * N) + i] = Scalar::conditional_select(&-Scalar::one(), &Scalar::zero(), bit);
}
}
(aL, aR)
}
pub(crate) fn hash_commitments<C: IntoIterator<Item = DalekPoint>>(
commitments: C,
) -> (Scalar, Vec<EdwardsPoint>) {
let V = commitments.into_iter().map(|c| EdwardsPoint(c) * *INV_EIGHT).collect::<Vec<_>>();
(hash_to_scalar(&V.iter().flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>()), V)
}
pub(crate) fn alpha_rho<R: RngCore + CryptoRng>(
rng: &mut R,
generators: &Generators,
aL: &ScalarVector,
aR: &ScalarVector,
) -> (Scalar, EdwardsPoint) {
let ar = Scalar::random(rng);
(ar, (vector_exponent(generators, aL, aR) + (EdwardsPoint::generator() * ar)) * *INV_EIGHT)
}
pub(crate) fn LR_statements(
a: &ScalarVector,
G_i: &[EdwardsPoint],
b: &ScalarVector,
H_i: &[EdwardsPoint],
cL: Scalar,
U: EdwardsPoint,
) -> Vec<(Scalar, EdwardsPoint)> {
let mut res = a
.0
.iter()
.cloned()
.zip(G_i.iter().cloned())
.chain(b.0.iter().cloned().zip(H_i.iter().cloned()))
.collect::<Vec<_>>();
res.push((cL, U));
res
}
lazy_static! {
pub(crate) static ref TWO_N: ScalarVector = ScalarVector::powers(Scalar::from(2u8), N);
}
pub(crate) fn challenge_products(w: &[Scalar], winv: &[Scalar]) -> Vec<Scalar> {
let mut products = vec![Scalar::zero(); 1 << w.len()];
products[0] = winv[0];
products[1] = w[0];
for j in 1 .. w.len() {
let mut slots = (1 << (j + 1)) - 1;
while slots > 0 {
products[slots] = products[slots / 2] * w[j];
products[slots - 1] = products[slots / 2] * winv[j];
slots = slots.saturating_sub(2);
}
}
// Sanity check as if the above failed to populate, it'd be critical
for w in &products {
debug_assert!(!bool::from(w.is_zero()));
}
products
}

View File

@@ -1,175 +0,0 @@
#![allow(non_snake_case)]
use std::io::{self, Read, Write};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use curve25519_dalek::edwards::EdwardsPoint;
use multiexp::BatchVerifier;
use crate::{Commitment, wallet::TransactionError, serialize::*};
pub(crate) mod scalar_vector;
pub(crate) mod core;
use self::core::LOG_N;
pub(crate) mod original;
pub use original::GENERATORS as BULLETPROOFS_GENERATORS;
pub(crate) mod plus;
pub use plus::GENERATORS as BULLETPROOFS_PLUS_GENERATORS;
pub(crate) use self::original::OriginalStruct;
pub(crate) use self::plus::PlusStruct;
pub(crate) const MAX_OUTPUTS: usize = self::core::MAX_M;
/// Bulletproofs enum, supporting the original and plus formulations.
#[allow(clippy::large_enum_variant)]
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum Bulletproofs {
Original(OriginalStruct),
Plus(PlusStruct),
}
impl Bulletproofs {
pub(crate) fn fee_weight(plus: bool, outputs: usize) -> usize {
let fields = if plus { 6 } else { 9 };
#[allow(non_snake_case)]
let mut LR_len = usize::try_from(usize::BITS - (outputs - 1).leading_zeros()).unwrap();
let padded_outputs = 1 << LR_len;
LR_len += LOG_N;
let len = (fields + (2 * LR_len)) * 32;
len +
if padded_outputs <= 2 {
0
} else {
let base = ((fields + (2 * (LOG_N + 1))) * 32) / 2;
let size = (fields + (2 * LR_len)) * 32;
((base * padded_outputs) - size) * 4 / 5
}
}
/// Prove the list of commitments are within [0 .. 2^64).
pub fn prove<R: RngCore + CryptoRng>(
rng: &mut R,
outputs: &[Commitment],
plus: bool,
) -> Result<Bulletproofs, TransactionError> {
if outputs.len() > MAX_OUTPUTS {
return Err(TransactionError::TooManyOutputs)?;
}
Ok(if !plus {
Bulletproofs::Original(OriginalStruct::prove(rng, outputs))
} else {
Bulletproofs::Plus(PlusStruct::prove(rng, outputs))
})
}
/// Verify the given Bulletproofs.
#[must_use]
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
match self {
Bulletproofs::Original(bp) => bp.verify(rng, commitments),
Bulletproofs::Plus(bp) => bp.verify(rng, commitments),
}
}
/// Accumulate the verification for the given Bulletproofs into the specified BatchVerifier.
/// Returns false if the Bulletproofs aren't sane, without mutating the BatchVerifier.
/// Returns true if the Bulletproofs are sane, regardless of their validity.
#[must_use]
pub fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, dalek_ff_group::EdwardsPoint>,
id: ID,
commitments: &[EdwardsPoint],
) -> bool {
match self {
Bulletproofs::Original(bp) => bp.batch_verify(rng, verifier, id, commitments),
Bulletproofs::Plus(bp) => bp.batch_verify(rng, verifier, id, commitments),
}
}
fn write_core<W: Write, F: Fn(&[EdwardsPoint], &mut W) -> io::Result<()>>(
&self,
w: &mut W,
specific_write_vec: F,
) -> io::Result<()> {
match self {
Bulletproofs::Original(bp) => {
write_point(&bp.A, w)?;
write_point(&bp.S, w)?;
write_point(&bp.T1, w)?;
write_point(&bp.T2, w)?;
write_scalar(&bp.taux, w)?;
write_scalar(&bp.mu, w)?;
specific_write_vec(&bp.L, w)?;
specific_write_vec(&bp.R, w)?;
write_scalar(&bp.a, w)?;
write_scalar(&bp.b, w)?;
write_scalar(&bp.t, w)
}
Bulletproofs::Plus(bp) => {
write_point(&bp.A, w)?;
write_point(&bp.A1, w)?;
write_point(&bp.B, w)?;
write_scalar(&bp.r1, w)?;
write_scalar(&bp.s1, w)?;
write_scalar(&bp.d1, w)?;
specific_write_vec(&bp.L, w)?;
specific_write_vec(&bp.R, w)
}
}
}
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.write_core(w, |points, w| write_raw_vec(write_point, points, w))
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.write_core(w, |points, w| write_vec(write_point, points, w))
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
/// Read Bulletproofs.
pub fn read<R: Read>(r: &mut R) -> io::Result<Bulletproofs> {
Ok(Bulletproofs::Original(OriginalStruct {
A: read_point(r)?,
S: read_point(r)?,
T1: read_point(r)?,
T2: read_point(r)?,
taux: read_scalar(r)?,
mu: read_scalar(r)?,
L: read_vec(read_point, r)?,
R: read_vec(read_point, r)?,
a: read_scalar(r)?,
b: read_scalar(r)?,
t: read_scalar(r)?,
}))
}
/// Read Bulletproofs+.
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Bulletproofs> {
Ok(Bulletproofs::Plus(PlusStruct {
A: read_point(r)?,
A1: read_point(r)?,
B: read_point(r)?,
r1: read_scalar(r)?,
s1: read_scalar(r)?,
d1: read_scalar(r)?,
L: read_vec(read_point, r)?,
R: read_vec(read_point, r)?,
}))
}
}

View File

@@ -1,306 +0,0 @@
use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
use group::{ff::Field, Group};
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
use multiexp::BatchVerifier;
use crate::{Commitment, ringct::bulletproofs::core::*};
include!(concat!(env!("OUT_DIR"), "/generators.rs"));
lazy_static! {
static ref ONE_N: ScalarVector = ScalarVector(vec![Scalar::one(); N]);
static ref IP12: Scalar = inner_product(&ONE_N, &TWO_N);
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct OriginalStruct {
pub(crate) A: DalekPoint,
pub(crate) S: DalekPoint,
pub(crate) T1: DalekPoint,
pub(crate) T2: DalekPoint,
pub(crate) taux: DalekScalar,
pub(crate) mu: DalekScalar,
pub(crate) L: Vec<DalekPoint>,
pub(crate) R: Vec<DalekPoint>,
pub(crate) a: DalekScalar,
pub(crate) b: DalekScalar,
pub(crate) t: DalekScalar,
}
impl OriginalStruct {
pub(crate) fn prove<R: RngCore + CryptoRng>(
rng: &mut R,
commitments: &[Commitment],
) -> OriginalStruct {
let (logMN, M, MN) = MN(commitments.len());
let (aL, aR) = bit_decompose(commitments);
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
let (mut cache, _) = hash_commitments(commitments_points.clone());
let (sL, sR) =
ScalarVector((0 .. (MN * 2)).map(|_| Scalar::random(&mut *rng)).collect::<Vec<_>>()).split();
let (mut alpha, A) = alpha_rho(&mut *rng, &GENERATORS, &aL, &aR);
let (mut rho, S) = alpha_rho(&mut *rng, &GENERATORS, &sL, &sR);
let y = hash_cache(&mut cache, &[A.compress().to_bytes(), S.compress().to_bytes()]);
let mut cache = hash_to_scalar(&y.to_bytes());
let z = cache;
let l0 = &aL - z;
let l1 = sL;
let mut zero_twos = Vec::with_capacity(MN);
let zpow = ScalarVector::powers(z, M + 2);
for j in 0 .. M {
for i in 0 .. N {
zero_twos.push(zpow[j + 2] * TWO_N[i]);
}
}
let yMN = ScalarVector::powers(y, MN);
let r0 = (&(aR + z) * &yMN) + ScalarVector(zero_twos);
let r1 = yMN * sR;
let (T1, T2, x, mut taux) = {
let t1 = inner_product(&l0, &r1) + inner_product(&l1, &r0);
let t2 = inner_product(&l1, &r1);
let mut tau1 = Scalar::random(&mut *rng);
let mut tau2 = Scalar::random(&mut *rng);
let T1 = prove_multiexp(&[(t1, *H), (tau1, EdwardsPoint::generator())]);
let T2 = prove_multiexp(&[(t2, *H), (tau2, EdwardsPoint::generator())]);
let x =
hash_cache(&mut cache, &[z.to_bytes(), T1.compress().to_bytes(), T2.compress().to_bytes()]);
let taux = (tau2 * (x * x)) + (tau1 * x);
tau1.zeroize();
tau2.zeroize();
(T1, T2, x, taux)
};
let mu = (x * rho) + alpha;
alpha.zeroize();
rho.zeroize();
for (i, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
taux += zpow[i + 2] * gamma;
}
let l = &l0 + &(l1 * x);
let r = &r0 + &(r1 * x);
let t = inner_product(&l, &r);
let x_ip =
hash_cache(&mut cache, &[x.to_bytes(), taux.to_bytes(), mu.to_bytes(), t.to_bytes()]);
let mut a = l;
let mut b = r;
let yinv = y.invert().unwrap();
let yinvpow = ScalarVector::powers(yinv, MN);
let mut G_proof = GENERATORS.G[.. a.len()].to_vec();
let mut H_proof = GENERATORS.H[.. a.len()].to_vec();
H_proof.iter_mut().zip(yinvpow.0.iter()).for_each(|(this_H, yinvpow)| *this_H *= yinvpow);
let U = *H * x_ip;
let mut L = Vec::with_capacity(logMN);
let mut R = Vec::with_capacity(logMN);
while a.len() != 1 {
let (aL, aR) = a.split();
let (bL, bR) = b.split();
let cL = inner_product(&aL, &bR);
let cR = inner_product(&aR, &bL);
let (G_L, G_R) = G_proof.split_at(aL.len());
let (H_L, H_R) = H_proof.split_at(aL.len());
let L_i = prove_multiexp(&LR_statements(&aL, G_R, &bR, H_L, cL, U));
let R_i = prove_multiexp(&LR_statements(&aR, G_L, &bL, H_R, cR, U));
L.push(L_i);
R.push(R_i);
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
let winv = w.invert().unwrap();
a = (aL * w) + (aR * winv);
b = (bL * winv) + (bR * w);
if a.len() != 1 {
G_proof = hadamard_fold(G_L, G_R, winv, w);
H_proof = hadamard_fold(H_L, H_R, w, winv);
}
}
let res = OriginalStruct {
A: *A,
S: *S,
T1: *T1,
T2: *T2,
taux: *taux,
mu: *mu,
L: L.drain(..).map(|L| *L).collect(),
R: R.drain(..).map(|R| *R).collect(),
a: *a[0],
b: *b[0],
t: *t,
};
debug_assert!(res.verify(rng, &commitments_points));
res
}
#[must_use]
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
id: ID,
commitments: &[DalekPoint],
) -> bool {
// Verify commitments are valid
if commitments.is_empty() || (commitments.len() > MAX_M) {
return false;
}
// Verify L and R are properly sized
if self.L.len() != self.R.len() {
return false;
}
let (logMN, M, MN) = MN(commitments.len());
if self.L.len() != logMN {
return false;
}
// Rebuild all challenges
let (mut cache, commitments) = hash_commitments(commitments.iter().cloned());
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes(), self.S.compress().to_bytes()]);
let z = hash_to_scalar(&y.to_bytes());
cache = z;
let x = hash_cache(
&mut cache,
&[z.to_bytes(), self.T1.compress().to_bytes(), self.T2.compress().to_bytes()],
);
let x_ip = hash_cache(
&mut cache,
&[x.to_bytes(), self.taux.to_bytes(), self.mu.to_bytes(), self.t.to_bytes()],
);
let mut w = Vec::with_capacity(logMN);
let mut winv = Vec::with_capacity(logMN);
for (L, R) in self.L.iter().zip(&self.R) {
w.push(hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]));
winv.push(cache.invert().unwrap());
}
// Convert the proof from * INV_EIGHT to its actual form
let normalize = |point: &DalekPoint| EdwardsPoint(point.mul_by_cofactor());
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
let T1 = normalize(&self.T1);
let T2 = normalize(&self.T2);
let A = normalize(&self.A);
let S = normalize(&self.S);
let commitments = commitments.iter().map(|c| c.mul_by_cofactor()).collect::<Vec<_>>();
// Verify it
let mut proof = Vec::with_capacity(4 + commitments.len());
let zpow = ScalarVector::powers(z, M + 3);
let ip1y = ScalarVector::powers(y, M * N).sum();
let mut k = -(zpow[2] * ip1y);
for j in 1 ..= M {
k -= zpow[j + 2] * *IP12;
}
let y1 = Scalar(self.t) - ((z * ip1y) + k);
proof.push((-y1, *H));
proof.push((-Scalar(self.taux), G));
for (j, commitment) in commitments.iter().enumerate() {
proof.push((zpow[j + 2], *commitment));
}
proof.push((x, T1));
proof.push((x * x, T2));
verifier.queue(&mut *rng, id, proof);
proof = Vec::with_capacity(4 + (2 * (MN + logMN)));
let z3 = (Scalar(self.t) - (Scalar(self.a) * Scalar(self.b))) * x_ip;
proof.push((z3, *H));
proof.push((-Scalar(self.mu), G));
proof.push((Scalar::one(), A));
proof.push((x, S));
{
let ypow = ScalarVector::powers(y, MN);
let yinv = y.invert().unwrap();
let yinvpow = ScalarVector::powers(yinv, MN);
let w_cache = challenge_products(&w, &winv);
for i in 0 .. MN {
let g = (Scalar(self.a) * w_cache[i]) + z;
proof.push((-g, GENERATORS.G[i]));
let mut h = Scalar(self.b) * yinvpow[i] * w_cache[(!i) & (MN - 1)];
h -= ((zpow[(i / N) + 2] * TWO_N[i % N]) + (z * ypow[i])) * yinvpow[i];
proof.push((-h, GENERATORS.H[i]));
}
}
for i in 0 .. logMN {
proof.push((w[i] * w[i], L[i]));
proof.push((winv[i] * winv[i], R[i]));
}
verifier.queue(rng, id, proof);
true
}
#[must_use]
pub(crate) fn verify<R: RngCore + CryptoRng>(
&self,
rng: &mut R,
commitments: &[DalekPoint],
) -> bool {
let mut verifier = BatchVerifier::new(1);
if self.verify_core(rng, &mut verifier, (), commitments) {
verifier.verify_vartime()
} else {
false
}
}
#[must_use]
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
id: ID,
commitments: &[DalekPoint],
) -> bool {
self.verify_core(rng, verifier, id, commitments)
}
}

View File

@@ -1,306 +0,0 @@
use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
use group::ff::Field;
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
use multiexp::BatchVerifier;
use crate::{
Commitment, hash,
ringct::{hash_to_point::raw_hash_to_point, bulletproofs::core::*},
};
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
lazy_static! {
static ref TRANSCRIPT: [u8; 32] =
EdwardsPoint(raw_hash_to_point(hash(b"bulletproof_plus_transcript"))).compress().to_bytes();
}
// TRANSCRIPT isn't a Scalar, so we need this alternative for the first hash
fn hash_plus<C: IntoIterator<Item = DalekPoint>>(commitments: C) -> (Scalar, Vec<EdwardsPoint>) {
let (cache, commitments) = hash_commitments(commitments);
(hash_to_scalar(&[&*TRANSCRIPT as &[u8], &cache.to_bytes()].concat()), commitments)
}
// d[j*N+i] = z**(2*(j+1)) * 2**i
fn d(z: Scalar, M: usize, MN: usize) -> (ScalarVector, ScalarVector) {
let zpow = ScalarVector::even_powers(z, 2 * M);
let mut d = vec![Scalar::zero(); MN];
for j in 0 .. M {
for i in 0 .. N {
d[(j * N) + i] = zpow[j] * TWO_N[i];
}
}
(zpow, ScalarVector(d))
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct PlusStruct {
pub(crate) A: DalekPoint,
pub(crate) A1: DalekPoint,
pub(crate) B: DalekPoint,
pub(crate) r1: DalekScalar,
pub(crate) s1: DalekScalar,
pub(crate) d1: DalekScalar,
pub(crate) L: Vec<DalekPoint>,
pub(crate) R: Vec<DalekPoint>,
}
impl PlusStruct {
pub(crate) fn prove<R: RngCore + CryptoRng>(
rng: &mut R,
commitments: &[Commitment],
) -> PlusStruct {
let (logMN, M, MN) = MN(commitments.len());
let (aL, aR) = bit_decompose(commitments);
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
let (mut cache, _) = hash_plus(commitments_points.clone());
let (mut alpha1, A) = alpha_rho(&mut *rng, &GENERATORS, &aL, &aR);
let y = hash_cache(&mut cache, &[A.compress().to_bytes()]);
let mut cache = hash_to_scalar(&y.to_bytes());
let z = cache;
let (zpow, d) = d(z, M, MN);
let aL1 = aL - z;
let ypow = ScalarVector::powers(y, MN + 2);
let mut y_for_d = ScalarVector(ypow.0[1 ..= MN].to_vec());
y_for_d.0.reverse();
let aR1 = (aR + z) + (y_for_d * d);
for (j, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
alpha1 += zpow[j] * ypow[MN + 1] * gamma;
}
let mut a = aL1;
let mut b = aR1;
let yinv = y.invert().unwrap();
let yinvpow = ScalarVector::powers(yinv, MN);
let mut G_proof = GENERATORS.G[.. a.len()].to_vec();
let mut H_proof = GENERATORS.H[.. a.len()].to_vec();
let mut L = Vec::with_capacity(logMN);
let mut R = Vec::with_capacity(logMN);
while a.len() != 1 {
let (aL, aR) = a.split();
let (bL, bR) = b.split();
let cL = weighted_inner_product(&aL, &bR, y);
let cR = weighted_inner_product(&(&aR * ypow[aR.len()]), &bL, y);
let (mut dL, mut dR) = (Scalar::random(&mut *rng), Scalar::random(&mut *rng));
let (G_L, G_R) = G_proof.split_at(aL.len());
let (H_L, H_R) = H_proof.split_at(aL.len());
let mut L_i = LR_statements(&(&aL * yinvpow[aL.len()]), G_R, &bR, H_L, cL, *H);
L_i.push((dL, G));
let L_i = prove_multiexp(&L_i);
L.push(L_i);
let mut R_i = LR_statements(&(&aR * ypow[aR.len()]), G_L, &bL, H_R, cR, *H);
R_i.push((dR, G));
let R_i = prove_multiexp(&R_i);
R.push(R_i);
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
let winv = w.invert().unwrap();
G_proof = hadamard_fold(G_L, G_R, winv, w * yinvpow[aL.len()]);
H_proof = hadamard_fold(H_L, H_R, w, winv);
a = (&aL * w) + (aR * (winv * ypow[aL.len()]));
b = (bL * winv) + (bR * w);
alpha1 += (dL * (w * w)) + (dR * (winv * winv));
dL.zeroize();
dR.zeroize();
}
let mut r = Scalar::random(&mut *rng);
let mut s = Scalar::random(&mut *rng);
let mut d = Scalar::random(&mut *rng);
let mut eta = Scalar::random(&mut *rng);
let A1 = prove_multiexp(&[
(r, G_proof[0]),
(s, H_proof[0]),
(d, G),
((r * y * b[0]) + (s * y * a[0]), *H),
]);
let B = prove_multiexp(&[(r * y * s, *H), (eta, G)]);
let e = hash_cache(&mut cache, &[A1.compress().to_bytes(), B.compress().to_bytes()]);
let r1 = (a[0] * e) + r;
r.zeroize();
let s1 = (b[0] * e) + s;
s.zeroize();
let d1 = ((d * e) + eta) + (alpha1 * (e * e));
d.zeroize();
eta.zeroize();
alpha1.zeroize();
let res = PlusStruct {
A: *A,
A1: *A1,
B: *B,
r1: *r1,
s1: *s1,
d1: *d1,
L: L.drain(..).map(|L| *L).collect(),
R: R.drain(..).map(|R| *R).collect(),
};
debug_assert!(res.verify(rng, &commitments_points));
res
}
#[must_use]
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
id: ID,
commitments: &[DalekPoint],
) -> bool {
// Verify commitments are valid
if commitments.is_empty() || (commitments.len() > MAX_M) {
return false;
}
// Verify L and R are properly sized
if self.L.len() != self.R.len() {
return false;
}
let (logMN, M, MN) = MN(commitments.len());
if self.L.len() != logMN {
return false;
}
// Rebuild all challenges
let (mut cache, commitments) = hash_plus(commitments.iter().cloned());
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes()]);
let yinv = y.invert().unwrap();
let z = hash_to_scalar(&y.to_bytes());
cache = z;
let mut w = Vec::with_capacity(logMN);
let mut winv = Vec::with_capacity(logMN);
for (L, R) in self.L.iter().zip(&self.R) {
w.push(hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]));
winv.push(cache.invert().unwrap());
}
let e = hash_cache(&mut cache, &[self.A1.compress().to_bytes(), self.B.compress().to_bytes()]);
// Convert the proof from * INV_EIGHT to its actual form
let normalize = |point: &DalekPoint| EdwardsPoint(point.mul_by_cofactor());
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
let A = normalize(&self.A);
let A1 = normalize(&self.A1);
let B = normalize(&self.B);
let mut commitments = commitments.iter().map(|c| c.mul_by_cofactor()).collect::<Vec<_>>();
// Verify it
let mut proof = Vec::with_capacity(logMN + 5 + (2 * (MN + logMN)));
let mut yMN = y;
for _ in 0 .. logMN {
yMN *= yMN;
}
let yMNy = yMN * y;
let (zpow, d) = d(z, M, MN);
let zsq = zpow[0];
let esq = e * e;
let minus_esq = -esq;
let commitment_weight = minus_esq * yMNy;
for (i, commitment) in commitments.drain(..).enumerate() {
proof.push((commitment_weight * zpow[i], commitment));
}
// Invert B, instead of the Scalar, as the latter is only 2x as expensive yet enables reduction
// to a single addition under vartime for the first BP verified in the batch, which is expected
// to be much more significant
proof.push((Scalar::one(), -B));
proof.push((-e, A1));
proof.push((minus_esq, A));
proof.push((Scalar(self.d1), G));
let d_sum = zpow.sum() * Scalar::from(u64::MAX);
let y_sum = weighted_powers(y, MN).sum();
proof.push((
Scalar(self.r1 * y.0 * self.s1) + (esq * ((yMNy * z * d_sum) + ((zsq - z) * y_sum))),
*H,
));
let w_cache = challenge_products(&w, &winv);
let mut e_r1_y = e * Scalar(self.r1);
let e_s1 = e * Scalar(self.s1);
let esq_z = esq * z;
let minus_esq_z = -esq_z;
let mut minus_esq_y = minus_esq * yMN;
for i in 0 .. MN {
proof.push((e_r1_y * w_cache[i] + esq_z, GENERATORS.G[i]));
proof.push((
(e_s1 * w_cache[(!i) & (MN - 1)]) + minus_esq_z + (minus_esq_y * d[i]),
GENERATORS.H[i],
));
e_r1_y *= yinv;
minus_esq_y *= yinv;
}
for i in 0 .. logMN {
proof.push((minus_esq * w[i] * w[i], L[i]));
proof.push((minus_esq * winv[i] * winv[i], R[i]));
}
verifier.queue(rng, id, proof);
true
}
#[must_use]
pub(crate) fn verify<R: RngCore + CryptoRng>(
&self,
rng: &mut R,
commitments: &[DalekPoint],
) -> bool {
let mut verifier = BatchVerifier::new(1);
if self.verify_core(rng, &mut verifier, (), commitments) {
verifier.verify_vartime()
} else {
false
}
}
#[must_use]
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
id: ID,
commitments: &[DalekPoint],
) -> bool {
self.verify_core(rng, verifier, id, commitments)
}
}

View File

@@ -1,136 +0,0 @@
use core::ops::{Add, Sub, Mul, Index};
use zeroize::{Zeroize, ZeroizeOnDrop};
use group::ff::Field;
use dalek_ff_group::{Scalar, EdwardsPoint};
use multiexp::multiexp;
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
macro_rules! math_op {
($Op: ident, $op: ident, $f: expr) => {
impl $Op<Scalar> for ScalarVector {
type Output = ScalarVector;
fn $op(self, b: Scalar) -> ScalarVector {
ScalarVector(self.0.iter().map(|a| $f((a, &b))).collect())
}
}
impl $Op<Scalar> for &ScalarVector {
type Output = ScalarVector;
fn $op(self, b: Scalar) -> ScalarVector {
ScalarVector(self.0.iter().map(|a| $f((a, &b))).collect())
}
}
impl $Op<ScalarVector> for ScalarVector {
type Output = ScalarVector;
fn $op(self, b: ScalarVector) -> ScalarVector {
debug_assert_eq!(self.len(), b.len());
ScalarVector(self.0.iter().zip(b.0.iter()).map($f).collect())
}
}
impl $Op<&ScalarVector> for &ScalarVector {
type Output = ScalarVector;
fn $op(self, b: &ScalarVector) -> ScalarVector {
debug_assert_eq!(self.len(), b.len());
ScalarVector(self.0.iter().zip(b.0.iter()).map($f).collect())
}
}
};
}
math_op!(Add, add, |(a, b): (&Scalar, &Scalar)| *a + *b);
math_op!(Sub, sub, |(a, b): (&Scalar, &Scalar)| *a - *b);
math_op!(Mul, mul, |(a, b): (&Scalar, &Scalar)| *a * *b);
impl ScalarVector {
pub(crate) fn new(len: usize) -> ScalarVector {
ScalarVector(vec![Scalar::zero(); len])
}
pub(crate) fn powers(x: Scalar, len: usize) -> ScalarVector {
debug_assert!(len != 0);
let mut res = Vec::with_capacity(len);
res.push(Scalar::one());
for i in 1 .. len {
res.push(res[i - 1] * x);
}
ScalarVector(res)
}
pub(crate) fn even_powers(x: Scalar, pow: usize) -> ScalarVector {
debug_assert!(pow != 0);
// Verify pow is a power of two
debug_assert_eq!(((pow - 1) & pow), 0);
let xsq = x * x;
let mut res = ScalarVector(Vec::with_capacity(pow / 2));
res.0.push(xsq);
let mut prev = 2;
while prev < pow {
res.0.push(res[res.len() - 1] * xsq);
prev += 2;
}
res
}
pub(crate) fn sum(mut self) -> Scalar {
self.0.drain(..).sum()
}
pub(crate) fn len(&self) -> usize {
self.0.len()
}
pub(crate) fn split(self) -> (ScalarVector, ScalarVector) {
let (l, r) = self.0.split_at(self.0.len() / 2);
(ScalarVector(l.to_vec()), ScalarVector(r.to_vec()))
}
}
impl Index<usize> for ScalarVector {
type Output = Scalar;
fn index(&self, index: usize) -> &Scalar {
&self.0[index]
}
}
pub(crate) fn inner_product(a: &ScalarVector, b: &ScalarVector) -> Scalar {
(a * b).sum()
}
pub(crate) fn weighted_powers(x: Scalar, len: usize) -> ScalarVector {
ScalarVector(ScalarVector::powers(x, len + 1).0[1 ..].to_vec())
}
pub(crate) fn weighted_inner_product(a: &ScalarVector, b: &ScalarVector, y: Scalar) -> Scalar {
// y ** 0 is not used as a power
(a * b * weighted_powers(y, a.len())).sum()
}
impl Mul<&[EdwardsPoint]> for &ScalarVector {
type Output = EdwardsPoint;
fn mul(self, b: &[EdwardsPoint]) -> EdwardsPoint {
debug_assert_eq!(self.len(), b.len());
multiexp(&self.0.iter().cloned().zip(b.iter().cloned()).collect::<Vec<_>>())
}
}
pub(crate) fn hadamard_fold(
l: &[EdwardsPoint],
r: &[EdwardsPoint],
a: Scalar,
b: Scalar,
) -> Vec<EdwardsPoint> {
let mut res = Vec::with_capacity(l.len() / 2);
for i in 0 .. l.len() {
res.push(multiexp(&[(a, l[i]), (b, r[i])]));
}
res
}

View File

@@ -1,73 +1,65 @@
#![allow(non_snake_case)]
use core::ops::Deref;
use std::io::{self, Read, Write};
use lazy_static::lazy_static;
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use subtle::{ConstantTimeEq, Choice, CtOption};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
traits::{IsIdentity, VartimePrecomputedMultiscalarMul},
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation},
traits::VartimePrecomputedMultiscalarMul,
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation}
};
use crate::{
Commitment, random_scalar, hash_to_scalar, wallet::decoys::Decoys, ringct::hash_to_point,
serialize::*,
Commitment, random_scalar, hash_to_scalar,
transaction::RING_LEN,
wallet::decoys::Decoys,
ringct::hash_to_point,
serialize::*
};
#[cfg(feature = "multisig")]
mod multisig;
#[cfg(feature = "multisig")]
pub use multisig::{ClsagDetails, ClsagAddendum, ClsagMultisig};
#[cfg(feature = "multisig")]
pub(crate) use multisig::add_key_image_share;
pub use multisig::{ClsagDetails, ClsagMultisig};
lazy_static! {
static ref INV_EIGHT: Scalar = Scalar::from(8u8).invert();
}
/// Errors returned when CLSAG signing fails.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
#[derive(Clone, Error, Debug)]
pub enum ClsagError {
#[error("internal error ({0})")]
InternalError(&'static str),
#[error("invalid ring")]
InvalidRing,
InternalError(String),
#[error("invalid ring member (member {0}, ring size {1})")]
InvalidRingMember(u8, u8),
#[error("invalid commitment")]
InvalidCommitment,
#[error("invalid key image")]
InvalidImage,
#[error("invalid D")]
InvalidD,
#[error("invalid s")]
InvalidS,
#[error("invalid c1")]
InvalidC1,
InvalidC1
}
/// Input being signed for.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
#[derive(Clone, PartialEq, Debug)]
pub struct ClsagInput {
// The actual commitment for the true spend
pub(crate) commitment: Commitment,
pub commitment: Commitment,
// True spend index, offsets, and ring
pub(crate) decoys: Decoys,
pub decoys: Decoys
}
impl ClsagInput {
pub fn new(commitment: Commitment, decoys: Decoys) -> Result<ClsagInput, ClsagError> {
pub fn new(
commitment: Commitment,
decoys: Decoys
) -> Result<ClsagInput, ClsagError> {
let n = decoys.len();
if n > u8::MAX.into() {
Err(ClsagError::InternalError("max ring size in this library is u8 max"))?;
Err(ClsagError::InternalError("max ring size in this library is u8 max".to_string()))?;
}
let n = u8::try_from(n).unwrap();
if decoys.i >= n {
@@ -83,10 +75,10 @@ impl ClsagInput {
}
}
#[allow(clippy::large_enum_variant)]
enum Mode {
Sign(usize, EdwardsPoint, EdwardsPoint),
Verify(Scalar),
#[cfg(feature = "experimental")]
Verify(Scalar)
}
// Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences
@@ -98,7 +90,7 @@ fn core(
msg: &[u8; 32],
D: &EdwardsPoint,
s: &[Scalar],
A_c1: Mode,
A_c1: Mode
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
let n = ring.len();
@@ -107,17 +99,13 @@ fn core(
// Generate the transcript
// Instead of generating multiple, a single transcript is created and then edited as needed
const PREFIX: &[u8] = b"CLSAG_";
#[rustfmt::skip]
const AGG_0: &[u8] = b"agg_0";
#[rustfmt::skip]
const ROUND: &[u8] = b"round";
const PREFIX_AGG_0_LEN: usize = PREFIX.len() + AGG_0.len();
let mut to_hash = Vec::with_capacity(((2 * n) + 5) * 32);
to_hash.extend(PREFIX);
let mut to_hash = vec![];
to_hash.reserve_exact(((2 * n) + 5) * 32);
const PREFIX: &[u8] = "CLSAG_".as_bytes();
const AGG_0: &[u8] = "CLSAG_agg_0".as_bytes();
const ROUND: &[u8] = "round".as_bytes();
to_hash.extend(AGG_0);
to_hash.extend([0; 32 - PREFIX_AGG_0_LEN]);
to_hash.extend([0; 32 - AGG_0.len()]);
let mut P = Vec::with_capacity(n);
for member in ring {
@@ -137,7 +125,7 @@ fn core(
// mu_P with agg_0
let mu_P = hash_to_scalar(&to_hash);
// mu_C with agg_1
to_hash[PREFIX_AGG_0_LEN - 1] = b'1';
to_hash[AGG_0.len() - 1] = b'1';
let mu_C = hash_to_scalar(&to_hash);
// Truncate it for the round transcript, altering the DST as needed
@@ -161,8 +149,9 @@ fn core(
to_hash.extend(A.compress().to_bytes());
to_hash.extend(AH.compress().to_bytes());
c = hash_to_scalar(&to_hash);
}
},
#[cfg(feature = "experimental")]
Mode::Verify(c1) => {
start = 0;
end = n;
@@ -171,12 +160,11 @@ fn core(
}
// Perform the core loop
let mut c1 = CtOption::new(Scalar::zero(), Choice::from(0));
let mut c1 = None;
for i in (start .. end).map(|i| i % n) {
// This will only execute once and shouldn't need to be constant time. Making it constant time
// removes the risk of branch prediction creating timing differences depending on ring index
// however
c1 = c1.or_else(|| CtOption::new(c, i.ct_eq(&0)));
if i == 0 {
c1 = Some(c);
}
let c_p = mu_P * c;
let c_c = mu_C * c;
@@ -196,12 +184,11 @@ fn core(
((D, c * mu_P, c * mu_C), c1.unwrap_or(c))
}
/// CLSAG signature, as used in Monero.
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct Clsag {
pub D: EdwardsPoint,
pub s: Vec<Scalar>,
pub c1: Scalar,
pub c1: Scalar
}
impl Clsag {
@@ -214,7 +201,7 @@ impl Clsag {
mask: Scalar,
msg: &[u8; 32],
A: EdwardsPoint,
AH: EdwardsPoint,
AH: EdwardsPoint
) -> (Clsag, EdwardsPoint, Scalar, Scalar) {
let r: usize = input.decoys.i.into();
@@ -227,21 +214,27 @@ impl Clsag {
for _ in 0 .. input.decoys.ring.len() {
s.push(random_scalar(rng));
}
let ((D, p, c), c1) =
core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
let ((D, p, c), c1) = core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
(Clsag { D, s, c1 }, pseudo_out, p, c * z)
(
Clsag { D, s, c1 },
pseudo_out,
p,
c * z
)
}
/// Generate CLSAG signatures for the given inputs.
/// inputs is of the form (private key, key image, input).
/// sum_outputs is for the sum of the outputs' commitment masks.
// Single signer CLSAG
pub fn sign<R: RngCore + CryptoRng>(
rng: &mut R,
mut inputs: Vec<(Zeroizing<Scalar>, EdwardsPoint, ClsagInput)>,
inputs: &[(Scalar, EdwardsPoint, ClsagInput)],
sum_outputs: Scalar,
msg: [u8; 32],
msg: [u8; 32]
) -> Vec<(Clsag, EdwardsPoint)> {
let nonce = random_scalar(rng);
let mut rand_source = [0; 64];
rng.fill_bytes(&mut rand_source);
let mut res = Vec::with_capacity(inputs.len());
let mut sum_pseudo_outs = Scalar::zero();
for i in 0 .. inputs.len() {
@@ -252,25 +245,18 @@ impl Clsag {
sum_pseudo_outs += mask;
}
let mut nonce = Zeroizing::new(random_scalar(rng));
let mut rand_source = [0; 64];
rng.fill_bytes(&mut rand_source);
let (mut clsag, pseudo_out, p, c) = Clsag::sign_core(
rng,
&inputs[i].1,
&inputs[i].2,
mask,
&msg,
nonce.deref() * &ED25519_BASEPOINT_TABLE,
nonce.deref() *
hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
&nonce * &ED25519_BASEPOINT_TABLE,
nonce * hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0])
);
clsag.s[usize::from(inputs[i].2.decoys.i)] =
(-((p * inputs[i].0.deref()) + c)) + nonce.deref();
inputs[i].0.zeroize();
nonce.zeroize();
debug_assert!(clsag
.verify(&inputs[i].2.decoys.ring, &inputs[i].1, &pseudo_out, &msg)
.is_ok());
clsag.s[usize::from(inputs[i].2.decoys.i)] = nonce - ((p * inputs[i].0) + c);
res.push((clsag, pseudo_out));
}
@@ -278,49 +264,97 @@ impl Clsag {
res
}
/// Verify the CLSAG signature against the given Transaction data.
pub fn verify(
// Not extensively tested nor guaranteed to have expected parity with Monero
#[cfg(feature = "experimental")]
pub fn rust_verify(
&self,
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32],
msg: &[u8; 32]
) -> Result<(), ClsagError> {
// Preliminary checks. s, c1, and points must also be encoded canonically, which isn't checked
// here
if ring.is_empty() {
Err(ClsagError::InvalidRing)?;
}
if ring.len() != self.s.len() {
Err(ClsagError::InvalidS)?;
}
if I.is_identity() {
Err(ClsagError::InvalidImage)?;
}
let D = self.D.mul_by_cofactor();
if D.is_identity() {
Err(ClsagError::InvalidD)?;
}
let (_, c1) = core(ring, I, pseudo_out, msg, &D, &self.s, Mode::Verify(self.c1));
let (_, c1) = core(
ring,
I,
pseudo_out,
msg,
&self.D.mul_by_cofactor(),
&self.s,
Mode::Verify(self.c1)
);
if c1 != self.c1 {
Err(ClsagError::InvalidC1)?;
}
Ok(())
}
pub(crate) fn fee_weight(ring_len: usize) -> usize {
(ring_len * 32) + 32 + 32
pub(crate) fn fee_weight() -> usize {
(RING_LEN * 32) + 32 + 32
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
write_raw_vec(write_scalar, &self.s, w)?;
w.write_all(&self.c1.to_bytes())?;
write_point(&self.D, w)
}
pub fn read<R: Read>(decoys: usize, r: &mut R) -> io::Result<Clsag> {
Ok(Clsag { s: read_raw_vec(read_scalar, decoys, r)?, c1: read_scalar(r)?, D: read_point(r)? })
pub fn deserialize<R: std::io::Read>(decoys: usize, r: &mut R) -> std::io::Result<Clsag> {
Ok(
Clsag {
s: read_raw_vec(read_scalar, decoys, r)?,
c1: read_scalar(r)?,
D: read_point(r)?
}
)
}
pub fn verify(
&self,
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32]
) -> Result<(), ClsagError> {
// Serialize it to pass the struct to Monero without extensive FFI
let mut serialized = Vec::with_capacity(1 + ((self.s.len() + 2) * 32));
write_varint(&self.s.len().try_into().unwrap(), &mut serialized).unwrap();
self.serialize(&mut serialized).unwrap();
let I_bytes = I.compress().to_bytes();
let mut ring_bytes = vec![];
for member in ring {
ring_bytes.extend(&member[0].compress().to_bytes());
ring_bytes.extend(&member[1].compress().to_bytes());
}
let pseudo_out_bytes = pseudo_out.compress().to_bytes();
unsafe {
// Uses Monero's C verification function to ensure compatibility with Monero
#[link(name = "wrapper")]
extern "C" {
pub(crate) fn c_verify_clsag(
serialized_len: usize,
serialized: *const u8,
ring_size: u8,
ring: *const u8,
I: *const u8,
pseudo_out: *const u8,
msg: *const u8
) -> bool;
}
if c_verify_clsag(
serialized.len(), serialized.as_ptr(),
u8::try_from(ring.len()).map_err(|_| ClsagError::InternalError("too large ring".to_string()))?,
ring_bytes.as_ptr(),
I_bytes.as_ptr(), pseudo_out_bytes.as_ptr(), msg.as_ptr()
) {
Ok(())
} else {
Err(ClsagError::InvalidC1)
}
}
}
}

View File

@@ -1,57 +1,44 @@
use core::{ops::Deref, fmt::Debug};
use std::{
io::{self, Read, Write},
sync::{Arc, RwLock},
};
use core::fmt::Debug;
use std::{io::Read, sync::{Arc, RwLock}};
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha20Rng;
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use rand_chacha::ChaCha12Rng;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
traits::{Identity, IsIdentity},
scalar::Scalar,
edwards::EdwardsPoint,
edwards::EdwardsPoint
};
use group::{ff::Field, Group, GroupEncoding};
use group::Group;
use transcript::{Transcript, RecommendedTranscript};
use frost::{curve::Ed25519, FrostError, FrostView, algorithm::Algorithm};
use dalek_ff_group as dfg;
use dleq::DLEqProof;
use frost::{
dkg::lagrange,
curve::Ed25519,
FrostError, ThresholdKeys, ThresholdView,
algorithm::{WriteAddendum, Algorithm},
};
use crate::ringct::{
hash_to_point,
clsag::{ClsagInput, Clsag},
use crate::{
frost::{MultisigError, write_dleq, read_dleq},
ringct::{hash_to_point, clsag::{ClsagInput, Clsag}}
};
fn dleq_transcript() -> RecommendedTranscript {
RecommendedTranscript::new(b"monero_key_image_dleq")
}
impl ClsagInput {
fn transcript<T: Transcript>(&self, transcript: &mut T) {
// Doesn't domain separate as this is considered part of the larger CLSAG proof
// Ring index
transcript.append_message(b"real_spend", [self.decoys.i]);
transcript.append_message(b"ring_index", &[self.decoys.i]);
// Ring
for (i, pair) in self.decoys.ring.iter().enumerate() {
let mut ring = vec![];
for pair in &self.decoys.ring {
// Doesn't include global output indexes as CLSAG doesn't care and won't be affected by it
// They're just a unreliable reference to this data which will be included in the message
// if in use
transcript.append_message(b"member", [u8::try_from(i).expect("ring size exceeded 255")]);
transcript.append_message(b"key", pair[0].compress().to_bytes());
transcript.append_message(b"commitment", pair[1].compress().to_bytes())
ring.extend(&pair[0].compress().to_bytes());
ring.extend(&pair[1].compress().to_bytes());
}
transcript.append_message(b"ring", &ring);
// Doesn't include the commitment's parts as the above ring + index includes the commitment
// The only potential malleability would be if the G/H relationship is known breaking the
@@ -59,11 +46,10 @@ impl ClsagInput {
}
}
/// CLSAG input and the mask to use for it.
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
#[derive(Clone, Debug)]
pub struct ClsagDetails {
input: ClsagInput,
mask: Scalar,
mask: Scalar
}
impl ClsagDetails {
@@ -72,64 +58,54 @@ impl ClsagDetails {
}
}
/// Addendum produced during the FROST signing process with relevant data.
#[derive(Clone, PartialEq, Eq, Zeroize, Debug)]
pub struct ClsagAddendum {
pub(crate) key_image: dfg::EdwardsPoint,
dleq: DLEqProof<dfg::EdwardsPoint>,
}
impl WriteAddendum for ClsagAddendum {
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(self.key_image.compress().to_bytes().as_ref())?;
self.dleq.write(writer)
}
}
#[allow(non_snake_case)]
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
struct Interim {
p: Scalar,
c: Scalar,
clsag: Clsag,
pseudo_out: EdwardsPoint,
pseudo_out: EdwardsPoint
}
/// FROST algorithm for producing a CLSAG signature.
#[allow(non_snake_case)]
#[derive(Clone, Debug)]
pub struct ClsagMultisig {
transcript: RecommendedTranscript,
pub(crate) H: EdwardsPoint,
// Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires
// an extra round
H: EdwardsPoint,
// Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires a round
image: EdwardsPoint,
details: Arc<RwLock<Option<ClsagDetails>>>,
msg: Option<[u8; 32]>,
interim: Option<Interim>,
interim: Option<Interim>
}
impl ClsagMultisig {
pub fn new(
transcript: RecommendedTranscript,
output_key: EdwardsPoint,
details: Arc<RwLock<Option<ClsagDetails>>>,
) -> ClsagMultisig {
ClsagMultisig {
transcript,
details: Arc<RwLock<Option<ClsagDetails>>>
) -> Result<ClsagMultisig, MultisigError> {
Ok(
ClsagMultisig {
transcript,
H: hash_to_point(output_key),
image: EdwardsPoint::identity(),
H: hash_to_point(output_key),
image: EdwardsPoint::identity(),
details,
details,
msg: None,
interim: None,
}
msg: None,
interim: None
}
)
}
pub const fn serialized_len() -> usize {
32 + (2 * 32)
}
fn input(&self) -> ClsagInput {
@@ -141,23 +117,8 @@ impl ClsagMultisig {
}
}
pub(crate) fn add_key_image_share(
image: &mut EdwardsPoint,
generator: EdwardsPoint,
offset: Scalar,
included: &[u16],
participant: u16,
share: EdwardsPoint,
) {
if image.is_identity() {
*image = generator * offset;
}
*image += share * lagrange::<dfg::Scalar>(participant, included).0;
}
impl Algorithm<Ed25519> for ClsagMultisig {
type Transcript = RecommendedTranscript;
type Addendum = ClsagAddendum;
type Signature = (Clsag, EdwardsPoint);
fn nonces(&self) -> Vec<Vec<dfg::EdwardsPoint>> {
@@ -167,70 +128,35 @@ impl Algorithm<Ed25519> for ClsagMultisig {
fn preprocess_addendum<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
keys: &ThresholdKeys<Ed25519>,
) -> ClsagAddendum {
ClsagAddendum {
key_image: dfg::EdwardsPoint(self.H) * keys.secret_share().deref(),
dleq: DLEqProof::prove(
rng,
// Doesn't take in a larger transcript object due to the usage of this
// Every prover would immediately write their own DLEq proof, when they can only do so in
// the proper order if they want to reach consensus
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to
// try to merge later in some form, when it should instead just merge xH (as it does)
&mut dleq_transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
keys.secret_share(),
),
}
view: &FrostView<Ed25519>
) -> Vec<u8> {
let mut serialized = Vec::with_capacity(Self::serialized_len());
serialized.extend((view.secret_share().0 * self.H).compress().to_bytes());
serialized.extend(write_dleq(rng, self.H, view.secret_share().0));
serialized
}
fn read_addendum<R: Read>(&self, reader: &mut R) -> io::Result<ClsagAddendum> {
let mut bytes = [0; 32];
reader.read_exact(&mut bytes)?;
// dfg ensures the point is torsion free
let xH = Option::<dfg::EdwardsPoint>::from(dfg::EdwardsPoint::from_bytes(&bytes))
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid key image"))?;
// Ensure this is a canonical point
if xH.to_bytes() != bytes {
Err(io::Error::new(io::ErrorKind::Other, "non-canonical key image"))?;
}
Ok(ClsagAddendum { key_image: xH, dleq: DLEqProof::<dfg::EdwardsPoint>::read(reader)? })
}
fn process_addendum(
fn process_addendum<Re: Read>(
&mut self,
view: &ThresholdView<Ed25519>,
view: &FrostView<Ed25519>,
l: u16,
addendum: ClsagAddendum,
serialized: &mut Re
) -> Result<(), FrostError> {
if self.image.is_identity() {
if self.image.is_identity().into() {
self.transcript.domain_separate(b"CLSAG");
self.input().transcript(&mut self.transcript);
self.transcript.append_message(b"mask", self.mask().to_bytes());
self.transcript.append_message(b"mask", &self.mask().to_bytes());
}
self.transcript.append_message(b"participant", l.to_be_bytes());
addendum
.dleq
.verify(
&mut dleq_transcript(),
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
&[view.original_verification_share(l), addendum.key_image],
)
.map_err(|_| FrostError::InvalidPreprocess(l))?;
self.transcript.append_message(b"key_image_share", addendum.key_image.compress().to_bytes());
add_key_image_share(
&mut self.image,
self.transcript.append_message(b"participant", &l.to_be_bytes());
let image = read_dleq(
serialized,
self.H,
view.offset().0,
view.included(),
l,
addendum.key_image.0,
);
view.verification_share(l)
).map_err(|_| FrostError::InvalidCommitment(l))?.0;
self.transcript.append_message(b"key_image_share", image.compress().to_bytes().as_ref());
self.image += image;
Ok(())
}
@@ -241,17 +167,17 @@ impl Algorithm<Ed25519> for ClsagMultisig {
fn sign_share(
&mut self,
view: &ThresholdView<Ed25519>,
view: &FrostView<Ed25519>,
nonce_sums: &[Vec<dfg::EdwardsPoint>],
nonces: Vec<Zeroizing<dfg::Scalar>>,
msg: &[u8],
nonces: &[dfg::Scalar],
msg: &[u8]
) -> dfg::Scalar {
// Use the transcript to get a seeded random number generator
// The transcript contains private data, preventing passive adversaries from recreating this
// process even if they have access to commitments (specifically, the ring index being signed
// for, along with the mask which should not only require knowing the shared keys yet also the
// input commitment masks)
let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"decoy_responses"));
let mut rng = ChaCha12Rng::from_seed(self.transcript.rng_seed(b"decoy_responses"));
self.msg = Some(msg.try_into().expect("CLSAG message should be 32-bytes"));
@@ -261,13 +187,15 @@ impl Algorithm<Ed25519> for ClsagMultisig {
&self.image,
&self.input(),
self.mask(),
self.msg.as_ref().unwrap(),
&self.msg.as_ref().unwrap(),
nonce_sums[0][0].0,
nonce_sums[0][1].0,
nonce_sums[0][1].0
);
self.interim = Some(Interim { p, c, clsag, pseudo_out });
(-(dfg::Scalar(p) * view.secret_share().deref())) + nonces[0].deref()
let share = dfg::Scalar(nonces[0].0 - (p * view.secret_share().0));
share
}
#[must_use]
@@ -275,36 +203,32 @@ impl Algorithm<Ed25519> for ClsagMultisig {
&self,
_: dfg::EdwardsPoint,
_: &[Vec<dfg::EdwardsPoint>],
sum: dfg::Scalar,
sum: dfg::Scalar
) -> Option<Self::Signature> {
let interim = self.interim.as_ref().unwrap();
let mut clsag = interim.clsag.clone();
clsag.s[usize::from(self.input().decoys.i)] = sum.0 - interim.c;
if clsag
.verify(
&self.input().decoys.ring,
&self.image,
&interim.pseudo_out,
self.msg.as_ref().unwrap(),
)
.is_ok()
{
if clsag.verify(
&self.input().decoys.ring,
&self.image,
&interim.pseudo_out,
&self.msg.as_ref().unwrap()
).is_ok() {
return Some((clsag, interim.pseudo_out));
}
None
return None;
}
#[must_use]
fn verify_share(
&self,
verification_share: dfg::EdwardsPoint,
nonces: &[Vec<dfg::EdwardsPoint>],
share: dfg::Scalar,
) -> Result<Vec<(dfg::Scalar, dfg::EdwardsPoint)>, ()> {
) -> bool {
let interim = self.interim.as_ref().unwrap();
Ok(vec![
(share, dfg::EdwardsPoint::generator()),
(dfg::Scalar(interim.p), verification_share),
(-dfg::Scalar::one(), nonces[0][0]),
])
return (&share.0 * &ED25519_BASEPOINT_TABLE) == (
nonces[0][0].0 - (interim.p * verification_share.0)
);
}
}

View File

@@ -1,8 +1,67 @@
use curve25519_dalek::edwards::EdwardsPoint;
use subtle::ConditionallySelectable;
pub use monero_generators::{hash_to_point as raw_hash_to_point};
use curve25519_dalek::edwards::{CompressedEdwardsY, EdwardsPoint};
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
pub fn hash_to_point(key: EdwardsPoint) -> EdwardsPoint {
raw_hash_to_point(key.compress().to_bytes())
use group::ff::{Field, PrimeField};
use dalek_ff_group::field::FieldElement;
use crate::hash;
pub fn hash_to_point(point: EdwardsPoint) -> EdwardsPoint {
let mut bytes = point.compress().to_bytes();
unsafe {
#[link(name = "wrapper")]
extern "C" {
fn c_hash_to_point(point: *const u8);
}
c_hash_to_point(bytes.as_mut_ptr());
}
CompressedEdwardsY::from_slice(&bytes).decompress().unwrap()
}
// This works without issue. It's also 140 times slower (@ 3.5ms), and despite checking it passes
// for all branches, there still could be *some* discrepancy somewhere. There's no reason to use it
// unless we're trying to purge that section of the C static library, which we aren't right now
#[allow(dead_code)]
pub(crate) fn rust_hash_to_point(key: EdwardsPoint) -> EdwardsPoint {
#[allow(non_snake_case)]
let A = FieldElement::from(486662u64);
let v = FieldElement::from_square(hash(&key.compress().to_bytes())).double();
let w = v + FieldElement::one();
let x = w.square() + (-A.square() * v);
// This isn't the complete X, yet its initial value
// We don't calculate the full X, and instead solely calculate Y, letting dalek reconstruct X
// While inefficient, it solves API boundaries and reduces the amount of work done here
#[allow(non_snake_case)]
let X = {
let u = w;
let v = x;
let v3 = v * v * v;
let uv3 = u * v3;
let v7 = v3 * v3 * v;
let uv7 = u * v7;
uv3 * uv7.pow((-FieldElement::from(5u8)) * FieldElement::from(8u8).invert().unwrap())
};
let x = X.square() * x;
let y = w - x;
let non_zero_0 = !y.is_zero();
let y_if_non_zero_0 = w + x;
let sign = non_zero_0 & (!y_if_non_zero_0.is_zero());
let mut z = -A;
z *= FieldElement::conditional_select(&v, &FieldElement::from(1u8), sign);
#[allow(non_snake_case)]
let Z = z + w;
#[allow(non_snake_case)]
let mut Y = z - w;
Y = Y * Z.invert().unwrap();
let mut bytes = Y.to_repr();
bytes[31] |= sign.unwrap_u8() << 7;
CompressedEdwardsY(bytes).decompress().unwrap().mul_by_cofactor()
}

View File

@@ -1,34 +1,25 @@
use core::ops::Deref;
use std::io::{self, Read, Write};
use zeroize::Zeroizing;
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
pub(crate) mod hash_to_point;
pub use hash_to_point::{raw_hash_to_point, hash_to_point};
pub use hash_to_point::hash_to_point;
/// CLSAG struct, along with signing and verifying functionality.
pub mod clsag;
/// Bulletproofs(+) structs, along with proving and verifying functionality.
pub mod bulletproofs;
use crate::{
Protocol,
serialize::*,
ringct::{clsag::Clsag, bulletproofs::Bulletproofs},
ringct::{clsag::Clsag, bulletproofs::Bulletproofs}
};
/// Generate a key image for a given key. Defined as `x * hash_to_point(xG)`.
pub fn generate_key_image(secret: &Zeroizing<Scalar>) -> EdwardsPoint {
hash_to_point(&ED25519_BASEPOINT_TABLE * secret.deref()) * secret.deref()
pub fn generate_key_image(secret: Scalar) -> EdwardsPoint {
secret * hash_to_point(&secret * &ED25519_BASEPOINT_TABLE)
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct RctBase {
pub fee: u64,
pub ecdh_info: Vec<[u8; 8]>,
pub commitments: Vec<EdwardsPoint>,
pub commitments: Vec<EdwardsPoint>
}
impl RctBase {
@@ -36,130 +27,119 @@ impl RctBase {
1 + 8 + (outputs * (8 + 32))
}
pub fn write<W: Write>(&self, w: &mut W, rct_type: u8) -> io::Result<()> {
pub fn serialize<W: std::io::Write>(&self, w: &mut W, rct_type: u8) -> std::io::Result<()> {
w.write_all(&[rct_type])?;
match rct_type {
0 => Ok(()),
5 | 6 => {
5 => {
write_varint(&self.fee, w)?;
for ecdh in &self.ecdh_info {
w.write_all(ecdh)?;
}
write_raw_vec(write_point, &self.commitments, w)
}
_ => panic!("Serializing unknown RctType's Base"),
},
_ => panic!("Serializing unknown RctType's Base")
}
}
pub fn read<R: Read>(outputs: usize, r: &mut R) -> io::Result<(RctBase, u8)> {
let rct_type = read_byte(r)?;
pub fn deserialize<R: std::io::Read>(outputs: usize, r: &mut R) -> std::io::Result<(RctBase, u8)> {
let mut rct_type = [0];
r.read_exact(&mut rct_type)?;
Ok((
if rct_type == 0 {
if rct_type[0] == 0 {
RctBase { fee: 0, ecdh_info: vec![], commitments: vec![] }
} else {
RctBase {
fee: read_varint(r)?,
ecdh_info: (0 .. outputs).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
commitments: read_raw_vec(read_point, outputs, r)?,
ecdh_info: (0 .. outputs).map(
|_| { let mut ecdh = [0; 8]; r.read_exact(&mut ecdh).map(|_| ecdh) }
).collect::<Result<_, _>>()?,
commitments: read_raw_vec(read_point, outputs, r)?
}
},
rct_type,
rct_type[0]
))
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub enum RctPrunable {
Null,
Clsag { bulletproofs: Vec<Bulletproofs>, clsags: Vec<Clsag>, pseudo_outs: Vec<EdwardsPoint> },
Clsag {
bulletproofs: Vec<Bulletproofs>,
clsags: Vec<Clsag>,
pseudo_outs: Vec<EdwardsPoint>
}
}
impl RctPrunable {
/// RCT Type byte for a given RctPrunable struct.
pub fn rct_type(&self) -> u8 {
match self {
RctPrunable::Null => 0,
RctPrunable::Clsag { bulletproofs, .. } => {
if matches!(bulletproofs[0], Bulletproofs::Original { .. }) {
5
} else {
6
}
}
RctPrunable::Clsag { .. } => 5
}
}
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
1 + Bulletproofs::fee_weight(protocol.bp_plus(), outputs) +
(inputs * (Clsag::fee_weight(protocol.ring_len()) + 32))
pub(crate) fn fee_weight(inputs: usize, outputs: usize) -> usize {
1 + Bulletproofs::fee_weight(outputs) + (inputs * (Clsag::fee_weight() + 32))
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
match self {
RctPrunable::Null => Ok(()),
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs, .. } => {
write_vec(Bulletproofs::write, bulletproofs, w)?;
write_raw_vec(Clsag::write, clsags, w)?;
write_raw_vec(write_point, pseudo_outs, w)
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs } => {
write_vec(Bulletproofs::serialize, &bulletproofs, w)?;
write_raw_vec(Clsag::serialize, &clsags, w)?;
write_raw_vec(write_point, &pseudo_outs, w)
}
}
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
pub fn deserialize<R: std::io::Read>(
rct_type: u8,
decoys: &[usize],
r: &mut R
) -> std::io::Result<RctPrunable> {
Ok(
match rct_type {
0 => RctPrunable::Null,
5 => RctPrunable::Clsag {
// TODO: Can the amount of outputs be calculated from the BPs for any validly formed TX?
bulletproofs: read_vec(Bulletproofs::deserialize, r)?,
clsags: (0 .. decoys.len()).map(|o| Clsag::deserialize(decoys[o], r)).collect::<Result<_, _>>()?,
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?
},
_ => Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown RCT type"))?
}
)
}
pub fn read<R: Read>(rct_type: u8, decoys: &[usize], r: &mut R) -> io::Result<RctPrunable> {
Ok(match rct_type {
0 => RctPrunable::Null,
5 | 6 => RctPrunable::Clsag {
bulletproofs: read_vec(
if rct_type == 5 { Bulletproofs::read } else { Bulletproofs::read_plus },
r,
)?,
clsags: (0 .. decoys.len()).map(|o| Clsag::read(decoys[o], r)).collect::<Result<_, _>>()?,
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
},
_ => Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown RCT type"))?,
})
}
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
pub fn signature_serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
match self {
RctPrunable::Null => panic!("Serializing RctPrunable::Null for a signature"),
RctPrunable::Clsag { bulletproofs, .. } => {
bulletproofs.iter().try_for_each(|bp| bp.signature_write(w))
}
RctPrunable::Clsag { bulletproofs, .. } => bulletproofs.iter().map(|bp| bp.signature_serialize(w)).collect(),
}
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct RctSignatures {
pub base: RctBase,
pub prunable: RctPrunable,
pub prunable: RctPrunable
}
impl RctSignatures {
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
RctBase::fee_weight(outputs) + RctPrunable::fee_weight(protocol, inputs, outputs)
pub(crate) fn fee_weight(inputs: usize, outputs: usize) -> usize {
RctBase::fee_weight(outputs) + RctPrunable::fee_weight(inputs, outputs)
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.base.write(w, self.prunable.rct_type())?;
self.prunable.write(w)
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.base.serialize(w, self.prunable.rct_type())?;
self.prunable.serialize(w)
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(decoys: Vec<usize>, outputs: usize, r: &mut R) -> io::Result<RctSignatures> {
let base = RctBase::read(outputs, r)?;
Ok(RctSignatures { base: base.0, prunable: RctPrunable::read(base.1, &decoys, r)? })
pub fn deserialize<R: std::io::Read>(decoys: Vec<usize>, outputs: usize, r: &mut R) -> std::io::Result<RctSignatures> {
let base = RctBase::deserialize(outputs, r)?;
Ok(RctSignatures { base: base.0, prunable: RctPrunable::deserialize(base.1, &decoys, r)? })
}
}

View File

@@ -5,47 +5,25 @@ use thiserror::Error;
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
use serde::{Serialize, Deserialize, de::DeserializeOwned};
use serde_json::{Value, json};
use serde_json::json;
use digest_auth::AuthContext;
use reqwest::{Client, RequestBuilder};
use reqwest;
use crate::{
Protocol,
transaction::{Input, Timelock, Transaction},
block::Block,
wallet::Fee,
};
use crate::{transaction::{Input, Timelock, Transaction}, block::Block, wallet::Fee};
#[derive(Deserialize, Debug)]
pub struct EmptyResponse {}
#[derive(Deserialize, Debug)]
pub struct JsonRpcResponse<T> {
result: T,
result: T
}
#[derive(Deserialize, Debug)]
struct TransactionResponse {
tx_hash: String,
block_height: Option<usize>,
as_hex: String,
pruned_as_hex: String,
}
#[derive(Deserialize, Debug)]
struct TransactionsResponse {
#[serde(default)]
missed_tx: Vec<String>,
txs: Vec<TransactionResponse>,
}
#[derive(Clone, PartialEq, Eq, Debug, Error)]
#[derive(Clone, Error, Debug)]
pub enum RpcError {
#[error("internal error ({0})")]
InternalError(&'static str),
InternalError(String),
#[error("connection error")]
ConnectionError,
#[error("invalid node")]
InvalidNode,
#[error("transactions not found")]
TransactionsNotFound(Vec<[u8; 32]>),
#[error("invalid point ({0})")]
@@ -53,78 +31,33 @@ pub enum RpcError {
#[error("pruned transaction")]
PrunedTransaction,
#[error("invalid transaction ({0:?})")]
InvalidTransaction([u8; 32]),
InvalidTransaction([u8; 32])
}
fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
hex::decode(value).map_err(|_| RpcError::InvalidNode)
}
fn hash_hex(hash: &str) -> Result<[u8; 32], RpcError> {
rpc_hex(hash)?.try_into().map_err(|_| RpcError::InvalidNode)
hex::decode(value).map_err(|_| RpcError::InternalError("Monero returned invalid hex".to_string()))
}
fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
CompressedEdwardsY(
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?,
)
.decompress()
.ok_or_else(|| RpcError::InvalidPoint(point.to_string()))
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?
).decompress().ok_or(RpcError::InvalidPoint(point.to_string()))
}
#[derive(Clone, Debug)]
pub struct Rpc {
client: Client,
userpass: Option<(String, String)>,
url: String,
}
pub struct Rpc(String);
impl Rpc {
/// Create a new RPC connection.
/// A daemon requiring authentication can be used via including the username and password in the
/// URL.
pub fn new(mut url: String) -> Result<Rpc, RpcError> {
// Parse out the username and password
let userpass = if url.contains('@') {
let url_clone = url.clone();
let split_url = url_clone.split('@').collect::<Vec<_>>();
if split_url.len() != 2 {
Err(RpcError::InvalidNode)?;
}
let mut userpass = split_url[0];
url = split_url[1].to_string();
// If there was additionally a protocol string, restore that to the daemon URL
if userpass.contains("://") {
let split_userpass = userpass.split("://").collect::<Vec<_>>();
if split_userpass.len() != 2 {
Err(RpcError::InvalidNode)?;
}
url = split_userpass[0].to_string() + "://" + &url;
userpass = split_userpass[1];
}
let split_userpass = userpass.split(':').collect::<Vec<_>>();
if split_userpass.len() != 2 {
Err(RpcError::InvalidNode)?;
}
Some((split_userpass[0].to_string(), split_userpass[1].to_string()))
} else {
None
};
Ok(Rpc { client: Client::new(), userpass, url })
pub fn new(daemon: String) -> Rpc {
Rpc(daemon)
}
/// Perform a RPC call to the specified method with the provided parameters.
/// This is NOT a JSON-RPC call, which use a method of "json_rpc" and are available via
/// `json_rpc_call`.
pub async fn rpc_call<Params: Serialize + Debug, Response: DeserializeOwned + Debug>(
&self,
method: &str,
params: Option<Params>,
) -> Result<Response, RpcError> {
let mut builder = self.client.post(self.url.clone() + "/" + method);
pub async fn rpc_call<
Params: Serialize + Debug,
Response: DeserializeOwned + Debug
>(&self, method: &str, params: Option<Params>) -> Result<Response, RpcError> {
let client = reqwest::Client::new();
let mut builder = client.post(&(self.0.clone() + "/" + method));
if let Some(params) = params.as_ref() {
builder = builder.json(params);
}
@@ -132,212 +65,153 @@ impl Rpc {
self.call_tail(method, builder).await
}
/// Perform a JSON-RPC call to the specified method with the provided parameters
pub async fn json_rpc_call<Response: DeserializeOwned + Debug>(
&self,
method: &str,
params: Option<Value>,
) -> Result<Response, RpcError> {
let mut req = json!({ "method": method });
if let Some(params) = params {
req.as_object_mut().unwrap().insert("params".into(), params);
}
Ok(self.rpc_call::<_, JsonRpcResponse<Response>>("json_rpc", Some(req)).await?.result)
}
/// Perform a binary call to the specified method with the provided parameters.
pub async fn bin_call<Response: DeserializeOwned + Debug>(
&self,
method: &str,
params: Vec<u8>,
) -> Result<Response, RpcError> {
let builder = self.client.post(self.url.clone() + "/" + method).body(params.clone());
pub async fn bin_call<
Response: DeserializeOwned + Debug
>(&self, method: &str, params: Vec<u8>) -> Result<Response, RpcError> {
let client = reqwest::Client::new();
let builder = client.post(&(self.0.clone() + "/" + method)).body(params);
self.call_tail(method, builder.header("Content-Type", "application/octet-stream")).await
}
async fn call_tail<Response: DeserializeOwned + Debug>(
&self,
method: &str,
mut builder: RequestBuilder,
) -> Result<Response, RpcError> {
if let Some((user, pass)) = &self.userpass {
let req = self.client.post(&self.url).send().await.map_err(|_| RpcError::InvalidNode)?;
// Only provide authentication if this daemon actually expects it
if let Some(header) = req.headers().get("www-authenticate") {
builder = builder.header(
"Authorization",
digest_auth::parse(header.to_str().map_err(|_| RpcError::InvalidNode)?)
.map_err(|_| RpcError::InvalidNode)?
.respond(&AuthContext::new_post::<_, _, _, &[u8]>(
user,
pass,
"/".to_string() + method,
None,
))
.map_err(|_| RpcError::InvalidNode)?
.to_header_string(),
);
}
}
let res = builder.send().await.map_err(|_| RpcError::ConnectionError)?;
Ok(if !method.ends_with(".bin") {
serde_json::from_str(&res.text().await.map_err(|_| RpcError::ConnectionError)?)
.map_err(|_| RpcError::InternalError("Failed to parse JSON response"))?
} else {
monero_epee_bin_serde::from_bytes(&res.bytes().await.map_err(|_| RpcError::ConnectionError)?)
.map_err(|_| RpcError::InternalError("Failed to parse binary response"))?
})
}
/// Get the active blockchain protocol version.
pub async fn get_protocol(&self) -> Result<Protocol, RpcError> {
#[derive(Deserialize, Debug)]
struct ProtocolResponse {
major_version: usize,
}
#[derive(Deserialize, Debug)]
struct LastHeaderResponse {
block_header: ProtocolResponse,
}
async fn call_tail<
Response: DeserializeOwned + Debug
>(&self, method: &str, builder: reqwest::RequestBuilder) -> Result<Response, RpcError> {
let res = builder
.send()
.await
.map_err(|_| RpcError::ConnectionError)?;
Ok(
match self
.json_rpc_call::<LastHeaderResponse>("get_last_block_header", None)
.await?
.block_header
.major_version
{
13 | 14 => Protocol::v14,
15 | 16 => Protocol::v16,
version => Protocol::Unsupported(version),
},
if !method.ends_with(".bin") {
serde_json::from_str(&res.text().await.map_err(|_| RpcError::ConnectionError)?)
.map_err(|_| RpcError::InternalError("Failed to parse JSON response".to_string()))?
} else {
monero_epee_bin_serde::from_bytes(&res.bytes().await.map_err(|_| RpcError::ConnectionError)?)
.map_err(|_| RpcError::InternalError("Failed to parse binary response".to_string()))?
}
)
}
pub async fn get_height(&self) -> Result<usize, RpcError> {
#[derive(Deserialize, Debug)]
struct HeightResponse {
height: usize,
height: usize
}
Ok(self.rpc_call::<Option<()>, HeightResponse>("get_height", None).await?.height)
}
pub async fn get_transactions(&self, hashes: &[[u8; 32]]) -> Result<Vec<Transaction>, RpcError> {
if hashes.is_empty() {
return Ok(vec![]);
async fn get_transactions_core(
&self,
hashes: &[[u8; 32]]
) -> Result<(Vec<Result<Transaction, RpcError>>, Vec<[u8; 32]>), RpcError> {
if hashes.len() == 0 {
return Ok((vec![], vec![]));
}
let txs: TransactionsResponse = self
.rpc_call(
"get_transactions",
Some(json!({
"txs_hashes": hashes.iter().map(hex::encode).collect::<Vec<_>>()
})),
)
.await?;
if !txs.missed_tx.is_empty() {
Err(RpcError::TransactionsNotFound(
txs.missed_tx.iter().map(|hash| hash_hex(hash)).collect::<Result<_, _>>()?,
))?;
#[derive(Deserialize, Debug)]
struct TransactionResponse {
tx_hash: String,
as_hex: String,
pruned_as_hex: String
}
#[derive(Deserialize, Debug)]
struct TransactionsResponse {
#[serde(default)]
missed_tx: Vec<String>,
txs: Vec<TransactionResponse>
}
txs
.txs
.iter()
.map(|res| {
let tx = Transaction::read::<&[u8]>(
&mut rpc_hex(if !res.as_hex.is_empty() { &res.as_hex } else { &res.pruned_as_hex })?
.as_ref(),
)
.map_err(|_| match hash_hex(&res.tx_hash) {
Ok(hash) => RpcError::InvalidTransaction(hash),
Err(err) => err,
})?;
let txs: TransactionsResponse = self.rpc_call("get_transactions", Some(json!({
"txs_hashes": hashes.iter().map(|hash| hex::encode(&hash)).collect::<Vec<_>>()
}))).await?;
Ok((
txs.txs.iter().map(|res| {
let tx = Transaction::deserialize(
&mut std::io::Cursor::new(
rpc_hex(if res.as_hex.len() != 0 { &res.as_hex } else { &res.pruned_as_hex }).unwrap()
)
).map_err(|_| RpcError::InvalidTransaction(hex::decode(&res.tx_hash).unwrap().try_into().unwrap()))?;
// https://github.com/monero-project/monero/issues/8311
if res.as_hex.is_empty() {
if res.as_hex.len() == 0 {
match tx.prefix.inputs.get(0) {
Some(Input::Gen { .. }) => (),
_ => Err(RpcError::PrunedTransaction)?,
_ => Err(RpcError::PrunedTransaction)?
}
}
Ok(tx)
})
.collect()
}).collect(),
txs.missed_tx.iter().map(|hash| hex::decode(&hash).unwrap().try_into().unwrap()).collect()
))
}
pub async fn get_transaction(&self, tx: [u8; 32]) -> Result<Transaction, RpcError> {
self.get_transactions(&[tx]).await.map(|mut txs| txs.swap_remove(0))
}
pub async fn get_transaction_block_number(&self, tx: &[u8]) -> Result<Option<usize>, RpcError> {
let txs: TransactionsResponse =
self.rpc_call("get_transactions", Some(json!({ "txs_hashes": [hex::encode(tx)] }))).await?;
if !txs.missed_tx.is_empty() {
Err(RpcError::TransactionsNotFound(
txs.missed_tx.iter().map(|hash| hash_hex(hash)).collect::<Result<_, _>>()?,
))?;
pub async fn get_transactions(&self, hashes: &[[u8; 32]]) -> Result<Vec<Transaction>, RpcError> {
let (txs, missed) = self.get_transactions_core(hashes).await?;
if missed.len() != 0 {
Err(RpcError::TransactionsNotFound(missed))?;
}
Ok(txs.txs[0].block_height)
// This will clone several KB and is accordingly inefficient
// TODO: Optimize
txs.iter().cloned().collect::<Result<_, _>>()
}
pub async fn get_block_hash(&self, number: usize) -> Result<[u8; 32], RpcError> {
#[derive(Deserialize, Debug)]
struct BlockHeaderResponse {
hash: String,
}
#[derive(Deserialize, Debug)]
struct BlockHeaderByHeightResponse {
block_header: BlockHeaderResponse,
}
let header: BlockHeaderByHeightResponse =
self.json_rpc_call("get_block_header_by_height", Some(json!({ "height": number }))).await?;
rpc_hex(&header.block_header.hash)?.try_into().map_err(|_| RpcError::InvalidNode)
pub async fn get_transactions_possible(&self, hashes: &[[u8; 32]]) -> Result<Vec<Transaction>, RpcError> {
let (txs, _) = self.get_transactions_core(hashes).await?;
Ok(txs.iter().cloned().filter_map(|tx| tx.ok()).collect())
}
pub async fn get_block(&self, hash: [u8; 32]) -> Result<Block, RpcError> {
pub async fn get_block(&self, height: usize) -> Result<Block, RpcError> {
#[derive(Deserialize, Debug)]
struct BlockResponse {
blob: String,
blob: String
}
let res: BlockResponse =
self.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await?;
let block: JsonRpcResponse<BlockResponse> = self.rpc_call("json_rpc", Some(json!({
"method": "get_block",
"params": {
"height": height
}
}))).await?;
Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref()).map_err(|_| RpcError::InvalidNode)
Ok(
Block::deserialize(
&mut std::io::Cursor::new(rpc_hex(&block.result.blob)?)
).expect("Monero returned a block we couldn't deserialize")
)
}
pub async fn get_block_by_number(&self, number: usize) -> Result<Block, RpcError> {
self.get_block(self.get_block_hash(number).await?).await
}
pub async fn get_block_transactions(&self, hash: [u8; 32]) -> Result<Vec<Transaction>, RpcError> {
let block = self.get_block(hash).await?;
async fn get_block_transactions_core(
&self,
height: usize,
possible: bool
) -> Result<Vec<Transaction>, RpcError> {
let block = self.get_block(height).await?;
let mut res = vec![block.miner_tx];
res.extend(self.get_transactions(&block.txs).await?);
res.extend(
if possible {
self.get_transactions_possible(&block.txs).await?
} else {
self.get_transactions(&block.txs).await?
}
);
Ok(res)
}
pub async fn get_block_transactions_by_number(
&self,
number: usize,
) -> Result<Vec<Transaction>, RpcError> {
self.get_block_transactions(self.get_block_hash(number).await?).await
pub async fn get_block_transactions(&self, height: usize) -> Result<Vec<Transaction>, RpcError> {
self.get_block_transactions_core(height, false).await
}
pub async fn get_block_transactions_possible(&self, height: usize) -> Result<Vec<Transaction>, RpcError> {
self.get_block_transactions_core(height, true).await
}
/// Get the output indexes of the specified transaction.
pub async fn get_o_indexes(&self, hash: [u8; 32]) -> Result<Vec<u64>, RpcError> {
#[derive(Serialize, Debug)]
struct Request {
txid: [u8; 32],
txid: [u8; 32]
}
#[allow(dead_code)]
@@ -347,125 +221,104 @@ impl Rpc {
status: String,
untrusted: bool,
credits: usize,
top_hash: String,
top_hash: String
}
let indexes: OIndexes = self
.bin_call(
"get_o_indexes.bin",
monero_epee_bin_serde::to_bytes(&Request { txid: hash }).unwrap(),
)
.await?;
let indexes: OIndexes = self.bin_call("get_o_indexes.bin", monero_epee_bin_serde::to_bytes(
&Request {
txid: hash
}).unwrap()
).await?;
Ok(indexes.o_indexes)
}
/// Get the output distribution, from the specified height to the specified height (both
/// inclusive).
pub async fn get_output_distribution(
&self,
from: usize,
to: usize,
) -> Result<Vec<u64>, RpcError> {
// from and to are inclusive
pub async fn get_output_distribution(&self, from: usize, to: usize) -> Result<Vec<u64>, RpcError> {
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct Distribution {
distribution: Vec<u64>,
pub struct Distribution {
distribution: Vec<u64>
}
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct Distributions {
distributions: Vec<Distribution>,
distributions: Vec<Distribution>
}
let mut distributions: Distributions = self
.json_rpc_call(
"get_output_distribution",
Some(json!({
"binary": false,
"amounts": [0],
"cumulative": true,
"from_height": from,
"to_height": to,
})),
)
.await?;
let mut distributions: JsonRpcResponse<Distributions> = self.rpc_call("json_rpc", Some(json!({
"method": "get_output_distribution",
"params": {
"binary": false,
"amounts": [0],
"cumulative": true,
"from_height": from,
"to_height": to
}
}))).await?;
Ok(distributions.distributions.swap_remove(0).distribution)
Ok(distributions.result.distributions.swap_remove(0).distribution)
}
/// Get the specified outputs from the RingCT (zero-amount) pool, but only return them if they're
/// unlocked.
pub async fn get_unlocked_outputs(
pub async fn get_outputs(
&self,
indexes: &[u64],
height: usize,
height: usize
) -> Result<Vec<Option<[EdwardsPoint; 2]>>, RpcError> {
#[derive(Deserialize, Debug)]
struct Out {
pub struct Out {
key: String,
mask: String,
txid: String,
txid: String
}
#[derive(Deserialize, Debug)]
struct Outs {
outs: Vec<Out>,
outs: Vec<Out>
}
let outs: Outs = self
.rpc_call(
"get_outs",
Some(json!({
"get_txid": true,
"outputs": indexes.iter().map(|o| json!({
"amount": 0,
"index": o
})).collect::<Vec<_>>()
})),
)
.await?;
let outs: Outs = self.rpc_call("get_outs", Some(json!({
"get_txid": true,
"outputs": indexes.iter().map(|o| json!({
"amount": 0,
"index": o
})).collect::<Vec<_>>()
}))).await?;
let txs = self
.get_transactions(
&outs
.outs
.iter()
.map(|out| rpc_hex(&out.txid)?.try_into().map_err(|_| RpcError::InvalidNode))
.collect::<Result<Vec<_>, _>>()?,
)
.await?;
// TODO: https://github.com/serai-dex/serai/issues/104
outs
.outs
.iter()
.enumerate()
.map(|(i, out)| {
Ok(Some([rpc_point(&out.key)?, rpc_point(&out.mask)?]).filter(|_| {
let txs = self.get_transactions(
&outs.outs.iter().map(|out|
rpc_hex(&out.txid).expect("Monero returned an invalidly encoded hash")
.try_into().expect("Monero returned an invalid sized hash")
).collect::<Vec<_>>()
).await?;
// TODO: Support time based lock times. These shouldn't be needed, and it may be painful to
// get the median time for the given height, yet we do need to in order to be complete
outs.outs.iter().enumerate().map(
|(i, out)| Ok(
Some([rpc_point(&out.key)?, rpc_point(&out.mask)?]).filter(|_| {
match txs[i].prefix.timelock {
Timelock::Block(t_height) => t_height <= height,
_ => false,
Timelock::Block(t_height) => (t_height <= height),
_ => false
}
}))
})
.collect()
})
)
).collect()
}
/// Get the currently estimated fee from the node. This may be manipulated to unsafe levels and
/// MUST be sanity checked.
// TODO: Take a sanity check argument
pub async fn get_fee(&self) -> Result<Fee, RpcError> {
#[allow(dead_code)]
#[derive(Deserialize, Debug)]
struct FeeResponse {
fee: u64,
quantization_mask: u64,
quantization_mask: u64
}
let res: FeeResponse = self.json_rpc_call("get_fee_estimate", None).await?;
Ok(Fee { per_weight: res.fee, mask: res.quantization_mask })
let res: JsonRpcResponse<FeeResponse> = self.rpc_call("json_rpc", Some(json!({
"method": "get_fee_estimate"
}))).await?;
Ok(Fee { per_weight: res.result.fee, mask: res.result.quantization_mask })
}
pub async fn publish_transaction(&self, tx: &Transaction) -> Result<(), RpcError> {
@@ -482,14 +335,14 @@ impl Rpc {
overspend: bool,
too_big: bool,
too_few_outputs: bool,
reason: String,
reason: String
}
let mut buf = Vec::with_capacity(2048);
tx.write(&mut buf).unwrap();
let res: SendRawResponse = self
.rpc_call("send_raw_transaction", Some(json!({ "tx_as_hex": hex::encode(&buf) })))
.await?;
tx.serialize(&mut buf).unwrap();
let res: SendRawResponse = self.rpc_call("send_raw_transaction", Some(json!({
"tx_as_hex": hex::encode(&buf)
}))).await?;
if res.status != "OK" {
Err(RpcError::InvalidTransaction(tx.hash()))?;
@@ -497,21 +350,4 @@ impl Rpc {
Ok(())
}
pub async fn generate_blocks(&self, address: &str, block_count: usize) -> Result<(), RpcError> {
self
.rpc_call::<_, EmptyResponse>(
"json_rpc",
Some(json!({
"method": "generateblocks",
"params": {
"wallet_address": address,
"amount_of_blocks": block_count
},
})),
)
.await?;
Ok(())
}
}

View File

@@ -1,21 +1,14 @@
use std::io::{self, Read, Write};
use std::io;
use curve25519_dalek::{
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
use curve25519_dalek::{scalar::Scalar, edwards::{EdwardsPoint, CompressedEdwardsY}};
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
pub const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
pub(crate) fn varint_len(varint: usize) -> usize {
pub fn varint_len(varint: usize) -> usize {
((usize::try_from(usize::BITS - varint.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
}
pub(crate) fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
w.write_all(&[*byte])
}
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
pub fn write_varint<W: io::Write>(varint: &u64, w: &mut W) -> io::Result<()> {
let mut varint = *varint;
while {
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
@@ -23,119 +16,89 @@ pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()>
if varint != 0 {
b |= VARINT_CONTINUATION_MASK;
}
write_byte(&b, w)?;
w.write_all(&[b])?;
varint != 0
} {}
Ok(())
}
pub(crate) fn write_scalar<W: Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
pub fn write_scalar<W: io::Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
w.write_all(&scalar.to_bytes())
}
pub(crate) fn write_point<W: Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
pub fn write_point<W: io::Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
w.write_all(&point.compress().to_bytes())
}
pub(crate) fn write_raw_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
f: F,
values: &[T],
w: &mut W,
) -> io::Result<()> {
pub fn write_raw_vec<
T,
W: io::Write,
F: Fn(&T, &mut W) -> io::Result<()>
>(f: F, values: &[T], w: &mut W) -> io::Result<()> {
for value in values {
f(value, w)?;
}
Ok(())
}
pub(crate) fn write_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
f: F,
values: &[T],
w: &mut W,
) -> io::Result<()> {
pub fn write_vec<
T,
W: io::Write,
F: Fn(&T, &mut W) -> io::Result<()>
>(f: F, values: &[T], w: &mut W) -> io::Result<()> {
write_varint(&values.len().try_into().unwrap(), w)?;
write_raw_vec(f, values, w)
write_raw_vec(f, &values, w)
}
pub(crate) fn read_bytes<R: Read, const N: usize>(r: &mut R) -> io::Result<[u8; N]> {
let mut res = [0; N];
pub fn read_byte<R: io::Read>(r: &mut R) -> io::Result<u8> {
let mut res = [0; 1];
r.read_exact(&mut res)?;
Ok(res)
Ok(res[0])
}
pub(crate) fn read_byte<R: Read>(r: &mut R) -> io::Result<u8> {
Ok(read_bytes::<_, 1>(r)?[0])
}
pub(crate) fn read_u64<R: Read>(r: &mut R) -> io::Result<u64> {
read_bytes(r).map(u64::from_le_bytes)
}
pub(crate) fn read_u32<R: Read>(r: &mut R) -> io::Result<u32> {
read_bytes(r).map(u32::from_le_bytes)
}
pub(crate) fn read_varint<R: Read>(r: &mut R) -> io::Result<u64> {
pub fn read_varint<R: io::Read>(r: &mut R) -> io::Result<u64> {
let mut bits = 0;
let mut res = 0;
while {
let b = read_byte(r)?;
if (bits != 0) && (b == 0) {
Err(io::Error::new(io::ErrorKind::Other, "non-canonical varint"))?;
}
if ((bits + 7) > 64) && (b >= (1 << (64 - bits))) {
Err(io::Error::new(io::ErrorKind::Other, "varint overflow"))?;
}
res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits;
// TODO: Error if bits exceed u64
bits += 7;
b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK
} {}
Ok(res)
}
// All scalar fields supported by monero-serai are checked to be canonical for valid transactions
// While from_bytes_mod_order would be more flexible, it's not currently needed and would be
// inaccurate to include now. While casting a wide net may be preferable, it'd also be inaccurate
// for now. There's also further edge cases as noted by
// https://github.com/monero-project/monero/issues/8438, where some scalars had an archaic
// reduction applied
pub(crate) fn read_scalar<R: Read>(r: &mut R) -> io::Result<Scalar> {
Scalar::from_canonical_bytes(read_bytes(r)?)
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
pub fn read_32<R: io::Read>(r: &mut R) -> io::Result<[u8; 32]> {
let mut res = [0; 32];
r.read_exact(&mut res)?;
Ok(res)
}
pub(crate) fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
let bytes = read_bytes(r)?;
CompressedEdwardsY(bytes)
.decompress()
// Ban points which are either unreduced or -0
.filter(|point| point.compress().to_bytes() == bytes)
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
// TODO: Potentially update to Monero's parsing rules on scalars/points, which should be any arbitrary 32-bytes
// We may be able to consider such transactions as malformed and accordingly be opinionated in ignoring them
pub fn read_scalar<R: io::Read>(r: &mut R) -> io::Result<Scalar> {
Scalar::from_canonical_bytes(
read_32(r)?
).ok_or(io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
}
pub(crate) fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
read_point(r)
.ok()
.filter(|point| point.is_torsion_free())
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
pub fn read_point<R: io::Read>(r: &mut R) -> io::Result<EdwardsPoint> {
CompressedEdwardsY(
read_32(r)?
).decompress().filter(|point| point.is_torsion_free()).ok_or(io::Error::new(io::ErrorKind::Other, "invalid point"))
}
pub(crate) fn read_raw_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
f: F,
len: usize,
r: &mut R,
) -> io::Result<Vec<T>> {
let mut res = vec![];
pub fn read_raw_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, len: usize, r: &mut R) -> io::Result<Vec<T>> {
let mut res = Vec::with_capacity(
len.try_into().map_err(|_| io::Error::new(io::ErrorKind::Other, "length exceeds usize"))?
);
for _ in 0 .. len {
res.push(f(r)?);
}
Ok(res)
}
pub(crate) fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
f: F,
r: &mut R,
) -> io::Result<Vec<T>> {
pub fn read_vec<R: io::Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, r: &mut R) -> io::Result<Vec<T>> {
read_raw_vec(f, read_varint(r)?.try_into().unwrap(), r)
}

View File

@@ -1,176 +1,45 @@
use hex_literal::hex;
use rand_core::{RngCore, OsRng};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, edwards::CompressedEdwardsY};
use crate::{
random_scalar,
wallet::address::{Network, AddressType, AddressMeta, MoneroAddress},
};
use crate::wallet::address::{Network, AddressType, Address};
const SPEND: [u8; 32] = hex!("f8631661f6ab4e6fda310c797330d86e23a682f20d5bc8cc27b18051191f16d7");
const VIEW: [u8; 32] = hex!("4a1535063ad1fee2dabbf909d4fd9a873e29541b401f0944754e17c9a41820ce");
const STANDARD: &str =
"4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey";
const STANDARD: &'static str = "4B33mFPMq6mKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KQH4pNey";
const PAYMENT_ID: [u8; 8] = hex!("b8963a57855cf73f");
const INTEGRATED: &str =
"4Ljin4CrSNHKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KbaTH6Mn\
pXSn88oBX35";
const INTEGRATED: &'static str = "4Ljin4CrSNHKi7Eiyd5XuyKRVMGVZz1Rqb9ZTyGApXW5d1aT7UBDZ89ewmnWFkzJ5wPd2SFbn313vCT8a4E2Qf4KbaTH6MnpXSn88oBX35";
const SUB_SPEND: [u8; 32] =
hex!("fe358188b528335ad1cfdc24a22a23988d742c882b6f19a602892eaab3c1b62b");
const SUB_SPEND: [u8; 32] = hex!("fe358188b528335ad1cfdc24a22a23988d742c882b6f19a602892eaab3c1b62b");
const SUB_VIEW: [u8; 32] = hex!("9bc2b464de90d058468522098d5610c5019c45fd1711a9517db1eea7794f5470");
const SUBADDRESS: &str =
"8C5zHM5ud8nGC4hC2ULiBLSWx9infi8JUUmWEat4fcTf8J4H38iWYVdFmPCA9UmfLTZxD43RsyKnGEdZkoGij6csDeUnbEB";
const FEATURED_JSON: &str = include_str!("vectors/featured_addresses.json");
const SUBADDRESS: &'static str = "8C5zHM5ud8nGC4hC2ULiBLSWx9infi8JUUmWEat4fcTf8J4H38iWYVdFmPCA9UmfLTZxD43RsyKnGEdZkoGij6csDeUnbEB";
#[test]
fn standard_address() {
let addr = MoneroAddress::from_str(Network::Mainnet, STANDARD).unwrap();
let addr = Address::from_str(STANDARD, Network::Mainnet).unwrap();
assert_eq!(addr.meta.network, Network::Mainnet);
assert_eq!(addr.meta.kind, AddressType::Standard);
assert!(!addr.meta.kind.subaddress());
assert_eq!(addr.meta.kind.payment_id(), None);
assert!(!addr.meta.kind.guaranteed());
assert_eq!(addr.meta.guaranteed, false);
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
assert_eq!(addr.view.compress().to_bytes(), VIEW);
assert_eq!(addr.to_string(), STANDARD);
}
#[test]
fn integrated_address() {
let addr = MoneroAddress::from_str(Network::Mainnet, INTEGRATED).unwrap();
let addr = Address::from_str(INTEGRATED, Network::Mainnet).unwrap();
assert_eq!(addr.meta.network, Network::Mainnet);
assert_eq!(addr.meta.kind, AddressType::Integrated(PAYMENT_ID));
assert!(!addr.meta.kind.subaddress());
assert_eq!(addr.meta.kind.payment_id(), Some(PAYMENT_ID));
assert!(!addr.meta.kind.guaranteed());
assert_eq!(addr.meta.guaranteed, false);
assert_eq!(addr.spend.compress().to_bytes(), SPEND);
assert_eq!(addr.view.compress().to_bytes(), VIEW);
assert_eq!(addr.to_string(), INTEGRATED);
}
#[test]
fn subaddress() {
let addr = MoneroAddress::from_str(Network::Mainnet, SUBADDRESS).unwrap();
let addr = Address::from_str(SUBADDRESS, Network::Mainnet).unwrap();
assert_eq!(addr.meta.network, Network::Mainnet);
assert_eq!(addr.meta.kind, AddressType::Subaddress);
assert!(addr.meta.kind.subaddress());
assert_eq!(addr.meta.kind.payment_id(), None);
assert!(!addr.meta.kind.guaranteed());
assert_eq!(addr.meta.guaranteed, false);
assert_eq!(addr.spend.compress().to_bytes(), SUB_SPEND);
assert_eq!(addr.view.compress().to_bytes(), SUB_VIEW);
assert_eq!(addr.to_string(), SUBADDRESS);
}
#[test]
fn featured() {
for (network, first) in
[(Network::Mainnet, 'C'), (Network::Testnet, 'K'), (Network::Stagenet, 'F')]
{
for _ in 0 .. 100 {
let spend = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
let view = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
for features in 0 .. (1 << 3) {
const SUBADDRESS_FEATURE_BIT: u8 = 1;
const INTEGRATED_FEATURE_BIT: u8 = 1 << 1;
const GUARANTEED_FEATURE_BIT: u8 = 1 << 2;
let subaddress = (features & SUBADDRESS_FEATURE_BIT) == SUBADDRESS_FEATURE_BIT;
let mut payment_id = [0; 8];
OsRng.fill_bytes(&mut payment_id);
let payment_id = Some(payment_id)
.filter(|_| (features & INTEGRATED_FEATURE_BIT) == INTEGRATED_FEATURE_BIT);
let guaranteed = (features & GUARANTEED_FEATURE_BIT) == GUARANTEED_FEATURE_BIT;
let kind = AddressType::Featured { subaddress, payment_id, guaranteed };
let meta = AddressMeta::new(network, kind);
let addr = MoneroAddress::new(meta, spend, view);
assert_eq!(addr.to_string().chars().next().unwrap(), first);
assert_eq!(MoneroAddress::from_str(network, &addr.to_string()).unwrap(), addr);
assert_eq!(addr.spend, spend);
assert_eq!(addr.view, view);
assert_eq!(addr.subaddress(), subaddress);
assert_eq!(addr.payment_id(), payment_id);
assert_eq!(addr.guaranteed(), guaranteed);
}
}
}
}
#[test]
fn featured_vectors() {
#[derive(serde::Deserialize)]
struct Vector {
address: String,
network: String,
spend: String,
view: String,
subaddress: bool,
integrated: bool,
payment_id: Option<[u8; 8]>,
guaranteed: bool,
}
let vectors = serde_json::from_str::<Vec<Vector>>(FEATURED_JSON).unwrap();
for vector in vectors {
let first = vector.address.chars().next().unwrap();
let network = match vector.network.as_str() {
"Mainnet" => {
assert_eq!(first, 'C');
Network::Mainnet
}
"Testnet" => {
assert_eq!(first, 'K');
Network::Testnet
}
"Stagenet" => {
assert_eq!(first, 'F');
Network::Stagenet
}
_ => panic!("Unknown network"),
};
let spend =
CompressedEdwardsY::from_slice(&hex::decode(vector.spend).unwrap()).decompress().unwrap();
let view =
CompressedEdwardsY::from_slice(&hex::decode(vector.view).unwrap()).decompress().unwrap();
let addr = MoneroAddress::from_str(network, &vector.address).unwrap();
assert_eq!(addr.spend, spend);
assert_eq!(addr.view, view);
assert_eq!(addr.subaddress(), vector.subaddress);
assert_eq!(vector.integrated, vector.payment_id.is_some());
assert_eq!(addr.payment_id(), vector.payment_id);
assert_eq!(addr.guaranteed(), vector.guaranteed);
assert_eq!(
MoneroAddress::new(
AddressMeta::new(
network,
AddressType::Featured {
subaddress: vector.subaddress,
payment_id: vector.payment_id,
guaranteed: vector.guaranteed
}
),
spend,
view
)
.to_string(),
vector.address
);
}
}

View File

@@ -1,92 +0,0 @@
use hex_literal::hex;
use rand::rngs::OsRng;
use curve25519_dalek::{scalar::Scalar, edwards::CompressedEdwardsY};
use multiexp::BatchVerifier;
use crate::{
Commitment, random_scalar,
ringct::bulletproofs::{Bulletproofs, original::OriginalStruct},
};
#[test]
fn bulletproofs_vector() {
let scalar = |scalar| Scalar::from_canonical_bytes(scalar).unwrap();
let point = |point| CompressedEdwardsY(point).decompress().unwrap();
// Generated from Monero
assert!(Bulletproofs::Original(OriginalStruct {
A: point(hex!("ef32c0b9551b804decdcb107eb22aa715b7ce259bf3c5cac20e24dfa6b28ac71")),
S: point(hex!("e1285960861783574ee2b689ae53622834eb0b035d6943103f960cd23e063fa0")),
T1: point(hex!("4ea07735f184ba159d0e0eb662bac8cde3eb7d39f31e567b0fbda3aa23fe5620")),
T2: point(hex!("b8390aa4b60b255630d40e592f55ec6b7ab5e3a96bfcdcd6f1cd1d2fc95f441e")),
taux: scalar(hex!("5957dba8ea9afb23d6e81cc048a92f2d502c10c749dc1b2bd148ae8d41ec7107")),
mu: scalar(hex!("923023b234c2e64774b820b4961f7181f6c1dc152c438643e5a25b0bf271bc02")),
L: vec![
point(hex!("c45f656316b9ebf9d357fb6a9f85b5f09e0b991dd50a6e0ae9b02de3946c9d99")),
point(hex!("9304d2bf0f27183a2acc58cc755a0348da11bd345485fda41b872fee89e72aac")),
point(hex!("1bb8b71925d155dd9569f64129ea049d6149fdc4e7a42a86d9478801d922129b")),
point(hex!("5756a7bf887aa72b9a952f92f47182122e7b19d89e5dd434c747492b00e1c6b7")),
point(hex!("6e497c910d102592830555356af5ff8340e8d141e3fb60ea24cfa587e964f07d")),
point(hex!("f4fa3898e7b08e039183d444f3d55040f3c790ed806cb314de49f3068bdbb218")),
point(hex!("0bbc37597c3ead517a3841e159c8b7b79a5ceaee24b2a9a20350127aab428713")),
],
R: vec![
point(hex!("609420ba1702781692e84accfd225adb3d077aedc3cf8125563400466b52dbd9")),
point(hex!("fb4e1d079e7a2b0ec14f7e2a3943bf50b6d60bc346a54fcf562fb234b342abf8")),
point(hex!("6ae3ac97289c48ce95b9c557289e82a34932055f7f5e32720139824fe81b12e5")),
point(hex!("d071cc2ffbdab2d840326ad15f68c01da6482271cae3cf644670d1632f29a15c")),
point(hex!("e52a1754b95e1060589ba7ce0c43d0060820ebfc0d49dc52884bc3c65ad18af5")),
point(hex!("41573b06140108539957df71aceb4b1816d2409ce896659aa5c86f037ca5e851")),
point(hex!("a65970b2cc3c7b08b2b5b739dbc8e71e646783c41c625e2a5b1535e3d2e0f742")),
],
a: scalar(hex!("0077c5383dea44d3cd1bc74849376bd60679612dc4b945255822457fa0c0a209")),
b: scalar(hex!("fe80cf5756473482581e1d38644007793ddc66fdeb9404ec1689a907e4863302")),
t: scalar(hex!("40dfb08e09249040df997851db311bd6827c26e87d6f0f332c55be8eef10e603"))
})
.verify(
&mut OsRng,
&[
// For some reason, these vectors are * INV_EIGHT
point(hex!("8e8f23f315edae4f6c2f948d9a861e0ae32d356b933cd11d2f0e031ac744c41f"))
.mul_by_cofactor(),
point(hex!("2829cbd025aa54cd6e1b59a032564f22f0b2e5627f7f2c4297f90da438b5510f"))
.mul_by_cofactor(),
]
));
}
macro_rules! bulletproofs_tests {
($name: ident, $max: ident, $plus: literal) => {
#[test]
fn $name() {
// Create Bulletproofs for all possible output quantities
let mut verifier = BatchVerifier::new(16);
for i in 1 .. 17 {
let commitments = (1 ..= i)
.map(|i| Commitment::new(random_scalar(&mut OsRng), u64::try_from(i).unwrap()))
.collect::<Vec<_>>();
let bp = Bulletproofs::prove(&mut OsRng, &commitments, $plus).unwrap();
let commitments = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
assert!(bp.verify(&mut OsRng, &commitments));
assert!(bp.batch_verify(&mut OsRng, &mut verifier, i, &commitments));
}
assert!(verifier.verify_vartime());
}
#[test]
fn $max() {
// Check Bulletproofs errors if we try to prove for too many outputs
let mut commitments = vec![];
for _ in 0 .. 17 {
commitments.push(Commitment::new(Scalar::zero(), 0));
}
assert!(Bulletproofs::prove(&mut OsRng, &commitments, $plus).is_err());
}
};
}
bulletproofs_tests!(bulletproofs, bulletproofs_max, false);
bulletproofs_tests!(bulletproofs_plus, bulletproofs_plus_max, true);

View File

@@ -1,9 +1,7 @@
use core::ops::Deref;
#[cfg(feature = "multisig")]
use std::sync::{Arc, RwLock};
use zeroize::Zeroizing;
use rand_core::{RngCore, OsRng};
use rand::{RngCore, rngs::OsRng};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
@@ -13,15 +11,13 @@ use transcript::{Transcript, RecommendedTranscript};
use frost::curve::Ed25519;
use crate::{
Commitment, random_scalar,
Commitment,
random_scalar,
wallet::Decoys,
ringct::{
generate_key_image,
clsag::{ClsagInput, Clsag},
},
ringct::{generate_key_image, clsag::{ClsagInput, Clsag}}
};
#[cfg(feature = "multisig")]
use crate::ringct::clsag::{ClsagDetails, ClsagMultisig};
use crate::{frost::MultisigError, ringct::clsag::{ClsagDetails, ClsagMultisig}};
#[cfg(feature = "multisig")]
use frost::tests::{key_gen, algorithm_machines, sign};
@@ -37,49 +33,48 @@ fn clsag() {
for real in 0 .. RING_LEN {
let msg = [1; 32];
let mut secrets = (Zeroizing::new(Scalar::zero()), Scalar::zero());
let mut secrets = [Scalar::zero(), Scalar::zero()];
let mut ring = vec![];
for i in 0 .. RING_LEN {
let dest = Zeroizing::new(random_scalar(&mut OsRng));
let dest = random_scalar(&mut OsRng);
let mask = random_scalar(&mut OsRng);
let amount;
if i == real {
secrets = (dest.clone(), mask);
if i == u64::from(real) {
secrets = [dest, mask];
amount = AMOUNT;
} else {
amount = OsRng.next_u64();
}
ring
.push([dest.deref() * &ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]);
ring.push([&dest * &ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]);
}
let image = generate_key_image(&secrets.0);
let image = generate_key_image(secrets[0]);
let (clsag, pseudo_out) = Clsag::sign(
&mut OsRng,
vec![(
secrets.0,
&vec![(
secrets[0],
image,
ClsagInput::new(
Commitment::new(secrets.1, AMOUNT),
Commitment::new(secrets[1], AMOUNT),
Decoys {
i: u8::try_from(real).unwrap(),
offsets: (1 ..= RING_LEN).into_iter().collect(),
ring: ring.clone(),
},
)
.unwrap(),
ring: ring.clone()
}
).unwrap()
)],
random_scalar(&mut OsRng),
msg,
)
.swap_remove(0);
msg
).swap_remove(0);
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
#[cfg(feature = "experimental")]
clsag.rust_verify(&ring, &image, &pseudo_out, &msg).unwrap();
}
}
#[cfg(feature = "multisig")]
#[test]
fn clsag_multisig() {
fn clsag_multisig() -> Result<(), MultisigError> {
let keys = key_gen::<_, Ed25519>(&mut OsRng);
let randomness = random_scalar(&mut OsRng);
@@ -101,28 +96,31 @@ fn clsag_multisig() {
}
let mask_sum = random_scalar(&mut OsRng);
let algorithm = ClsagMultisig::new(
RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
keys[&1].group_key().0,
Arc::new(RwLock::new(Some(ClsagDetails::new(
ClsagInput::new(
Commitment::new(randomness, AMOUNT),
Decoys {
i: RING_INDEX,
offsets: (1 ..= RING_LEN).into_iter().collect(),
ring: ring.clone(),
},
)
.unwrap(),
mask_sum,
)))),
);
sign(
&mut OsRng,
algorithm.clone(),
keys.clone(),
algorithm_machines(&mut OsRng, algorithm, &keys),
&[1; 32],
algorithm_machines(
&mut OsRng,
ClsagMultisig::new(
RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
keys[&1].group_key().0,
Arc::new(RwLock::new(Some(
ClsagDetails::new(
ClsagInput::new(
Commitment::new(randomness, AMOUNT),
Decoys {
i: RING_INDEX,
offsets: (1 ..= RING_LEN).into_iter().collect(),
ring: ring.clone()
}
).unwrap(),
mask_sum
)
)))
).unwrap(),
&keys
),
&[1; 32]
);
Ok(())
}

View File

@@ -0,0 +1,13 @@
use rand::rngs::OsRng;
use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
use crate::{random_scalar, ringct::hash_to_point::{hash_to_point as c_hash_to_point, rust_hash_to_point}};
#[test]
fn hash_to_point() {
for _ in 0 .. 50 {
let point = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
assert_eq!(rust_hash_to_point(point), c_hash_to_point(point));
}
}

View File

@@ -1,3 +1,3 @@
mod hash_to_point;
mod clsag;
mod bulletproofs;
mod address;

View File

@@ -1,230 +0,0 @@
[
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v3pYyUDn",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": false,
"integrated": false,
"guaranteed": false
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v3wfMHCy",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": true,
"integrated": false,
"guaranteed": false
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJTo4p5ayvj36PStM5AX",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": false,
"integrated": true,
"payment_id": [46, 48, 134, 34, 245, 148, 243, 195],
"guaranteed": false
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJWv5WqMCNE2hRs9rJfy",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": true,
"integrated": true,
"payment_id": [153, 176, 98, 204, 151, 27, 197, 168],
"guaranteed": false
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v4DwqwH1",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": false,
"integrated": false,
"guaranteed": true
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5Jye2v4Pyz8bD",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": true,
"integrated": false,
"guaranteed": true
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJcwt7hykou237MqZZDA",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": false,
"integrated": true,
"payment_id": [88, 37, 149, 111, 171, 108, 120, 181],
"guaranteed": true
},
{
"address": "CjWdTpuDaZ69nTGxzm9YarR82YDYFECi1WaaREZTMy5yDsjaRX5bC3cbC3JpcrBPd7YYpjoWKuBMidgGaKBK5JyeeJfTrFAp69u2MYbf5YeN",
"network": "Mainnet",
"spend": "258dfe7eef9be934839f3b8e0d40e79035fe85879c0a9eb0d7372ae2deb0004c",
"view": "f91382373045f3cc69233254ab0406bc9e008707569ff9db4718654812d839df",
"subaddress": true,
"integrated": true,
"payment_id": [125, 69, 155, 152, 140, 160, 157, 186],
"guaranteed": true
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712U9w7ScYA",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": false,
"integrated": false,
"guaranteed": false
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UA2gCrT1",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": true,
"integrated": false,
"guaranteed": false
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71Vc1DbPKwJu81cxJjqBkS",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": false,
"integrated": true,
"payment_id": [92, 225, 118, 220, 39, 3, 72, 51],
"guaranteed": false
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71Vc2o1rPMaXN31Fe5J6dn",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": true,
"integrated": true,
"payment_id": [20, 120, 47, 89, 72, 165, 233, 115],
"guaranteed": false
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UAQHCRZ4",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": false,
"integrated": false,
"guaranteed": true
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x712UAUzqaii",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": true,
"integrated": false,
"guaranteed": true
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71VcAsfQc3gJQ2gHLd5DiQ",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": false,
"integrated": true,
"payment_id": [193, 149, 123, 214, 180, 205, 195, 91],
"guaranteed": true
},
{
"address": "Kgx5uCVsMSEVm7seL8tjyRGmmVXjWfEowKpKjgaXUGVyMViBYMh13VQ4mfqpB7zEVVcJx3E8FFgAuQ8cq6mg5x71VcDBAD5jbZQ3AMHFyvQB",
"network": "Testnet",
"spend": "bba3a8a5bb47f7abf2e2dffeaf43385e4b308fd63a9ff6707e355f3b0a6c247a",
"view": "881713a4fa9777168a54bbdcb75290d319fb92fdf1026a8a4b125a8e341de8ab",
"subaddress": true,
"integrated": true,
"payment_id": [205, 170, 65, 0, 51, 175, 251, 184],
"guaranteed": true
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPJnBtTP",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": false,
"integrated": false,
"guaranteed": false
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPUrwMvP",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": true,
"integrated": false,
"guaranteed": false
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AY5ECEhP5Nr1aCRPXdxk",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": false,
"integrated": true,
"payment_id": [173, 149, 78, 64, 215, 211, 66, 170],
"guaranteed": false
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AY882kTUS1D2LttnPvTR",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": true,
"integrated": true,
"payment_id": [254, 159, 186, 162, 1, 8, 156, 108],
"guaranteed": false
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPpBBo8F",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": false,
"integrated": false,
"guaranteed": true
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV61VPuUJX3b",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": true,
"integrated": false,
"guaranteed": true
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AYCZPxVAoDu21DryMoto",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": false,
"integrated": true,
"payment_id": [3, 115, 230, 129, 172, 108, 116, 235],
"guaranteed": true
},
{
"address": "FSDinqdKK54PbjF73GgW3nUpf7bF8QbyxFCUurENmUyeEfSxSLL2hxwANBLzq1A8gTSAzzEn65hKjetA8o5BvjV6AYFYCqKQAWL18KkpBQ8R",
"network": "Stagenet",
"spend": "4cd503040f5e43871bf37d8ca7177da655bda410859af754e24e7b44437f3151",
"view": "af60d42b6c6e4437fd93eb32657a14967efa393630d7aee27b5973c8e1c5ad39",
"subaddress": true,
"integrated": true,
"payment_id": [94, 122, 63, 167, 209, 225, 14, 180],
"guaranteed": true
}
]

View File

@@ -1,39 +1,36 @@
use core::cmp::Ordering;
use std::io::{self, Read, Write};
use zeroize::Zeroize;
use curve25519_dalek::edwards::EdwardsPoint;
use curve25519_dalek::{
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
use crate::{hash, serialize::*, ringct::{RctPrunable, RctSignatures}};
use crate::{
Protocol, hash,
serialize::*,
ringct::{RctBase, RctPrunable, RctSignatures},
};
pub const RING_LEN: usize = 11;
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub enum Input {
Gen(u64),
ToKey { amount: u64, key_offsets: Vec<u64>, key_image: EdwardsPoint },
ToKey {
amount: u64,
key_offsets: Vec<u64>,
key_image: EdwardsPoint
}
}
impl Input {
// Worst-case predictive len
pub(crate) fn fee_weight(ring_len: usize) -> usize {
pub(crate) fn fee_weight() -> usize {
// Uses 1 byte for the VarInt amount due to amount being 0
// Uses 1 byte for the VarInt encoding of the length of the ring as well
1 + 1 + 1 + (8 * ring_len) + 32
1 + 1 + 1 + (8 * RING_LEN) + 32
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
match self {
Input::Gen(height) => {
w.write_all(&[255])?;
write_varint(height, w)
}
},
Input::ToKey { amount, key_offsets, key_image } => {
w.write_all(&[2])?;
@@ -44,27 +41,29 @@ impl Input {
}
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Input> {
Ok(match read_byte(r)? {
255 => Input::Gen(read_varint(r)?),
2 => Input::ToKey {
amount: read_varint(r)?,
key_offsets: read_vec(read_varint, r)?,
key_image: read_torsion_free_point(r)?,
},
_ => {
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown/unused input type"))?
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Input> {
let mut variant = [0];
r.read_exact(&mut variant)?;
Ok(
match variant[0] {
255 => Input::Gen(read_varint(r)?),
2 => Input::ToKey {
amount: read_varint(r)?,
key_offsets: read_vec(read_varint, r)?,
key_image: read_point(r)?
},
_ => Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown/unused input type"))?
}
})
)
}
}
// Doesn't bother moving to an enum for the unused Script classes
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct Output {
pub amount: u64,
pub key: CompressedEdwardsY,
pub view_tag: Option<u8>,
pub key: EdwardsPoint,
pub tag: Option<u8>
}
impl Output {
@@ -72,40 +71,39 @@ impl Output {
1 + 1 + 32 + 1
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
write_varint(&self.amount, w)?;
w.write_all(&[2 + u8::from(self.view_tag.is_some())])?;
w.write_all(&self.key.to_bytes())?;
if let Some(view_tag) = self.view_tag {
w.write_all(&[view_tag])?;
w.write_all(&[2 + (if self.tag.is_some() { 1 } else { 0 })])?;
write_point(&self.key, w)?;
if let Some(tag) = self.tag {
w.write_all(&[tag])?;
}
Ok(())
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Output> {
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Output> {
let amount = read_varint(r)?;
let view_tag = match read_byte(r)? {
2 => false,
3 => true,
_ => Err(io::Error::new(
io::ErrorKind::Other,
"Tried to deserialize unknown/unused output type",
))?,
};
let mut tag = [0];
r.read_exact(&mut tag)?;
if (tag[0] != 2) && (tag[0] != 3) {
Err(std::io::Error::new(std::io::ErrorKind::Other, "Tried to deserialize unknown/unused output type"))?;
}
Ok(Output {
amount,
key: CompressedEdwardsY(read_bytes(r)?),
view_tag: if view_tag { Some(read_byte(r)?) } else { None },
})
Ok(
Output {
amount,
key: read_point(r)?,
tag: if tag[0] == 3 { r.read_exact(&mut tag)?; Some(tag[0]) } else { None }
}
)
}
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum Timelock {
None,
Block(usize),
Time(u64),
Time(u64)
}
impl Timelock {
@@ -119,14 +117,18 @@ impl Timelock {
}
}
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
pub(crate) fn fee_weight() -> usize {
8
}
fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
write_varint(
&match self {
Timelock::None => 0,
Timelock::Block(block) => (*block).try_into().unwrap(),
Timelock::Time(time) => *time,
Timelock::Time(time) => *time
},
w,
w
)
}
}
@@ -137,173 +139,135 @@ impl PartialOrd for Timelock {
(Timelock::None, _) => Some(Ordering::Less),
(Timelock::Block(a), Timelock::Block(b)) => a.partial_cmp(b),
(Timelock::Time(a), Timelock::Time(b)) => a.partial_cmp(b),
_ => None,
_ => None
}
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct TransactionPrefix {
pub version: u64,
pub timelock: Timelock,
pub inputs: Vec<Input>,
pub outputs: Vec<Output>,
pub extra: Vec<u8>,
pub extra: Vec<u8>
}
impl TransactionPrefix {
pub(crate) fn fee_weight(ring_len: usize, inputs: usize, outputs: usize, extra: usize) -> usize {
pub(crate) fn fee_weight(inputs: usize, outputs: usize, extra: usize) -> usize {
// Assumes Timelock::None since this library won't let you create a TX with a timelock
1 + 1 +
varint_len(inputs) +
(inputs * Input::fee_weight(ring_len)) +
1 +
(outputs * Output::fee_weight()) +
varint_len(extra) +
extra
varint_len(inputs) + (inputs * Input::fee_weight()) +
// Only 16 outputs are possible under transactions by this lib
1 + (outputs * Output::fee_weight()) +
varint_len(extra) + extra
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
write_varint(&self.version, w)?;
self.timelock.write(w)?;
write_vec(Input::write, &self.inputs, w)?;
write_vec(Output::write, &self.outputs, w)?;
self.timelock.serialize(w)?;
write_vec(Input::serialize, &self.inputs, w)?;
write_vec(Output::serialize, &self.outputs, w)?;
write_varint(&self.extra.len().try_into().unwrap(), w)?;
w.write_all(&self.extra)
}
pub fn read<R: Read>(r: &mut R) -> io::Result<TransactionPrefix> {
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<TransactionPrefix> {
let mut prefix = TransactionPrefix {
version: read_varint(r)?,
timelock: Timelock::from_raw(read_varint(r)?),
inputs: read_vec(Input::read, r)?,
outputs: read_vec(Output::read, r)?,
extra: vec![],
inputs: read_vec(Input::deserialize, r)?,
outputs: read_vec(Output::deserialize, r)?,
extra: vec![]
};
prefix.extra = read_vec(read_byte, r)?;
let len = read_varint(r)?;
prefix.extra.resize(len.try_into().unwrap(), 0);
r.read_exact(&mut prefix.extra)?;
Ok(prefix)
}
}
/// Monero transaction. For version 1, rct_signatures still contains an accurate fee value.
#[derive(Clone, PartialEq, Eq, Debug)]
#[derive(Clone, PartialEq, Debug)]
pub struct Transaction {
pub prefix: TransactionPrefix,
pub signatures: Vec<(Scalar, Scalar)>,
pub rct_signatures: RctSignatures,
pub rct_signatures: RctSignatures
}
impl Transaction {
pub(crate) fn fee_weight(
protocol: Protocol,
inputs: usize,
outputs: usize,
extra: usize,
) -> usize {
TransactionPrefix::fee_weight(protocol.ring_len(), inputs, outputs, extra) +
RctSignatures::fee_weight(protocol, inputs, outputs)
pub(crate) fn fee_weight(inputs: usize, outputs: usize, extra: usize) -> usize {
TransactionPrefix::fee_weight(inputs, outputs, extra) + RctSignatures::fee_weight(inputs, outputs)
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.prefix.write(w)?;
if self.prefix.version == 1 {
for sig in &self.signatures {
write_scalar(&sig.0, w)?;
write_scalar(&sig.1, w)?;
}
Ok(())
} else if self.prefix.version == 2 {
self.rct_signatures.write(w)
} else {
panic!("Serializing a transaction with an unknown version");
}
pub fn serialize<W: std::io::Write>(&self, w: &mut W) -> std::io::Result<()> {
self.prefix.serialize(w)?;
self.rct_signatures.serialize(w)
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Transaction> {
let prefix = TransactionPrefix::read(r)?;
let mut signatures = vec![];
let mut rct_signatures = RctSignatures {
base: RctBase { fee: 0, ecdh_info: vec![], commitments: vec![] },
prunable: RctPrunable::Null,
};
if prefix.version == 1 {
for _ in 0 .. prefix.inputs.len() {
signatures.push((read_scalar(r)?, read_scalar(r)?));
}
rct_signatures.base.fee = prefix
.inputs
.iter()
.map(|input| match input {
Input::Gen(..) => 0,
Input::ToKey { amount, .. } => *amount,
})
.sum::<u64>()
.saturating_sub(prefix.outputs.iter().map(|output| output.amount).sum());
} else if prefix.version == 2 {
rct_signatures = RctSignatures::read(
prefix
.inputs
.iter()
.map(|input| match input {
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<Transaction> {
let prefix = TransactionPrefix::deserialize(r)?;
Ok(
Transaction {
rct_signatures: RctSignatures::deserialize(
prefix.inputs.iter().map(|input| match input {
Input::Gen(_) => 0,
Input::ToKey { key_offsets, .. } => key_offsets.len(),
})
.collect(),
prefix.outputs.len(),
r,
)?;
} else {
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown version"))?;
}
Ok(Transaction { prefix, signatures, rct_signatures })
Input::ToKey { key_offsets, .. } => key_offsets.len()
}).collect(),
prefix.outputs.len(),
r
)?,
prefix
}
)
}
pub fn hash(&self) -> [u8; 32] {
let mut buf = Vec::with_capacity(2048);
let mut serialized = Vec::with_capacity(2048);
if self.prefix.version == 1 {
self.write(&mut buf).unwrap();
hash(&buf)
self.serialize(&mut serialized).unwrap();
hash(&serialized)
} else {
let mut hashes = Vec::with_capacity(96);
let mut sig_hash = Vec::with_capacity(96);
self.prefix.write(&mut buf).unwrap();
hashes.extend(hash(&buf));
buf.clear();
self.prefix.serialize(&mut serialized).unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();
self.rct_signatures.base.write(&mut buf, self.rct_signatures.prunable.rct_type()).unwrap();
hashes.extend(hash(&buf));
buf.clear();
self.rct_signatures.base.serialize(
&mut serialized,
self.rct_signatures.prunable.rct_type()
).unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();
match self.rct_signatures.prunable {
RctPrunable::Null => buf.resize(32, 0),
RctPrunable::Null => serialized.resize(32, 0),
_ => {
self.rct_signatures.prunable.write(&mut buf).unwrap();
buf = hash(&buf).to_vec();
self.rct_signatures.prunable.serialize(&mut serialized).unwrap();
serialized = hash(&serialized).to_vec();
}
}
hashes.extend(&buf);
sig_hash.extend(&serialized);
hash(&hashes)
hash(&sig_hash)
}
}
/// Calculate the hash of this transaction as needed for signing it.
pub fn signature_hash(&self) -> [u8; 32] {
let mut buf = Vec::with_capacity(2048);
let mut serialized = Vec::with_capacity(2048);
let mut sig_hash = Vec::with_capacity(96);
self.prefix.write(&mut buf).unwrap();
sig_hash.extend(hash(&buf));
buf.clear();
self.prefix.serialize(&mut serialized).unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();
self.rct_signatures.base.write(&mut buf, self.rct_signatures.prunable.rct_type()).unwrap();
sig_hash.extend(hash(&buf));
buf.clear();
self.rct_signatures.base.serialize(&mut serialized, self.rct_signatures.prunable.rct_type()).unwrap();
sig_hash.extend(hash(&serialized));
serialized.clear();
self.rct_signatures.prunable.signature_write(&mut buf).unwrap();
sig_hash.extend(hash(&buf));
self.rct_signatures.prunable.signature_serialize(&mut serialized).unwrap();
sig_hash.extend(&hash(&serialized));
hash(&sig_hash)
}

View File

@@ -1,120 +1,45 @@
use core::{marker::PhantomData, fmt::Debug};
use std::string::ToString;
use thiserror::Error;
use zeroize::Zeroize;
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, edwards::{EdwardsPoint, CompressedEdwardsY}};
use base58_monero::base58::{encode_check, decode_check};
/// The network this address is for.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
use crate::wallet::ViewPair;
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum Network {
Mainnet,
Testnet,
Stagenet,
Stagenet
}
/// The address type, supporting the officially documented addresses, along with
/// [Featured Addresses](https://gist.github.com/kayabaNerve/01c50bbc35441e0bbdcee63a9d823789).
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum AddressType {
Standard,
Integrated([u8; 8]),
Subaddress,
Featured { subaddress: bool, payment_id: Option<[u8; 8]>, guaranteed: bool },
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct SubaddressIndex {
pub(crate) account: u32,
pub(crate) address: u32,
}
impl SubaddressIndex {
pub const fn new(account: u32, address: u32) -> Option<SubaddressIndex> {
if (account == 0) && (address == 0) {
return None;
}
Some(SubaddressIndex { account, address })
}
pub fn account(&self) -> u32 {
self.account
}
pub fn address(&self) -> u32 {
self.address
}
}
/// Address specification. Used internally to create addresses.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub enum AddressSpec {
Standard,
Integrated([u8; 8]),
Subaddress(SubaddressIndex),
Featured { subaddress: Option<SubaddressIndex>, payment_id: Option<[u8; 8]>, guaranteed: bool },
Subaddress
}
impl AddressType {
pub fn subaddress(&self) -> bool {
matches!(self, AddressType::Subaddress) ||
matches!(self, AddressType::Featured { subaddress: true, .. })
}
pub fn payment_id(&self) -> Option<[u8; 8]> {
if let AddressType::Integrated(id) = self {
Some(*id)
} else if let AddressType::Featured { payment_id, .. } = self {
*payment_id
} else {
None
}
}
pub fn guaranteed(&self) -> bool {
matches!(self, AddressType::Featured { guaranteed: true, .. })
}
}
/// A type which returns the byte for a given address.
pub trait AddressBytes: Clone + Copy + PartialEq + Eq + Debug {
fn network_bytes(network: Network) -> (u8, u8, u8, u8);
}
/// Address bytes for Monero.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct MoneroAddressBytes;
impl AddressBytes for MoneroAddressBytes {
fn network_bytes(network: Network) -> (u8, u8, u8, u8) {
fn network_bytes(network: Network) -> (u8, u8, u8) {
match network {
Network::Mainnet => (18, 19, 42, 70),
Network::Testnet => (53, 54, 63, 111),
Network::Stagenet => (24, 25, 36, 86),
Network::Mainnet => (18, 19, 42),
Network::Testnet => (53, 54, 63),
Network::Stagenet => (24, 25, 36)
}
}
}
/// Address metadata.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct AddressMeta<B: AddressBytes> {
_bytes: PhantomData<B>,
pub struct AddressMeta {
pub network: Network,
pub kind: AddressType,
pub guaranteed: bool
}
impl<B: AddressBytes> Zeroize for AddressMeta<B> {
fn zeroize(&mut self) {
self.network.zeroize();
self.kind.zeroize();
}
}
/// Error when decoding an address.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
#[derive(Clone, Error, Debug)]
pub enum AddressError {
#[error("invalid address byte")]
InvalidByte,
@@ -122,192 +47,106 @@ pub enum AddressError {
InvalidEncoding,
#[error("invalid length")]
InvalidLength,
#[error("invalid key")]
InvalidKey,
#[error("unknown features")]
UnknownFeatures,
#[error("different network than expected")]
DifferentNetwork,
#[error("invalid key")]
InvalidKey
}
impl<B: AddressBytes> AddressMeta<B> {
#[allow(clippy::wrong_self_convention)]
impl AddressMeta {
fn to_byte(&self) -> u8 {
let bytes = B::network_bytes(self.network);
match self.kind {
let bytes = AddressType::network_bytes(self.network);
let byte = match self.kind {
AddressType::Standard => bytes.0,
AddressType::Integrated(_) => bytes.1,
AddressType::Subaddress => bytes.2,
AddressType::Featured { .. } => bytes.3,
}
AddressType::Subaddress => bytes.2
};
byte | (if self.guaranteed { 1 << 7 } else { 0 })
}
/// Create an address's metadata.
pub fn new(network: Network, kind: AddressType) -> Self {
AddressMeta { _bytes: PhantomData, network, kind }
}
// Returns an incomplete type in the case of Integrated addresses
fn from_byte(byte: u8) -> Result<AddressMeta, AddressError> {
let actual = byte & 0b01111111;
let guaranteed = (byte >> 7) == 1;
// Returns an incomplete instantiation in the case of Integrated/Featured addresses
fn from_byte(byte: u8) -> Result<Self, AddressError> {
let mut meta = None;
for network in [Network::Mainnet, Network::Testnet, Network::Stagenet] {
let (standard, integrated, subaddress, featured) = B::network_bytes(network);
if let Some(kind) = match byte {
_ if byte == standard => Some(AddressType::Standard),
_ if byte == integrated => Some(AddressType::Integrated([0; 8])),
_ if byte == subaddress => Some(AddressType::Subaddress),
_ if byte == featured => {
Some(AddressType::Featured { subaddress: false, payment_id: None, guaranteed: false })
}
_ => None,
let (standard, integrated, subaddress) = AddressType::network_bytes(network);
if let Some(kind) = match actual {
_ if actual == standard => Some(AddressType::Standard),
_ if actual == integrated => Some(AddressType::Integrated([0; 8])),
_ if actual == subaddress => Some(AddressType::Subaddress),
_ => None
} {
meta = Some(AddressMeta::new(network, kind));
meta = Some(AddressMeta { network, kind, guaranteed });
break;
}
}
meta.ok_or(AddressError::InvalidByte)
}
pub fn subaddress(&self) -> bool {
self.kind.subaddress()
}
pub fn payment_id(&self) -> Option<[u8; 8]> {
self.kind.payment_id()
}
pub fn guaranteed(&self) -> bool {
self.kind.guaranteed()
}
}
/// A Monero address, composed of metadata and a spend/view key.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct Address<B: AddressBytes> {
pub meta: AddressMeta<B>,
pub struct Address {
pub meta: AddressMeta,
pub spend: EdwardsPoint,
pub view: EdwardsPoint,
pub view: EdwardsPoint
}
impl<B: AddressBytes> Zeroize for Address<B> {
fn zeroize(&mut self) {
self.meta.zeroize();
self.spend.zeroize();
self.view.zeroize();
impl ViewPair {
pub fn address(&self, network: Network, kind: AddressType, guaranteed: bool) -> Address {
Address {
meta: AddressMeta {
network,
kind,
guaranteed
},
spend: self.spend,
view: &self.view * &ED25519_BASEPOINT_TABLE
}
}
}
impl<B: AddressBytes> ToString for Address<B> {
impl ToString for Address {
fn to_string(&self) -> String {
let mut data = vec![self.meta.to_byte()];
data.extend(self.spend.compress().to_bytes());
data.extend(self.view.compress().to_bytes());
if let AddressType::Featured { subaddress, payment_id, guaranteed } = self.meta.kind {
// Technically should be a VarInt, yet we don't have enough features it's needed
data.push(
u8::from(subaddress) + (u8::from(payment_id.is_some()) << 1) + (u8::from(guaranteed) << 2),
);
}
if let Some(id) = self.meta.kind.payment_id() {
if let AddressType::Integrated(id) = self.meta.kind {
data.extend(id);
}
encode_check(&data).unwrap()
}
}
impl<B: AddressBytes> Address<B> {
pub fn new(meta: AddressMeta<B>, spend: EdwardsPoint, view: EdwardsPoint) -> Self {
Address { meta, spend, view }
}
pub fn from_str_raw(s: &str) -> Result<Self, AddressError> {
impl Address {
pub fn from_str(s: &str, network: Network) -> Result<Self, AddressError> {
let raw = decode_check(s).map_err(|_| AddressError::InvalidEncoding)?;
if raw.len() < (1 + 32 + 32) {
if raw.len() == 1 {
Err(AddressError::InvalidLength)?;
}
let mut meta = AddressMeta::from_byte(raw[0])?;
let spend = CompressedEdwardsY(raw[1 .. 33].try_into().unwrap())
.decompress()
.ok_or(AddressError::InvalidKey)?;
let view = CompressedEdwardsY(raw[33 .. 65].try_into().unwrap())
.decompress()
.ok_or(AddressError::InvalidKey)?;
let mut read = 65;
if matches!(meta.kind, AddressType::Featured { .. }) {
if raw[read] >= (2 << 3) {
Err(AddressError::UnknownFeatures)?;
}
let subaddress = (raw[read] & 1) == 1;
let integrated = ((raw[read] >> 1) & 1) == 1;
let guaranteed = ((raw[read] >> 2) & 1) == 1;
meta.kind = AddressType::Featured {
subaddress,
payment_id: Some([0; 8]).filter(|_| integrated),
guaranteed,
};
read += 1;
if meta.network != network {
Err(AddressError::DifferentNetwork)?;
}
// Update read early so we can verify the length
if meta.kind.payment_id().is_some() {
read += 8;
}
if raw.len() != read {
let len = match meta.kind {
AddressType::Standard | AddressType::Subaddress => 65,
AddressType::Integrated(_) => 73
};
if raw.len() != len {
Err(AddressError::InvalidLength)?;
}
if let AddressType::Integrated(ref mut id) = meta.kind {
id.copy_from_slice(&raw[(read - 8) .. read]);
}
if let AddressType::Featured { payment_id: Some(ref mut id), .. } = meta.kind {
id.copy_from_slice(&raw[(read - 8) .. read]);
let spend = CompressedEdwardsY(raw[1 .. 33].try_into().unwrap()).decompress().ok_or(AddressError::InvalidKey)?;
let view = CompressedEdwardsY(raw[33 .. 65].try_into().unwrap()).decompress().ok_or(AddressError::InvalidKey)?;
if let AddressType::Integrated(ref mut payment_id) = meta.kind {
payment_id.copy_from_slice(&raw[65 .. 73]);
}
Ok(Address { meta, spend, view })
}
pub fn from_str(network: Network, s: &str) -> Result<Self, AddressError> {
Self::from_str_raw(s).and_then(|addr| {
if addr.meta.network == network {
Ok(addr)
} else {
Err(AddressError::DifferentNetwork)?
}
})
}
pub fn network(&self) -> Network {
self.meta.network
}
pub fn subaddress(&self) -> bool {
self.meta.subaddress()
}
pub fn payment_id(&self) -> Option<[u8; 8]> {
self.meta.payment_id()
}
pub fn guaranteed(&self) -> bool {
self.meta.guaranteed()
}
}
/// Instantiation of the Address type with Monero's network bytes.
pub type MoneroAddress = Address<MoneroAddressBytes>;
// Allow re-interpreting of an arbitrary address as a monero address so it can be used with the
// rest of this library. Doesn't use From as it was conflicting with From<T> for T.
impl MoneroAddress {
pub fn from<B: AddressBytes>(address: Address<B>) -> MoneroAddress {
MoneroAddress::new(
AddressMeta::new(address.meta.network, address.meta.kind),
address.spend,
address.view,
)
}
}

View File

@@ -5,14 +5,9 @@ use lazy_static::lazy_static;
use rand_core::{RngCore, CryptoRng};
use rand_distr::{Distribution, Gamma};
use zeroize::{Zeroize, ZeroizeOnDrop};
use curve25519_dalek::edwards::EdwardsPoint;
use crate::{
wallet::SpendableOutput,
rpc::{RpcError, Rpc},
};
use crate::{transaction::RING_LEN, wallet::SpendableOutput, rpc::{RpcError, Rpc}};
const LOCK_WINDOW: usize = 10;
const MATURITY: u64 = 60;
@@ -21,25 +16,24 @@ const BLOCK_TIME: usize = 120;
const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME;
const TIP_APPLICATION: f64 = (LOCK_WINDOW * BLOCK_TIME) as f64;
const DECOYS: usize = RING_LEN - 1;
lazy_static! {
static ref GAMMA: Gamma<f64> = Gamma::new(19.28, 1.0 / 1.61).unwrap();
static ref DISTRIBUTION: Mutex<Vec<u64>> = Mutex::new(Vec::with_capacity(3000000));
}
#[allow(clippy::too_many_arguments)]
async fn select_n<R: RngCore + CryptoRng>(
rng: &mut R,
rpc: &Rpc,
height: usize,
high: u64,
per_second: f64,
real: &[u64],
used: &mut HashSet<u64>,
count: usize,
count: usize
) -> Result<Vec<(u64, [EdwardsPoint; 2])>, RpcError> {
let mut iters = 0;
let mut confirmed = Vec::with_capacity(count);
// Retries on failure. Retries are obvious as decoys, yet should be minimal
while confirmed.len() != count {
let remaining = count - confirmed.len();
let mut candidates = Vec::with_capacity(remaining);
@@ -47,7 +41,7 @@ async fn select_n<R: RngCore + CryptoRng>(
iters += 1;
// This is cheap and on fresh chains, thousands of rounds may be needed
if iters == 10000 {
Err(RpcError::InternalError("not enough decoy candidates"))?;
Err(RpcError::InternalError("not enough decoy candidates".to_string()))?;
}
// Use a gamma distribution
@@ -76,28 +70,9 @@ async fn select_n<R: RngCore + CryptoRng>(
}
}
// If this is the first time we're requesting these outputs, include the real one as well
// Prevents the node we're connected to from having a list of known decoys and then seeing a
// TX which uses all of them, with one additional output (the true spend)
let mut real_indexes = HashSet::with_capacity(real.len());
if confirmed.is_empty() {
for real in real {
candidates.push(*real);
}
// Sort candidates so the real spends aren't the ones at the end
candidates.sort();
for real in real {
real_indexes.insert(candidates.binary_search(real).unwrap());
}
}
for (i, output) in rpc.get_unlocked_outputs(&candidates, height).await?.iter_mut().enumerate() {
// Don't include the real spend as a decoy, despite requesting it
if real_indexes.contains(&i) {
continue;
}
if let Some(output) = output.take() {
let outputs = rpc.get_outputs(&candidates, height).await?;
for i in 0 .. outputs.len() {
if let Some(output) = outputs[i] {
confirmed.push((candidates[i], output));
}
}
@@ -115,12 +90,11 @@ fn offset(ring: &[u64]) -> Vec<u64> {
res
}
/// Decoy data, containing the actual member as well (at index `i`).
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
#[derive(Clone, PartialEq, Debug)]
pub struct Decoys {
pub i: u8,
pub offsets: Vec<u64>,
pub ring: Vec<[EdwardsPoint; 2]>,
pub ring: Vec<[EdwardsPoint; 2]>
}
impl Decoys {
@@ -128,22 +102,19 @@ impl Decoys {
self.offsets.len()
}
/// Select decoys using the same distribution as Monero.
pub async fn select<R: RngCore + CryptoRng>(
pub(crate) async fn select<R: RngCore + CryptoRng>(
rng: &mut R,
rpc: &Rpc,
ring_len: usize,
height: usize,
inputs: &[SpendableOutput],
inputs: &[SpendableOutput]
) -> Result<Vec<Decoys>, RpcError> {
let decoy_count = ring_len - 1;
// Convert the inputs in question to the raw output data
let mut real = Vec::with_capacity(inputs.len());
let mut outputs = Vec::with_capacity(inputs.len());
for input in inputs {
real.push(input.global_index);
outputs.push((real[real.len() - 1], [input.key(), input.commitment().calculate()]));
outputs.push((
rpc.get_o_indexes(input.tx).await?[usize::from(input.o)],
[input.key, input.commitment.calculate()]
));
}
let distribution_len = {
@@ -177,22 +148,27 @@ impl Decoys {
}
// TODO: Simply create a TX with less than the target amount
if (high - MATURITY) < u64::try_from(inputs.len() * ring_len).unwrap() {
Err(RpcError::InternalError("not enough decoy candidates"))?;
if (high - MATURITY) < u64::try_from(inputs.len() * RING_LEN).unwrap() {
Err(RpcError::InternalError("not enough decoy candidates".to_string()))?;
}
// Select all decoys for this transaction, assuming we generate a sane transaction
// We should almost never naturally generate an insane transaction, hence why this doesn't
// bother with an overage
let mut decoys =
select_n(rng, rpc, height, high, per_second, &real, &mut used, inputs.len() * decoy_count)
.await?;
real.zeroize();
// We should almost never naturally generate an insane transaction, hence why this doesn't bother
// with an overage
let mut decoys = select_n(
rng,
rpc,
height,
high,
per_second,
&mut used,
inputs.len() * DECOYS
).await?;
let mut res = Vec::with_capacity(inputs.len());
for o in outputs {
// Grab the decoys for this specific output
let mut ring = decoys.drain((decoys.len() - decoy_count) ..).collect::<Vec<_>>();
let mut ring = decoys.drain((decoys.len() - DECOYS) ..).collect::<Vec<_>>();
ring.push(o);
ring.sort_by(|a, b| a.0.cmp(&b.0));
@@ -202,43 +178,40 @@ impl Decoys {
// 500 outputs since while itself not being a sufficiently mature blockchain
// Considering Monero's p2p layer doesn't actually check transaction sanity, it should be
// fine for us to not have perfectly matching rules, especially since this code will infinite
// loop if it can't determine sanity, which is possible with sufficient inputs on
// sufficiently small chains
// loop if it can't determine sanity, which is possible with sufficient inputs on sufficiently
// small chains
if high > 500 {
// Make sure the TX passes the sanity check that the median output is within the last 40%
let target_median = high * 3 / 5;
while ring[ring_len / 2].0 < target_median {
while ring[RING_LEN / 2].0 < target_median {
// If it's not, update the bottom half with new values to ensure the median only moves up
for removed in ring.drain(0 .. (ring_len / 2)).collect::<Vec<_>>() {
for removed in ring.drain(0 .. (RING_LEN / 2)).collect::<Vec<_>>() {
// If we removed the real spend, add it back
if removed.0 == o.0 {
ring.push(o);
} else {
// We could not remove this, saving CPU time and removing low values as
// possibilities, yet it'd increase the amount of decoys required to create this
// transaction and some removed outputs may be the best option (as we drop the first
// half, not just the bottom n)
// We could not remove this, saving CPU time and removing low values as possibilities, yet
// it'd increase the amount of decoys required to create this transaction and some removed
// outputs may be the best option (as we drop the first half, not just the bottom n)
used.remove(&removed.0);
}
}
// Select new outputs until we have a full sized ring again
ring.extend(
select_n(rng, rpc, height, high, per_second, &[], &mut used, ring_len - ring.len())
.await?,
select_n(rng, rpc, height, high, per_second, &mut used, RING_LEN - ring.len()).await?
);
ring.sort_by(|a, b| a.0.cmp(&b.0));
}
// The other sanity check rule is about duplicates, yet we already enforce unique ring
// members
// The other sanity check rule is about duplicates, yet we already enforce unique ring members
}
res.push(Decoys {
// Binary searches for the real spend since we don't know where it sorted to
i: u8::try_from(ring.partition_point(|x| x.0 < o.0)).unwrap(),
offsets: offset(&ring.iter().map(|output| output.0).collect::<Vec<_>>()),
ring: ring.iter().map(|output| output.1).collect(),
ring: ring.iter().map(|output| output.1).collect()
});
}

View File

@@ -1,197 +0,0 @@
use core::ops::BitXor;
use std::io::{self, Read, Write};
use zeroize::Zeroize;
use curve25519_dalek::edwards::EdwardsPoint;
use crate::serialize::{
varint_len, read_byte, read_bytes, read_varint, read_point, read_vec, write_byte, write_varint,
write_point, write_vec,
};
pub const MAX_TX_EXTRA_NONCE_SIZE: usize = 255;
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub(crate) enum PaymentId {
Unencrypted([u8; 32]),
Encrypted([u8; 8]),
}
impl BitXor<[u8; 8]> for PaymentId {
type Output = PaymentId;
fn bitxor(self, bytes: [u8; 8]) -> PaymentId {
match self {
PaymentId::Unencrypted(_) => self,
PaymentId::Encrypted(id) => {
PaymentId::Encrypted((u64::from_le_bytes(id) ^ u64::from_le_bytes(bytes)).to_le_bytes())
}
}
}
}
impl PaymentId {
pub(crate) fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
match self {
PaymentId::Unencrypted(id) => {
w.write_all(&[0])?;
w.write_all(id)?;
}
PaymentId::Encrypted(id) => {
w.write_all(&[1])?;
w.write_all(id)?;
}
}
Ok(())
}
fn read<R: Read>(r: &mut R) -> io::Result<PaymentId> {
Ok(match read_byte(r)? {
0 => PaymentId::Unencrypted(read_bytes(r)?),
1 => PaymentId::Encrypted(read_bytes(r)?),
_ => Err(io::Error::new(io::ErrorKind::Other, "unknown payment ID type"))?,
})
}
}
// Doesn't bother with padding nor MinerGate
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub(crate) enum ExtraField {
PublicKey(EdwardsPoint),
Nonce(Vec<u8>),
MergeMining(usize, [u8; 32]),
PublicKeys(Vec<EdwardsPoint>),
}
impl ExtraField {
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
match self {
ExtraField::PublicKey(key) => {
w.write_all(&[1])?;
w.write_all(&key.compress().to_bytes())?;
}
ExtraField::Nonce(data) => {
w.write_all(&[2])?;
write_vec(write_byte, data, w)?;
}
ExtraField::MergeMining(height, merkle) => {
w.write_all(&[3])?;
write_varint(&u64::try_from(*height).unwrap(), w)?;
w.write_all(merkle)?;
}
ExtraField::PublicKeys(keys) => {
w.write_all(&[4])?;
write_vec(write_point, keys, w)?;
}
}
Ok(())
}
fn read<R: Read>(r: &mut R) -> io::Result<ExtraField> {
Ok(match read_byte(r)? {
1 => ExtraField::PublicKey(read_point(r)?),
2 => ExtraField::Nonce({
let nonce = read_vec(read_byte, r)?;
if nonce.len() > MAX_TX_EXTRA_NONCE_SIZE {
Err(io::Error::new(io::ErrorKind::Other, "too long nonce"))?;
}
nonce
}),
3 => ExtraField::MergeMining(
usize::try_from(read_varint(r)?)
.map_err(|_| io::Error::new(io::ErrorKind::Other, "varint for height exceeds usize"))?,
read_bytes(r)?,
),
4 => ExtraField::PublicKeys(read_vec(read_point, r)?),
_ => Err(io::Error::new(io::ErrorKind::Other, "unknown extra field"))?,
})
}
}
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub(crate) struct Extra(Vec<ExtraField>);
impl Extra {
pub(crate) fn keys(&self) -> Vec<EdwardsPoint> {
let mut keys = Vec::with_capacity(2);
for field in &self.0 {
match field.clone() {
ExtraField::PublicKey(key) => keys.push(key),
ExtraField::PublicKeys(additional) => keys.extend(additional),
_ => (),
}
}
keys
}
pub(crate) fn payment_id(&self) -> Option<PaymentId> {
for field in &self.0 {
if let ExtraField::Nonce(data) = field {
return PaymentId::read::<&[u8]>(&mut data.as_ref()).ok();
}
}
None
}
pub(crate) fn data(&self) -> Vec<Vec<u8>> {
let mut first = true;
let mut res = vec![];
for field in &self.0 {
if let ExtraField::Nonce(data) = field {
// Skip the first Nonce, which should be the payment ID
if first {
first = false;
continue;
}
res.push(data.clone());
}
}
res
}
pub(crate) fn new(mut keys: Vec<EdwardsPoint>) -> Extra {
let mut res = Extra(Vec::with_capacity(3));
if !keys.is_empty() {
res.push(ExtraField::PublicKey(keys[0]));
}
if keys.len() > 1 {
res.push(ExtraField::PublicKeys(keys.drain(1 ..).collect()));
}
res
}
pub(crate) fn push(&mut self, field: ExtraField) {
self.0.push(field);
}
#[rustfmt::skip]
pub(crate) fn fee_weight(outputs: usize, data: &[Vec<u8>]) -> usize {
// PublicKey, key
(1 + 32) +
// PublicKeys, length, additional keys
(1 + 1 + (outputs.saturating_sub(1) * 32)) +
// PaymentId (Nonce), length, encrypted, ID
(1 + 1 + 1 + 8) +
// Nonce, length, data (if existent)
data.iter().map(|v| 1 + varint_len(v.len()) + v.len()).sum::<usize>()
}
pub(crate) fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
for field in &self.0 {
field.write(w)?;
}
Ok(())
}
pub(crate) fn read<R: Read>(r: &mut R) -> io::Result<Extra> {
let mut res = Extra(vec![]);
let mut field;
while {
field = ExtraField::read(r);
field.is_ok()
} {
res.0.push(field.unwrap());
}
Ok(res)
}
}

View File

@@ -1,31 +1,21 @@
use core::ops::Deref;
use std::collections::{HashSet, HashMap};
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
use crate::{
hash, hash_to_scalar,
serialize::write_varint,
transaction::Input
};
use crate::{hash, hash_to_scalar, serialize::write_varint, transaction::Input};
mod extra;
pub(crate) use extra::{PaymentId, ExtraField, Extra};
/// Address encoding and decoding functionality.
pub mod address;
use address::{Network, AddressType, SubaddressIndex, AddressSpec, AddressMeta, MoneroAddress};
mod scan;
pub use scan::{ReceivedOutput, SpendableOutput, Timelocked};
pub use scan::SpendableOutput;
pub(crate) mod decoys;
pub(crate) use decoys::Decoys;
mod send;
pub use send::{Fee, TransactionError, SignableTransaction, SignableTransactionBuilder};
pub use send::{Fee, TransactionError, SignableTransaction};
#[cfg(feature = "multisig")]
pub use send::TransactionMachine;
@@ -40,50 +30,30 @@ pub(crate) fn uniqueness(inputs: &[Input]) -> [u8; 32] {
match input {
// If Gen, this should be the only input, making this loop somewhat pointless
// This works and even if there were somehow multiple inputs, it'd be a false negative
Input::Gen(height) => {
write_varint(height, &mut u).unwrap();
}
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
Input::Gen(height) => { write_varint(&(*height).try_into().unwrap(), &mut u).unwrap(); },
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes())
}
}
hash(&u)
}
// Hs("view_tag" || 8Ra || o), Hs(8Ra || o), and H(8Ra || 0x8d) with uniqueness inclusion in the
// Scalar as an option
// Hs(8Ra || o) with https://github.com/monero-project/research-lab/issues/103 as an option
#[allow(non_snake_case)]
pub(crate) fn shared_key(
uniqueness: Option<[u8; 32]>,
s: &Scalar,
P: &EdwardsPoint,
o: usize,
) -> (u8, Scalar, [u8; 8]) {
// 8Ra
let mut output_derivation = (s * P).mul_by_cofactor().compress().to_bytes().to_vec();
let mut payment_id_xor = [0; 8];
payment_id_xor
.copy_from_slice(&hash(&[output_derivation.as_ref(), [0x8d].as_ref()].concat())[.. 8]);
pub(crate) fn shared_key(uniqueness: Option<[u8; 32]>, s: Scalar, P: &EdwardsPoint, o: usize) -> Scalar {
// uniqueness
let mut shared = uniqueness.map_or(vec![], |uniqueness| uniqueness.to_vec());
// || 8Ra
shared.extend((s * P).mul_by_cofactor().compress().to_bytes().to_vec());
// || o
write_varint(&o.try_into().unwrap(), &mut output_derivation).unwrap();
let view_tag = hash(&[b"view_tag".as_ref(), &output_derivation].concat())[0];
// uniqueness ||
let shared_key = if let Some(uniqueness) = uniqueness {
[uniqueness.as_ref(), &output_derivation].concat().to_vec()
} else {
output_derivation
};
(view_tag, hash_to_scalar(&shared_key), payment_id_xor)
write_varint(&o.try_into().unwrap(), &mut shared).unwrap();
// Hs()
hash_to_scalar(&shared)
}
pub(crate) fn amount_encryption(amount: u64, key: Scalar) -> [u8; 8] {
let mut amount_mask = b"amount".to_vec();
amount_mask.extend(key.to_bytes());
(amount ^ u64::from_le_bytes(hash(&amount_mask)[.. 8].try_into().unwrap())).to_le_bytes()
(amount ^ u64::from_le_bytes(hash(&amount_mask)[0 .. 8].try_into().unwrap())).to_le_bytes()
}
fn amount_decryption(amount: [u8; 8], key: Scalar) -> u64 {
@@ -96,133 +66,8 @@ pub(crate) fn commitment_mask(shared_key: Scalar) -> Scalar {
hash_to_scalar(&mask)
}
/// The private view key and public spend key, enabling scanning transactions.
#[derive(Clone, Zeroize, ZeroizeOnDrop)]
#[derive(Clone, Copy)]
pub struct ViewPair {
spend: EdwardsPoint,
view: Zeroizing<Scalar>,
}
impl ViewPair {
pub fn new(spend: EdwardsPoint, view: Zeroizing<Scalar>) -> ViewPair {
ViewPair { spend, view }
}
pub fn spend(&self) -> EdwardsPoint {
self.spend
}
pub fn view(&self) -> EdwardsPoint {
self.view.deref() * &ED25519_BASEPOINT_TABLE
}
fn subaddress_derivation(&self, index: SubaddressIndex) -> Scalar {
hash_to_scalar(&Zeroizing::new(
[
b"SubAddr\0".as_ref(),
Zeroizing::new(self.view.to_bytes()).as_ref(),
&index.account().to_le_bytes(),
&index.address().to_le_bytes(),
]
.concat(),
))
}
fn subaddress_keys(&self, index: SubaddressIndex) -> (EdwardsPoint, EdwardsPoint) {
let scalar = self.subaddress_derivation(index);
let spend = self.spend + (&scalar * &ED25519_BASEPOINT_TABLE);
let view = self.view.deref() * spend;
(spend, view)
}
/// Returns an address with the provided specification.
pub fn address(&self, network: Network, spec: AddressSpec) -> MoneroAddress {
let mut spend = self.spend;
let mut view: EdwardsPoint = self.view.deref() * &ED25519_BASEPOINT_TABLE;
// construct the address meta
let meta = match spec {
AddressSpec::Standard => AddressMeta::new(network, AddressType::Standard),
AddressSpec::Integrated(payment_id) => {
AddressMeta::new(network, AddressType::Integrated(payment_id))
}
AddressSpec::Subaddress(index) => {
(spend, view) = self.subaddress_keys(index);
AddressMeta::new(network, AddressType::Subaddress)
}
AddressSpec::Featured { subaddress, payment_id, guaranteed } => {
if let Some(index) = subaddress {
(spend, view) = self.subaddress_keys(index);
}
AddressMeta::new(
network,
AddressType::Featured { subaddress: subaddress.is_some(), payment_id, guaranteed },
)
}
};
MoneroAddress::new(meta, spend, view)
}
}
/// Transaction scanner.
/// This scanner is capable of generating subaddresses, additionally scanning for them once they've
/// been explicitly generated. If the burning bug is attempted, any secondary outputs will be
/// ignored.
#[derive(Clone)]
pub struct Scanner {
pair: ViewPair,
// Also contains the spend key as None
pub(crate) subaddresses: HashMap<CompressedEdwardsY, Option<SubaddressIndex>>,
pub(crate) burning_bug: Option<HashSet<CompressedEdwardsY>>,
}
impl Zeroize for Scanner {
fn zeroize(&mut self) {
self.pair.zeroize();
// These may not be effective, unfortunately
for (mut key, mut value) in self.subaddresses.drain() {
key.zeroize();
value.zeroize();
}
if let Some(ref mut burning_bug) = self.burning_bug.take() {
for mut output in burning_bug.drain() {
output.zeroize();
}
}
}
}
impl Drop for Scanner {
fn drop(&mut self) {
self.zeroize();
}
}
impl ZeroizeOnDrop for Scanner {}
impl Scanner {
/// Create a Scanner from a ViewPair.
/// burning_bug is a HashSet of used keys, intended to prevent key reuse which would burn funds.
/// When an output is successfully scanned, the output key MUST be saved to disk.
/// When a new scanner is created, ALL saved output keys must be passed in to be secure.
/// If None is passed, a modified shared key derivation is used which is immune to the burning
/// bug (specifically the Guaranteed feature from Featured Addresses).
// TODO: Should this take in a DB access handle to ensure output keys are saved?
pub fn from_view(pair: ViewPair, burning_bug: Option<HashSet<CompressedEdwardsY>>) -> Scanner {
let mut subaddresses = HashMap::new();
subaddresses.insert(pair.spend.compress(), None);
Scanner { pair, subaddresses, burning_bug }
}
/// Register a subaddress.
// There used to be an address function here, yet it wasn't safe. It could generate addresses
// incompatible with the Scanner. While we could return None for that, then we have the issue
// of runtime failures to generate an address.
// Removing that API was the simplest option.
pub fn register_subaddress(&mut self, subaddress: SubaddressIndex) {
let (spend, _) = self.pair.subaddress_keys(subaddress);
self.subaddresses.insert(spend.compress(), Some(subaddress));
}
pub spend: EdwardsPoint,
pub view: Scalar
}

View File

@@ -1,341 +1,122 @@
use std::io::{self, Read, Write};
use std::convert::TryFrom;
use zeroize::{Zeroize, ZeroizeOnDrop};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::EdwardsPoint
};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
use monero::{consensus::deserialize, blockdata::transaction::ExtraField};
use crate::{
Commitment,
serialize::{read_byte, read_u32, read_u64, read_bytes, read_scalar, read_point, read_raw_vec},
transaction::{Input, Timelock, Transaction},
block::Block,
rpc::{Rpc, RpcError},
wallet::{
PaymentId, Extra, address::SubaddressIndex, Scanner, uniqueness, shared_key, amount_decryption,
commitment_mask,
},
serialize::{write_varint, read_32, read_scalar, read_point},
transaction::{Timelock, Transaction},
wallet::{ViewPair, uniqueness, shared_key, amount_decryption, commitment_mask}
};
/// An absolute output ID, defined as its transaction hash and output index.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct AbsoluteId {
#[derive(Clone, PartialEq, Debug)]
pub struct SpendableOutput {
pub tx: [u8; 32],
pub o: u8,
}
impl AbsoluteId {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(&self.tx)?;
w.write_all(&[self.o])
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(32 + 1);
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<AbsoluteId> {
Ok(AbsoluteId { tx: read_bytes(r)?, o: read_byte(r)? })
}
}
/// The data contained with an output.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct OutputData {
pub key: EdwardsPoint,
/// Absolute difference between the spend key and the key in this output
pub key_offset: Scalar,
pub commitment: Commitment,
pub commitment: Commitment
}
impl OutputData {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(&self.key.compress().to_bytes())?;
w.write_all(&self.key_offset.to_bytes())?;
w.write_all(&self.commitment.mask.to_bytes())?;
w.write_all(&self.commitment.amount.to_le_bytes())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(32 + 32 + 32 + 8);
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<OutputData> {
Ok(OutputData {
key: read_point(r)?,
key_offset: read_scalar(r)?,
commitment: Commitment::new(read_scalar(r)?, read_u64(r)?),
})
}
}
/// The metadata for an output.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct Metadata {
/// The subaddress this output was sent to.
pub subaddress: Option<SubaddressIndex>,
/// The payment ID included with this output.
/// This will be gibberish if the payment ID wasn't intended for the recipient or wasn't included.
// Could be an Option, as extra doesn't necessarily have a payment ID, yet all Monero TXs should
// have this making it simplest for it to be as-is.
pub payment_id: [u8; 8],
/// Arbitrary data encoded in TX extra.
pub arbitrary_data: Vec<Vec<u8>>,
}
impl Metadata {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
if let Some(subaddress) = self.subaddress {
w.write_all(&[1])?;
w.write_all(&subaddress.account().to_le_bytes())?;
w.write_all(&subaddress.address().to_le_bytes())?;
} else {
w.write_all(&[0])?;
}
w.write_all(&self.payment_id)?;
w.write_all(&u32::try_from(self.arbitrary_data.len()).unwrap().to_le_bytes())?;
for part in &self.arbitrary_data {
w.write_all(&[u8::try_from(part.len()).unwrap()])?;
w.write_all(part)?;
}
Ok(())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(1 + 8 + 1);
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Metadata> {
let subaddress = if read_byte(r)? == 1 {
Some(
SubaddressIndex::new(read_u32(r)?, read_u32(r)?)
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid subaddress in metadata"))?,
)
} else {
None
};
Ok(Metadata {
subaddress,
payment_id: read_bytes(r)?,
arbitrary_data: {
let mut data = vec![];
for _ in 0 .. read_u32(r)? {
let len = read_byte(r)?;
data.push(read_raw_vec(read_byte, usize::from(len), r)?);
}
data
},
})
}
}
/// A received output, defined as its absolute ID, data, and metadara.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct ReceivedOutput {
pub absolute: AbsoluteId,
pub data: OutputData,
pub metadata: Metadata,
}
impl ReceivedOutput {
pub fn key(&self) -> EdwardsPoint {
self.data.key
}
pub fn key_offset(&self) -> Scalar {
self.data.key_offset
}
pub fn commitment(&self) -> Commitment {
self.data.commitment.clone()
}
pub fn arbitrary_data(&self) -> &[Vec<u8>] {
&self.metadata.arbitrary_data
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.absolute.write(w)?;
self.data.write(w)?;
self.metadata.write(w)
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
Ok(ReceivedOutput {
absolute: AbsoluteId::read(r)?,
data: OutputData::read(r)?,
metadata: Metadata::read(r)?,
})
}
}
/// A spendable output, defined as a received output and its index on the Monero blockchain.
/// This index is dependent on the Monero blockchain and will only be known once the output is
/// included within a block. This may change if there's a reorganization.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct SpendableOutput {
pub output: ReceivedOutput,
pub global_index: u64,
}
impl SpendableOutput {
/// Update the spendable output's global index. This is intended to be called if a
/// re-organization occurred.
pub async fn refresh_global_index(&mut self, rpc: &Rpc) -> Result<(), RpcError> {
self.global_index =
rpc.get_o_indexes(self.output.absolute.tx).await?[usize::from(self.output.absolute.o)];
Ok(())
}
pub async fn from(rpc: &Rpc, output: ReceivedOutput) -> Result<SpendableOutput, RpcError> {
let mut output = SpendableOutput { output, global_index: 0 };
output.refresh_global_index(rpc).await?;
Ok(output)
}
pub fn key(&self) -> EdwardsPoint {
self.output.key()
}
pub fn key_offset(&self) -> Scalar {
self.output.key_offset()
}
pub fn commitment(&self) -> Commitment {
self.output.commitment()
}
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.output.write(w)?;
w.write_all(&self.global_index.to_le_bytes())
}
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
pub fn read<R: Read>(r: &mut R) -> io::Result<SpendableOutput> {
Ok(SpendableOutput { output: ReceivedOutput::read(r)?, global_index: read_u64(r)? })
}
}
/// A collection of timelocked outputs, either received or spendable.
#[derive(Zeroize)]
pub struct Timelocked<O: Clone + Zeroize>(Timelock, Vec<O>);
impl<O: Clone + Zeroize> Drop for Timelocked<O> {
fn drop(&mut self) {
self.zeroize();
}
}
impl<O: Clone + Zeroize> ZeroizeOnDrop for Timelocked<O> {}
impl<O: Clone + Zeroize> Timelocked<O> {
pub struct Timelocked(Timelock, Vec<SpendableOutput>);
impl Timelocked {
pub fn timelock(&self) -> Timelock {
self.0
}
/// Return the outputs if they're not timelocked, or an empty vector if they are.
pub fn not_locked(&self) -> Vec<O> {
pub fn not_locked(&self) -> Vec<SpendableOutput> {
if self.0 == Timelock::None {
return self.1.clone();
}
vec![]
}
/// Returns None if the Timelocks aren't comparable. Returns Some(vec![]) if none are unlocked.
pub fn unlocked(&self, timelock: Timelock) -> Option<Vec<O>> {
/// Returns None if the Timelocks aren't comparable. Returns Some(vec![]) if none are unlocked
pub fn unlocked(&self, timelock: Timelock) -> Option<Vec<SpendableOutput>> {
// If the Timelocks are comparable, return the outputs if they're now unlocked
self.0.partial_cmp(&timelock).filter(|_| self.0 <= timelock).map(|_| self.1.clone())
}
pub fn ignore_timelock(&self) -> Vec<O> {
pub fn ignore_timelock(&self) -> Vec<SpendableOutput> {
self.1.clone()
}
}
impl Scanner {
/// Scan a transaction to discover the received outputs.
pub fn scan_transaction(&mut self, tx: &Transaction) -> Timelocked<ReceivedOutput> {
let extra = Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref());
let keys;
let extra = if let Ok(extra) = extra {
keys = extra.keys();
extra
impl SpendableOutput {
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(32 + 1 + 32 + 32 + 40);
res.extend(&self.tx);
res.push(self.o);
res.extend(self.key.compress().to_bytes());
res.extend(self.key_offset.to_bytes());
res.extend(self.commitment.mask.to_bytes());
res.extend(self.commitment.amount.to_le_bytes());
res
}
pub fn deserialize<R: std::io::Read>(r: &mut R) -> std::io::Result<SpendableOutput> {
Ok(
SpendableOutput {
tx: read_32(r)?,
o: { let mut o = [0; 1]; r.read_exact(&mut o)?; o[0] },
key: read_point(r)?,
key_offset: read_scalar(r)?,
commitment: Commitment::new(
read_scalar(r)?,
{ let mut amount = [0; 8]; r.read_exact(&mut amount)?; u64::from_le_bytes(amount) }
)
}
)
}
}
impl Transaction {
pub fn scan(
&self,
view: ViewPair,
guaranteed: bool
) -> Timelocked {
let mut extra = vec![];
write_varint(&u64::try_from(self.prefix.extra.len()).unwrap(), &mut extra).unwrap();
extra.extend(&self.prefix.extra);
let extra = deserialize::<ExtraField>(&extra);
let pubkeys: Vec<EdwardsPoint>;
if let Ok(extra) = extra {
let mut m_pubkeys = vec![];
if let Some(key) = extra.tx_pubkey() {
m_pubkeys.push(key);
}
if let Some(keys) = extra.tx_additional_pubkeys() {
m_pubkeys.extend(&keys);
}
pubkeys = m_pubkeys.iter().map(|key| key.point.decompress()).filter_map(|key| key).collect();
} else {
return Timelocked(tx.prefix.timelock, vec![]);
return Timelocked(self.prefix.timelock, vec![]);
};
let payment_id = extra.payment_id();
let mut res = vec![];
for (o, output) in tx.prefix.outputs.iter().enumerate() {
// https://github.com/serai-dex/serai/issues/106
if let Some(burning_bug) = self.burning_bug.as_ref() {
if burning_bug.contains(&output.key) {
continue;
}
}
let output_key = output.key.decompress();
if output_key.is_none() {
continue;
}
let output_key = output_key.unwrap();
for key in &keys {
let (view_tag, shared_key, payment_id_xor) = shared_key(
if self.burning_bug.is_none() { Some(uniqueness(&tx.prefix.inputs)) } else { None },
&self.pair.view,
key,
o,
for (o, output) in self.prefix.outputs.iter().enumerate() {
// TODO: This may be replaceable by pubkeys[o]
for pubkey in &pubkeys {
let key_offset = shared_key(
Some(uniqueness(&self.prefix.inputs)).filter(|_| guaranteed),
view.view,
pubkey,
o
);
let payment_id =
if let Some(PaymentId::Encrypted(id)) = payment_id.map(|id| id ^ payment_id_xor) {
id
} else {
payment_id_xor
};
if let Some(actual_view_tag) = output.view_tag {
if actual_view_tag != view_tag {
continue;
}
}
// P - shared == spend
let subaddress = self
.subaddresses
.get(&(output_key - (&shared_key * &ED25519_BASEPOINT_TABLE)).compress());
if subaddress.is_none() {
if (output.key - (&key_offset * &ED25519_BASEPOINT_TABLE)) != view.spend {
continue;
}
let subaddress = *subaddress.unwrap();
// If it has torsion, it'll substract the non-torsioned shared key to a torsioned key
// We will not have a torsioned key in our HashMap of keys, so we wouldn't identify it as
// ours
// If we did though, it'd enable bypassing the included burning bug protection
debug_assert!(output_key.is_torsion_free());
let mut key_offset = shared_key;
if let Some(subaddress) = subaddress {
key_offset += self.pair.subaddress_derivation(subaddress);
}
// Since we've found an output to us, get its amount
let mut commitment = Commitment::zero();
@@ -344,34 +125,30 @@ impl Scanner {
commitment.amount = output.amount;
// Regular transaction
} else {
let amount = match tx.rct_signatures.base.ecdh_info.get(o) {
Some(amount) => amount_decryption(*amount, shared_key),
let amount = match self.rct_signatures.base.ecdh_info.get(o) {
Some(amount) => amount_decryption(*amount, key_offset),
// This should never happen, yet it may be possible with miner transactions?
// Using get just decreases the possibility of a panic and lets us move on in that case
None => break,
None => break
};
// Rebuild the commitment to verify it
commitment = Commitment::new(commitment_mask(shared_key), amount);
commitment = Commitment::new(commitment_mask(key_offset), amount);
// If this is a malicious commitment, move to the next output
// Any other R value will calculate to a different spend key and are therefore ignorable
if Some(&commitment.calculate()) != tx.rct_signatures.base.commitments.get(o) {
if Some(&commitment.calculate()) != self.rct_signatures.base.commitments.get(o) {
break;
}
}
if commitment.amount != 0 {
res.push(ReceivedOutput {
absolute: AbsoluteId { tx: tx.hash(), o: o.try_into().unwrap() },
data: OutputData { key: output_key, key_offset, commitment },
metadata: Metadata { subaddress, payment_id, arbitrary_data: extra.data() },
res.push(SpendableOutput {
tx: self.hash(),
o: o.try_into().unwrap(),
key: output.key,
key_offset,
commitment
});
if let Some(burning_bug) = self.burning_bug.as_mut() {
burning_bug.insert(output.key);
}
}
// Break to prevent public keys from being included multiple times, triggering multiple
// inclusions of the same output
@@ -379,59 +156,6 @@ impl Scanner {
}
}
Timelocked(tx.prefix.timelock, res)
}
/// Scan a block to obtain its spendable outputs. Its the presence in a block giving these
/// transactions their global index, and this must be batched as asking for the index of specific
/// transactions is a dead giveaway for which transactions you successfully scanned. This
/// function obtains the output indexes for the miner transaction, incrementing from there
/// instead.
pub async fn scan(
&mut self,
rpc: &Rpc,
block: &Block,
) -> Result<Vec<Timelocked<SpendableOutput>>, RpcError> {
let mut index = rpc.get_o_indexes(block.miner_tx.hash()).await?[0];
let mut txs = vec![block.miner_tx.clone()];
txs.extend(rpc.get_transactions(&block.txs).await?);
let map = |mut timelock: Timelocked<ReceivedOutput>, index| {
if timelock.1.is_empty() {
None
} else {
Some(Timelocked(
timelock.0,
timelock
.1
.drain(..)
.map(|output| SpendableOutput {
global_index: index + u64::from(output.absolute.o),
output,
})
.collect(),
))
}
};
let mut res = vec![];
for tx in txs.drain(..) {
if let Some(timelock) = map(self.scan_transaction(&tx), index) {
res.push(timelock);
}
index += u64::try_from(
tx.prefix
.outputs
.iter()
// Filter to miner TX outputs/0-amount outputs since we're tacking the 0-amount index
// This will fail to scan blocks containing pre-RingCT miner TXs
.filter(|output| {
matches!(tx.prefix.inputs.get(0), Some(Input::Gen(..))) || (output.amount == 0)
})
.count(),
)
.unwrap()
}
Ok(res)
Timelocked(self.prefix.timelock, res)
}
}

View File

@@ -1,125 +0,0 @@
use std::sync::{Arc, RwLock};
use zeroize::{Zeroize, ZeroizeOnDrop};
use crate::{
Protocol,
wallet::{
address::MoneroAddress, Fee, SpendableOutput, SignableTransaction, TransactionError,
extra::MAX_TX_EXTRA_NONCE_SIZE,
},
};
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
struct SignableTransactionBuilderInternal {
protocol: Protocol,
fee: Fee,
inputs: Vec<SpendableOutput>,
payments: Vec<(MoneroAddress, u64)>,
change_address: Option<MoneroAddress>,
data: Vec<Vec<u8>>,
}
impl SignableTransactionBuilderInternal {
// Takes in the change address so users don't miss that they have to manually set one
// If they don't, all leftover funds will become part of the fee
fn new(protocol: Protocol, fee: Fee, change_address: Option<MoneroAddress>) -> Self {
Self { protocol, fee, inputs: vec![], payments: vec![], change_address, data: vec![] }
}
fn add_input(&mut self, input: SpendableOutput) {
self.inputs.push(input);
}
fn add_inputs(&mut self, inputs: &[SpendableOutput]) {
self.inputs.extend(inputs.iter().cloned());
}
fn add_payment(&mut self, dest: MoneroAddress, amount: u64) {
self.payments.push((dest, amount));
}
fn add_payments(&mut self, payments: &[(MoneroAddress, u64)]) {
self.payments.extend(payments);
}
fn add_data(&mut self, data: Vec<u8>) {
self.data.push(data);
}
}
/// A Transaction Builder for Monero transactions.
/// All methods provided will modify self while also returning a shallow copy, enabling efficient
/// chaining with a clean API.
/// In order to fork the builder at some point, clone will still return a deep copy.
#[derive(Debug)]
pub struct SignableTransactionBuilder(Arc<RwLock<SignableTransactionBuilderInternal>>);
impl Clone for SignableTransactionBuilder {
fn clone(&self) -> Self {
Self(Arc::new(RwLock::new((*self.0.read().unwrap()).clone())))
}
}
impl PartialEq for SignableTransactionBuilder {
fn eq(&self, other: &Self) -> bool {
*self.0.read().unwrap() == *other.0.read().unwrap()
}
}
impl Eq for SignableTransactionBuilder {}
impl Zeroize for SignableTransactionBuilder {
fn zeroize(&mut self) {
self.0.write().unwrap().zeroize()
}
}
impl SignableTransactionBuilder {
fn shallow_copy(&self) -> Self {
Self(self.0.clone())
}
pub fn new(protocol: Protocol, fee: Fee, change_address: Option<MoneroAddress>) -> Self {
Self(Arc::new(RwLock::new(SignableTransactionBuilderInternal::new(
protocol,
fee,
change_address,
))))
}
pub fn add_input(&mut self, input: SpendableOutput) -> Self {
self.0.write().unwrap().add_input(input);
self.shallow_copy()
}
pub fn add_inputs(&mut self, inputs: &[SpendableOutput]) -> Self {
self.0.write().unwrap().add_inputs(inputs);
self.shallow_copy()
}
pub fn add_payment(&mut self, dest: MoneroAddress, amount: u64) -> Self {
self.0.write().unwrap().add_payment(dest, amount);
self.shallow_copy()
}
pub fn add_payments(&mut self, payments: &[(MoneroAddress, u64)]) -> Self {
self.0.write().unwrap().add_payments(payments);
self.shallow_copy()
}
pub fn add_data(&mut self, data: Vec<u8>) -> Result<Self, TransactionError> {
if data.len() > MAX_TX_EXTRA_NONCE_SIZE {
Err(TransactionError::TooMuchData)?;
}
self.0.write().unwrap().add_data(data);
Ok(self.shallow_copy())
}
pub fn build(self) -> Result<SignableTransaction, TransactionError> {
let read = self.0.read().unwrap();
SignableTransaction::new(
read.protocol,
read.inputs.clone(),
read.payments.clone(),
read.change_address,
read.data.clone(),
read.fee,
)
}
}

View File

@@ -1,35 +1,37 @@
use core::ops::Deref;
use thiserror::Error;
use rand_core::{RngCore, CryptoRng};
use rand::seq::SliceRandom;
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
scalar::Scalar,
edwards::EdwardsPoint
};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
use monero::{consensus::Encodable, PublicKey, blockdata::transaction::SubField};
#[cfg(feature = "multisig")]
use frost::FrostError;
use crate::{
Protocol, Commitment, random_scalar,
Commitment,
random_scalar,
ringct::{
generate_key_image,
clsag::{ClsagError, ClsagInput, Clsag},
bulletproofs::{MAX_OUTPUTS, Bulletproofs},
RctBase, RctPrunable, RctSignatures,
RctBase, RctPrunable, RctSignatures
},
transaction::{Input, Output, Timelock, TransactionPrefix, Transaction},
rpc::{Rpc, RpcError},
wallet::{
address::MoneroAddress, SpendableOutput, Decoys, PaymentId, ExtraField, Extra, key_image_sort,
uniqueness, shared_key, commitment_mask, amount_encryption, extra::MAX_TX_EXTRA_NONCE_SIZE,
},
address::{AddressType, Address}, SpendableOutput, Decoys,
key_image_sort, uniqueness, shared_key, commitment_mask, amount_encryption
}
};
mod builder;
pub use builder::SignableTransactionBuilder;
#[cfg(feature = "multisig")]
use crate::frost::MultisigError;
#[cfg(feature = "multisig")]
mod multisig;
@@ -37,52 +39,47 @@ mod multisig;
pub use multisig::TransactionMachine;
#[allow(non_snake_case)]
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
#[derive(Clone, PartialEq, Debug)]
struct SendOutput {
R: EdwardsPoint,
view_tag: u8,
dest: EdwardsPoint,
commitment: Commitment,
amount: [u8; 8],
amount: [u8; 8]
}
impl SendOutput {
fn new<R: RngCore + CryptoRng>(
rng: &mut R,
unique: [u8; 32],
output: (usize, (MoneroAddress, u64)),
) -> (SendOutput, Option<[u8; 8]>) {
let o = output.0;
let output = output.1;
output: (Address, u64),
o: usize
) -> SendOutput {
let r = random_scalar(rng);
let (view_tag, shared_key, payment_id_xor) =
shared_key(Some(unique).filter(|_| output.0.meta.kind.guaranteed()), &r, &output.0.view, o);
let shared_key = shared_key(
Some(unique).filter(|_| output.0.meta.guaranteed),
r,
&output.0.view,
o
);
(
SendOutput {
R: if !output.0.meta.kind.subaddress() {
&r * &ED25519_BASEPOINT_TABLE
} else {
r * output.0.spend
},
view_tag,
dest: ((&shared_key * &ED25519_BASEPOINT_TABLE) + output.0.spend),
commitment: Commitment::new(commitment_mask(shared_key), output.1),
amount: amount_encryption(output.1, shared_key),
let spend = output.0.spend;
SendOutput {
R: match output.0.meta.kind {
AddressType::Standard => &r * &ED25519_BASEPOINT_TABLE,
AddressType::Integrated(_) => unimplemented!("SendOutput::new doesn't support Integrated addresses"),
AddressType::Subaddress => &r * spend
},
output
.0
.payment_id()
.map(|id| (u64::from_le_bytes(id) ^ u64::from_le_bytes(payment_id_xor)).to_le_bytes()),
)
dest: ((&shared_key * &ED25519_BASEPOINT_TABLE) + spend),
commitment: Commitment::new(commitment_mask(shared_key), output.1),
amount: amount_encryption(output.1, shared_key)
}
}
}
#[derive(Clone, PartialEq, Eq, Debug, Error)]
#[derive(Clone, Error, Debug)]
pub enum TransactionError {
#[error("multiple addresses with payment IDs")]
MultiplePaymentIds,
#[error("invalid address")]
InvalidAddress,
#[error("no inputs")]
NoInputs,
#[error("no outputs")]
@@ -91,8 +88,6 @@ pub enum TransactionError {
NoChange,
#[error("too many outputs")]
TooManyOutputs,
#[error("too much data")]
TooMuchData,
#[error("not enough funds (in {0}, out {1})")]
NotEnoughFunds(u64, u64),
#[error("wrong spend private key")]
@@ -106,63 +101,62 @@ pub enum TransactionError {
#[cfg(feature = "multisig")]
#[error("frost error {0}")]
FrostError(FrostError),
#[cfg(feature = "multisig")]
#[error("multisig error {0}")]
MultisigError(MultisigError)
}
async fn prepare_inputs<R: RngCore + CryptoRng>(
rng: &mut R,
rpc: &Rpc,
ring_len: usize,
inputs: &[SpendableOutput],
spend: &Zeroizing<Scalar>,
tx: &mut Transaction,
) -> Result<Vec<(Zeroizing<Scalar>, EdwardsPoint, ClsagInput)>, TransactionError> {
spend: &Scalar,
tx: &mut Transaction
) -> Result<Vec<(Scalar, EdwardsPoint, ClsagInput)>, TransactionError> {
let mut signable = Vec::with_capacity(inputs.len());
// Select decoys
let decoys = Decoys::select(
rng,
rpc,
ring_len,
rpc.get_height().await.map_err(TransactionError::RpcError)? - 10,
inputs,
)
.await
.map_err(TransactionError::RpcError)?;
rpc.get_height().await.map_err(|e| TransactionError::RpcError(e))? - 10,
inputs
).await.map_err(|e| TransactionError::RpcError(e))?;
for (i, input) in inputs.iter().enumerate() {
let input_spend = Zeroizing::new(input.key_offset() + spend.deref());
let image = generate_key_image(&input_spend);
signable.push((
input_spend,
image,
ClsagInput::new(input.commitment().clone(), decoys[i].clone())
.map_err(TransactionError::ClsagError)?,
spend + input.key_offset,
generate_key_image(spend + input.key_offset),
ClsagInput::new(
input.commitment,
decoys[i].clone()
).map_err(|e| TransactionError::ClsagError(e))?
));
tx.prefix.inputs.push(Input::ToKey {
amount: 0,
key_offsets: decoys[i].offsets.clone(),
key_image: signable[i].1,
key_image: signable[i].1
});
}
signable.sort_by(|x, y| x.1.compress().to_bytes().cmp(&y.1.compress().to_bytes()).reverse());
tx.prefix.inputs.sort_by(|x, y| {
if let (Input::ToKey { key_image: x, .. }, Input::ToKey { key_image: y, .. }) = (x, y) {
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
} else {
panic!("Input wasn't ToKey")
}
tx.prefix.inputs.sort_by(|x, y| if let (
Input::ToKey { key_image: x, ..},
Input::ToKey { key_image: y, ..}
) = (x, y) {
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
} else {
panic!("Input wasn't ToKey")
});
Ok(signable)
}
/// Fee struct, defined as a per-unit cost and a mask for rounding purposes.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct Fee {
pub per_weight: u64,
pub mask: u64,
pub mask: u64
}
impl Fee {
@@ -171,61 +165,44 @@ impl Fee {
}
}
/// A signable transaction, either in a single-signer or multisig context.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
#[derive(Clone, PartialEq, Debug)]
pub struct SignableTransaction {
protocol: Protocol,
inputs: Vec<SpendableOutput>,
payments: Vec<(MoneroAddress, u64)>,
data: Vec<Vec<u8>>,
fee: u64,
payments: Vec<(Address, u64)>,
outputs: Vec<SendOutput>,
fee: u64
}
impl SignableTransaction {
/// Create a signable transaction. If the change address is specified, leftover funds will be
/// sent to it. If the change address isn't specified, up to 16 outputs may be specified, using
/// any leftover funds as a bonus to the fee. The optional data field will be embedded in TX
/// extra.
pub fn new(
protocol: Protocol,
inputs: Vec<SpendableOutput>,
mut payments: Vec<(MoneroAddress, u64)>,
change_address: Option<MoneroAddress>,
data: Vec<Vec<u8>>,
fee_rate: Fee,
mut payments: Vec<(Address, u64)>,
change_address: Option<Address>,
fee_rate: Fee
) -> Result<SignableTransaction, TransactionError> {
// Make sure there's only one payment ID
{
let mut payment_ids = 0;
let mut count = |addr: MoneroAddress| {
if addr.payment_id().is_some() {
payment_ids += 1
}
};
for payment in &payments {
count(payment.0);
}
if let Some(change) = change_address {
count(change);
}
if payment_ids > 1 {
Err(TransactionError::MultiplePaymentIds)?;
// Make sure all addresses are valid
let test = |addr: Address| {
match addr.meta.kind {
AddressType::Standard => Ok(()),
AddressType::Integrated(..) => Err(TransactionError::InvalidAddress),
AddressType::Subaddress => Ok(())
}
};
for payment in &payments {
test(payment.0)?;
}
if let Some(change) = change_address {
test(change)?;
}
if inputs.is_empty() {
if inputs.len() == 0 {
Err(TransactionError::NoInputs)?;
}
if payments.is_empty() {
if payments.len() == 0 {
Err(TransactionError::NoOutputs)?;
}
for part in &data {
if part.len() > MAX_TX_EXTRA_NONCE_SIZE {
Err(TransactionError::TooMuchData)?;
}
}
// TODO TX MAX SIZE
// If we don't have two outputs, as required by Monero, add a second
@@ -233,17 +210,17 @@ impl SignableTransaction {
if change && change_address.is_none() {
Err(TransactionError::NoChange)?;
}
let outputs = payments.len() + usize::from(change);
let mut outputs = payments.len() + (if change { 1 } else { 0 });
// Calculate the extra length
let extra = Extra::fee_weight(outputs, data.as_ref());
// Calculate the extra length.
// Type, length, value, with 1 field for the first key and 1 field for the rest
let extra = (outputs * (2 + 32)) - (outputs.saturating_sub(2) * 2);
// Calculate the fee.
let mut fee =
fee_rate.calculate(Transaction::fee_weight(protocol, inputs.len(), outputs, extra));
let mut fee = fee_rate.calculate(Transaction::fee_weight(inputs.len(), outputs, extra));
// Make sure we have enough funds
let in_amount = inputs.iter().map(|input| input.commitment().amount).sum::<u64>();
let in_amount = inputs.iter().map(|input| input.commitment.amount).sum::<u64>();
let mut out_amount = payments.iter().map(|payment| payment.1).sum::<u64>() + fee;
if in_amount < out_amount {
Err(TransactionError::NotEnoughFunds(in_amount, out_amount))?;
@@ -252,154 +229,139 @@ impl SignableTransaction {
// If we have yet to add a change output, do so if it's economically viable
if (!change) && change_address.is_some() && (in_amount != out_amount) {
// Check even with the new fee, there's remaining funds
let change_fee =
fee_rate.calculate(Transaction::fee_weight(protocol, inputs.len(), outputs + 1, extra)) -
fee;
let change_fee = fee_rate.calculate(Transaction::fee_weight(inputs.len(), outputs + 1, extra)) - fee;
if (out_amount + change_fee) < in_amount {
change = true;
outputs += 1;
out_amount += change_fee;
fee += change_fee;
}
}
if outputs > MAX_OUTPUTS {
Err(TransactionError::TooManyOutputs)?;
}
if change {
payments.push((change_address.unwrap(), in_amount - out_amount));
}
if payments.len() > MAX_OUTPUTS {
Err(TransactionError::TooManyOutputs)?;
}
Ok(SignableTransaction { protocol, inputs, payments, data, fee })
Ok(
SignableTransaction {
inputs,
payments,
outputs: vec![],
fee
}
)
}
fn prepare_transaction<R: RngCore + CryptoRng>(
fn prepare_outputs<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
uniqueness: [u8; 32],
) -> (Transaction, Scalar) {
uniqueness: [u8; 32]
) -> (Vec<Commitment>, Scalar) {
// Shuffle the payments
self.payments.shuffle(rng);
// Actually create the outputs
let mut outputs = Vec::with_capacity(self.payments.len());
let mut id = None;
for payment in self.payments.drain(..).enumerate() {
let (output, payment_id) = SendOutput::new(rng, uniqueness, payment);
outputs.push(output);
id = id.or(payment_id);
self.outputs = Vec::with_capacity(self.payments.len() + 1);
for (o, output) in self.payments.iter().enumerate() {
self.outputs.push(SendOutput::new(rng, uniqueness, *output, o));
}
// Include a random payment ID if we don't actually have one
// It prevents transactions from leaking if they're sending to integrated addresses or not
let id = if let Some(id) = id {
id
} else {
let mut id = [0; 8];
rng.fill_bytes(&mut id);
id
};
let commitments = outputs.iter().map(|output| output.commitment.clone()).collect::<Vec<_>>();
let commitments = self.outputs.iter().map(|output| output.commitment).collect::<Vec<_>>();
let sum = commitments.iter().map(|commitment| commitment.mask).sum();
// Safe due to the constructor checking MAX_OUTPUTS
let bp = Bulletproofs::prove(rng, &commitments, self.protocol.bp_plus()).unwrap();
// Create the TX extra
let extra = {
let mut extra = Extra::new(outputs.iter().map(|output| output.R).collect());
let mut id_vec = Vec::with_capacity(1 + 8);
PaymentId::Encrypted(id).write(&mut id_vec).unwrap();
extra.push(ExtraField::Nonce(id_vec));
// Include data if present
for part in self.data.drain(..) {
extra.push(ExtraField::Nonce(part));
}
let mut serialized = Vec::with_capacity(Extra::fee_weight(outputs.len(), self.data.as_ref()));
extra.write(&mut serialized).unwrap();
serialized
};
let mut tx_outputs = Vec::with_capacity(outputs.len());
let mut ecdh_info = Vec::with_capacity(outputs.len());
for output in &outputs {
tx_outputs.push(Output {
amount: 0,
key: output.dest.compress(),
view_tag: Some(output.view_tag).filter(|_| matches!(self.protocol, Protocol::v16)),
});
ecdh_info.push(output.amount);
}
(
Transaction {
prefix: TransactionPrefix {
version: 2,
timelock: Timelock::None,
inputs: vec![],
outputs: tx_outputs,
extra,
},
signatures: vec![],
rct_signatures: RctSignatures {
base: RctBase {
fee: self.fee,
ecdh_info,
commitments: commitments.iter().map(|commitment| commitment.calculate()).collect(),
},
prunable: RctPrunable::Clsag {
bulletproofs: vec![bp],
clsags: vec![],
pseudo_outs: vec![],
},
},
},
sum,
)
(commitments, sum)
}
fn prepare_transaction(
&self,
commitments: &[Commitment],
bp: Bulletproofs
) -> Transaction {
// Create the TX extra
// TODO: Review this for canonicity with Monero
let mut extra = vec![];
SubField::TxPublicKey(
PublicKey { point: self.outputs[0].R.compress() }
).consensus_encode(&mut extra).unwrap();
SubField::AdditionalPublickKey(
self.outputs[1 ..].iter().map(|output| PublicKey { point: output.R.compress() }).collect()
).consensus_encode(&mut extra).unwrap();
let mut tx_outputs = Vec::with_capacity(self.outputs.len());
let mut ecdh_info = Vec::with_capacity(self.outputs.len());
for o in 0 .. self.outputs.len() {
tx_outputs.push(Output {
amount: 0,
key: self.outputs[o].dest,
tag: None
});
ecdh_info.push(self.outputs[o].amount);
}
Transaction {
prefix: TransactionPrefix {
version: 2,
timelock: Timelock::None,
inputs: vec![],
outputs: tx_outputs,
extra
},
rct_signatures: RctSignatures {
base: RctBase {
fee: self.fee,
ecdh_info,
commitments: commitments.iter().map(|commitment| commitment.calculate()).collect()
},
prunable: RctPrunable::Clsag {
bulletproofs: vec![bp],
clsags: vec![],
pseudo_outs: vec![]
}
}
}
}
/// Sign this transaction.
pub async fn sign<R: RngCore + CryptoRng>(
mut self,
&mut self,
rng: &mut R,
rpc: &Rpc,
spend: &Zeroizing<Scalar>,
spend: &Scalar
) -> Result<Transaction, TransactionError> {
let mut images = Vec::with_capacity(self.inputs.len());
for input in &self.inputs {
let mut offset = Zeroizing::new(spend.deref() + input.key_offset());
if (offset.deref() * &ED25519_BASEPOINT_TABLE) != input.key() {
let offset = spend + input.key_offset;
if (&offset * &ED25519_BASEPOINT_TABLE) != input.key {
Err(TransactionError::WrongPrivateKey)?;
}
images.push(generate_key_image(&offset));
offset.zeroize();
images.push(generate_key_image(offset));
}
images.sort_by(key_image_sort);
let (mut tx, mask_sum) = self.prepare_transaction(
let (commitments, mask_sum) = self.prepare_outputs(
rng,
uniqueness(
&images
.iter()
.map(|image| Input::ToKey { amount: 0, key_offsets: vec![], key_image: *image })
.collect::<Vec<_>>(),
),
&images.iter().map(|image| Input::ToKey {
amount: 0,
key_offsets: vec![],
key_image: *image
}).collect::<Vec<_>>()
)
);
let signable =
prepare_inputs(rng, rpc, self.protocol.ring_len(), &self.inputs, spend, &mut tx).await?;
let mut tx = self.prepare_transaction(&commitments, Bulletproofs::new(rng, &commitments)?);
let clsag_pairs = Clsag::sign(rng, signable, mask_sum, tx.signature_hash());
let signable = prepare_inputs(rng, rpc, &self.inputs, spend, &mut tx).await?;
let clsag_pairs = Clsag::sign(rng, &signable, mask_sum, tx.signature_hash());
match tx.rct_signatures.prunable {
RctPrunable::Null => panic!("Signing for RctPrunable::Null"),
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
clsags.append(&mut clsag_pairs.iter().map(|clsag| clsag.0.clone()).collect::<Vec<_>>());
pseudo_outs.append(&mut clsag_pairs.iter().map(|clsag| clsag.1).collect::<Vec<_>>());
pseudo_outs.append(&mut clsag_pairs.iter().map(|clsag| clsag.1.clone()).collect::<Vec<_>>());
}
}
Ok(tx)

View File

@@ -1,79 +1,66 @@
use std::{
io::{self, Read},
sync::{Arc, RwLock},
collections::HashMap,
};
use std::{io::{Read, Cursor}, sync::{Arc, RwLock}, collections::HashMap};
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha20Rng;
use rand_chacha::ChaCha12Rng;
use group::ff::Field;
use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::EdwardsPoint};
use dalek_ff_group as dfg;
use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::{EdwardsPoint, CompressedEdwardsY}};
use transcript::{Transcript, RecommendedTranscript};
use frost::{
curve::Ed25519,
FrostError, ThresholdKeys,
FrostError, FrostKeys,
sign::{
Writable, Preprocess, CachedPreprocess, SignatureShare, PreprocessMachine, SignMachine,
SignatureMachine, AlgorithmMachine, AlgorithmSignMachine, AlgorithmSignatureMachine,
},
PreprocessMachine, SignMachine, SignatureMachine,
AlgorithmMachine, AlgorithmSignMachine, AlgorithmSignatureMachine
}
};
use crate::{
random_scalar,
ringct::{
clsag::{ClsagInput, ClsagDetails, ClsagAddendum, ClsagMultisig, add_key_image_share},
RctPrunable,
},
random_scalar, ringct::{clsag::{ClsagInput, ClsagDetails, ClsagMultisig}, bulletproofs::Bulletproofs, RctPrunable},
transaction::{Input, Transaction},
rpc::Rpc,
wallet::{TransactionError, SignableTransaction, Decoys, key_image_sort, uniqueness},
wallet::{TransactionError, SignableTransaction, Decoys, key_image_sort, uniqueness}
};
/// FROST signing machine to produce a signed transaction.
pub struct TransactionMachine {
signable: SignableTransaction,
i: u16,
included: Vec<u16>,
transcript: RecommendedTranscript,
decoys: Vec<Decoys>,
// Hashed key and scalar offset
key_images: Vec<(EdwardsPoint, Scalar)>,
inputs: Vec<Arc<RwLock<Option<ClsagDetails>>>>,
clsags: Vec<AlgorithmMachine<Ed25519, ClsagMultisig>>,
clsags: Vec<AlgorithmMachine<Ed25519, ClsagMultisig>>
}
pub struct TransactionSignMachine {
signable: SignableTransaction,
i: u16,
included: Vec<u16>,
transcript: RecommendedTranscript,
decoys: Vec<Decoys>,
key_images: Vec<(EdwardsPoint, Scalar)>,
inputs: Vec<Arc<RwLock<Option<ClsagDetails>>>>,
clsags: Vec<AlgorithmSignMachine<Ed25519, ClsagMultisig>>,
our_preprocess: Vec<Preprocess<Ed25519, ClsagAddendum>>,
our_preprocess: Vec<u8>
}
pub struct TransactionSignatureMachine {
tx: Transaction,
clsags: Vec<AlgorithmSignatureMachine<Ed25519, ClsagMultisig>>,
clsags: Vec<AlgorithmSignatureMachine<Ed25519, ClsagMultisig>>
}
impl SignableTransaction {
/// Create a FROST signing machine out of this signable transaction.
/// The height is the Monero blockchain height to synchronize around.
pub async fn multisig(
self,
rpc: &Rpc,
keys: ThresholdKeys<Ed25519>,
keys: FrostKeys<Ed25519>,
mut transcript: RecommendedTranscript,
height: usize,
mut included: Vec<u16>
) -> Result<TransactionMachine, TransactionError> {
let mut inputs = vec![];
for _ in 0 .. self.inputs.len() {
@@ -92,40 +79,46 @@ impl SignableTransaction {
// Include the height we're using for our data
// The data itself will be included, making this unnecessary, yet a lot of this is technically
// unnecessary. Anything which further increases security at almost no cost should be followed
transcript.append_message(b"height", u64::try_from(height).unwrap().to_le_bytes());
// Also include the spend_key as below only the key offset is included, so this transcripts the
// sum product
// Useful as transcripting the sum product effectively transcripts the key image, further
// guaranteeing the one time properties noted below
transcript.append_message(b"spend_key", keys.group_key().0.compress().to_bytes());
transcript.append_message(b"height", &u64::try_from(height).unwrap().to_le_bytes());
// Also include the spend_key as below only the key offset is included, so this confirms the sum product
// Useful as confirming the sum product confirms the key image, further guaranteeing the one time
// properties noted below
transcript.append_message(b"spend_key", &keys.group_key().0.compress().to_bytes());
for input in &self.inputs {
// These outputs can only be spent once. Therefore, it forces all RNGs derived from this
// transcript (such as the one used to create one time keys) to be unique
transcript.append_message(b"input_hash", input.output.absolute.tx);
transcript.append_message(b"input_output_index", [input.output.absolute.o]);
transcript.append_message(b"input_hash", &input.tx);
transcript.append_message(b"input_output_index", &[input.o]);
// Not including this, with a doxxed list of payments, would allow brute forcing the inputs
// to determine RNG seeds and therefore the true spends
transcript.append_message(b"input_shared_key", input.key_offset().to_bytes());
transcript.append_message(b"input_shared_key", &input.key_offset.to_bytes());
}
for payment in &self.payments {
transcript.append_message(b"payment_address", payment.0.to_string().as_bytes());
transcript.append_message(b"payment_amount", payment.1.to_le_bytes());
transcript.append_message(b"payment_address", &payment.0.to_string().as_bytes());
transcript.append_message(b"payment_amount", &payment.1.to_le_bytes());
}
let mut key_images = vec![];
// Sort included before cloning it around
included.sort_unstable();
for (i, input) in self.inputs.iter().enumerate() {
// Check this the right set of keys
let offset = keys.offset(dfg::Scalar(input.key_offset()));
if offset.group_key().0 != input.key() {
let offset = keys.offset(dalek_ff_group::Scalar(input.key_offset));
if offset.group_key().0 != input.key {
Err(TransactionError::WrongPrivateKey)?;
}
let clsag = ClsagMultisig::new(transcript.clone(), input.key(), inputs[i].clone());
key_images.push((
clsag.H,
keys.current_offset().unwrap_or_else(dfg::Scalar::zero).0 + self.inputs[i].key_offset(),
));
clsags.push(AlgorithmMachine::new(clsag, offset).map_err(TransactionError::FrostError)?);
clsags.push(
AlgorithmMachine::new(
ClsagMultisig::new(
transcript.clone(),
input.key,
inputs[i].clone()
).map_err(|e| TransactionError::MultisigError(e))?,
Arc::new(offset),
&included
).map_err(|e| TransactionError::FrostError(e))?
);
}
// Select decoys
@@ -136,50 +129,47 @@ impl SignableTransaction {
let decoys = Decoys::select(
// Using a seeded RNG with a specific height, committed to above, should make these decoys
// committed to. They'll also be committed to later via the TX message as a whole
&mut ChaCha20Rng::from_seed(transcript.rng_seed(b"decoys")),
&mut ChaCha12Rng::from_seed(transcript.rng_seed(b"decoys")),
rpc,
self.protocol.ring_len(),
height,
&self.inputs,
&self.inputs
).await.map_err(|e| TransactionError::RpcError(e))?;
Ok(
TransactionMachine {
signable: self,
i: keys.params().i(),
included,
transcript,
decoys,
inputs,
clsags
}
)
.await
.map_err(TransactionError::RpcError)?;
Ok(TransactionMachine {
signable: self,
i: keys.params().i(),
transcript,
decoys,
key_images,
inputs,
clsags,
})
}
}
impl PreprocessMachine for TransactionMachine {
type Preprocess = Vec<Preprocess<Ed25519, ClsagAddendum>>;
type Signature = Transaction;
type SignMachine = TransactionSignMachine;
fn preprocess<R: RngCore + CryptoRng>(
mut self,
rng: &mut R,
) -> (TransactionSignMachine, Self::Preprocess) {
rng: &mut R
) -> (TransactionSignMachine, Vec<u8>) {
// Iterate over each CLSAG calling preprocess
let mut preprocesses = Vec::with_capacity(self.clsags.len());
let clsags = self
.clsags
.drain(..)
.map(|clsag| {
let (clsag, preprocess) = clsag.preprocess(rng);
preprocesses.push(preprocess);
clsag
})
.collect();
let our_preprocess = preprocesses.clone();
let mut serialized = Vec::with_capacity(
// D_{G, H}, E_{G, H}, DLEqs, key image addendum
self.clsags.len() * ((2 * (32 + 32)) + (2 * (32 + 32)) + ClsagMultisig::serialized_len())
);
let clsags = self.clsags.drain(..).map(|clsag| {
let (clsag, preprocess) = clsag.preprocess(rng);
serialized.extend(&preprocess);
clsag
}).collect();
let our_preprocess = serialized.clone();
// We could add further entropy here, and previous versions of this library did so
// As of right now, the multisig's key, the inputs being spent, and the FROST data itself
@@ -194,108 +184,70 @@ impl PreprocessMachine for TransactionMachine {
TransactionSignMachine {
signable: self.signable,
i: self.i,
included: self.included,
transcript: self.transcript,
decoys: self.decoys,
key_images: self.key_images,
inputs: self.inputs,
clsags,
our_preprocess,
},
preprocesses,
serialized
)
}
}
impl SignMachine<Transaction> for TransactionSignMachine {
type Params = ();
type Keys = ThresholdKeys<Ed25519>;
type Preprocess = Vec<Preprocess<Ed25519, ClsagAddendum>>;
type SignatureShare = Vec<SignatureShare<Ed25519>>;
type SignatureMachine = TransactionSignatureMachine;
fn cache(self) -> CachedPreprocess {
unimplemented!(
"Monero transactions don't support caching their preprocesses due to {}",
"being already bound to a specific transaction"
);
}
fn from_cache(_: (), _: ThresholdKeys<Ed25519>, _: CachedPreprocess) -> Result<Self, FrostError> {
unimplemented!(
"Monero transactions don't support caching their preprocesses due to {}",
"being already bound to a specific transaction"
);
}
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
self.clsags.iter().map(|clsag| clsag.read_preprocess(reader)).collect()
}
fn sign(
fn sign<Re: Read>(
mut self,
mut commitments: HashMap<u16, Self::Preprocess>,
msg: &[u8],
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
if !msg.is_empty() {
Err(FrostError::InternalError(
"message was passed to the TransactionMachine when it generates its own",
))?;
mut commitments: HashMap<u16, Re>,
msg: &[u8]
) -> Result<(TransactionSignatureMachine, Vec<u8>), FrostError> {
if msg.len() != 0 {
Err(
FrostError::InternalError(
"message was passed to the TransactionMachine when it generates its own"
)
)?;
}
// Find out who's included
// This may not be a valid set of signers yet the algorithm machine will error if it's not
commitments.remove(&self.i); // Remove, if it was included for some reason
let mut included = commitments.keys().into_iter().cloned().collect::<Vec<_>>();
included.push(self.i);
included.sort_unstable();
// FROST commitments and their DLEqs, and the image and its DLEq
const CLSAG_LEN: usize = (2 * (32 + 32)) + (2 * (32 + 32)) + ClsagMultisig::serialized_len();
// Convert the unified commitments to a Vec of the individual commitments
let mut images = vec![EdwardsPoint::identity(); self.clsags.len()];
let mut commitments = (0 .. self.clsags.len())
.map(|c| {
included
.iter()
.map(|l| {
// Add all commitments to the transcript for their entropy
// While each CLSAG will do this as they need to for security, they have their own
// transcripts cloned from this TX's initial premise's transcript. For our TX
// transcript to have the CLSAG data for entropy, it'll have to be added ourselves here
self.transcript.append_message(b"participant", (*l).to_be_bytes());
let mut commitments = (0 .. self.clsags.len()).map(|c| {
let mut buf = [0; CLSAG_LEN];
(&self.included).iter().map(|l| {
// Add all commitments to the transcript for their entropy
// While each CLSAG will do this as they need to for security, they have their own transcripts
// cloned from this TX's initial premise's transcript. For our TX transcript to have the CLSAG
// data for entropy, it'll have to be added ourselves here
self.transcript.append_message(b"participant", &(*l).to_be_bytes());
if *l == self.i {
buf.copy_from_slice(self.our_preprocess.drain(.. CLSAG_LEN).as_slice());
} else {
commitments.get_mut(l).ok_or(FrostError::MissingParticipant(*l))?
.read_exact(&mut buf).map_err(|_| FrostError::InvalidCommitment(*l))?;
}
self.transcript.append_message(b"preprocess", &buf);
let preprocess = if *l == self.i {
self.our_preprocess[c].clone()
} else {
commitments.get_mut(l).ok_or(FrostError::MissingParticipant(*l))?[c].clone()
};
// While here, calculate the key image
// Clsag will parse/calculate/validate this as needed, yet doing so here as well provides
// the easiest API overall, as this is where the TX is (which needs the key images in its
// message), along with where the outputs are determined (where our outputs may need
// these in order to guarantee uniqueness)
images[c] += CompressedEdwardsY(
buf[(CLSAG_LEN - 96) .. (CLSAG_LEN - 64)].try_into().map_err(|_| FrostError::InvalidCommitment(*l))?
).decompress().ok_or(FrostError::InvalidCommitment(*l))?;
{
let mut buf = vec![];
preprocess.write(&mut buf).unwrap();
self.transcript.append_message(b"preprocess", buf);
}
// While here, calculate the key image
// Clsag will parse/calculate/validate this as needed, yet doing so here as well
// provides the easiest API overall, as this is where the TX is (which needs the key
// images in its message), along with where the outputs are determined (where our
// outputs may need these in order to guarantee uniqueness)
add_key_image_share(
&mut images[c],
self.key_images[c].0,
self.key_images[c].1,
&included,
*l,
preprocess.addendum.key_image.0,
);
Ok((*l, preprocess))
})
.collect::<Result<HashMap<_, _>, _>>()
})
.collect::<Result<Vec<_>, _>>()?;
Ok((*l, Cursor::new(buf)))
}).collect::<Result<HashMap<_, _>, _>>()
}).collect::<Result<Vec<_>, _>>()?;
// Remove our preprocess which shouldn't be here. It was just the easiest way to implement the
// above
@@ -304,59 +256,75 @@ impl SignMachine<Transaction> for TransactionSignMachine {
}
// Create the actual transaction
let (mut tx, output_masks) = {
let output_masks;
let mut tx = {
let mut sorted_images = images.clone();
sorted_images.sort_by(key_image_sort);
self.signable.prepare_transaction(
&mut ChaCha20Rng::from_seed(self.transcript.rng_seed(b"transaction_keys_bulletproofs")),
let commitments;
(commitments, output_masks) = self.signable.prepare_outputs(
&mut ChaCha12Rng::from_seed(self.transcript.rng_seed(b"tx_keys")),
uniqueness(
&sorted_images
.iter()
.map(|image| Input::ToKey { amount: 0, key_offsets: vec![], key_image: *image })
.collect::<Vec<_>>(),
),
&images.iter().map(|image| Input::ToKey {
amount: 0,
key_offsets: vec![],
key_image: *image
}).collect::<Vec<_>>()
)
);
self.signable.prepare_transaction(
&commitments,
Bulletproofs::new(
&mut ChaCha12Rng::from_seed(self.transcript.rng_seed(b"bulletproofs")),
&commitments
).unwrap()
)
};
// Sort the inputs, as expected
let mut sorted = Vec::with_capacity(self.clsags.len());
while !self.clsags.is_empty() {
while self.clsags.len() != 0 {
sorted.push((
images.swap_remove(0),
self.signable.inputs.swap_remove(0),
self.decoys.swap_remove(0),
self.inputs.swap_remove(0),
self.clsags.swap_remove(0),
commitments.swap_remove(0),
commitments.swap_remove(0)
));
}
sorted.sort_by(|x, y| key_image_sort(&x.0, &y.0));
let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"pseudo_out_masks"));
let mut rng = ChaCha12Rng::from_seed(self.transcript.rng_seed(b"pseudo_out_masks"));
let mut sum_pseudo_outs = Scalar::zero();
while !sorted.is_empty() {
while sorted.len() != 0 {
let value = sorted.remove(0);
let mut mask = random_scalar(&mut rng);
if sorted.is_empty() {
if sorted.len() == 0 {
mask = output_masks - sum_pseudo_outs;
} else {
sum_pseudo_outs += mask;
}
tx.prefix.inputs.push(Input::ToKey {
amount: 0,
key_offsets: value.2.offsets.clone(),
key_image: value.0,
});
tx.prefix.inputs.push(
Input::ToKey {
amount: 0,
key_offsets: value.2.offsets.clone(),
key_image: value.0
}
);
*value.3.write().unwrap() = Some(ClsagDetails::new(
ClsagInput::new(value.1.commitment().clone(), value.2).map_err(|_| {
panic!("Signing an input which isn't present in the ring we created for it")
})?,
mask,
));
*value.3.write().unwrap() = Some(
ClsagDetails::new(
ClsagInput::new(
value.1.commitment,
value.2
).map_err(|_| panic!("Signing an input which isn't present in the ring we created for it"))?,
mask
)
);
self.clsags.push(value.4);
commitments.push(value.5);
@@ -365,39 +333,30 @@ impl SignMachine<Transaction> for TransactionSignMachine {
let msg = tx.signature_hash();
// Iterate over each CLSAG calling sign
let mut shares = Vec::with_capacity(self.clsags.len());
let clsags = self
.clsags
.drain(..)
.map(|clsag| {
let (clsag, share) = clsag.sign(commitments.remove(0), &msg)?;
shares.push(share);
Ok(clsag)
})
.collect::<Result<_, _>>()?;
let mut serialized = Vec::with_capacity(self.clsags.len() * 32);
let clsags = self.clsags.drain(..).map(|clsag| {
let (clsag, share) = clsag.sign(commitments.remove(0), &msg)?;
serialized.extend(&share);
Ok(clsag)
}).collect::<Result<_, _>>()?;
Ok((TransactionSignatureMachine { tx, clsags }, shares))
Ok((TransactionSignatureMachine { tx, clsags }, serialized))
}
}
impl SignatureMachine<Transaction> for TransactionSignatureMachine {
type SignatureShare = Vec<SignatureShare<Ed25519>>;
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare> {
self.clsags.iter().map(|clsag| clsag.read_share(reader)).collect()
}
fn complete(
mut self,
shares: HashMap<u16, Self::SignatureShare>,
) -> Result<Transaction, FrostError> {
fn complete<Re: Read>(self, mut shares: HashMap<u16, Re>) -> Result<Transaction, FrostError> {
let mut tx = self.tx;
match tx.rct_signatures.prunable {
RctPrunable::Null => panic!("Signing for RctPrunable::Null"),
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
for (c, clsag) in self.clsags.drain(..).enumerate() {
for clsag in self.clsags {
let (clsag, pseudo_out) = clsag.complete(
shares.iter().map(|(l, shares)| (*l, shares[c].clone())).collect::<HashMap<_, _>>(),
shares.iter_mut().map(|(l, shares)| {
let mut buf = [0; 32];
shares.read_exact(&mut buf).map_err(|_| FrostError::InvalidShare(*l))?;
Ok((*l, Cursor::new(buf)))
}).collect::<Result<HashMap<_, _>, _>>()?
)?;
clsags.push(clsag);
pseudo_outs.push(pseudo_out);

View File

@@ -1,72 +0,0 @@
use monero_serai::{wallet::TransactionError, transaction::Transaction};
mod runner;
test!(
add_single_data_less_than_255,
(
|_, mut builder: Builder, addr| async move {
let arbitrary_data = vec![b'\0', 254];
// make sure we can add to tx
let result = builder.add_data(arbitrary_data.clone());
assert!(result.is_ok());
builder.add_payment(addr, 5);
(builder.build().unwrap(), (arbitrary_data,))
},
|_, tx: Transaction, mut scanner: Scanner, data: (Vec<u8>,)| async move {
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.arbitrary_data()[0], data.0);
},
),
);
test!(
add_multiple_data_less_than_255,
(
|_, mut builder: Builder, addr| async move {
let data = vec![b'\0', 254];
// Add tx multiple times
for _ in 0 .. 5 {
let result = builder.add_data(data.clone());
assert!(result.is_ok());
}
builder.add_payment(addr, 5);
(builder.build().unwrap(), data)
},
|_, tx: Transaction, mut scanner: Scanner, data: Vec<u8>| async move {
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.arbitrary_data(), vec![data; 5]);
},
),
);
test!(
add_single_data_more_than_255,
(
|_, mut builder: Builder, addr| async move {
// Make a data that is bigger than 255 bytes
let mut data = vec![b'a'; 256];
// Make sure we get an error if we try to add it to the TX
assert_eq!(builder.add_data(data.clone()), Err(TransactionError::TooMuchData));
// Reduce data size and retry. The data will now be 255 bytes long, exactly
data.pop();
assert!(builder.add_data(data.clone()).is_ok());
builder.add_payment(addr, 5);
(builder.build().unwrap(), data)
},
|_, tx: Transaction, mut scanner: Scanner, data: Vec<u8>| async move {
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.arbitrary_data(), vec![data]);
},
),
);

42
coins/monero/tests/rpc.rs Normal file
View File

@@ -0,0 +1,42 @@
use rand::rngs::OsRng;
use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
use serde_json::json;
use monero::{
network::Network,
util::{key::PublicKey, address::Address}
};
use monero_serai::{random_scalar, rpc::{EmptyResponse, RpcError, Rpc}};
pub async fn rpc() -> Rpc {
let rpc = Rpc::new("http://127.0.0.1:18081".to_string());
// Only run once
if rpc.get_height().await.unwrap() != 1 {
return rpc;
}
let addr = Address::standard(
Network::Mainnet,
PublicKey { point: (&random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE).compress() },
PublicKey { point: (&random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE).compress() }
).to_string();
// Mine 10 blocks so we have 10 decoys so decoy selection doesn't fail
mine_block(&rpc, &addr).await.unwrap();
rpc
}
pub async fn mine_block(rpc: &Rpc, address: &str) -> Result<EmptyResponse, RpcError> {
rpc.rpc_call("json_rpc", Some(json!({
"method": "generateblocks",
"params": {
"wallet_address": address,
"amount_of_blocks": 10
},
}))).await
}

View File

@@ -1,280 +0,0 @@
use core::ops::Deref;
use std::collections::HashSet;
use lazy_static::lazy_static;
use zeroize::Zeroizing;
use rand_core::OsRng;
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
use tokio::sync::Mutex;
use monero_serai::{
Protocol, random_scalar,
wallet::{
ViewPair, Scanner,
address::{Network, AddressType, AddressSpec, AddressMeta, MoneroAddress},
SpendableOutput,
},
rpc::Rpc,
};
pub fn random_address() -> (Scalar, ViewPair, MoneroAddress) {
let spend = random_scalar(&mut OsRng);
let spend_pub = &spend * &ED25519_BASEPOINT_TABLE;
let view = Zeroizing::new(random_scalar(&mut OsRng));
(
spend,
ViewPair::new(spend_pub, view.clone()),
MoneroAddress {
meta: AddressMeta::new(Network::Mainnet, AddressType::Standard),
spend: spend_pub,
view: view.deref() * &ED25519_BASEPOINT_TABLE,
},
)
}
// TODO: Support transactions already on-chain
// TODO: Don't have a side effect of mining blocks more blocks than needed under race conditions
// TODO: mine as much as needed instead of default 10 blocks
pub async fn mine_until_unlocked(rpc: &Rpc, addr: &str, tx_hash: [u8; 32]) {
// mine until tx is in a block
let mut height = rpc.get_height().await.unwrap();
let mut found = false;
while !found {
let block = rpc.get_block_by_number(height - 1).await.unwrap();
found = match block.txs.iter().find(|&&x| x == tx_hash) {
Some(_) => true,
None => {
rpc.generate_blocks(addr, 1).await.unwrap();
height += 1;
false
}
}
}
// mine 9 more blocks to unlock the tx
rpc.generate_blocks(addr, 9).await.unwrap();
}
// Mines 60 blocks and returns an unlocked miner TX output.
pub async fn get_miner_tx_output(rpc: &Rpc, view: &ViewPair) -> SpendableOutput {
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
// Mine 60 blocks to unlock a miner TX
let start = rpc.get_height().await.unwrap();
rpc
.generate_blocks(&view.address(Network::Mainnet, AddressSpec::Standard).to_string(), 60)
.await
.unwrap();
let block = rpc.get_block_by_number(start).await.unwrap();
scanner.scan(rpc, &block).await.unwrap().swap_remove(0).ignore_timelock().swap_remove(0)
}
pub async fn rpc() -> Rpc {
let rpc = Rpc::new("http://127.0.0.1:18081".to_string()).unwrap();
// Only run once
if rpc.get_height().await.unwrap() != 1 {
return rpc;
}
let addr = MoneroAddress {
meta: AddressMeta::new(Network::Mainnet, AddressType::Standard),
spend: &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE,
view: &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE,
}
.to_string();
// Mine 40 blocks to ensure decoy availability
rpc.generate_blocks(&addr, 40).await.unwrap();
assert!(!matches!(rpc.get_protocol().await.unwrap(), Protocol::Unsupported(_)));
rpc
}
lazy_static! {
pub static ref SEQUENTIAL: Mutex<()> = Mutex::new(());
}
#[macro_export]
macro_rules! async_sequential {
($(async fn $name: ident() $body: block)*) => {
$(
#[tokio::test]
async fn $name() {
let guard = runner::SEQUENTIAL.lock().await;
let local = tokio::task::LocalSet::new();
local.run_until(async move {
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {
drop(guard);
Err(err).unwrap()
}
}).await;
}
)*
}
}
#[macro_export]
macro_rules! test {
(
$name: ident,
(
$first_tx: expr,
$first_checks: expr,
),
$((
$tx: expr,
$checks: expr,
)$(,)?),*
) => {
async_sequential! {
async fn $name() {
use core::{ops::Deref, any::Any};
use std::collections::HashSet;
#[cfg(feature = "multisig")]
use std::collections::HashMap;
use zeroize::Zeroizing;
use rand_core::OsRng;
use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
#[cfg(feature = "multisig")]
use transcript::{Transcript, RecommendedTranscript};
#[cfg(feature = "multisig")]
use frost::{
curve::Ed25519,
tests::{THRESHOLD, key_gen},
};
use monero_serai::{
random_scalar,
wallet::{
address::{Network, AddressSpec}, ViewPair, Scanner, SignableTransaction,
SignableTransactionBuilder,
},
};
use runner::{random_address, rpc, mine_until_unlocked, get_miner_tx_output};
type Builder = SignableTransactionBuilder;
// Run each function as both a single signer and as a multisig
#[allow(clippy::redundant_closure_call)]
for multisig in [false, true] {
// Only run the multisig variant if multisig is enabled
if multisig {
#[cfg(not(feature = "multisig"))]
continue;
}
let spend = Zeroizing::new(random_scalar(&mut OsRng));
#[cfg(feature = "multisig")]
let keys = key_gen::<_, Ed25519>(&mut OsRng);
let spend_pub = if !multisig {
spend.deref() * &ED25519_BASEPOINT_TABLE
} else {
#[cfg(not(feature = "multisig"))]
panic!("Multisig branch called without the multisig feature");
#[cfg(feature = "multisig")]
keys[&1].group_key().0
};
let rpc = rpc().await;
let view = ViewPair::new(spend_pub, Zeroizing::new(random_scalar(&mut OsRng)));
let addr = view.address(Network::Mainnet, AddressSpec::Standard);
let miner_tx = get_miner_tx_output(&rpc, &view).await;
let builder = SignableTransactionBuilder::new(
rpc.get_protocol().await.unwrap(),
rpc.get_fee().await.unwrap(),
Some(random_address().2),
);
let sign = |tx: SignableTransaction| {
let rpc = rpc.clone();
let spend = spend.clone();
#[cfg(feature = "multisig")]
let keys = keys.clone();
async move {
if !multisig {
tx.sign(&mut OsRng, &rpc, &spend).await.unwrap()
} else {
#[cfg(not(feature = "multisig"))]
panic!("Multisig branch called without the multisig feature");
#[cfg(feature = "multisig")]
{
let mut machines = HashMap::new();
for i in 1 ..= THRESHOLD {
machines.insert(
i,
tx
.clone()
.multisig(
&rpc,
keys[&i].clone(),
RecommendedTranscript::new(b"Monero Serai Test Transaction"),
rpc.get_height().await.unwrap() - 10,
)
.await
.unwrap(),
);
}
frost::tests::sign_without_caching(&mut OsRng, machines, &[])
}
}
}
};
// TODO: Generate a distinct wallet for each transaction to prevent overlap
let next_addr = addr;
let temp = Box::new({
let mut builder = builder.clone();
builder.add_input(miner_tx);
let (tx, state) = ($first_tx)(rpc.clone(), builder, next_addr).await;
let signed = sign(tx).await;
rpc.publish_transaction(&signed).await.unwrap();
mine_until_unlocked(&rpc, &random_address().2.to_string(), signed.hash()).await;
let tx = rpc.get_transaction(signed.hash()).await.unwrap();
let scanner =
Scanner::from_view(view.clone(), Some(HashSet::new()));
($first_checks)(rpc.clone(), tx, scanner, state).await
});
#[allow(unused_variables, unused_mut, unused_assignments)]
let mut carried_state: Box<dyn Any> = temp;
$(
let (tx, state) = ($tx)(
rpc.clone(),
builder.clone(),
next_addr,
*carried_state.downcast().unwrap()
).await;
let signed = sign(tx).await;
rpc.publish_transaction(&signed).await.unwrap();
mine_until_unlocked(&rpc, &random_address().2.to_string(), signed.hash()).await;
let tx = rpc.get_transaction(signed.hash()).await.unwrap();
#[allow(unused_assignments)]
{
let scanner =
Scanner::from_view(view.clone(), Some(HashSet::new()));
carried_state =
Box::new(($checks)(rpc.clone(), tx, scanner, state).await);
}
)*
}
}
}
}
}

View File

@@ -1,300 +0,0 @@
use rand::RngCore;
use monero_serai::{transaction::Transaction, wallet::address::SubaddressIndex};
mod runner;
test!(
scan_standard_address,
(
|_, mut builder: Builder, _| async move {
let view = runner::random_address().1;
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
builder.add_payment(view.address(Network::Mainnet, AddressSpec::Standard), 5);
(builder.build().unwrap(), scanner)
},
|_, tx: Transaction, _, mut state: Scanner| async move {
let output = state.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
},
),
);
test!(
scan_subaddress,
(
|_, mut builder: Builder, _| async move {
let subaddress = SubaddressIndex::new(0, 1).unwrap();
let view = runner::random_address().1;
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
scanner.register_subaddress(subaddress);
builder.add_payment(view.address(Network::Mainnet, AddressSpec::Subaddress(subaddress)), 5);
(builder.build().unwrap(), (scanner, subaddress))
},
|_, tx: Transaction, _, mut state: (Scanner, SubaddressIndex)| async move {
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.metadata.subaddress, Some(state.1));
},
),
);
test!(
scan_integrated_address,
(
|_, mut builder: Builder, _| async move {
let view = runner::random_address().1;
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
let mut payment_id = [0u8; 8];
OsRng.fill_bytes(&mut payment_id);
builder.add_payment(view.address(Network::Mainnet, AddressSpec::Integrated(payment_id)), 5);
(builder.build().unwrap(), (scanner, payment_id))
},
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.metadata.payment_id, state.1);
},
),
);
test!(
scan_featured_standard,
(
|_, mut builder: Builder, _| async move {
let view = runner::random_address().1;
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
builder.add_payment(
view.address(
Network::Mainnet,
AddressSpec::Featured { subaddress: None, payment_id: None, guaranteed: false },
),
5,
);
(builder.build().unwrap(), scanner)
},
|_, tx: Transaction, _, mut state: Scanner| async move {
let output = state.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
},
),
);
test!(
scan_featured_subaddress,
(
|_, mut builder: Builder, _| async move {
let subaddress = SubaddressIndex::new(0, 2).unwrap();
let view = runner::random_address().1;
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
scanner.register_subaddress(subaddress);
builder.add_payment(
view.address(
Network::Mainnet,
AddressSpec::Featured {
subaddress: Some(subaddress),
payment_id: None,
guaranteed: false,
},
),
5,
);
(builder.build().unwrap(), (scanner, subaddress))
},
|_, tx: Transaction, _, mut state: (Scanner, SubaddressIndex)| async move {
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.metadata.subaddress, Some(state.1));
},
),
);
test!(
scan_featured_integrated,
(
|_, mut builder: Builder, _| async move {
let view = runner::random_address().1;
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
let mut payment_id = [0u8; 8];
OsRng.fill_bytes(&mut payment_id);
builder.add_payment(
view.address(
Network::Mainnet,
AddressSpec::Featured {
subaddress: None,
payment_id: Some(payment_id),
guaranteed: false,
},
),
5,
);
(builder.build().unwrap(), (scanner, payment_id))
},
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.metadata.payment_id, state.1);
},
),
);
test!(
scan_featured_integrated_subaddress,
(
|_, mut builder: Builder, _| async move {
let subaddress = SubaddressIndex::new(0, 3).unwrap();
let view = runner::random_address().1;
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
scanner.register_subaddress(subaddress);
let mut payment_id = [0u8; 8];
OsRng.fill_bytes(&mut payment_id);
builder.add_payment(
view.address(
Network::Mainnet,
AddressSpec::Featured {
subaddress: Some(subaddress),
payment_id: Some(payment_id),
guaranteed: false,
},
),
5,
);
(builder.build().unwrap(), (scanner, payment_id, subaddress))
},
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8], SubaddressIndex)| async move {
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.metadata.payment_id, state.1);
assert_eq!(output.metadata.subaddress, Some(state.2));
},
),
);
test!(
scan_guaranteed_standard,
(
|_, mut builder: Builder, _| async move {
let view = runner::random_address().1;
let scanner = Scanner::from_view(view.clone(), None);
builder.add_payment(
view.address(
Network::Mainnet,
AddressSpec::Featured { subaddress: None, payment_id: None, guaranteed: true },
),
5,
);
(builder.build().unwrap(), scanner)
},
|_, tx: Transaction, _, mut state: Scanner| async move {
let output = state.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
},
),
);
test!(
scan_guaranteed_subaddress,
(
|_, mut builder: Builder, _| async move {
let subaddress = SubaddressIndex::new(1, 0).unwrap();
let view = runner::random_address().1;
let mut scanner = Scanner::from_view(view.clone(), None);
scanner.register_subaddress(subaddress);
builder.add_payment(
view.address(
Network::Mainnet,
AddressSpec::Featured {
subaddress: Some(subaddress),
payment_id: None,
guaranteed: true,
},
),
5,
);
(builder.build().unwrap(), (scanner, subaddress))
},
|_, tx: Transaction, _, mut state: (Scanner, SubaddressIndex)| async move {
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.metadata.subaddress, Some(state.1));
},
),
);
test!(
scan_guaranteed_integrated,
(
|_, mut builder: Builder, _| async move {
let view = runner::random_address().1;
let scanner = Scanner::from_view(view.clone(), None);
let mut payment_id = [0u8; 8];
OsRng.fill_bytes(&mut payment_id);
builder.add_payment(
view.address(
Network::Mainnet,
AddressSpec::Featured {
subaddress: None,
payment_id: Some(payment_id),
guaranteed: true,
},
),
5,
);
(builder.build().unwrap(), (scanner, payment_id))
},
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.metadata.payment_id, state.1);
},
),
);
test!(
scan_guaranteed_integrated_subaddress,
(
|_, mut builder: Builder, _| async move {
let subaddress = SubaddressIndex::new(1, 1).unwrap();
let view = runner::random_address().1;
let mut scanner = Scanner::from_view(view.clone(), None);
scanner.register_subaddress(subaddress);
let mut payment_id = [0u8; 8];
OsRng.fill_bytes(&mut payment_id);
builder.add_payment(
view.address(
Network::Mainnet,
AddressSpec::Featured {
subaddress: Some(subaddress),
payment_id: Some(payment_id),
guaranteed: true,
},
),
5,
);
(builder.build().unwrap(), (scanner, payment_id, subaddress))
},
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8], SubaddressIndex)| async move {
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.metadata.payment_id, state.1);
assert_eq!(output.metadata.subaddress, Some(state.2));
},
),
);

View File

@@ -1,51 +1,176 @@
use monero_serai::{
wallet::{ReceivedOutput, SpendableOutput},
transaction::Transaction,
};
use std::sync::Mutex;
#[cfg(feature = "multisig")]
use std::collections::HashMap;
mod runner;
use lazy_static::lazy_static;
test!(
spend_miner_output,
(
|_, mut builder: Builder, addr| async move {
builder.add_payment(addr, 5);
(builder.build().unwrap(), ())
},
|_, tx: Transaction, mut scanner: Scanner, _| async move {
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
},
),
);
use rand::rngs::OsRng;
test!(
spend_multiple_outputs,
(
|_, mut builder: Builder, addr| async move {
builder.add_payment(addr, 1000000000000);
builder.add_payment(addr, 2000000000000);
(builder.build().unwrap(), ())
},
|_, tx: Transaction, mut scanner: Scanner, _| async move {
let mut outputs = scanner.scan_transaction(&tx).not_locked();
outputs.sort_by(|x, y| x.commitment().amount.cmp(&y.commitment().amount));
assert_eq!(outputs[0].commitment().amount, 1000000000000);
assert_eq!(outputs[1].commitment().amount, 2000000000000);
outputs
},
),
(
|rpc, mut builder: Builder, addr, mut outputs: Vec<ReceivedOutput>| async move {
for output in outputs.drain(..) {
builder.add_input(SpendableOutput::from(&rpc, output).await.unwrap());
#[cfg(feature = "multisig")]
use blake2::{digest::Update, Digest, Blake2b512};
use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
#[cfg(feature = "multisig")]
use dalek_ff_group::Scalar;
#[cfg(feature = "multisig")]
use transcript::{Transcript, RecommendedTranscript};
#[cfg(feature = "multisig")]
use frost::{curve::Ed25519, tests::{THRESHOLD, key_gen, sign}};
use monero_serai::{random_scalar, wallet::{ViewPair, address::{Network, AddressType}, SignableTransaction}};
mod rpc;
use crate::rpc::{rpc, mine_block};
lazy_static! {
static ref SEQUENTIAL: Mutex<()> = Mutex::new(());
}
macro_rules! async_sequential {
($(async fn $name: ident() $body: block)*) => {
$(
#[tokio::test]
async fn $name() {
let guard = SEQUENTIAL.lock().unwrap();
let local = tokio::task::LocalSet::new();
local.run_until(async move {
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {
drop(guard);
Err(err).unwrap()
}
}).await;
}
builder.add_payment(addr, 6);
(builder.build().unwrap(), ())
},
|_, tx: Transaction, mut scanner: Scanner, _| async move {
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 6);
},
),
);
)*
};
}
async fn send_core(test: usize, multisig: bool) {
let rpc = rpc().await;
// Generate an address
let spend = random_scalar(&mut OsRng);
#[allow(unused_mut)]
let mut view = random_scalar(&mut OsRng);
#[allow(unused_mut)]
let mut spend_pub = &spend * &ED25519_BASEPOINT_TABLE;
#[cfg(feature = "multisig")]
let keys = key_gen::<_, Ed25519>(&mut OsRng);
if multisig {
#[cfg(not(feature = "multisig"))]
panic!("Running a multisig test without the multisig feature");
#[cfg(feature = "multisig")]
{
view = Scalar::from_hash(Blake2b512::new().chain("Monero Serai Transaction Test")).0;
spend_pub = keys[&1].group_key().0;
}
}
let view_pair = ViewPair { view, spend: spend_pub };
let addr = view_pair.address(Network::Mainnet, AddressType::Standard, false);
let fee = rpc.get_fee().await.unwrap();
let start = rpc.get_height().await.unwrap();
for _ in 0 .. 7 {
mine_block(&rpc, &addr.to_string()).await.unwrap();
}
let mut tx = None;
// Allow tests to test variable transactions
for i in 0 .. [2, 1][test] {
let mut outputs = vec![];
let mut amount = 0;
// Test spending both a miner output and a normal output
if test == 0 {
if i == 0 {
tx = Some(rpc.get_block_transactions(start).await.unwrap().swap_remove(0));
}
// Grab the largest output available
let output = {
let mut outputs = tx.as_ref().unwrap().scan(view_pair, false).ignore_timelock();
outputs.sort_by(|x, y| x.commitment.amount.cmp(&y.commitment.amount).reverse());
outputs.swap_remove(0)
};
// Test creating a zero change output and a non-zero change output
amount = output.commitment.amount - u64::try_from(i).unwrap();
outputs.push(output);
// Test spending multiple inputs
} else if test == 1 {
if i != 0 {
continue;
}
// We actually need 80 decoys for this transaction, so mine until then
// 80 + 60 (miner TX maturity) + 10 (lock blocks)
// It is possible for this to be lower, by noting maturity is sufficient regardless of lock
// blocks, yet that's not currently implemented
// TODO, if we care
while rpc.get_height().await.unwrap() < 160 {
mine_block(&rpc, &addr.to_string()).await.unwrap();
}
for i in (start + 1) .. (start + 9) {
let tx = rpc.get_block_transactions(i).await.unwrap().swap_remove(0);
let output = tx.scan(view_pair, false).ignore_timelock().swap_remove(0);
amount += output.commitment.amount;
outputs.push(output);
}
}
let mut signable = SignableTransaction::new(
outputs, vec![(addr, amount - 10000000000)], Some(addr), fee
).unwrap();
if !multisig {
tx = Some(signable.sign(&mut OsRng, &rpc, &spend).await.unwrap());
} else {
#[cfg(feature = "multisig")]
{
let mut machines = HashMap::new();
for i in 1 ..= THRESHOLD {
machines.insert(
i,
signable.clone().multisig(
&rpc,
(*keys[&i]).clone(),
RecommendedTranscript::new(b"Monero Serai Test Transaction"),
rpc.get_height().await.unwrap() - 10,
(1 ..= THRESHOLD).collect::<Vec<_>>()
).await.unwrap()
);
}
tx = Some(sign(&mut OsRng, machines, &vec![]));
}
}
rpc.publish_transaction(tx.as_ref().unwrap()).await.unwrap();
mine_block(&rpc, &addr.to_string()).await.unwrap();
}
}
async_sequential! {
async fn send_single_input() {
send_core(0, false).await;
}
async fn send_multiple_inputs() {
send_core(1, false).await;
}
}
#[cfg(feature = "multisig")]
async_sequential! {
async fn multisig_send_single_input() {
send_core(0, true).await;
}
async fn multisig_send_multiple_inputs() {
send_core(1, true).await;
}
}

View File

@@ -1,91 +0,0 @@
use std::{
collections::{HashSet, HashMap},
str::FromStr,
};
use rand_core::{RngCore, OsRng};
use monero_rpc::{
monero::{Amount, Address},
TransferOptions,
};
use monero_serai::{
wallet::address::{Network, AddressSpec, SubaddressIndex},
wallet::Scanner,
};
mod runner;
async fn test_from_wallet_rpc_to_self(spec: AddressSpec) {
let wallet_rpc =
monero_rpc::RpcClientBuilder::new().build("http://127.0.0.1:6061").unwrap().wallet();
let daemon_rpc = runner::rpc().await;
// initialize wallet rpc
let address_resp = wallet_rpc.get_address(0, None).await;
let wallet_rpc_addr = if address_resp.is_ok() {
address_resp.unwrap().address
} else {
wallet_rpc.create_wallet("test_wallet".to_string(), None, "English".to_string()).await.unwrap();
let addr = wallet_rpc.get_address(0, None).await.unwrap().address;
daemon_rpc.generate_blocks(&addr.to_string(), 70).await.unwrap();
addr
};
// make an addr
let (_, view_pair, _) = runner::random_address();
let addr = Address::from_str(&view_pair.address(Network::Mainnet, spec).to_string()[..]).unwrap();
// refresh & make a tx
wallet_rpc.refresh(None).await.unwrap();
let tx = wallet_rpc
.transfer(
HashMap::from([(addr, Amount::ONE_XMR)]),
monero_rpc::TransferPriority::Default,
TransferOptions::default(),
)
.await
.unwrap();
let tx_hash: [u8; 32] = tx.tx_hash.0.try_into().unwrap();
// unlock it
runner::mine_until_unlocked(&daemon_rpc, &wallet_rpc_addr.to_string(), tx_hash).await;
// create the scanner
let mut scanner = Scanner::from_view(view_pair, Some(HashSet::new()));
if let AddressSpec::Subaddress(index) = spec {
scanner.register_subaddress(index);
}
// retrieve it and confirm
let tx = daemon_rpc.get_transaction(tx_hash).await.unwrap();
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
match spec {
AddressSpec::Subaddress(index) => assert_eq!(output.metadata.subaddress, Some(index)),
AddressSpec::Integrated(payment_id) => {
assert_eq!(output.metadata.payment_id, payment_id);
assert_eq!(output.metadata.subaddress, None);
}
_ => assert_eq!(output.metadata.subaddress, None),
}
assert_eq!(output.commitment().amount, 1000000000000);
}
async_sequential!(
async fn test_receipt_of_wallet_rpc_tx_standard() {
test_from_wallet_rpc_to_self(AddressSpec::Standard).await;
}
async fn test_receipt_of_wallet_rpc_tx_subaddress() {
test_from_wallet_rpc_to_self(AddressSpec::Subaddress(SubaddressIndex::new(0, 1).unwrap()))
.await;
}
async fn test_receipt_of_wallet_rpc_tx_integrated() {
let mut payment_id = [0u8; 8];
OsRng.fill_bytes(&mut payment_id);
test_from_wallet_rpc_to_self(AddressSpec::Integrated(payment_id)).await;
}
);

View File

@@ -1,20 +0,0 @@
[package]
name = "zalloc"
version = "0.1.0"
description = "An allocator wrapper which zeroizes memory on dealloc"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/common/zalloc"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = []
edition = "2021"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
zeroize = "1.5"
[features]
# Commented for now as it requires nightly and we don't use nightly
# allocator = []

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2023 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,46 +0,0 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![cfg_attr(feature = "allocator", feature(allocator_api))]
//! Implementation of a Zeroizing Allocator, enabling zeroizing memory on deallocation.
//! This can either be used with Box (requires nightly and the "allocator" feature) to provide the
//! functionality of zeroize on types which don't implement zeroize, or used as a wrapper around
//! the global allocator to ensure *all* memory is zeroized.
use core::{
slice,
alloc::{Layout, GlobalAlloc},
};
use zeroize::Zeroize;
/// An allocator wrapper which zeroizes its memory on dealloc.
pub struct ZeroizingAlloc<T>(pub T);
#[cfg(feature = "allocator")]
use core::{
ptr::NonNull,
alloc::{AllocError, Allocator},
};
#[cfg(feature = "allocator")]
unsafe impl<T: Allocator> Allocator for ZeroizingAlloc<T> {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.0.allocate(layout)
}
unsafe fn deallocate(&self, mut ptr: NonNull<u8>, layout: Layout) {
slice::from_raw_parts_mut(ptr.as_mut(), layout.size()).zeroize();
self.0.deallocate(ptr, layout);
}
}
unsafe impl<T: GlobalAlloc> GlobalAlloc for ZeroizingAlloc<T> {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
self.0.alloc(layout)
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
slice::from_raw_parts_mut(ptr, layout.size()).zeroize();
self.0.dealloc(ptr, layout);
}
}

View File

@@ -1,54 +0,0 @@
[package]
name = "ciphersuite"
version = "0.1.1"
description = "Ciphersuites built around ff/group"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ciphersuite"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["ciphersuite", "ff", "group"]
edition = "2021"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
rand_core = "0.6"
zeroize = { version = "1.5", features = ["zeroize_derive"] }
subtle = "2"
digest = "0.10"
sha2 = { version = "0.10", optional = true }
sha3 = { version = "0.10", optional = true }
ff = { version = "0.12", features = ["bits"] }
group = "0.12"
dalek-ff-group = { path = "../dalek-ff-group", version = "^0.1.2", optional = true }
elliptic-curve = { version = "0.12", features = ["hash2curve"], optional = true }
p256 = { version = "0.11", features = ["arithmetic", "bits", "hash2curve"], optional = true }
k256 = { version = "0.11", features = ["arithmetic", "bits", "hash2curve"], optional = true }
minimal-ed448 = { path = "../ed448", version = "^0.1.2", optional = true }
[dev-dependencies]
hex = "0.4"
ff-group-tests = { version = "0.12", path = "../ff-group-tests" }
[features]
std = []
dalek = ["sha2", "dalek-ff-group"]
ed25519 = ["dalek"]
ristretto = ["dalek"]
kp256 = ["sha2", "elliptic-curve"]
p256 = ["kp256", "dep:p256"]
secp256k1 = ["kp256", "k256"]
ed448 = ["sha3", "minimal-ed448"]
default = ["std"]

View File

@@ -1,35 +0,0 @@
# Ciphersuite
Ciphersuites for elliptic curves premised on ff/group.
### Secp256k1/P-256
Secp256k1 and P-256 are offered via [k256](https://crates.io/crates/k256) and
[p256](https://crates.io/crates/p256), two libraries maintained by
[RustCrypto](https://github.com/RustCrypto).
Their `hash_to_F` is the
[IETF's hash to curve](https://www.ietf.org/archive/id/draft-irtf-cfrg-hash-to-curve-16.html),
yet applied to their scalar field.
### Ed25519/Ristretto
Ed25519/Ristretto are offered via
[dalek-ff-group](https://crates.io/crates/dalek-ff-group), an ff/group wrapper
around [curve25519-dalek](https://crates.io/crates/curve25519-dalek).
Their `hash_to_F` is the wide reduction of SHA2-512, as used in
[RFC-8032](https://www.rfc-editor.org/rfc/rfc8032). This is also compliant with
the draft
[RFC-RISTRETTO](https://www.ietf.org/archive/id/draft-irtf-cfrg-ristretto255-decaf448-05.html).
The domain-separation tag is naively prefixed to the message.
### Ed448
Ed448 is offered via [minimal-ed448](https://crates.io/crates/minimal-ed448), an
explicitly not recommended, unaudited Ed448 implementation, limited to its
prime-order subgroup.
Its `hash_to_F` is the wide reduction of SHAKE256, with a 114-byte output, as
used in [RFC-8032](https://www.rfc-editor.org/rfc/rfc8032). The
domain-separation tag is naively prefixed to the message.

View File

@@ -1,86 +0,0 @@
use zeroize::Zeroize;
use sha2::{Digest, Sha512};
use group::Group;
use dalek_ff_group::Scalar;
use crate::Ciphersuite;
macro_rules! dalek_curve {
(
$feature: literal,
$Ciphersuite: ident,
$Point: ident,
$ID: literal
) => {
use dalek_ff_group::$Point;
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct $Ciphersuite;
impl Ciphersuite for $Ciphersuite {
type F = Scalar;
type G = $Point;
type H = Sha512;
const ID: &'static [u8] = $ID;
fn generator() -> Self::G {
$Point::generator()
}
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
Scalar::from_hash(Sha512::new_with_prefix(&[dst, data].concat()))
}
}
};
}
#[cfg(any(test, feature = "ristretto"))]
dalek_curve!("ristretto", Ristretto, RistrettoPoint, b"ristretto");
#[cfg(any(test, feature = "ristretto"))]
#[test]
fn test_ristretto() {
ff_group_tests::group::test_prime_group_bits::<RistrettoPoint>();
assert_eq!(
Ristretto::hash_to_F(
b"FROST-RISTRETTO255-SHA512-v11nonce",
&hex::decode(
"\
81800157bb554f299fe0b6bd658e4c4591d74168b5177bf55e8dceed59dc80c7\
5c3430d391552f6e60ecdc093ff9f6f4488756aa6cebdbad75a768010b8f830e"
)
.unwrap()
)
.to_bytes()
.as_ref(),
&hex::decode("40f58e8df202b21c94f826e76e4647efdb0ea3ca7ae7e3689bc0cbe2e2f6660c").unwrap()
);
}
#[cfg(feature = "ed25519")]
dalek_curve!("ed25519", Ed25519, EdwardsPoint, b"edwards25519");
#[cfg(feature = "ed25519")]
#[test]
fn test_ed25519() {
ff_group_tests::group::test_prime_group_bits::<EdwardsPoint>();
// Ideally, a test vector from RFC-8032 (not FROST) would be here
// Unfortunately, the IETF draft doesn't provide any vectors for the derived challenges
assert_eq!(
Ed25519::hash_to_F(
b"FROST-ED25519-SHA512-v11nonce",
&hex::decode(
"\
9d06a6381c7a4493929761a73692776772b274236fb5cfcc7d1b48ac3a9c249f\
929dcc590407aae7d388761cddb0c0db6f5627aea8e217f4a033f2ec83d93509"
)
.unwrap()
)
.to_bytes()
.as_ref(),
&hex::decode("70652da3e8d7533a0e4b9e9104f01b48c396b5b553717784ed8d05c6a36b9609").unwrap()
);
}

View File

@@ -1,102 +0,0 @@
use zeroize::Zeroize;
use digest::{
typenum::U114, core_api::BlockSizeUser, Update, Output, OutputSizeUser, FixedOutput,
ExtendableOutput, XofReader, HashMarker, Digest,
};
use sha3::Shake256;
use group::Group;
use minimal_ed448::{scalar::Scalar, point::Point};
use crate::Ciphersuite;
// Re-define Shake256 as a traditional Digest to meet API expectations
#[derive(Clone, Default)]
pub struct Shake256_114(Shake256);
impl BlockSizeUser for Shake256_114 {
type BlockSize = <Shake256 as BlockSizeUser>::BlockSize;
fn block_size() -> usize {
Shake256::block_size()
}
}
impl OutputSizeUser for Shake256_114 {
type OutputSize = U114;
fn output_size() -> usize {
114
}
}
impl Update for Shake256_114 {
fn update(&mut self, data: &[u8]) {
self.0.update(data);
}
fn chain(mut self, data: impl AsRef<[u8]>) -> Self {
Update::update(&mut self, data.as_ref());
self
}
}
impl FixedOutput for Shake256_114 {
fn finalize_fixed(self) -> Output<Self> {
let mut res = Default::default();
FixedOutput::finalize_into(self, &mut res);
res
}
fn finalize_into(self, out: &mut Output<Self>) {
let mut reader = self.0.finalize_xof();
reader.read(out);
}
}
impl HashMarker for Shake256_114 {}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct Ed448;
impl Ciphersuite for Ed448 {
type F = Scalar;
type G = Point;
type H = Shake256_114;
const ID: &'static [u8] = b"ed448";
fn generator() -> Self::G {
Point::generator()
}
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
Scalar::wide_reduce(Self::H::digest([dst, data].concat()).as_ref().try_into().unwrap())
}
}
#[test]
fn test_ed448() {
use ff::PrimeField;
// TODO: Enable once ed448 passes these tests
//ff_group_tests::group::test_prime_group_bits::<Point>();
// Ideally, a test vector from RFC-8032 (not FROST) would be here
// Unfortunately, the IETF draft doesn't provide any vectors for the derived challenges
assert_eq!(
Ed448::hash_to_F(
b"FROST-ED448-SHAKE256-v11nonce",
&hex::decode(
"\
89bf16040081ff2990336b200613787937ebe1f024b8cdff90eb6f1c741d91c1\
4a2b2f5858a932ad3d3b18bd16e76ced3070d72fd79ae4402df201f5\
25e754716a1bc1b87a502297f2a99d89ea054e0018eb55d39562fd01\
00"
)
.unwrap()
)
.to_repr()
.iter()
.cloned()
.collect::<Vec<_>>(),
hex::decode(
"\
67a6f023e77361707c6e894c625e809e80f33fdb310810053ae29e28\
e7011f3193b9020e73c183a98cc3a519160ed759376dd92c94831622\
00"
)
.unwrap()
);
}

View File

@@ -1,118 +0,0 @@
use zeroize::Zeroize;
use sha2::{Digest, Sha256};
use group::ff::{Field, PrimeField};
use elliptic_curve::{
generic_array::GenericArray,
bigint::{Encoding, U384},
hash2curve::{Expander, ExpandMsg, ExpandMsgXmd},
};
use crate::Ciphersuite;
macro_rules! kp_curve {
(
$feature: literal,
$lib: ident,
$Ciphersuite: ident,
$ID: literal
) => {
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct $Ciphersuite;
impl Ciphersuite for $Ciphersuite {
type F = $lib::Scalar;
type G = $lib::ProjectivePoint;
type H = Sha256;
const ID: &'static [u8] = $ID;
fn generator() -> Self::G {
$lib::ProjectivePoint::GENERATOR
}
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F {
let mut dst = dst;
let oversize = Sha256::digest([b"H2C-OVERSIZE-DST-".as_ref(), dst].concat());
if dst.len() > 255 {
dst = oversize.as_ref();
}
// While one of these two libraries does support directly hashing to the Scalar field, the
// other doesn't. While that's probably an oversight, this is a universally working method
let mut modulus = [0; 48];
modulus[16 ..].copy_from_slice(&(Self::F::zero() - Self::F::one()).to_bytes());
let modulus = U384::from_be_slice(&modulus).wrapping_add(&U384::ONE);
let mut unreduced = U384::from_be_bytes({
let mut bytes = [0; 48];
ExpandMsgXmd::<Sha256>::expand_message(&[msg], dst, 48).unwrap().fill_bytes(&mut bytes);
bytes
})
.reduce(&modulus)
.unwrap()
.to_be_bytes();
let mut array = *GenericArray::from_slice(&unreduced[16 ..]);
let res = $lib::Scalar::from_repr(array).unwrap();
unreduced.zeroize();
array.zeroize();
res
}
}
};
}
#[cfg(feature = "secp256k1")]
kp_curve!("secp256k1", k256, Secp256k1, b"secp256k1");
#[cfg(feature = "secp256k1")]
#[test]
fn test_secp256k1() {
ff_group_tests::group::test_prime_group_bits::<k256::ProjectivePoint>();
// Ideally, a test vector from hash to field (not FROST) would be here
// Unfortunately, the IETF draft only provides vectors for field elements, not scalars
assert_eq!(
Secp256k1::hash_to_F(
b"FROST-secp256k1-SHA256-v11nonce",
&hex::decode(
"\
80cbea5e405d169999d8c4b30b755fedb26ab07ec8198cda4873ed8ce5e16773\
08f89ffe80ac94dcb920c26f3f46140bfc7f95b493f8310f5fc1ea2b01f4254c"
)
.unwrap()
)
.to_repr()
.iter()
.cloned()
.collect::<Vec<_>>(),
hex::decode("acc83278035223c1ba464e2d11bfacfc872b2b23e1041cf5f6130da21e4d8068").unwrap()
);
}
#[cfg(feature = "p256")]
kp_curve!("p256", p256, P256, b"P-256");
#[cfg(feature = "p256")]
#[test]
fn test_p256() {
ff_group_tests::group::test_prime_group_bits::<p256::ProjectivePoint>();
assert_eq!(
P256::hash_to_F(
b"FROST-P256-SHA256-v11nonce",
&hex::decode(
"\
f4e8cf80aec3f888d997900ac7e3e349944b5a6b47649fc32186d2f1238103c6\
0c9c1a0fe806c184add50bbdcac913dda73e482daf95dcb9f35dbb0d8a9f7731"
)
.unwrap()
)
.to_repr()
.iter()
.cloned()
.collect::<Vec<_>>(),
hex::decode("f871dfcf6bcd199342651adc361b92c941cb6a0d8c8c1a3b91d79e2c1bf3722d").unwrap()
);
}

View File

@@ -1,113 +0,0 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![cfg_attr(not(feature = "std"), no_std)]
use core::fmt::Debug;
#[cfg(feature = "std")]
use std::io::{self, Read};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use subtle::ConstantTimeEq;
use digest::{core_api::BlockSizeUser, Digest};
use group::{
ff::{Field, PrimeField, PrimeFieldBits},
Group, GroupOps,
prime::PrimeGroup,
};
#[cfg(feature = "std")]
use group::GroupEncoding;
#[cfg(feature = "dalek")]
mod dalek;
#[cfg(feature = "ristretto")]
pub use dalek::Ristretto;
#[cfg(feature = "ed25519")]
pub use dalek::Ed25519;
#[cfg(feature = "kp256")]
mod kp256;
#[cfg(feature = "secp256k1")]
pub use kp256::Secp256k1;
#[cfg(feature = "p256")]
pub use kp256::P256;
#[cfg(feature = "ed448")]
mod ed448;
#[cfg(feature = "ed448")]
pub use ed448::*;
/// Unified trait defining a ciphersuite around an elliptic curve.
pub trait Ciphersuite: Clone + Copy + PartialEq + Eq + Debug + Zeroize {
/// Scalar field element type.
// This is available via G::Scalar yet `C::G::Scalar` is ambiguous, forcing horrific accesses
type F: PrimeField + PrimeFieldBits + Zeroize;
/// Group element type.
type G: Group<Scalar = Self::F> + GroupOps + PrimeGroup + Zeroize + ConstantTimeEq;
/// Hash algorithm used with this curve.
// Requires BlockSizeUser so it can be used within Hkdf which requies that.
type H: Clone + BlockSizeUser + Digest;
/// ID for this curve.
const ID: &'static [u8];
/// Generator for the group.
// While group does provide this in its API, privacy coins may want to use a custom basepoint
fn generator() -> Self::G;
/// Hash the provided domain-separation tag and message to a scalar. Ciphersuites MAY naively
/// prefix the tag to the message, enabling transpotion between the two. Accordingly, this
/// function should NOT be used in any scheme where one tag is a valid substring of another
/// UNLESS the specific Ciphersuite is verified to handle the DST securely.
///
/// Verifying specific ciphersuites have secure tag handling is not recommended, due to it
/// breaking the intended modularity of ciphersuites. Instead, component-specific tags with
/// further purpose tags are recommended ("Schnorr-nonce", "Schnorr-chal").
#[allow(non_snake_case)]
fn hash_to_F(dst: &[u8], msg: &[u8]) -> Self::F;
/// Generate a random non-zero scalar.
#[allow(non_snake_case)]
fn random_nonzero_F<R: RngCore + CryptoRng>(rng: &mut R) -> Self::F {
let mut res;
while {
res = Self::F::random(&mut *rng);
res.ct_eq(&Self::F::zero()).into()
} {}
res
}
/// Read a canonical scalar from something implementing std::io::Read.
#[cfg(feature = "std")]
#[allow(non_snake_case)]
fn read_F<R: Read>(reader: &mut R) -> io::Result<Self::F> {
let mut encoding = <Self::F as PrimeField>::Repr::default();
reader.read_exact(encoding.as_mut())?;
// ff mandates this is canonical
let res = Option::<Self::F>::from(Self::F::from_repr(encoding))
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "non-canonical scalar"));
for b in encoding.as_mut() {
b.zeroize();
}
res
}
/// Read a canonical point from something implementing std::io::Read.
#[cfg(feature = "std")]
#[allow(non_snake_case)]
fn read_G<R: Read>(reader: &mut R) -> io::Result<Self::G> {
let mut encoding = <Self::G as GroupEncoding>::Repr::default();
reader.read_exact(encoding.as_mut())?;
let point = Option::<Self::G>::from(Self::G::from_bytes(&encoding))
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))?;
if point.to_bytes().as_ref() != encoding.as_ref() {
Err(io::Error::new(io::ErrorKind::Other, "non-canonical point"))?;
}
Ok(point)
}
}

View File

@@ -1,22 +1,17 @@
[package]
name = "dalek-ff-group"
version = "0.1.5"
version = "0.1.2"
description = "ff/group bindings around curve25519-dalek"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dalek-ff-group"
repository = "https://github.com/serai-dex/serai"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["curve25519", "ed25519", "ristretto", "dalek", "group"]
edition = "2021"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
rand_core = "0.6"
digest = "0.10"
zeroize = { version = "1.5", features = ["zeroize_derive"] }
subtle = "2.4"
ff = "0.12"
@@ -24,6 +19,3 @@ group = "0.12"
crypto-bigint = "0.4"
curve25519-dalek = "3.2"
[dev-dependencies]
ff-group-tests = { path = "../ff-group-tests" }

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2022-2023 Luke Parker
Copyright (c) 2022 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -2,3 +2,5 @@
ff/group bindings around curve25519-dalek with a from_hash/random function based
around modern dependencies.
Some functions currently remain unimplemented.

View File

@@ -2,103 +2,87 @@ use core::ops::{Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign};
use rand_core::RngCore;
use subtle::{
Choice, CtOption, ConstantTimeEq, ConstantTimeLess, ConditionallyNegatable,
ConditionallySelectable,
};
use crypto_bigint::{Integer, Encoding, U256, U512};
use subtle::{Choice, CtOption, ConstantTimeEq, ConditionallySelectable};
use crypto_bigint::{Encoding, U256, U512};
use ff::{Field, PrimeField, FieldBits, PrimeFieldBits};
use crate::{constant_time, math, from_uint};
use crate::{choice, constant_time, math_op, math, from_wrapper, from_uint};
const MODULUS: U256 =
U256::from_be_hex("7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffed");
const WIDE_MODULUS: U512 = U512::from_be_hex(concat!(
"0000000000000000000000000000000000000000000000000000000000000000",
const FIELD_MODULUS: U256 = U256::from_be_hex(
"7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffed"
));
);
#[derive(Clone, Copy, PartialEq, Eq, Default, Debug)]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Default)]
pub struct FieldElement(U256);
pub const MOD_3_8: FieldElement =
FieldElement(MODULUS.saturating_add(&U256::from_u8(3)).wrapping_div(&U256::from_u8(8)));
pub const MOD_5_8: FieldElement = FieldElement(MOD_3_8.0.saturating_sub(&U256::ONE));
pub const EDWARDS_D: FieldElement = FieldElement(U256::from_be_hex(
"52036cee2b6ffe738cc740797779e89800700a4d4141d8ab75eb4dca135978a3",
));
pub const SQRT_M1: FieldElement = FieldElement(U256::from_be_hex(
"2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0",
));
fn reduce(x: U512) -> U256 {
U256::from_le_slice(&x.reduce(&WIDE_MODULUS).unwrap().to_le_bytes()[.. 32])
}
pub const SQRT_M1: FieldElement = FieldElement(
U256::from_be_hex("2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0")
);
constant_time!(FieldElement, U256);
math!(
FieldElement,
FieldElement,
|x, y| U256::add_mod(&x, &y, &MODULUS),
|x, y| U256::sub_mod(&x, &y, &MODULUS),
|x, y| U256::add_mod(&x, &y, &FIELD_MODULUS),
|x, y| U256::sub_mod(&x, &y, &FIELD_MODULUS),
|x, y| {
#[allow(non_snake_case)]
let WIDE_MODULUS: U512 = U512::from((U256::ZERO, FIELD_MODULUS));
debug_assert_eq!(FIELD_MODULUS.to_le_bytes()[..], WIDE_MODULUS.to_le_bytes()[.. 32]);
let wide = U256::mul_wide(&x, &y);
reduce(U512::from((wide.1, wide.0)))
U256::from_le_slice(
&U512::from((wide.1, wide.0)).reduce(&WIDE_MODULUS).unwrap().to_le_bytes()[.. 32]
)
}
);
from_uint!(FieldElement, U256);
impl Neg for FieldElement {
type Output = Self;
fn neg(self) -> Self::Output {
Self(self.0.neg_mod(&MODULUS))
}
}
impl<'a> Neg for &'a FieldElement {
type Output = FieldElement;
fn neg(self) -> Self::Output {
(*self).neg()
}
fn neg(self) -> Self::Output { Self(self.0.neg_mod(&FIELD_MODULUS)) }
}
impl Field for FieldElement {
fn random(mut rng: impl RngCore) -> Self {
let mut bytes = [0; 64];
rng.fill_bytes(&mut bytes);
FieldElement(reduce(U512::from_le_bytes(bytes)))
#[allow(non_snake_case)]
let WIDE_MODULUS: U512 = U512::from((U256::ZERO, FIELD_MODULUS));
debug_assert_eq!(FIELD_MODULUS.to_le_bytes()[..], WIDE_MODULUS.to_le_bytes()[.. 32]);
FieldElement(
U256::from_le_slice(
&U512::from_be_bytes(bytes).reduce(&WIDE_MODULUS).unwrap().to_le_bytes()[.. 32]
)
)
}
fn zero() -> Self {
Self(U256::ZERO)
}
fn one() -> Self {
Self(U256::ONE)
}
fn square(&self) -> Self {
FieldElement(reduce(self.0.square()))
}
fn double(&self) -> Self {
FieldElement((self.0 << 1).reduce(&MODULUS).unwrap())
}
fn zero() -> Self { Self(U256::ZERO) }
fn one() -> Self { Self(U256::ONE) }
fn square(&self) -> Self { *self * self }
fn double(&self) -> Self { *self + self }
fn invert(&self) -> CtOption<Self> {
const NEG_2: FieldElement = FieldElement(MODULUS.saturating_sub(&U256::from_u8(2)));
CtOption::new(self.pow(NEG_2), !self.is_zero())
CtOption::new(self.pow(-FieldElement(U256::from(2u64))), !self.is_zero())
}
fn sqrt(&self) -> CtOption<Self> {
let tv1 = self.pow(MOD_3_8);
let tv2 = tv1 * SQRT_M1;
let candidate = Self::conditional_select(&tv2, &tv1, tv1.square().ct_eq(self));
CtOption::new(candidate, candidate.square().ct_eq(self))
let c1 = SQRT_M1;
let c2 = FIELD_MODULUS.saturating_add(&U256::from(3u8)).checked_div(&U256::from(8u8)).unwrap();
let tv1 = self.pow(FieldElement(c2));
let tv2 = tv1 * c1;
let res = Self::conditional_select(&tv2, &tv1, tv1.square().ct_eq(self));
debug_assert_eq!(res * res, *self);
CtOption::new(Self::conditional_select(&tv2, &tv1, tv1.square().ct_eq(self)), 1.into())
}
fn is_zero(&self) -> Choice { self.0.ct_eq(&U256::ZERO) }
fn cube(&self) -> Self { *self * self * self }
fn pow_vartime<S: AsRef<[u64]>>(&self, _exp: S) -> Self { unimplemented!() }
}
impl PrimeField for FieldElement {
@@ -107,23 +91,17 @@ impl PrimeField for FieldElement {
const CAPACITY: u32 = 254;
fn from_repr(bytes: [u8; 32]) -> CtOption<Self> {
let res = Self(U256::from_le_bytes(bytes));
CtOption::new(res, res.0.ct_lt(&MODULUS))
}
fn to_repr(&self) -> [u8; 32] {
self.0.to_le_bytes()
CtOption::new(res, res.0.add_mod(&U256::ZERO, &FIELD_MODULUS).ct_eq(&res.0))
}
fn to_repr(&self) -> [u8; 32] { self.0.to_le_bytes() }
const S: u32 = 2;
fn is_odd(&self) -> Choice {
self.0.is_odd()
}
fn multiplicative_generator() -> Self {
2u64.into()
}
fn is_odd(&self) -> Choice { unimplemented!() }
fn multiplicative_generator() -> Self { 2u64.into() }
fn root_of_unity() -> Self {
FieldElement(U256::from_be_hex(
"2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0",
))
FieldElement(
U256::from_be_hex("2b8324804fc1df0b2b4d00993dfbd7a72f431806ad2fe478c4ee1b274a0ea0b0")
)
}
}
@@ -135,7 +113,7 @@ impl PrimeFieldBits for FieldElement {
}
fn char_le_bits() -> FieldBits<Self::ReprBits> {
MODULUS.to_le_bytes().into()
FIELD_MODULUS.to_le_bytes().into()
}
}
@@ -146,54 +124,19 @@ impl FieldElement {
}
pub fn pow(&self, other: FieldElement) -> FieldElement {
let mut table = [FieldElement::one(); 16];
table[1] = *self;
for i in 2 .. 16 {
table[i] = table[i - 1] * self;
}
let mut res = FieldElement::one();
let mut bits = 0;
for (i, bit) in other.to_le_bits().iter().rev().enumerate() {
bits <<= 1;
let bit = u8::from(*bit);
bits |= bit;
if ((i + 1) % 4) == 0 {
if i != 3 {
for _ in 0 .. 4 {
res *= res;
}
}
res *= table[usize::from(bits)];
bits = 0;
}
let mut res = FieldElement(U256::ONE);
let mut m = *self;
for bit in other.to_le_bits() {
res *= FieldElement::conditional_select(&FieldElement(U256::ONE), &m, choice(bit));
m *= m;
}
res
}
pub fn sqrt_ratio_i(u: FieldElement, v: FieldElement) -> (Choice, FieldElement) {
let i = SQRT_M1;
let v3 = v.square() * v;
let v7 = v3.square() * v;
let mut r = (u * v3) * (u * v7).pow(MOD_5_8);
let check = v * r.square();
let correct_sign = check.ct_eq(&u);
let flipped_sign = check.ct_eq(&(-u));
let flipped_sign_i = check.ct_eq(&((-u) * i));
r.conditional_assign(&(r * i), flipped_sign | flipped_sign_i);
let r_is_negative = r.is_odd();
r.conditional_negate(r_is_negative);
(correct_sign | flipped_sign, r)
}
}
#[test]
fn test_field() {
ff_group_tests::prime_field::test_prime_field_bits::<FieldElement>();
fn test_mul() {
assert_eq!(FieldElement(FIELD_MODULUS) * FieldElement::one(), FieldElement::zero());
assert_eq!(FieldElement(FIELD_MODULUS) * FieldElement::one().double(), FieldElement::zero());
assert_eq!(SQRT_M1.square(), -FieldElement::one());
}

View File

@@ -1,21 +1,18 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![no_std]
use core::{
ops::{Deref, Add, AddAssign, Sub, SubAssign, Neg, Mul, MulAssign},
borrow::Borrow,
iter::{Iterator, Sum},
iter::{Iterator, Sum}
};
use zeroize::Zeroize;
use subtle::{ConstantTimeEq, ConditionallySelectable};
use rand_core::RngCore;
use digest::{consts::U64, Digest, HashMarker};
use digest::{consts::U64, Digest};
use subtle::{Choice, CtOption};
use crypto_bigint::{Encoding, U256};
pub use curve25519_dalek as dalek;
use dalek::{
@@ -23,13 +20,15 @@ use dalek::{
traits::Identity,
scalar::Scalar as DScalar,
edwards::{
EdwardsPoint as DEdwardsPoint, EdwardsBasepointTable as DEdwardsBasepointTable,
CompressedEdwardsY as DCompressedEdwards,
EdwardsPoint as DEdwardsPoint,
EdwardsBasepointTable as DEdwardsBasepointTable,
CompressedEdwardsY as DCompressedEdwards
},
ristretto::{
RistrettoPoint as DRistrettoPoint, RistrettoBasepointTable as DRistrettoBasepointTable,
CompressedRistretto as DCompressedRistretto,
},
RistrettoPoint as DRistrettoPoint,
RistrettoBasepointTable as DRistrettoBasepointTable,
CompressedRistretto as DCompressedRistretto
}
};
use ff::{Field, PrimeField, FieldBits, PrimeFieldBits};
@@ -39,7 +38,9 @@ pub mod field;
// Convert a boolean to a Choice in a *presumably* constant time manner
fn choice(value: bool) -> Choice {
Choice::from(u8::from(value))
let bit = value as u8;
debug_assert_eq!(bit | 1, 1);
Choice::from(bit)
}
macro_rules! deref_borrow {
@@ -63,7 +64,7 @@ macro_rules! deref_borrow {
&self.0
}
}
};
}
}
#[doc(hidden)]
@@ -71,9 +72,7 @@ macro_rules! deref_borrow {
macro_rules! constant_time {
($Value: ident, $Inner: ident) => {
impl ConstantTimeEq for $Value {
fn ct_eq(&self, other: &Self) -> Choice {
self.0.ct_eq(&other.0)
}
fn ct_eq(&self, other: &Self) -> Choice { self.0.ct_eq(&other.0) }
}
impl ConditionallySelectable for $Value {
@@ -81,7 +80,7 @@ macro_rules! constant_time {
$Value($Inner::conditional_select(&a.0, &b.0, choice))
}
}
};
}
}
#[doc(hidden)]
@@ -118,32 +117,28 @@ macro_rules! math_op {
self.0 = $function(self.0, other.0);
}
}
};
}
}
#[doc(hidden)]
#[macro_export(local_inner_macros)]
#[macro_export]
macro_rules! math {
($Value: ident, $Factor: ident, $add: expr, $sub: expr, $mul: expr) => {
math_op!($Value, $Value, Add, add, AddAssign, add_assign, $add);
math_op!($Value, $Value, Sub, sub, SubAssign, sub_assign, $sub);
math_op!($Value, $Factor, Mul, mul, MulAssign, mul_assign, $mul);
};
}
}
#[doc(hidden)]
#[macro_export(local_inner_macros)]
macro_rules! math_neg {
($Value: ident, $Factor: ident, $add: expr, $sub: expr, $mul: expr) => {
math!($Value, $Factor, $add, $sub, $mul);
impl Neg for $Value {
type Output = Self;
fn neg(self) -> Self::Output {
Self(-self.0)
}
fn neg(self) -> Self::Output { Self(-self.0) }
}
};
}
}
#[doc(hidden)]
@@ -151,75 +146,41 @@ macro_rules! math_neg {
macro_rules! from_wrapper {
($wrapper: ident, $inner: ident, $uint: ident) => {
impl From<$uint> for $wrapper {
fn from(a: $uint) -> $wrapper {
Self($inner::from(a))
}
fn from(a: $uint) -> $wrapper { Self($inner::from(a)) }
}
};
}
}
#[doc(hidden)]
#[macro_export(local_inner_macros)]
#[macro_export]
macro_rules! from_uint {
($wrapper: ident, $inner: ident) => {
from_wrapper!($wrapper, $inner, u8);
from_wrapper!($wrapper, $inner, u16);
from_wrapper!($wrapper, $inner, u32);
from_wrapper!($wrapper, $inner, u64);
};
}
}
/// Wrapper around the dalek Scalar type.
#[derive(Clone, Copy, PartialEq, Eq, Default, Debug, Zeroize)]
/// Wrapper around the dalek Scalar type
#[derive(Clone, Copy, PartialEq, Eq, Debug, Default)]
pub struct Scalar(pub DScalar);
deref_borrow!(Scalar, DScalar);
constant_time!(Scalar, DScalar);
math_neg!(Scalar, Scalar, DScalar::add, DScalar::sub, DScalar::mul);
from_uint!(Scalar, DScalar);
const MODULUS: U256 =
U256::from_be_hex("1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3ed");
impl Scalar {
pub fn pow(&self, other: Scalar) -> Scalar {
let mut table = [Scalar::one(); 16];
table[1] = *self;
for i in 2 .. 16 {
table[i] = table[i - 1] * self;
}
let mut res = Scalar::one();
let mut bits = 0;
for (i, bit) in other.to_le_bits().iter().rev().enumerate() {
bits <<= 1;
let bit = u8::from(*bit);
bits |= bit;
if ((i + 1) % 4) == 0 {
if i != 3 {
for _ in 0 .. 4 {
res *= res;
}
}
res *= table[usize::from(bits)];
bits = 0;
}
}
res
}
/// Perform wide reduction on a 64-byte array to create a Scalar without bias.
/// Perform wide reduction on a 64-byte array to create a Scalar without bias
pub fn from_bytes_mod_order_wide(bytes: &[u8; 64]) -> Scalar {
Self(DScalar::from_bytes_mod_order_wide(bytes))
}
/// Derive a Scalar without bias from a digest via wide reduction.
pub fn from_hash<D: Digest<OutputSize = U64> + HashMarker>(hash: D) -> Scalar {
/// Derive a Scalar without bias from a digest via wide reduction
pub fn from_hash<D: Digest<OutputSize = U64>>(hash: D) -> Scalar {
let mut output = [0u8; 64];
output.copy_from_slice(&hash.finalize());
let res = Scalar(DScalar::from_bytes_mod_order_wide(&output));
output.zeroize();
res
Scalar(DScalar::from_bytes_mod_order_wide(&output))
}
}
@@ -230,33 +191,17 @@ impl Field for Scalar {
Self(DScalar::from_bytes_mod_order_wide(&r))
}
fn zero() -> Self {
Self(DScalar::zero())
}
fn one() -> Self {
Self(DScalar::one())
}
fn square(&self) -> Self {
*self * self
}
fn double(&self) -> Self {
*self + self
}
fn zero() -> Self { Self(DScalar::zero()) }
fn one() -> Self { Self(DScalar::one()) }
fn square(&self) -> Self { *self * self }
fn double(&self) -> Self { *self + self }
fn invert(&self) -> CtOption<Self> {
CtOption::new(Self(self.0.invert()), !self.is_zero())
}
fn sqrt(&self) -> CtOption<Self> {
let mod_3_8 = MODULUS.saturating_add(&U256::from_u8(3)).wrapping_div(&U256::from_u8(8));
let mod_3_8 = Scalar::from_repr(mod_3_8.to_le_bytes()).unwrap();
let sqrt_m1 = MODULUS.saturating_sub(&U256::from_u8(1)).wrapping_div(&U256::from_u8(4));
let sqrt_m1 = Scalar::one().double().pow(Scalar::from_repr(sqrt_m1.to_le_bytes()).unwrap());
let tv1 = self.pow(mod_3_8);
let tv2 = tv1 * sqrt_m1;
let candidate = Self::conditional_select(&tv2, &tv1, tv1.square().ct_eq(self));
CtOption::new(candidate, candidate.square().ct_eq(self))
}
fn sqrt(&self) -> CtOption<Self> { unimplemented!() }
fn is_zero(&self) -> Choice { self.0.ct_eq(&DScalar::zero()) }
fn cube(&self) -> Self { *self * self * self }
fn pow_vartime<S: AsRef<[u64]>>(&self, _exp: S) -> Self { unimplemented!() }
}
impl PrimeField for Scalar {
@@ -265,27 +210,15 @@ impl PrimeField for Scalar {
const CAPACITY: u32 = 252;
fn from_repr(bytes: [u8; 32]) -> CtOption<Self> {
let scalar = DScalar::from_canonical_bytes(bytes);
// TODO: This unwrap_or isn't constant time, yet we don't exactly have an alternative...
CtOption::new(Scalar(scalar.unwrap_or_else(DScalar::zero)), choice(scalar.is_some()))
}
fn to_repr(&self) -> [u8; 32] {
self.0.to_bytes()
// TODO: This unwrap_or isn't constant time, yet do we have an alternative?
CtOption::new(Scalar(scalar.unwrap_or(DScalar::zero())), choice(scalar.is_some()))
}
fn to_repr(&self) -> [u8; 32] { self.0.to_bytes() }
const S: u32 = 2;
fn is_odd(&self) -> Choice {
choice(self.to_le_bits()[0])
}
fn multiplicative_generator() -> Self {
2u64.into()
}
fn root_of_unity() -> Self {
const ROOT: [u8; 32] = [
212, 7, 190, 235, 223, 117, 135, 190, 254, 131, 206, 66, 83, 86, 240, 14, 122, 194, 193, 171,
96, 109, 61, 125, 231, 129, 121, 224, 16, 115, 74, 9,
];
Scalar::from_repr(ROOT).unwrap()
}
fn is_odd(&self) -> Choice { unimplemented!() }
fn multiplicative_generator() -> Self { 2u64.into() }
fn root_of_unity() -> Self { unimplemented!() }
}
impl PrimeFieldBits for Scalar {
@@ -303,12 +236,6 @@ impl PrimeFieldBits for Scalar {
}
}
impl Sum<Scalar> for Scalar {
fn sum<I: Iterator<Item = Scalar>>(iter: I) -> Scalar {
Self(DScalar::sum(iter))
}
}
macro_rules! dalek_group {
(
$Point: ident,
@@ -323,8 +250,8 @@ macro_rules! dalek_group {
$BASEPOINT_POINT: ident,
$BASEPOINT_TABLE: ident
) => {
/// Wrapper around the dalek Point type. For Ed25519, this is restricted to the prime subgroup.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
/// Wrapper around the dalek Point type. For Ed25519, this is restricted to the prime subgroup
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct $Point(pub $DPoint);
deref_borrow!($Point, $DPoint);
constant_time!($Point, $DPoint);
@@ -333,40 +260,21 @@ macro_rules! dalek_group {
pub const $BASEPOINT_POINT: $Point = $Point(constants::$BASEPOINT_POINT);
impl Sum<$Point> for $Point {
fn sum<I: Iterator<Item = $Point>>(iter: I) -> $Point {
Self($DPoint::sum(iter))
}
fn sum<I: Iterator<Item = $Point>>(iter: I) -> $Point { Self($DPoint::sum(iter)) }
}
impl<'a> Sum<&'a $Point> for $Point {
fn sum<I: Iterator<Item = &'a $Point>>(iter: I) -> $Point {
Self($DPoint::sum(iter))
}
fn sum<I: Iterator<Item = &'a $Point>>(iter: I) -> $Point { Self($DPoint::sum(iter)) }
}
impl Group for $Point {
type Scalar = Scalar;
fn random(mut rng: impl RngCore) -> Self {
loop {
let mut bytes = field::FieldElement::random(&mut rng).to_repr();
bytes[31] |= u8::try_from(rng.next_u32() % 2).unwrap() << 7;
let opt = Self::from_bytes(&bytes);
if opt.is_some().into() {
return opt.unwrap();
}
}
}
fn identity() -> Self {
Self($DPoint::identity())
}
fn generator() -> Self {
$BASEPOINT_POINT
}
fn is_identity(&self) -> Choice {
self.0.ct_eq(&$DPoint::identity())
}
fn double(&self) -> Self {
*self + self
}
// Ideally, this would be cryptographically secure, yet that's not a bound on the trait
// k256 also does this
fn random(rng: impl RngCore) -> Self { &$BASEPOINT_TABLE * Scalar::random(rng) }
fn identity() -> Self { Self($DPoint::identity()) }
fn generator() -> Self { $BASEPOINT_POINT }
fn is_identity(&self) -> Choice { self.0.ct_eq(&$DPoint::identity()) }
fn double(&self) -> Self { *self + self }
}
impl GroupEncoding for $Point {
@@ -391,16 +299,14 @@ macro_rules! dalek_group {
impl PrimeGroup for $Point {}
/// Wrapper around the dalek Table type, offering efficient multiplication against the
/// basepoint.
/// basepoint
pub struct $Table(pub $DTable);
deref_borrow!($Table, $DTable);
pub const $BASEPOINT_TABLE: $Table = $Table(constants::$BASEPOINT_TABLE);
impl Mul<Scalar> for &$Table {
type Output = $Point;
fn mul(self, b: Scalar) -> $Point {
$Point(&b.0 * &self.0)
}
fn mul(self, b: Scalar) -> $Point { $Point(&b.0 * &self.0) }
}
};
}
@@ -409,36 +315,26 @@ dalek_group!(
EdwardsPoint,
DEdwardsPoint,
|point: DEdwardsPoint| point.is_torsion_free(),
EdwardsBasepointTable,
DEdwardsBasepointTable,
DCompressedEdwards,
ED25519_BASEPOINT_POINT,
ED25519_BASEPOINT_TABLE
);
impl EdwardsPoint {
pub fn mul_by_cofactor(&self) -> EdwardsPoint {
EdwardsPoint(self.0.mul_by_cofactor())
}
}
dalek_group!(
RistrettoPoint,
DRistrettoPoint,
|_| true,
RistrettoBasepointTable,
DRistrettoBasepointTable,
DCompressedRistretto,
RISTRETTO_BASEPOINT_POINT,
RISTRETTO_BASEPOINT_TABLE
);
#[test]
fn test_ed25519_group() {
ff_group_tests::group::test_prime_group_bits::<EdwardsPoint>();
}
#[test]
fn test_ristretto_group() {
ff_group_tests::group::test_prime_group_bits::<RistrettoPoint>();
}

View File

@@ -1,41 +0,0 @@
[package]
name = "dkg"
version = "0.2.0"
description = "Distributed key generation over ff/group"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["dkg", "multisig", "threshold", "ff", "group"]
edition = "2021"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[dependencies]
thiserror = "1"
rand_core = "0.6"
zeroize = { version = "1.5", features = ["zeroize_derive"] }
subtle = "2"
hex = "0.4"
transcript = { package = "flexible-transcript", path = "../transcript", version = "0.2", features = ["recommended"] }
chacha20 = { version = "0.9", features = ["zeroize"] }
group = "0.12"
multiexp = { path = "../multiexp", version = "0.2", features = ["batch"] }
ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["std"] }
schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "0.2" }
dleq = { path = "../dleq", version = "0.2", features = ["serialize"] }
tokio = { version = "1", features = ["full"] }
[dev-dependencies]
ciphersuite = { path = "../ciphersuite", version = "0.1", features = ["std", "ristretto"] }
[features]
tests = []

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2021-2023 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,12 +0,0 @@
# Distributed Key Generation
A collection of implementations of various distributed key generation protocols.
All included protocols resolve into the provided `Threshold` types, intended to
enable their modularity.
Additional utilities around them, such as promotion from one generator to
another, are also provided.
Currently included is the two-round protocol from the
[FROST paper](https://eprint.iacr.org/2020/852).

View File

@@ -1,73 +0,0 @@
use std::collections::HashMap;
use rand_core::OsRng;
use ciphersuite::{Ciphersuite, Ristretto};
use tokio::sync::mpsc::{self, UnboundedSender, UnboundedReceiver};
use dkg::{
ThresholdParams,
encryption::{EncryptionKeyMessage, EncryptedMessage},
frost::{Commitments, SecretShare, KeyGenMachine},
};
async fn dkg<C: Ciphersuite>(
i: u16,
send: UnboundedSender<Vec<u8>>,
mut recv: UnboundedReceiver<Vec<u8>>,
) {
// Calculate the other participant's i
let other_i = i ^ 0b11;
// A 2-of-2 multisig
let params = ThresholdParams::new(2, 2, i).unwrap();
// Create a key gen machine
let machine = KeyGenMachine::new(params, "DKG Example".to_string());
// Generate coefficients
let (machine, commitments) = machine.generate_coefficients(&mut OsRng);
// Send everyone our commitments
send.send(commitments.serialize()).unwrap();
// Receive everyone else's commitments
let other_commitments = EncryptionKeyMessage::<C, Commitments<C>>::read::<&[u8]>(
&mut recv.recv().await.unwrap().as_ref(),
params,
)
.unwrap();
let mut all_commitments = HashMap::new();
all_commitments.insert(other_i, other_commitments);
// Generate secret shares
let (machine, shares) = machine.generate_secret_shares(&mut OsRng, all_commitments).unwrap();
// Send everyone else their secret shares
send.send(shares[&other_i].serialize()).unwrap();
// Receive our shares from everyone else
let share = EncryptedMessage::<C, SecretShare<C::F>>::read::<&[u8]>(
&mut recv.recv().await.unwrap().as_ref(),
params,
)
.unwrap();
let mut all_shares = HashMap::new();
all_shares.insert(other_i, share);
// Calculate our share
let (machine, _key) = machine.calculate_share(&mut OsRng, all_shares).unwrap();
// Assume the process succeeded, though this should only be done ater everyone votes on the key
let _keys = machine.complete();
}
#[tokio::main]
async fn main() {
// Create a pair of channels
let (alice_send, alice_recv) = mpsc::unbounded_channel();
let (bob_send, bob_recv) = mpsc::unbounded_channel();
// Spawn Alice
let alice = dkg::<Ristretto>(1, alice_send, bob_recv);
// Spawn Bob
let bob = dkg::<Ristretto>(2, bob_send, alice_recv);
tokio::join!(alice, bob);
}

View File

@@ -1,442 +0,0 @@
use core::fmt::Debug;
use std::{
ops::Deref,
io::{self, Read, Write},
collections::HashMap,
};
use thiserror::Error;
use zeroize::{Zeroize, Zeroizing};
use rand_core::{RngCore, CryptoRng};
use chacha20::{
cipher::{crypto_common::KeyIvInit, StreamCipher},
Key as Cc20Key, Nonce as Cc20Iv, ChaCha20,
};
use transcript::{Transcript, RecommendedTranscript};
#[cfg(test)]
use group::ff::Field;
use group::GroupEncoding;
use ciphersuite::Ciphersuite;
use multiexp::BatchVerifier;
use schnorr::SchnorrSignature;
use dleq::DLEqProof;
use crate::ThresholdParams;
pub trait ReadWrite: Sized {
fn read<R: Read>(reader: &mut R, params: ThresholdParams) -> io::Result<Self>;
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()>;
fn serialize(&self) -> Vec<u8> {
let mut buf = vec![];
self.write(&mut buf).unwrap();
buf
}
}
pub trait Message: Clone + PartialEq + Eq + Debug + Zeroize + ReadWrite {}
impl<M: Clone + PartialEq + Eq + Debug + Zeroize + ReadWrite> Message for M {}
/// Wraps a message with a key to use for encryption in the future.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct EncryptionKeyMessage<C: Ciphersuite, M: Message> {
msg: M,
enc_key: C::G,
}
// Doesn't impl ReadWrite so that doesn't need to be imported
impl<C: Ciphersuite, M: Message> EncryptionKeyMessage<C, M> {
pub fn read<R: Read>(reader: &mut R, params: ThresholdParams) -> io::Result<Self> {
Ok(Self { msg: M::read(reader, params)?, enc_key: C::read_G(reader)? })
}
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
self.msg.write(writer)?;
writer.write_all(self.enc_key.to_bytes().as_ref())
}
pub fn serialize(&self) -> Vec<u8> {
let mut buf = vec![];
self.write(&mut buf).unwrap();
buf
}
// Used by tests
pub(crate) fn enc_key(&self) -> C::G {
self.enc_key
}
}
pub trait Encryptable: Clone + AsRef<[u8]> + AsMut<[u8]> + Zeroize + ReadWrite {}
impl<E: Clone + AsRef<[u8]> + AsMut<[u8]> + Zeroize + ReadWrite> Encryptable for E {}
/// An encrypted message, with a per-message encryption key enabling revealing specific messages
/// without side effects.
#[derive(Clone, Zeroize)]
pub struct EncryptedMessage<C: Ciphersuite, E: Encryptable> {
key: C::G,
// Also include a proof-of-possession for the key.
// If this proof-of-possession wasn't here, Eve could observe Alice encrypt to Bob with key X,
// then send Bob a message also claiming to use X.
// While Eve's message would fail to meaningfully decrypt, Bob would then use this to create a
// blame argument against Eve. When they do, they'd reveal bX, revealing Alice's message to Bob.
// This is a massive side effect which could break some protocols, in the worst case.
// While Eve can still reuse their own keys, causing Bob to leak all messages by revealing for
// any single one, that's effectively Eve revealing themselves, and not considered relevant.
pop: SchnorrSignature<C>,
msg: Zeroizing<E>,
}
fn ecdh<C: Ciphersuite>(private: &Zeroizing<C::F>, public: C::G) -> Zeroizing<C::G> {
Zeroizing::new(public * private.deref())
}
fn cipher<C: Ciphersuite>(dst: &'static [u8], ecdh: &Zeroizing<C::G>) -> ChaCha20 {
// Ideally, we'd box this transcript with ZAlloc, yet that's only possible on nightly
// TODO: https://github.com/serai-dex/serai/issues/151
let mut transcript = RecommendedTranscript::new(b"DKG Encryption v0.2");
transcript.domain_separate(dst);
let mut ecdh = ecdh.to_bytes();
transcript.append_message(b"shared_key", ecdh.as_ref());
ecdh.as_mut().zeroize();
let zeroize = |buf: &mut [u8]| buf.zeroize();
let mut key = Cc20Key::default();
let mut challenge = transcript.challenge(b"key");
key.copy_from_slice(&challenge[.. 32]);
zeroize(challenge.as_mut());
// The RecommendedTranscript isn't vulnerable to length extension attacks, yet if it was,
// it'd make sense to clone it (and fork it) just to hedge against that
let mut iv = Cc20Iv::default();
let mut challenge = transcript.challenge(b"iv");
iv.copy_from_slice(&challenge[.. 12]);
zeroize(challenge.as_mut());
// Same commentary as the transcript regarding ZAlloc
// TODO: https://github.com/serai-dex/serai/issues/151
let res = ChaCha20::new(&key, &iv);
zeroize(key.as_mut());
zeroize(iv.as_mut());
res
}
fn encrypt<R: RngCore + CryptoRng, C: Ciphersuite, E: Encryptable>(
rng: &mut R,
dst: &'static [u8],
from: u16,
to: C::G,
mut msg: Zeroizing<E>,
) -> EncryptedMessage<C, E> {
/*
The following code could be used to replace the requirement on an RNG here.
It's just currently not an issue to require taking in an RNG here.
let last = self.last_enc_key.to_bytes();
self.last_enc_key = C::hash_to_F(b"encryption_base", last.as_ref());
let key = C::hash_to_F(b"encryption_key", last.as_ref());
last.as_mut().zeroize();
*/
let key = Zeroizing::new(C::random_nonzero_F(rng));
cipher::<C>(dst, &ecdh::<C>(&key, to)).apply_keystream(msg.as_mut().as_mut());
let pub_key = C::generator() * key.deref();
let nonce = Zeroizing::new(C::random_nonzero_F(rng));
let pub_nonce = C::generator() * nonce.deref();
EncryptedMessage {
key: pub_key,
pop: SchnorrSignature::sign(
&key,
nonce,
pop_challenge::<C>(pub_nonce, pub_key, from, msg.deref().as_ref()),
),
msg,
}
}
impl<C: Ciphersuite, E: Encryptable> EncryptedMessage<C, E> {
pub fn read<R: Read>(reader: &mut R, params: ThresholdParams) -> io::Result<Self> {
Ok(Self {
key: C::read_G(reader)?,
pop: SchnorrSignature::<C>::read(reader)?,
msg: Zeroizing::new(E::read(reader, params)?),
})
}
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(self.key.to_bytes().as_ref())?;
self.pop.write(writer)?;
self.msg.write(writer)
}
pub fn serialize(&self) -> Vec<u8> {
let mut buf = vec![];
self.write(&mut buf).unwrap();
buf
}
#[cfg(test)]
pub(crate) fn invalidate_pop(&mut self) {
self.pop.s += C::F::one();
}
#[cfg(test)]
pub(crate) fn invalidate_msg<R: RngCore + CryptoRng>(&mut self, rng: &mut R, from: u16) {
// Invalidate the message by specifying a new key/Schnorr PoP
// This will cause all initial checks to pass, yet a decrypt to gibberish
let key = Zeroizing::new(C::random_nonzero_F(rng));
let pub_key = C::generator() * key.deref();
let nonce = Zeroizing::new(C::random_nonzero_F(rng));
let pub_nonce = C::generator() * nonce.deref();
self.key = pub_key;
self.pop = SchnorrSignature::sign(
&key,
nonce,
pop_challenge::<C>(pub_nonce, pub_key, from, self.msg.deref().as_ref()),
);
}
// Assumes the encrypted message is a secret share.
#[cfg(test)]
pub(crate) fn invalidate_share_serialization<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
dst: &'static [u8],
from: u16,
to: C::G,
) {
use group::ff::PrimeField;
let mut repr = <C::F as PrimeField>::Repr::default();
for b in repr.as_mut().iter_mut() {
*b = 255;
}
// Tries to guarantee the above assumption.
assert_eq!(repr.as_ref().len(), self.msg.as_ref().len());
// Checks that this isn't over a field where this is somehow valid
assert!(!bool::from(C::F::from_repr(repr).is_some()));
self.msg.as_mut().as_mut().copy_from_slice(repr.as_ref());
*self = encrypt(rng, dst, from, to, self.msg.clone());
}
// Assumes the encrypted message is a secret share.
#[cfg(test)]
pub(crate) fn invalidate_share_value<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
dst: &'static [u8],
from: u16,
to: C::G,
) {
use group::ff::PrimeField;
// Assumes the share isn't randomly 1
let repr = C::F::one().to_repr();
self.msg.as_mut().as_mut().copy_from_slice(repr.as_ref());
*self = encrypt(rng, dst, from, to, self.msg.clone());
}
}
/// A proof that the provided point is the legitimately derived shared key for some message.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct EncryptionKeyProof<C: Ciphersuite> {
key: Zeroizing<C::G>,
dleq: DLEqProof<C::G>,
}
impl<C: Ciphersuite> EncryptionKeyProof<C> {
pub fn read<R: Read>(reader: &mut R) -> io::Result<Self> {
Ok(Self { key: Zeroizing::new(C::read_G(reader)?), dleq: DLEqProof::read(reader)? })
}
pub fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(self.key.to_bytes().as_ref())?;
self.dleq.write(writer)
}
pub fn serialize(&self) -> Vec<u8> {
let mut buf = vec![];
self.write(&mut buf).unwrap();
buf
}
#[cfg(test)]
pub(crate) fn invalidate_key(&mut self) {
*self.key += C::generator();
}
#[cfg(test)]
pub(crate) fn invalidate_dleq(&mut self) {
let mut buf = vec![];
self.dleq.write(&mut buf).unwrap();
// Adds one to c since this is serialized c, s
// Adding one to c will leave a validly serialized c
// Adding one to s may leave an invalidly serialized s
buf[0] = buf[0].wrapping_add(1);
self.dleq = DLEqProof::read::<&[u8]>(&mut buf.as_ref()).unwrap();
}
}
// This doesn't need to take the msg. It just doesn't hurt as an extra layer.
// This still doesn't mean the DKG offers an authenticated channel. The per-message keys have no
// root of trust other than their existence in the assumed-to-exist external authenticated channel.
fn pop_challenge<C: Ciphersuite>(nonce: C::G, key: C::G, sender: u16, msg: &[u8]) -> C::F {
let mut transcript = RecommendedTranscript::new(b"DKG Encryption Key Proof of Possession v0.2");
transcript.append_message(b"nonce", nonce.to_bytes());
transcript.append_message(b"key", key.to_bytes());
// This is sufficient to prevent the attack this is meant to stop
transcript.append_message(b"sender", sender.to_le_bytes());
// This, as written above, doesn't hurt
transcript.append_message(b"message", msg);
// While this is a PoK and a PoP, it's called a PoP here since the important part is its owner
// Elsewhere, where we use the term PoK, the important part is that it isn't some inverse, with
// an unknown to anyone discrete log, breaking the system
C::hash_to_F(b"DKG-encryption-proof_of_possession", &transcript.challenge(b"schnorr"))
}
fn encryption_key_transcript() -> RecommendedTranscript {
RecommendedTranscript::new(b"DKG Encryption Key Correctness Proof v0.2")
}
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
pub(crate) enum DecryptionError {
#[error("accused provided an invalid signature")]
InvalidSignature,
#[error("accuser provided an invalid decryption key")]
InvalidProof,
}
// A simple box for managing encryption.
#[derive(Clone)]
pub(crate) struct Encryption<C: Ciphersuite> {
dst: &'static [u8],
i: u16,
enc_key: Zeroizing<C::F>,
enc_pub_key: C::G,
enc_keys: HashMap<u16, C::G>,
}
impl<C: Ciphersuite> Zeroize for Encryption<C> {
fn zeroize(&mut self) {
self.enc_key.zeroize();
self.enc_pub_key.zeroize();
for (_, mut value) in self.enc_keys.drain() {
value.zeroize();
}
}
}
impl<C: Ciphersuite> Encryption<C> {
pub(crate) fn new<R: RngCore + CryptoRng>(dst: &'static [u8], i: u16, rng: &mut R) -> Self {
let enc_key = Zeroizing::new(C::random_nonzero_F(rng));
Self {
dst,
i,
enc_pub_key: C::generator() * enc_key.deref(),
enc_key,
enc_keys: HashMap::new(),
}
}
pub(crate) fn registration<M: Message>(&self, msg: M) -> EncryptionKeyMessage<C, M> {
EncryptionKeyMessage { msg, enc_key: self.enc_pub_key }
}
pub(crate) fn register<M: Message>(
&mut self,
participant: u16,
msg: EncryptionKeyMessage<C, M>,
) -> M {
if self.enc_keys.contains_key(&participant) {
panic!("Re-registering encryption key for a participant");
}
self.enc_keys.insert(participant, msg.enc_key);
msg.msg
}
pub(crate) fn encrypt<R: RngCore + CryptoRng, E: Encryptable>(
&self,
rng: &mut R,
participant: u16,
msg: Zeroizing<E>,
) -> EncryptedMessage<C, E> {
encrypt(rng, self.dst, self.i, self.enc_keys[&participant], msg)
}
pub(crate) fn decrypt<R: RngCore + CryptoRng, I: Copy + Zeroize, E: Encryptable>(
&self,
rng: &mut R,
batch: &mut BatchVerifier<I, C::G>,
// Uses a distinct batch ID so if this batch verifier is reused, we know its the PoP aspect
// which failed, and therefore to use None for the blame
batch_id: I,
from: u16,
mut msg: EncryptedMessage<C, E>,
) -> (Zeroizing<E>, EncryptionKeyProof<C>) {
msg.pop.batch_verify(
rng,
batch,
batch_id,
msg.key,
pop_challenge::<C>(msg.pop.R, msg.key, from, msg.msg.deref().as_ref()),
);
let key = ecdh::<C>(&self.enc_key, msg.key);
cipher::<C>(self.dst, &key).apply_keystream(msg.msg.as_mut().as_mut());
(
msg.msg,
EncryptionKeyProof {
key,
dleq: DLEqProof::prove(
rng,
&mut encryption_key_transcript(),
&[C::generator(), msg.key],
&self.enc_key,
),
},
)
}
// Given a message, and the intended decryptor, and a proof for its key, decrypt the message.
// Returns None if the key was wrong.
pub(crate) fn decrypt_with_proof<E: Encryptable>(
&self,
from: u16,
decryptor: u16,
mut msg: EncryptedMessage<C, E>,
// There's no encryption key proof if the accusation is of an invalid signature
proof: Option<EncryptionKeyProof<C>>,
) -> Result<Zeroizing<E>, DecryptionError> {
if !msg
.pop
.verify(msg.key, pop_challenge::<C>(msg.pop.R, msg.key, from, msg.msg.deref().as_ref()))
{
Err(DecryptionError::InvalidSignature)?;
}
if let Some(proof) = proof {
// Verify this is the decryption key for this message
proof
.dleq
.verify(
&mut encryption_key_transcript(),
&[C::generator(), msg.key],
&[self.enc_keys[&decryptor], *proof.key],
)
.map_err(|_| DecryptionError::InvalidProof)?;
cipher::<C>(self.dst, &proof.key).apply_keystream(msg.msg.as_mut().as_mut());
Ok(msg.msg)
} else {
Err(DecryptionError::InvalidProof)
}
}
}

View File

@@ -1,555 +0,0 @@
use core::{
marker::PhantomData,
ops::Deref,
fmt::{Debug, Formatter},
};
use std::{
io::{self, Read, Write},
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use transcript::{Transcript, RecommendedTranscript};
use group::{
ff::{Field, PrimeField},
Group, GroupEncoding,
};
use ciphersuite::Ciphersuite;
use multiexp::{multiexp_vartime, BatchVerifier};
use schnorr::SchnorrSignature;
use crate::{
DkgError, ThresholdParams, ThresholdCore, validate_map,
encryption::{
ReadWrite, EncryptionKeyMessage, EncryptedMessage, Encryption, EncryptionKeyProof,
DecryptionError,
},
};
type FrostError<C> = DkgError<EncryptionKeyProof<C>>;
#[allow(non_snake_case)]
fn challenge<C: Ciphersuite>(context: &str, l: u16, R: &[u8], Am: &[u8]) -> C::F {
let mut transcript = RecommendedTranscript::new(b"DKG FROST v0.2");
transcript.domain_separate(b"schnorr_proof_of_knowledge");
transcript.append_message(b"context", context.as_bytes());
transcript.append_message(b"participant", l.to_le_bytes());
transcript.append_message(b"nonce", R);
transcript.append_message(b"commitments", Am);
C::hash_to_F(b"DKG-FROST-proof_of_knowledge-0", &transcript.challenge(b"schnorr"))
}
/// The commitments message, intended to be broadcast to all other parties.
/// Every participant should only provide one set of commitments to all parties.
/// If any participant sends multiple sets of commitments, they are faulty and should be presumed
/// malicious.
/// As this library does not handle networking, it is also unable to detect if any participant is
/// so faulty. That responsibility lies with the caller.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct Commitments<C: Ciphersuite> {
commitments: Vec<C::G>,
cached_msg: Vec<u8>,
sig: SchnorrSignature<C>,
}
impl<C: Ciphersuite> ReadWrite for Commitments<C> {
fn read<R: Read>(reader: &mut R, params: ThresholdParams) -> io::Result<Self> {
let mut commitments = Vec::with_capacity(params.t().into());
let mut cached_msg = vec![];
#[allow(non_snake_case)]
let mut read_G = || -> io::Result<C::G> {
let mut buf = <C::G as GroupEncoding>::Repr::default();
reader.read_exact(buf.as_mut())?;
let point = C::read_G(&mut buf.as_ref())?;
cached_msg.extend(buf.as_ref());
Ok(point)
};
for _ in 0 .. params.t() {
commitments.push(read_G()?);
}
Ok(Commitments { commitments, cached_msg, sig: SchnorrSignature::read(reader)? })
}
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(&self.cached_msg)?;
self.sig.write(writer)
}
}
/// State machine to begin the key generation protocol.
pub struct KeyGenMachine<C: Ciphersuite> {
params: ThresholdParams,
context: String,
_curve: PhantomData<C>,
}
impl<C: Ciphersuite> KeyGenMachine<C> {
/// Creates a new machine to generate a key for the specified curve in the specified multisig.
// The context string should be unique among multisigs.
pub fn new(params: ThresholdParams, context: String) -> KeyGenMachine<C> {
KeyGenMachine { params, context, _curve: PhantomData }
}
/// Start generating a key according to the FROST DKG spec.
/// Returns a commitments message to be sent to all parties over an authenticated channel. If any
/// party submits multiple sets of commitments, they MUST be treated as malicious.
pub fn generate_coefficients<R: RngCore + CryptoRng>(
self,
rng: &mut R,
) -> (SecretShareMachine<C>, EncryptionKeyMessage<C, Commitments<C>>) {
let t = usize::from(self.params.t);
let mut coefficients = Vec::with_capacity(t);
let mut commitments = Vec::with_capacity(t);
let mut cached_msg = vec![];
for i in 0 .. t {
// Step 1: Generate t random values to form a polynomial with
coefficients.push(Zeroizing::new(C::random_nonzero_F(&mut *rng)));
// Step 3: Generate public commitments
commitments.push(C::generator() * coefficients[i].deref());
cached_msg.extend(commitments[i].to_bytes().as_ref());
}
// Step 2: Provide a proof of knowledge
let r = Zeroizing::new(C::random_nonzero_F(rng));
let nonce = C::generator() * r.deref();
let sig = SchnorrSignature::<C>::sign(
&coefficients[0],
// This could be deterministic as the PoK is a singleton never opened up to cooperative
// discussion
// There's no reason to spend the time and effort to make this deterministic besides a
// general obsession with canonicity and determinism though
r,
challenge::<C>(&self.context, self.params.i(), nonce.to_bytes().as_ref(), &cached_msg),
);
// Additionally create an encryption mechanism to protect the secret shares
let encryption = Encryption::new(b"FROST", self.params.i, rng);
// Step 4: Broadcast
let msg =
encryption.registration(Commitments { commitments: commitments.clone(), cached_msg, sig });
(
SecretShareMachine {
params: self.params,
context: self.context,
coefficients,
our_commitments: commitments,
encryption,
},
msg,
)
}
}
fn polynomial<F: PrimeField + Zeroize>(coefficients: &[Zeroizing<F>], l: u16) -> Zeroizing<F> {
let l = F::from(u64::from(l));
let mut share = Zeroizing::new(F::zero());
for (idx, coefficient) in coefficients.iter().rev().enumerate() {
*share += coefficient.deref();
if idx != (coefficients.len() - 1) {
*share *= l;
}
}
share
}
/// The secret share message, to be sent to the party it's intended for over an authenticated
/// channel.
/// If any participant sends multiple secret shares to another participant, they are faulty.
// This should presumably be written as SecretShare(Zeroizing<F::Repr>).
// It's unfortunately not possible as F::Repr doesn't have Zeroize as a bound.
// The encryption system also explicitly uses Zeroizing<M> so it can ensure anything being
// encrypted is within Zeroizing. Accordingly, internally having Zeroizing would be redundant.
#[derive(Clone, PartialEq, Eq)]
pub struct SecretShare<F: PrimeField>(F::Repr);
impl<F: PrimeField> AsRef<[u8]> for SecretShare<F> {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl<F: PrimeField> AsMut<[u8]> for SecretShare<F> {
fn as_mut(&mut self) -> &mut [u8] {
self.0.as_mut()
}
}
impl<F: PrimeField> Debug for SecretShare<F> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result<(), core::fmt::Error> {
fmt.debug_struct("SecretShare").finish_non_exhaustive()
}
}
impl<F: PrimeField> Zeroize for SecretShare<F> {
fn zeroize(&mut self) {
self.0.as_mut().zeroize()
}
}
// Still manually implement ZeroizeOnDrop to ensure these don't stick around.
// We could replace Zeroizing<M> with a bound M: ZeroizeOnDrop.
// Doing so would potentially fail to highlight thr expected behavior with these and remove a layer
// of depth.
impl<F: PrimeField> Drop for SecretShare<F> {
fn drop(&mut self) {
self.zeroize();
}
}
impl<F: PrimeField> ZeroizeOnDrop for SecretShare<F> {}
impl<F: PrimeField> ReadWrite for SecretShare<F> {
fn read<R: Read>(reader: &mut R, _: ThresholdParams) -> io::Result<Self> {
let mut repr = F::Repr::default();
reader.read_exact(repr.as_mut())?;
Ok(SecretShare(repr))
}
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(self.0.as_ref())
}
}
/// Advancement of the key generation state machine.
#[derive(Zeroize)]
pub struct SecretShareMachine<C: Ciphersuite> {
params: ThresholdParams,
context: String,
coefficients: Vec<Zeroizing<C::F>>,
our_commitments: Vec<C::G>,
encryption: Encryption<C>,
}
impl<C: Ciphersuite> SecretShareMachine<C> {
/// Verify the data from the previous round (canonicity, PoKs, message authenticity)
#[allow(clippy::type_complexity)]
fn verify_r1<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
mut commitments: HashMap<u16, EncryptionKeyMessage<C, Commitments<C>>>,
) -> Result<HashMap<u16, Vec<C::G>>, FrostError<C>> {
validate_map(&commitments, &(1 ..= self.params.n()).collect::<Vec<_>>(), self.params.i())?;
let mut batch = BatchVerifier::<u16, C::G>::new(commitments.len());
let mut commitments = commitments
.drain()
.map(|(l, msg)| {
let mut msg = self.encryption.register(l, msg);
// Step 5: Validate each proof of knowledge
// This is solely the prep step for the latter batch verification
msg.sig.batch_verify(
rng,
&mut batch,
l,
msg.commitments[0],
challenge::<C>(&self.context, l, msg.sig.R.to_bytes().as_ref(), &msg.cached_msg),
);
(l, msg.commitments.drain(..).collect::<Vec<_>>())
})
.collect::<HashMap<_, _>>();
batch.verify_with_vartime_blame().map_err(FrostError::InvalidProofOfKnowledge)?;
commitments.insert(self.params.i, self.our_commitments.drain(..).collect());
Ok(commitments)
}
/// Continue generating a key.
/// Takes in everyone else's commitments. Returns a HashMap of encrypted secret shares to be sent
/// over authenticated channels to their relevant counterparties.
/// If any participant sends multiple secret shares to another participant, they are faulty.
#[allow(clippy::type_complexity)]
pub fn generate_secret_shares<R: RngCore + CryptoRng>(
mut self,
rng: &mut R,
commitments: HashMap<u16, EncryptionKeyMessage<C, Commitments<C>>>,
) -> Result<(KeyMachine<C>, HashMap<u16, EncryptedMessage<C, SecretShare<C::F>>>), FrostError<C>>
{
let commitments = self.verify_r1(&mut *rng, commitments)?;
// Step 1: Generate secret shares for all other parties
let mut res = HashMap::new();
for l in 1 ..= self.params.n() {
// Don't insert our own shares to the byte buffer which is meant to be sent around
// An app developer could accidentally send it. Best to keep this black boxed
if l == self.params.i() {
continue;
}
let mut share = polynomial(&self.coefficients, l);
let share_bytes = Zeroizing::new(SecretShare::<C::F>(share.to_repr()));
share.zeroize();
res.insert(l, self.encryption.encrypt(rng, l, share_bytes));
}
// Calculate our own share
let share = polynomial(&self.coefficients, self.params.i());
self.coefficients.zeroize();
Ok((
KeyMachine { params: self.params, secret: share, commitments, encryption: self.encryption },
res,
))
}
}
/// Advancement of the the secret share state machine protocol.
/// This machine will 'complete' the protocol, by a local perspective, and can be the last
/// interactive component. In order to be secure, the parties must confirm having successfully
/// completed the protocol (an effort out of scope to this library), yet this is modelled by one
/// more state transition.
pub struct KeyMachine<C: Ciphersuite> {
params: ThresholdParams,
secret: Zeroizing<C::F>,
commitments: HashMap<u16, Vec<C::G>>,
encryption: Encryption<C>,
}
impl<C: Ciphersuite> Zeroize for KeyMachine<C> {
fn zeroize(&mut self) {
self.params.zeroize();
self.secret.zeroize();
for (_, commitments) in self.commitments.iter_mut() {
commitments.zeroize();
}
self.encryption.zeroize();
}
}
// Calculate the exponent for a given participant and apply it to a series of commitments
// Initially used with the actual commitments to verify the secret share, later used with
// stripes to generate the verification shares
fn exponential<C: Ciphersuite>(i: u16, values: &[C::G]) -> Vec<(C::F, C::G)> {
let i = C::F::from(i.into());
let mut res = Vec::with_capacity(values.len());
(0 .. values.len()).fold(C::F::one(), |exp, l| {
res.push((exp, values[l]));
exp * i
});
res
}
fn share_verification_statements<C: Ciphersuite>(
target: u16,
commitments: &[C::G],
mut share: Zeroizing<C::F>,
) -> Vec<(C::F, C::G)> {
// This can be insecurely linearized from n * t to just n using the below sums for a given
// stripe. Doing so uses naive addition which is subject to malleability. The only way to
// ensure that malleability isn't present is to use this n * t algorithm, which runs
// per sender and not as an aggregate of all senders, which also enables blame
let mut values = exponential::<C>(target, commitments);
// Perform the share multiplication outside of the multiexp to minimize stack copying
// While the multiexp BatchVerifier does zeroize its flattened multiexp, and itself, it still
// converts whatever we give to an iterator and then builds a Vec internally, welcoming copies
let neg_share_pub = C::generator() * -*share;
share.zeroize();
values.push((C::F::one(), neg_share_pub));
values
}
#[derive(Clone, Copy, Hash, Debug, Zeroize)]
enum BatchId {
Decryption(u16),
Share(u16),
}
impl<C: Ciphersuite> KeyMachine<C> {
/// Calculate our share given the shares sent to us.
/// Returns a BlameMachine usable to determine if faults in the protocol occurred.
/// Will error on, and return a blame proof for, the first-observed case of faulty behavior.
pub fn calculate_share<R: RngCore + CryptoRng>(
mut self,
rng: &mut R,
mut shares: HashMap<u16, EncryptedMessage<C, SecretShare<C::F>>>,
) -> Result<(BlameMachine<C>, C::G), FrostError<C>> {
validate_map(&shares, &(1 ..= self.params.n()).collect::<Vec<_>>(), self.params.i())?;
let mut batch = BatchVerifier::new(shares.len());
let mut blames = HashMap::new();
for (l, share_bytes) in shares.drain() {
let (mut share_bytes, blame) =
self.encryption.decrypt(rng, &mut batch, BatchId::Decryption(l), l, share_bytes);
let share =
Zeroizing::new(Option::<C::F>::from(C::F::from_repr(share_bytes.0)).ok_or_else(|| {
FrostError::InvalidShare { participant: l, blame: Some(blame.clone()) }
})?);
share_bytes.zeroize();
*self.secret += share.deref();
blames.insert(l, blame);
batch.queue(
rng,
BatchId::Share(l),
share_verification_statements::<C>(self.params.i(), &self.commitments[&l], share),
);
}
batch.verify_with_vartime_blame().map_err(|id| {
let (l, blame) = match id {
BatchId::Decryption(l) => (l, None),
BatchId::Share(l) => (l, Some(blames.remove(&l).unwrap())),
};
FrostError::InvalidShare { participant: l, blame }
})?;
// Stripe commitments per t and sum them in advance. Calculating verification shares relies on
// these sums so preprocessing them is a massive speedup
// If these weren't just sums, yet the tables used in multiexp, this would be further optimized
// As of right now, each multiexp will regenerate them
let mut stripes = Vec::with_capacity(usize::from(self.params.t()));
for t in 0 .. usize::from(self.params.t()) {
stripes.push(self.commitments.values().map(|commitments| commitments[t]).sum());
}
// Calculate each user's verification share
let mut verification_shares = HashMap::new();
for i in 1 ..= self.params.n() {
verification_shares.insert(
i,
if i == self.params.i() {
C::generator() * self.secret.deref()
} else {
multiexp_vartime(&exponential::<C>(i, &stripes))
},
);
}
let KeyMachine { commitments, encryption, params, secret } = self;
Ok((
BlameMachine {
commitments,
encryption,
result: ThresholdCore {
params,
secret_share: secret,
group_key: stripes[0],
verification_shares,
},
},
stripes[0],
))
}
}
pub struct BlameMachine<C: Ciphersuite> {
commitments: HashMap<u16, Vec<C::G>>,
encryption: Encryption<C>,
result: ThresholdCore<C>,
}
impl<C: Ciphersuite> Zeroize for BlameMachine<C> {
fn zeroize(&mut self) {
for (_, commitments) in self.commitments.iter_mut() {
commitments.zeroize();
}
self.encryption.zeroize();
self.result.zeroize();
}
}
impl<C: Ciphersuite> BlameMachine<C> {
/// Mark the protocol as having been successfully completed, returning the generated keys.
/// This should only be called after having confirmed, with all participants, successful
/// completion.
///
/// Confirming successful completion is not necessarily as simple as everyone reporting their
/// completion. Everyone must also receive everyone's report of completion, entering into the
/// territory of consensus protocols. This library does not handle that nor does it provide any
/// tooling to do so. This function is solely intended to force users to acknowledge they're
/// completing the protocol, not processing any blame.
pub fn complete(self) -> ThresholdCore<C> {
self.result
}
fn blame_internal(
&self,
sender: u16,
recipient: u16,
msg: EncryptedMessage<C, SecretShare<C::F>>,
proof: Option<EncryptionKeyProof<C>>,
) -> u16 {
let share_bytes = match self.encryption.decrypt_with_proof(sender, recipient, msg, proof) {
Ok(share_bytes) => share_bytes,
// If there's an invalid signature, the sender did not send a properly formed message
Err(DecryptionError::InvalidSignature) => return sender,
// Decryption will fail if the provided ECDH key wasn't correct for the given message
Err(DecryptionError::InvalidProof) => return recipient,
};
let share = match Option::<C::F>::from(C::F::from_repr(share_bytes.0)) {
Some(share) => share,
// If this isn't a valid scalar, the sender is faulty
None => return sender,
};
// If this isn't a valid share, the sender is faulty
if !bool::from(
multiexp_vartime(&share_verification_statements::<C>(
recipient,
&self.commitments[&sender],
Zeroizing::new(share),
))
.is_identity(),
) {
return sender;
}
// The share was canonical and valid
recipient
}
/// Given an accusation of fault, determine the faulty party (either the sender, who sent an
/// invalid secret share, or the receiver, who claimed a valid secret share was invalid). No
/// matter which, prevent completion of the machine, forcing an abort of the protocol.
///
/// The message should be a copy of the encrypted secret share from the accused sender to the
/// accusing recipient. This message must have been authenticated as actually having come from
/// the sender in question.
///
/// In order to enable detecting multiple faults, an `AdditionalBlameMachine` is returned, which
/// can be used to determine further blame. These machines will process the same blame statements
/// multiple times, always identifying blame. It is the caller's job to ensure they're unique in
/// order to prevent multiple instances of blame over a single incident.
pub fn blame(
self,
sender: u16,
recipient: u16,
msg: EncryptedMessage<C, SecretShare<C::F>>,
proof: Option<EncryptionKeyProof<C>>,
) -> (AdditionalBlameMachine<C>, u16) {
let faulty = self.blame_internal(sender, recipient, msg, proof);
(AdditionalBlameMachine(self), faulty)
}
}
#[derive(Zeroize)]
pub struct AdditionalBlameMachine<C: Ciphersuite>(BlameMachine<C>);
impl<C: Ciphersuite> AdditionalBlameMachine<C> {
/// Given an accusation of fault, determine the faulty party (either the sender, who sent an
/// invalid secret share, or the receiver, who claimed a valid secret share was invalid).
///
/// The message should be a copy of the encrypted secret share from the accused sender to the
/// accusing recipient. This message must have been authenticated as actually having come from
/// the sender in question.
///
/// This will process the same blame statement multiple times, always identifying blame. It is
/// the caller's job to ensure they're unique in order to prevent multiple instances of blame
/// over a single incident.
pub fn blame(
self,
sender: u16,
recipient: u16,
msg: EncryptedMessage<C, SecretShare<C::F>>,
proof: Option<EncryptionKeyProof<C>>,
) -> u16 {
self.0.blame_internal(sender, recipient, msg, proof)
}
}

Some files were not shown because too many files have changed in this diff Show More