41 Commits

Author SHA1 Message Date
Luke Parker
f9453b1ef4 Attempt downgrading docker from .28 to .27 2025-08-09 14:32:14 -04:00
Luke Parker
ed9c1a2d84 Tweak how subtrate-client tests waits to connect to the Monero node 2025-08-09 14:15:53 -04:00
Luke Parker
a6ffeabafc Add dedicated error for when amounts aren't representable within a u64
Fixes the issue where _inputs_ could still overflow u64::MAX and cause a panic.
2025-08-09 14:06:27 -04:00
Luke Parker
64e7fa112b Have docker install set host 2025-08-09 13:34:25 -04:00
Luke Parker
48a65285a1 Add uidmap dependency for rootless Docker 2025-08-09 12:49:56 -04:00
Luke Parker
f390426cae if: runner.os == 'Linux', with single quotes 2025-08-09 12:38:58 -04:00
Luke Parker
5dcc500b44 Attempt using rootless Docker in CI via the setup-docker-action
Restores using ubuntu-latest.

Basically, at some point in the last year the existing Docker e2e tests started
failing. I'm unclear if this is an issue with the OS, the docker packages, or
what. This just tries to find a solution.
2025-08-09 12:30:37 -04:00
Luke Parker
8df56dca83 Normalize FROM AS casing in Dockerfiles 2025-08-09 12:20:58 -04:00
Luke Parker
6f7ab6df13 Downgrade tests requiring Docker from Ubuntu latest to Ubuntu 22.04
Attempts to resolve containers immediately exiting for some specific test runs.
2025-08-09 03:37:44 -04:00
Luke Parker
358284dd46 deny --hide-inclusion-graph 2025-08-09 03:37:07 -04:00
Luke Parker
463940bfde Update which deb archive to use within the runtime Dockerfile 2025-08-09 02:13:33 -04:00
Luke Parker
8c7b000366 Update substrate to one with a properly defined panic handler as of modern Rust 2025-08-09 02:03:36 -04:00
Luke Parker
2516be38c4 Attempt to fix install of clang within runtime Dockerfile 2025-08-09 02:02:30 -04:00
Luke Parker
440ccf5594 Remove (presumably) unnecessary command to explicitly install python 2025-08-09 01:23:43 -04:00
Luke Parker
7fe2dd6076 Update Rust versions used in orchestration 2025-08-09 01:23:13 -04:00
Luke Parker
3aeab0f7c0 Missing --allow-remove-essential flag 2025-08-09 00:53:54 -04:00
Luke Parker
5ec7017db2 Attempt to force remove shim-signed to resolve 'unmet dependencies' issues with shim-signed 2025-08-09 00:52:11 -04:00
Luke Parker
c210fae2ef Expand python packages explicitly installed 2025-08-09 00:47:26 -04:00
Luke Parker
77128fe14c Install cargo deny with Rust 1.85 and pin its version 2025-08-09 00:46:25 -04:00
Luke Parker
1b99e47c27 Pin cargo-machete to 0.8.0 to prevent other unexpected CI failures 2025-08-09 00:43:41 -04:00
Luke Parker
482cce436d Explicitly install python3 after removing various unnecessary packages 2025-08-09 00:43:34 -04:00
Luke Parker
43f95bf470 Updated dated version of actions/cache 2025-08-09 00:39:02 -04:00
Luke Parker
41ea55080a Add update, upgrade, fix-missing call to Ubuntu build dependencies
Attempts to fix a CI failure for some misconfiguration...
2025-08-09 00:36:57 -04:00
Luke Parker
d98738fd8f Install cargo machete with Rust 1.85
cargo machete now uses Rust's 2024 edition, and 1.85 was the first to ship it.
2025-08-09 00:35:27 -04:00
Luke Parker
9e81dcb524 Remove msbuild from packages to remove when the CI starts
Apparently, it's no longer installed by default.
2025-08-09 00:29:27 -04:00
Luke Parker
e9f2d5fdd8 Further harden decoy selection
It risked panicking if a non-monotonic distribution was returned. While the
provided RPC code won't return non-monotonic distributions, users are allowed
to define their own implementations and override the provided method. Said
implementations could omit this required check.
2025-08-09 00:26:10 -04:00
Luke Parker
7858387aed Clarify transcripting for Clsag::verify, Mlsag::verify, as with Clsag::sign 2025-08-09 00:02:16 -04:00
Luke Parker
f65adbec17 Tweak the Substrate runtime as required by the Rust version bump performed 2025-08-08 23:58:25 -04:00
Luke Parker
ffed3edc70 Fix #654 2025-08-08 23:57:38 -04:00
Luke Parker
fde062769d Fix #643 2025-08-08 23:57:19 -04:00
Luke Parker
902bef9821 Rename Bulletproof::calculate_bp_clawback to Bulletproof::calculate_clawback 2025-08-08 23:56:48 -04:00
Luke Parker
c671284005 Fix #630 2025-08-08 23:56:13 -04:00
Luke Parker
90ee7f749d Respond to 13.1.1.
Uses Zeroizing for username/password in monero-simple-request-rpc.
2025-08-08 23:54:20 -04:00
Luke Parker
65c1eac599 Remove potentially-failing unchecked arithmetic operations for ones which error
In response to 9.13.3.

Requires a bump to Rust 1.82 to take advantage of `Option::is_none_or`.
2025-08-08 22:49:30 -04:00
Luke Parker
38bb7c1b87 Clarify messages in non-debug assertions 2025-08-08 21:35:55 -04:00
Luke Parker
c3472b4ceb Response to usage of unwrap in non-test code
This commit replaces all usage of `unwrap` with `expect` within
`networks/monero`, clarifying why the panic risked is unreachable. This commit
also replaces some uses of `unwrap` with solutions which are guaranteed not to
fail.

Notably, compilation on 128-bit systems is prevented, ensuring
`u64::try_from(usize::MAX)` will never panic at runtime.

Slight breaking changes are additionally included as necessary to massage out
some avoidable panics.
2025-08-08 21:33:28 -04:00
Luke Parker
77c66ee545 Remove Clone from ClsagMultisigMask{Sender, Receiver}
This had ill-defined properties on Clone, as a mask could be sent multiple times
(unintended) and multiple algorithms may receive the same mask from a singular
sender.

Requires removing the Clone bound within modular-frost and expanding the test
helpers accordingly.

This was not raised in the audit yet upon independent review.
2025-08-08 14:58:43 -04:00
Luke Parker
f4ea96f120 Respond to 2 3
We now use `FrostError::InternalError` instead of a panic to represent the mask
not being set.
2025-08-08 14:58:43 -04:00
Luke Parker
efa9d72603 Respond to 2 2 2025-08-08 14:58:39 -04:00
Luke Parker
feaa35201d Respond to 1.1 A2 (also cited as 2 1)
`read_vec` was unbounded. It now accepts an optional bound. In some places, we
are able to define and provide a bound (Bulletproofs(+)' `L` and `R` vectors).
In others, we cannot (the amount of inputs within a transaction, which is not
subject to any rule in the current consensus other than the total transaction
size limit). Usage of `None` in those locations preserves the existing
behavior.
2025-07-23 09:14:41 -04:00
Luke Parker
a6d2476b66 Respond to 1.1 A1 2025-07-23 08:42:20 -04:00
393 changed files with 22848 additions and 7399 deletions

View File

@@ -7,20 +7,12 @@ runs:
- name: Remove unused packages
shell: bash
run: |
# Ensure the repositories are synced
sudo apt update -y
# Actually perform the removals
sudo apt remove -y "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli
sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*"
sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*"
sudo apt remove -y --allow-remove-essential -f shim-signed *python3*
# This removal command requires the prior removals due to unmet dependencies otherwise
sudo apt remove -y --allow-remove-essential -f shim-signed
# This command would fail, due to shim-signed having unmet dependencies, hence its removal
sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*"
# Reinstall python3 as a general dependency of a functional operating system
sudo apt install -y python3 --fix-missing
if: runner.os == 'Linux'
- name: Remove unused packages
@@ -38,23 +30,19 @@ runs:
shell: bash
run: |
if [ "$RUNNER_OS" == "Linux" ]; then
sudo apt install -y ca-certificates protobuf-compiler libclang-dev
sudo apt install -y ca-certificates protobuf-compiler
elif [ "$RUNNER_OS" == "Windows" ]; then
choco install protoc
elif [ "$RUNNER_OS" == "macOS" ]; then
brew install protobuf llvm
HOMEBREW_ROOT_PATH=/opt/homebrew # Apple Silicon
if [ $(uname -m) = "x86_64" ]; then HOMEBREW_ROOT_PATH=/usr/local; fi # Intel
ls $HOMEBREW_ROOT_PATH/opt/llvm/lib | grep "libclang.dylib" # Make sure this installed `libclang`
echo "DYLD_LIBRARY_PATH=$HOMEBREW_ROOT_PATH/opt/llvm/lib:$DYLD_LIBRARY_PATH" >> "$GITHUB_ENV"
brew install protobuf
fi
- name: Install solc
shell: bash
run: |
cargo +1.89 install svm-rs --version =0.5.18
svm install 0.8.26
svm use 0.8.26
cargo install svm-rs
svm install 0.8.25
svm use 0.8.25
- name: Remove preinstalled Docker
shell: bash
@@ -79,6 +67,7 @@ runs:
with:
rootless: true
set-host: true
version: type=image,tag=27.5.1
if: runner.os == 'Linux'
# - name: Cache Rust

View File

@@ -1 +1 @@
nightly-2025-11-01
nightly-2024-09-01

View File

@@ -32,15 +32,9 @@ jobs:
-p dalek-ff-group \
-p minimal-ed448 \
-p ciphersuite \
-p ciphersuite-kp256 \
-p multiexp \
-p schnorr-signatures \
-p dleq \
-p dkg \
-p dkg-recovery \
-p dkg-dealer \
-p dkg-promote \
-p dkg-musig \
-p dkg-pedpop \
-p modular-frost \
-p frost-schnorrkel

View File

@@ -18,7 +18,7 @@ jobs:
key: rust-advisory-db
- name: Install cargo deny
run: cargo +1.89 install cargo-deny --version =0.18.3
run: cargo +1.85 install cargo-deny --version =0.18.3
- name: Run cargo deny
run: cargo deny -L error --all-features check --hide-inclusion-graph

View File

@@ -11,7 +11,7 @@ jobs:
clippy:
strategy:
matrix:
os: [ubuntu-latest, macos-15-intel, macos-latest, windows-latest]
os: [ubuntu-latest, macos-13, macos-14, windows-latest]
runs-on: ${{ matrix.os }}
steps:
@@ -26,7 +26,7 @@ jobs:
uses: ./.github/actions/build-dependencies
- name: Install nightly rust
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32v1-none -c rust-src -c clippy
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32-unknown-unknown -c clippy
- name: Run Clippy
run: cargo +${{ steps.nightly.outputs.version }} clippy --all-features --all-targets -- -D warnings -A clippy::items_after_test_module
@@ -52,7 +52,7 @@ jobs:
key: rust-advisory-db
- name: Install cargo deny
run: cargo +1.89 install cargo-deny --version =0.18.4
run: cargo +1.85 install cargo-deny --version =0.18.3
- name: Run cargo deny
run: cargo deny -L error --all-features check --hide-inclusion-graph
@@ -79,5 +79,5 @@ jobs:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Verify all dependencies are in use
run: |
cargo +1.89 install cargo-machete --version =0.8.0
cargo +1.89 machete
cargo +1.85 install cargo-machete --version =0.8.0
cargo +1.85 machete

72
.github/workflows/monero-tests.yaml vendored Normal file
View File

@@ -0,0 +1,72 @@
name: Monero Tests
on:
push:
branches:
- develop
paths:
- "networks/monero/**"
- "processor/**"
pull_request:
paths:
- "networks/monero/**"
- "processor/**"
workflow_dispatch:
jobs:
# Only run these once since they will be consistent regardless of any node
unit-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Test Dependencies
uses: ./.github/actions/test-dependencies
- name: Run Unit Tests Without Features
run: |
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-io --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-generators --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-primitives --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-mlsag --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-clsag --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-borromean --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-bulletproofs --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-rpc --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-address --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --lib
# Doesn't run unit tests with features as the tests workflow will
integration-tests:
runs-on: ubuntu-latest
# Test against all supported protocol versions
strategy:
matrix:
version: [v0.17.3.2, v0.18.3.4]
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Test Dependencies
uses: ./.github/actions/test-dependencies
with:
monero-version: ${{ matrix.version }}
- name: Run Integration Tests Without Features
run: |
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --test '*'
- name: Run Integration Tests
# Don't run if the the tests workflow also will
if: ${{ matrix.version != 'v0.18.3.4' }}
run: |
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --all-features --test '*'

View File

@@ -33,3 +33,16 @@ jobs:
-p alloy-simple-request-transport \
-p ethereum-serai \
-p serai-ethereum-relayer \
-p monero-io \
-p monero-generators \
-p monero-primitives \
-p monero-mlsag \
-p monero-clsag \
-p monero-borromean \
-p monero-bulletproofs \
-p monero-serai \
-p monero-rpc \
-p monero-simple-request-rpc \
-p monero-address \
-p monero-wallet \
-p monero-serai-verify-chain

View File

@@ -46,16 +46,16 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
uses: actions/checkout@v3
- name: Setup Ruby
uses: ruby/setup-ruby@44511735964dcb71245e7e55f72539531f7bc0eb
uses: ruby/setup-ruby@v1
with:
bundler-cache: true
cache-version: 0
working-directory: "${{ github.workspace }}/docs"
- name: Setup Pages
id: pages
uses: actions/configure-pages@983d7736d9b0ae728b81ab479565c72886d7745b
uses: actions/configure-pages@v3
- name: Build with Jekyll
run: cd ${{ github.workspace }}/docs && bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}"
env:
@@ -69,12 +69,12 @@ jobs:
uses: ./.github/actions/build-dependencies
- name: Buld Rust docs
run: |
rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32v1-none -c rust-docs
RUSTDOCFLAGS="--cfg docsrs" cargo +${{ steps.nightly.outputs.version }} doc --workspace --no-deps --all-features
rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32-unknown-unknown -c rust-docs
RUSTDOCFLAGS="--cfg docsrs" cargo +${{ steps.nightly.outputs.version }} doc --workspace --all-features
mv target/doc docs/_site/rust
- name: Upload artifact
uses: actions/upload-pages-artifact@7b1f4a764d45c48632c6b24a0339c27f5614fb0b
uses: actions/upload-pages-artifact@v3
with:
path: "docs/_site/"
@@ -88,4 +88,4 @@ jobs:
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e
uses: actions/deploy-pages@v4

7
.gitignore vendored
View File

@@ -1,14 +1,7 @@
target
# Don't commit any `Cargo.lock` which aren't the workspace's
Cargo.lock
!./Cargo.lock
# Don't commit any `Dockerfile`, as they're auto-generated, except the only one which isn't
Dockerfile
Dockerfile.fast-epoch
!orchestration/runtime/Dockerfile
.test-logs
.vscode

6457
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,15 @@
[workspace]
resolver = "2"
members = [
# Version patches
"patches/parking_lot_core",
"patches/parking_lot",
"patches/zstd",
"patches/rocksdb",
# std patches
"patches/matches",
"patches/is-terminal",
# Rewrites/redirects
"patches/option-ext",
@@ -21,18 +28,12 @@ members = [
"crypto/dalek-ff-group",
"crypto/ed448",
"crypto/ciphersuite",
"crypto/ciphersuite/kp256",
"crypto/multiexp",
"crypto/schnorr",
"crypto/dleq",
"crypto/dkg",
"crypto/dkg/recovery",
"crypto/dkg/dealer",
"crypto/dkg/promote",
"crypto/dkg/musig",
"crypto/dkg/pedpop",
"crypto/frost",
"crypto/schnorrkel",
@@ -42,6 +43,20 @@ members = [
"networks/ethereum",
"networks/ethereum/relayer",
"networks/monero/io",
"networks/monero/generators",
"networks/monero/primitives",
"networks/monero/ringct/mlsag",
"networks/monero/ringct/clsag",
"networks/monero/ringct/borromean",
"networks/monero/ringct/bulletproofs",
"networks/monero",
"networks/monero/rpc",
"networks/monero/rpc/simple-request",
"networks/monero/wallet/address",
"networks/monero/wallet",
"networks/monero/verify-chain",
"message-queue",
"processor/messages",
@@ -111,26 +126,29 @@ minimal-ed448 = { opt-level = 3 }
multiexp = { opt-level = 3 }
monero-oxide = { opt-level = 3 }
monero-serai = { opt-level = 3 }
[profile.release]
panic = "unwind"
overflow-checks = true
[patch.crates-io]
# Dependencies from monero-oxide which originate from within our own tree
std-shims = { path = "common/std-shims" }
simple-request = { path = "common/request" }
dalek-ff-group = { path = "crypto/dalek-ff-group" }
flexible-transcript = { path = "crypto/transcript" }
modular-frost = { path = "crypto/frost" }
# https://github.com/rust-lang-nursery/lazy-static.rs/issues/201
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
# These have `std` alternatives
parking_lot_core = { path = "patches/parking_lot_core" }
parking_lot = { path = "patches/parking_lot" }
# wasmtime pulls in an old version for this
zstd = { path = "patches/zstd" }
# Needed for WAL compression
rocksdb = { path = "patches/rocksdb" }
# 1.0.1 was yanked due to a breaking change (an extra field)
# 2.0 has fewer dependencies and still works within our tree
tiny-bip39 = { path = "patches/tiny-bip39" }
# is-terminal now has an std-based solution with an equivalent API
is-terminal = { path = "patches/is-terminal" }
# So does matches
matches = { path = "patches/matches" }
home = { path = "patches/home" }
# directories-next was created because directories was unmaintained
# directories-next is now unmaintained while directories is maintained
@@ -141,10 +159,7 @@ option-ext = { path = "patches/option-ext" }
directories-next = { path = "patches/directories-next" }
[workspace.lints.clippy]
uninlined_format_args = "allow" # TODO
unwrap_or_default = "allow"
manual_is_multiple_of = "allow"
incompatible_msrv = "allow" # Manually verified with a GitHub workflow
borrow_as_ptr = "deny"
cast_lossless = "deny"
cast_possible_truncation = "deny"
@@ -169,14 +184,14 @@ large_stack_arrays = "deny"
linkedlist = "deny"
macro_use_imports = "deny"
manual_instant_elapsed = "deny"
# TODO manual_let_else = "deny"
manual_let_else = "deny"
manual_ok_or = "deny"
manual_string_new = "deny"
map_unwrap_or = "deny"
match_bool = "deny"
match_same_arms = "deny"
missing_fields_in_debug = "deny"
# TODO needless_continue = "deny"
needless_continue = "deny"
needless_pass_by_value = "deny"
ptr_cast_constness = "deny"
range_minus_one = "deny"
@@ -184,7 +199,8 @@ range_plus_one = "deny"
redundant_closure_for_method_calls = "deny"
redundant_else = "deny"
string_add_assign = "deny"
unchecked_time_subtraction = "deny"
unchecked_duration_subtraction = "deny"
uninlined_format_args = "deny"
unnecessary_box_returns = "deny"
unnecessary_join = "deny"
unnecessary_wraps = "deny"
@@ -192,21 +208,3 @@ unnested_or_patterns = "deny"
unused_async = "deny"
unused_self = "deny"
zero_sized_map_values = "deny"
# TODO: These were incurred when updating Rust as necessary for compilation, yet aren't being fixed
# at this time due to the impacts it'd have throughout the repository (when this isn't actively the
# primary branch, `next` is)
needless_continue = "allow"
needless_lifetimes = "allow"
useless_conversion = "allow"
empty_line_after_doc_comments = "allow"
manual_div_ceil = "allow"
manual_let_else = "allow"
unnecessary_map_or = "allow"
result_large_err = "allow"
unneeded_struct_pattern = "allow"
[workspace.lints.rust]
unused = "allow" # TODO: https://github.com/rust-lang/rust/issues/147648
mismatched_lifetime_syntaxes = "allow"
unused_attributes = "allow"
unused_parens = "allow"

View File

@@ -18,7 +18,7 @@ workspace = true
[dependencies]
parity-db = { version = "0.4", default-features = false, optional = true }
rocksdb = { version = "0.24", default-features = false, features = ["zstd"], optional = true }
rocksdb = { version = "0.21", default-features = false, features = ["zstd"], optional = true }
[features]
parity-db = ["dep:parity-db"]

View File

@@ -1,5 +1,5 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
// Obtain a variable from the Serai environment/secret store.
pub fn var(variable: &str) -> Option<String> {

View File

@@ -1,4 +1,4 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![deny(missing_docs)]

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/common/simple-requ
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["http", "https", "async", "request", "ssl"]
edition = "2021"
rust-version = "1.70"
rust-version = "1.64"
[package.metadata.docs.rs]
all-features = true

View File

@@ -1,4 +1,4 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
use std::sync::Arc;

View File

@@ -1,13 +1,13 @@
[package]
name = "std-shims"
version = "0.1.4"
version = "0.1.1"
description = "A series of std shims to make alloc more feasible"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/common/std-shims"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["nostd", "no_std", "alloc", "io"]
edition = "2021"
rust-version = "1.64"
rust-version = "1.70"
[package.metadata.docs.rs]
all-features = true
@@ -17,8 +17,7 @@ rustdoc-args = ["--cfg", "docsrs"]
workspace = true
[dependencies]
rustversion = { version = "1", default-features = false }
spin = { version = "0.10", default-features = false, features = ["use_ticket_mutex", "once", "lazy"] }
spin = { version = "0.9", default-features = false, features = ["use_ticket_mutex", "lazy"] }
hashbrown = { version = "0.14", default-features = false, features = ["ahash", "inline-more"] }
[features]

View File

@@ -3,9 +3,4 @@
A crate which passes through to std when the default `std` feature is enabled,
yet provides a series of shims when it isn't.
No guarantee of one-to-one parity is provided. The shims provided aim to be sufficient for the
average case.
`HashSet` and `HashMap` are provided via `hashbrown`. Synchronization primitives are provided via
`spin` (avoiding a requirement on `critical-section`).
types are not guaranteed to be
`HashSet` and `HashMap` are provided via `hashbrown`.

View File

@@ -1,4 +1,4 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![cfg_attr(not(feature = "std"), no_std)]
@@ -11,64 +11,3 @@ pub mod io;
pub use alloc::vec;
pub use alloc::str;
pub use alloc::string;
pub mod prelude {
#[rustversion::before(1.73)]
#[doc(hidden)]
pub trait StdShimsDivCeil {
fn div_ceil(self, rhs: Self) -> Self;
}
#[rustversion::before(1.73)]
mod impl_divceil {
use super::StdShimsDivCeil;
impl StdShimsDivCeil for u8 {
fn div_ceil(self, rhs: Self) -> Self {
(self + (rhs - 1)) / rhs
}
}
impl StdShimsDivCeil for u16 {
fn div_ceil(self, rhs: Self) -> Self {
(self + (rhs - 1)) / rhs
}
}
impl StdShimsDivCeil for u32 {
fn div_ceil(self, rhs: Self) -> Self {
(self + (rhs - 1)) / rhs
}
}
impl StdShimsDivCeil for u64 {
fn div_ceil(self, rhs: Self) -> Self {
(self + (rhs - 1)) / rhs
}
}
impl StdShimsDivCeil for u128 {
fn div_ceil(self, rhs: Self) -> Self {
(self + (rhs - 1)) / rhs
}
}
impl StdShimsDivCeil for usize {
fn div_ceil(self, rhs: Self) -> Self {
(self + (rhs - 1)) / rhs
}
}
}
#[cfg(feature = "std")]
#[rustversion::before(1.74)]
#[doc(hidden)]
pub trait StdShimsIoErrorOther {
fn other<E>(error: E) -> Self
where
E: Into<Box<dyn std::error::Error + Send + Sync>>;
}
#[cfg(feature = "std")]
#[rustversion::before(1.74)]
impl StdShimsIoErrorOther for std::io::Error {
fn other<E>(error: E) -> Self
where
E: Into<Box<dyn std::error::Error + Send + Sync>>,
{
std::io::Error::new(std::io::ErrorKind::Other, error)
}
}
}

View File

@@ -25,11 +25,7 @@ mod mutex_shim {
}
pub use mutex_shim::{ShimMutex as Mutex, MutexGuard};
#[cfg(not(feature = "std"))]
pub use spin::Lazy as LazyLock;
#[rustversion::before(1.80)]
#[cfg(feature = "std")]
pub use spin::Lazy as LazyLock;
#[rustversion::since(1.80)]
#[cfg(feature = "std")]
pub use std::sync::LazyLock;
#[cfg(not(feature = "std"))]
pub use spin::Lazy as LazyLock;

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/common/zalloc"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = []
edition = "2021"
rust-version = "1.77"
rust-version = "1.77.0"
[package.metadata.docs.rs]
all-features = true

View File

@@ -1,5 +1,5 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![cfg_attr(all(zalloc_rustc_nightly, feature = "allocator"), feature(allocator_api))]
//! Implementation of a Zeroizing Allocator, enabling zeroizing memory on deallocation.

View File

@@ -25,10 +25,8 @@ rand_core = { version = "0.6", default-features = false, features = ["std"] }
blake2 = { version = "0.10", default-features = false, features = ["std"] }
transcript = { package = "flexible-transcript", path = "../crypto/transcript", default-features = false, features = ["std", "recommended"] }
dalek-ff-group = { path = "../crypto/dalek-ff-group", default-features = false, features = ["std"] }
ciphersuite = { path = "../crypto/ciphersuite", default-features = false, features = ["std"] }
schnorr = { package = "schnorr-signatures", path = "../crypto/schnorr", default-features = false, features = ["std", "aggregate"] }
dkg-musig = { path = "../crypto/dkg/musig", default-features = false, features = ["std"] }
schnorr = { package = "schnorr-signatures", path = "../crypto/schnorr", default-features = false, features = ["std"] }
frost = { package = "modular-frost", path = "../crypto/frost" }
frost-schnorrkel = { path = "../crypto/schnorrkel" }
@@ -42,7 +40,7 @@ processor-messages = { package = "serai-processor-messages", path = "../processo
message-queue = { package = "serai-message-queue", path = "../message-queue" }
tributary = { package = "tributary-chain", path = "./tributary" }
sp-application-crypto = { git = "https://github.com/serai-dex/patch-polkadot-sdk", rev = "da19e1f8ca7a9e2cbf39fbfa493918eeeb45e10b", default-features = false, features = ["std"] }
sp-application-crypto = { git = "https://github.com/serai-dex/substrate", default-features = false, features = ["std"] }
serai-client = { path = "../substrate/client", default-features = false, features = ["serai", "borsh"] }
hex = { version = "0.4", default-features = false, features = ["std"] }
@@ -57,8 +55,8 @@ libp2p = { version = "0.52", default-features = false, features = ["tokio", "tcp
[dev-dependencies]
tributary = { package = "tributary-chain", path = "./tributary", features = ["tests"] }
sp-application-crypto = { git = "https://github.com/serai-dex/patch-polkadot-sdk", rev = "da19e1f8ca7a9e2cbf39fbfa493918eeeb45e10b", default-features = false, features = ["std"] }
sp-runtime = { git = "https://github.com/serai-dex/patch-polkadot-sdk", rev = "da19e1f8ca7a9e2cbf39fbfa493918eeeb45e10b", default-features = false, features = ["std"] }
sp-application-crypto = { git = "https://github.com/serai-dex/substrate", default-features = false, features = ["std"] }
sp-runtime = { git = "https://github.com/serai-dex/substrate", default-features = false, features = ["std"] }
[features]
longer-reattempts = []

View File

@@ -12,7 +12,7 @@ use tokio::{
use borsh::BorshSerialize;
use sp_application_crypto::RuntimePublic;
use serai_client::{
primitives::{ExternalNetworkId, EXTERNAL_NETWORKS},
primitives::{ExternalNetworkId, Signature, EXTERNAL_NETWORKS},
validator_sets::primitives::{ExternalValidatorSet, Session},
Serai, SeraiError, TemporalSerai,
};
@@ -164,7 +164,7 @@ impl<D: Db> CosignEvaluator<D> {
if !keys
.0
.verify(&cosign_block_msg(cosign.block_number, cosign.block), &cosign.signature.into())
.verify(&cosign_block_msg(cosign.block_number, cosign.block), &Signature(cosign.signature))
{
log::warn!("received cosigned block with an invalid signature");
return Ok(());

View File

@@ -1,5 +1,3 @@
#![expect(clippy::cast_possible_truncation)]
use core::ops::Deref;
use std::{
sync::{OnceLock, Arc},
@@ -10,13 +8,12 @@ use std::{
use zeroize::{Zeroize, Zeroizing};
use rand_core::OsRng;
use dalek_ff_group::Ristretto;
use ciphersuite::{
group::{
ff::{Field, PrimeField},
GroupEncoding,
},
Ciphersuite,
Ciphersuite, Ristretto,
};
use schnorr::SchnorrSignature;
use frost::Participant;
@@ -271,15 +268,12 @@ async fn handle_processor_message<D: Db, P: P2p>(
coordinator::ProcessorMessage::SignedSlashReport { session, signature } => {
let set = ExternalValidatorSet { network, session: *session };
let signature: &[u8] = signature.as_ref();
let signature = <[u8; 64]>::try_from(signature).unwrap();
let signature: serai_client::Signature = signature.into();
let signature = serai_client::Signature(signature.try_into().unwrap());
let slashes = crate::tributary::SlashReport::get(&txn, set)
.expect("signed slash report despite not having slash report locally");
let slashes_pubs = slashes
.iter()
.map(|(address, points)| (Public::from(*address), *points))
.collect::<Vec<_>>();
let slashes_pubs =
slashes.iter().map(|(address, points)| (Public(*address), *points)).collect::<Vec<_>>();
let tx = serai_client::SeraiValidatorSets::report_slashes(
network,
@@ -289,7 +283,7 @@ async fn handle_processor_message<D: Db, P: P2p>(
.collect::<Vec<_>>()
.try_into()
.unwrap(),
signature,
signature.clone(),
);
loop {
@@ -506,7 +500,7 @@ async fn handle_processor_message<D: Db, P: P2p>(
&mut txn,
key,
spec,
&KeyPair(Public::from(substrate_key), network_key.try_into().unwrap()),
&KeyPair(Public(substrate_key), network_key.try_into().unwrap()),
id.attempt,
);

View File

@@ -14,8 +14,7 @@
use zeroize::Zeroizing;
use dalek_ff_group::Ristretto;
use ciphersuite::Ciphersuite;
use ciphersuite::{Ciphersuite, Ristretto};
use borsh::{BorshSerialize, BorshDeserialize};

View File

@@ -6,8 +6,7 @@ use std::{
use zeroize::Zeroizing;
use dalek_ff_group::Ristretto;
use ciphersuite::{group::GroupEncoding, Ciphersuite};
use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
use serai_client::{
coins::CoinsEvent,

View File

@@ -7,10 +7,9 @@ use zeroize::Zeroizing;
use rand_core::{RngCore, CryptoRng, OsRng};
use futures_util::{task::Poll, poll};
use dalek_ff_group::Ristretto;
use ciphersuite::{
group::{ff::Field, GroupEncoding},
Ciphersuite,
Ciphersuite, Ristretto,
};
use sp_application_crypto::sr25519;
@@ -55,9 +54,7 @@ pub fn new_spec<R: RngCore + CryptoRng>(
let set_participants = keys
.iter()
.map(|key| {
(sr25519::Public::from((<Ristretto as Ciphersuite>::generator() * **key).to_bytes()), 1)
})
.map(|key| (sr25519::Public((<Ristretto as Ciphersuite>::generator() * **key).to_bytes()), 1))
.collect::<Vec<_>>();
let res = TributarySpec::new(serai_block, start_time, set, set_participants);

View File

@@ -4,8 +4,7 @@ use std::collections::HashMap;
use zeroize::Zeroizing;
use rand_core::{RngCore, OsRng};
use dalek_ff_group::Ristretto;
use ciphersuite::{group::GroupEncoding, Ciphersuite};
use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
use frost::Participant;
use sp_runtime::traits::Verify;
@@ -316,8 +315,7 @@ async fn dkg_test() {
OsRng.fill_bytes(&mut substrate_key);
let mut network_key = vec![0; usize::try_from((OsRng.next_u64() % 32) + 32).unwrap()];
OsRng.fill_bytes(&mut network_key);
let key_pair =
KeyPair(serai_client::Public::from(substrate_key), network_key.try_into().unwrap());
let key_pair = KeyPair(serai_client::Public(substrate_key), network_key.try_into().unwrap());
let mut txs = vec![];
for (i, key) in keys.iter().enumerate() {
@@ -362,9 +360,9 @@ async fn dkg_test() {
assert_eq!(self.key_pair, key_pair);
assert!(signature.verify(
&*serai_client::validator_sets::primitives::set_keys_message(&set, &[], &key_pair),
&serai_client::Public::from(
dkg_musig::musig_key_vartime::<Ristretto>(
serai_client::validator_sets::primitives::musig_context(set.into()),
&serai_client::Public(
frost::dkg::musig::musig_key::<Ristretto>(
&serai_client::validator_sets::primitives::musig_context(set.into()),
&self.spec.validators().into_iter().map(|(validator, _)| validator).collect::<Vec<_>>()
)
.unwrap()

View File

@@ -2,8 +2,7 @@ use core::fmt::Debug;
use rand_core::{RngCore, OsRng};
use dalek_ff_group::Ristretto;
use ciphersuite::{group::Group, Ciphersuite};
use ciphersuite::{group::Group, Ciphersuite, Ristretto};
use scale::{Encode, Decode};
use serai_client::{

View File

@@ -3,8 +3,7 @@ use std::{sync::Arc, collections::HashSet};
use rand_core::OsRng;
use dalek_ff_group::Ristretto;
use ciphersuite::{group::GroupEncoding, Ciphersuite};
use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
use tokio::{
sync::{mpsc, broadcast},

View File

@@ -3,8 +3,7 @@ use std::collections::HashMap;
use scale::Encode;
use borsh::{BorshSerialize, BorshDeserialize};
use dalek_ff_group::Ristretto;
use ciphersuite::{group::GroupEncoding, Ciphersuite};
use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
use frost::Participant;
use serai_client::validator_sets::primitives::{KeyPair, ExternalValidatorSet};

View File

@@ -4,12 +4,11 @@ use std::collections::HashMap;
use zeroize::Zeroizing;
use rand_core::OsRng;
use dalek_ff_group::Ristretto;
use ciphersuite::{group::GroupEncoding, Ciphersuite};
use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
use frost::dkg::Participant;
use scale::{Encode, Decode};
use serai_client::validator_sets::primitives::KeyPair;
use serai_client::{Signature, validator_sets::primitives::KeyPair};
use tributary::{Signed, TransactionKind, TransactionTrait};
@@ -554,7 +553,7 @@ impl<
self.spec.set(),
removed.into_iter().map(|key| key.to_bytes().into()).collect(),
key_pair,
sig.into(),
Signature(sig),
)
.await;
}

View File

@@ -1,5 +1,4 @@
use dalek_ff_group::Ristretto;
use ciphersuite::{group::GroupEncoding, Ciphersuite};
use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
use serai_client::validator_sets::primitives::ExternalValidatorSet;

View File

@@ -3,8 +3,7 @@ use std::{sync::Arc, collections::HashSet};
use zeroize::Zeroizing;
use dalek_ff_group::Ristretto;
use ciphersuite::{group::GroupEncoding, Ciphersuite};
use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
use tokio::sync::broadcast;

View File

@@ -63,13 +63,16 @@ use rand_core::OsRng;
use blake2::{Digest, Blake2s256};
use dalek_ff_group::Ristretto;
use ciphersuite::{
group::{ff::PrimeField, GroupEncoding},
Ciphersuite,
Ciphersuite, Ristretto,
};
use frost::{
FrostError,
dkg::{Participant, musig::musig},
ThresholdKeys,
sign::*,
};
use dkg_musig::musig;
use frost::{FrostError, dkg::Participant, ThresholdKeys, sign::*};
use frost_schnorrkel::Schnorrkel;
use scale::Encode;
@@ -116,7 +119,7 @@ impl<T: DbTxn, C: Encode> SigningProtocol<'_, T, C> {
let algorithm = Schnorrkel::new(b"substrate");
let keys: ThresholdKeys<Ristretto> =
musig(musig_context(self.spec.set().into()), self.key.clone(), participants)
musig(&musig_context(self.spec.set().into()), self.key, participants)
.expect("signing for a set we aren't in/validator present multiple times")
.into();
@@ -295,7 +298,7 @@ impl<T: DbTxn> DkgConfirmer<'_, T> {
threshold_i_map_to_keys_and_musig_i_map(self.spec, &self.removed, self.key, preprocesses).1;
let msg = set_keys_message(
&self.spec.set(),
&self.removed.iter().map(|key| Public::from(key.to_bytes())).collect::<Vec<_>>(),
&self.removed.iter().map(|key| Public(key.to_bytes())).collect::<Vec<_>>(),
key_pair,
);
self.signing_protocol().share_internal(&participants, preprocesses, &msg)

View File

@@ -3,8 +3,7 @@ use std::{io, collections::HashMap};
use transcript::{Transcript, RecommendedTranscript};
use dalek_ff_group::Ristretto;
use ciphersuite::{group::GroupEncoding, Ciphersuite};
use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
use frost::Participant;
use scale::Encode;

View File

@@ -7,10 +7,9 @@ use rand_core::{RngCore, CryptoRng};
use blake2::{Digest, Blake2s256};
use transcript::{Transcript, RecommendedTranscript};
use dalek_ff_group::Ristretto;
use ciphersuite::{
group::{ff::Field, GroupEncoding},
Ciphersuite,
Ciphersuite, Ristretto,
};
use schnorr::SchnorrSignature;
use frost::Participant;

View File

@@ -27,8 +27,7 @@ rand_chacha = { version = "0.3", default-features = false, features = ["std"] }
blake2 = { version = "0.10", default-features = false, features = ["std"] }
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", default-features = false, features = ["std", "recommended"] }
dalek-ff-group = { path = "../../crypto/dalek-ff-group" }
ciphersuite = { package = "ciphersuite", path = "../../crypto/ciphersuite", default-features = false, features = ["std"] }
ciphersuite = { package = "ciphersuite", path = "../../crypto/ciphersuite", default-features = false, features = ["std", "ristretto"] }
schnorr = { package = "schnorr-signatures", path = "../../crypto/schnorr", default-features = false, features = ["std"] }
hex = { version = "0.4", default-features = false, features = ["std"] }

View File

@@ -1,7 +1,6 @@
use std::collections::{VecDeque, HashSet};
use dalek_ff_group::Ristretto;
use ciphersuite::{group::GroupEncoding, Ciphersuite};
use ciphersuite::{group::GroupEncoding, Ciphersuite, Ristretto};
use serai_db::{Get, DbTxn, Db};

View File

@@ -5,8 +5,7 @@ use async_trait::async_trait;
use zeroize::Zeroizing;
use dalek_ff_group::Ristretto;
use ciphersuite::Ciphersuite;
use ciphersuite::{Ciphersuite, Ristretto};
use scale::Decode;
use futures_channel::mpsc::UnboundedReceiver;

View File

@@ -1,7 +1,6 @@
use std::collections::HashMap;
use dalek_ff_group::Ristretto;
use ciphersuite::Ciphersuite;
use ciphersuite::{Ciphersuite, Ristretto};
use serai_db::{DbTxn, Db};

View File

@@ -11,13 +11,12 @@ use rand_chacha::ChaCha12Rng;
use transcript::{Transcript, RecommendedTranscript};
use dalek_ff_group::Ristretto;
use ciphersuite::{
group::{
GroupEncoding,
ff::{Field, PrimeField},
},
Ciphersuite,
Ciphersuite, Ristretto,
};
use schnorr::{
SchnorrSignature,

View File

@@ -4,8 +4,7 @@ use scale::{Encode, Decode, IoReader};
use blake2::{Digest, Blake2s256};
use dalek_ff_group::Ristretto;
use ciphersuite::Ciphersuite;
use ciphersuite::{Ciphersuite, Ristretto};
use crate::{
transaction::{Transaction, TransactionKind, TransactionError},

View File

@@ -1,11 +1,9 @@
use std::{sync::Arc, io, collections::HashMap, fmt::Debug};
use blake2::{Digest, Blake2s256};
use dalek_ff_group::Ristretto;
use ciphersuite::{
group::{ff::Field, Group},
Ciphersuite,
Ciphersuite, Ristretto,
};
use schnorr::SchnorrSignature;

View File

@@ -10,8 +10,7 @@ use rand::rngs::OsRng;
use blake2::{Digest, Blake2s256};
use dalek_ff_group::Ristretto;
use ciphersuite::{group::ff::Field, Ciphersuite};
use ciphersuite::{group::ff::Field, Ciphersuite, Ristretto};
use serai_db::{DbTxn, Db, MemDb};

View File

@@ -3,8 +3,7 @@ use std::{sync::Arc, collections::HashMap};
use zeroize::Zeroizing;
use rand::{RngCore, rngs::OsRng};
use dalek_ff_group::Ristretto;
use ciphersuite::{group::ff::Field, Ciphersuite};
use ciphersuite::{group::ff::Field, Ciphersuite, Ristretto};
use tendermint::ext::Commit;

View File

@@ -6,10 +6,9 @@ use rand::{RngCore, CryptoRng, rngs::OsRng};
use blake2::{Digest, Blake2s256};
use dalek_ff_group::Ristretto;
use ciphersuite::{
group::{ff::Field, Group},
Ciphersuite,
Ciphersuite, Ristretto,
};
use schnorr::SchnorrSignature;

View File

@@ -2,8 +2,7 @@ use rand::rngs::OsRng;
use blake2::{Digest, Blake2s256};
use dalek_ff_group::Ristretto;
use ciphersuite::{group::ff::Field, Ciphersuite};
use ciphersuite::{group::ff::Field, Ciphersuite, Ristretto};
use crate::{
ReadWrite,

View File

@@ -3,8 +3,7 @@ use std::sync::Arc;
use zeroize::Zeroizing;
use rand::{RngCore, rngs::OsRng};
use dalek_ff_group::Ristretto;
use ciphersuite::{Ciphersuite, group::ff::Field};
use ciphersuite::{Ristretto, Ciphersuite, group::ff::Field};
use scale::Encode;

View File

@@ -6,10 +6,9 @@ use thiserror::Error;
use blake2::{Digest, Blake2b512};
use dalek_ff_group::Ristretto;
use ciphersuite::{
group::{Group, GroupEncoding},
Ciphersuite,
Ciphersuite, Ristretto,
};
use schnorr::SchnorrSignature;

View File

@@ -1,5 +1,3 @@
#![expect(clippy::cast_possible_truncation)]
use core::fmt::Debug;
use std::{

View File

@@ -1,13 +1,13 @@
[package]
name = "ciphersuite"
version = "0.4.2"
version = "0.4.1"
description = "Ciphersuites built around ff/group"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ciphersuite"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["ciphersuite", "ff", "group"]
edition = "2021"
rust-version = "1.66"
rust-version = "1.74"
[package.metadata.docs.rs]
all-features = true
@@ -24,12 +24,22 @@ rand_core = { version = "0.6", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["derive"] }
subtle = { version = "^2.4", default-features = false }
digest = { version = "0.10", default-features = false, features = ["core-api"] }
digest = { version = "0.10", default-features = false }
transcript = { package = "flexible-transcript", path = "../transcript", version = "^0.3.2", default-features = false }
sha2 = { version = "0.10", default-features = false, optional = true }
sha3 = { version = "0.10", default-features = false, optional = true }
ff = { version = "0.13", default-features = false, features = ["bits"] }
group = { version = "0.13", default-features = false }
dalek-ff-group = { path = "../dalek-ff-group", version = "0.4", default-features = false, optional = true }
elliptic-curve = { version = "0.13", default-features = false, features = ["hash2curve"], optional = true }
p256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits", "hash2curve"], optional = true }
k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits", "hash2curve"], optional = true }
minimal-ed448 = { path = "../ed448", version = "0.4", default-features = false, optional = true }
[dev-dependencies]
hex = { version = "0.4", default-features = false, features = ["std"] }
@@ -38,7 +48,7 @@ rand_core = { version = "0.6", default-features = false, features = ["std"] }
ff-group-tests = { version = "0.13", path = "../ff-group-tests" }
[features]
alloc = ["std-shims", "ff/alloc"]
alloc = ["std-shims"]
std = [
"std-shims/std",
@@ -49,8 +59,27 @@ std = [
"digest/std",
"transcript/std",
"sha2?/std",
"sha3?/std",
"ff/std",
"dalek-ff-group?/std",
"elliptic-curve?/std",
"p256?/std",
"k256?/std",
"minimal-ed448?/std",
]
dalek = ["sha2", "dalek-ff-group"]
ed25519 = ["dalek"]
ristretto = ["dalek"]
kp256 = ["sha2", "elliptic-curve"]
p256 = ["kp256", "dep:p256"]
secp256k1 = ["kp256", "k256"]
ed448 = ["sha3", "minimal-ed448"]
default = ["std"]

View File

@@ -21,8 +21,6 @@ Their `hash_to_F` is the
[IETF's hash to curve](https://www.ietf.org/archive/id/draft-irtf-cfrg-hash-to-curve-16.html),
yet applied to their scalar field.
Please see the [`ciphersuite-kp256`](https://docs.rs/ciphersuite-kp256) crate for more info.
### Ed25519/Ristretto
Ed25519/Ristretto are offered via
@@ -35,8 +33,6 @@ the draft
[RFC-RISTRETTO](https://www.ietf.org/archive/id/draft-irtf-cfrg-ristretto255-decaf448-05.html).
The domain-separation tag is naively prefixed to the message.
Please see the [`dalek-ff-group`](https://docs.rs/dalek-ff-group) crate for more info.
### Ed448
Ed448 is offered via [minimal-ed448](https://crates.io/crates/minimal-ed448), an
@@ -46,5 +42,3 @@ to its prime-order subgroup.
Its `hash_to_F` is the wide reduction of SHAKE256, with a 114-byte output, as
used in [RFC-8032](https://www.rfc-editor.org/rfc/rfc8032). The
domain-separation tag is naively prefixed to the message.
Please see the [`minimal-ed448`](https://docs.rs/minimal-ed448) crate for more info.

View File

@@ -1,55 +0,0 @@
[package]
name = "ciphersuite-kp256"
version = "0.4.0"
description = "Ciphersuites built around ff/group"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ciphersuite/kp256"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["ciphersuite", "ff", "group"]
edition = "2021"
rust-version = "1.66"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
rand_core = { version = "0.6", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["derive"] }
sha2 = { version = "0.10", default-features = false }
elliptic-curve = { version = "0.13", default-features = false, features = ["hash2curve"] }
p256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits", "hash2curve"] }
k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits", "hash2curve"] }
ciphersuite = { path = "../", version = "0.4", default-features = false }
[dev-dependencies]
hex = { version = "0.4", default-features = false, features = ["std"] }
rand_core = { version = "0.6", default-features = false, features = ["std"] }
ff-group-tests = { version = "0.13", path = "../../ff-group-tests" }
[features]
alloc = ["ciphersuite/alloc"]
std = [
"rand_core/std",
"zeroize/std",
"sha2/std",
"elliptic-curve/std",
"p256/std",
"k256/std",
"ciphersuite/std",
]
default = ["std"]

View File

@@ -1,3 +0,0 @@
# Ciphersuite {k, p}256
SECP256k1 and P-256 Ciphersuites around k256 and p256.

View File

@@ -3,9 +3,9 @@ use zeroize::Zeroize;
use sha2::{Digest, Sha512};
use group::Group;
use crate::Scalar;
use dalek_ff_group::Scalar;
use ciphersuite::Ciphersuite;
use crate::Ciphersuite;
macro_rules! dalek_curve {
(
@@ -15,7 +15,7 @@ macro_rules! dalek_curve {
$Point: ident,
$ID: literal
) => {
use crate::$Point;
use dalek_ff_group::$Point;
impl Ciphersuite for $Ciphersuite {
type F = Scalar;
@@ -40,9 +40,12 @@ macro_rules! dalek_curve {
/// hash_to_F is implemented with a naive concatenation of the dst and data, allowing transposition
/// between the two. This means `dst: b"abc", data: b"def"`, will produce the same scalar as
/// `dst: "abcdef", data: b""`. Please use carefully, not letting dsts be substrings of each other.
#[cfg(any(test, feature = "ristretto"))]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct Ristretto;
#[cfg(any(test, feature = "ristretto"))]
dalek_curve!("ristretto", Ristretto, RistrettoPoint, b"ristretto");
#[cfg(any(test, feature = "ristretto"))]
#[test]
fn test_ristretto() {
ff_group_tests::group::test_prime_group_bits::<_, RistrettoPoint>(&mut rand_core::OsRng);
@@ -68,9 +71,12 @@ fn test_ristretto() {
/// hash_to_F is implemented with a naive concatenation of the dst and data, allowing transposition
/// between the two. This means `dst: b"abc", data: b"def"`, will produce the same scalar as
/// `dst: "abcdef", data: b""`. Please use carefully, not letting dsts be substrings of each other.
#[cfg(feature = "ed25519")]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct Ed25519;
#[cfg(feature = "ed25519")]
dalek_curve!("ed25519", Ed25519, EdwardsPoint, b"edwards25519");
#[cfg(feature = "ed25519")]
#[test]
fn test_ed25519() {
ff_group_tests::group::test_prime_group_bits::<_, EdwardsPoint>(&mut rand_core::OsRng);

View File

@@ -1,17 +1,15 @@
use zeroize::Zeroize;
use sha3::{
digest::{
typenum::U114, core_api::BlockSizeUser, Update, Output, OutputSizeUser, FixedOutput,
ExtendableOutput, XofReader, HashMarker, Digest,
},
Shake256,
use digest::{
typenum::U114, core_api::BlockSizeUser, Update, Output, OutputSizeUser, FixedOutput,
ExtendableOutput, XofReader, HashMarker, Digest,
};
use sha3::Shake256;
use group::Group;
use crate::{Scalar, Point};
use minimal_ed448::{Scalar, Point};
use ciphersuite::Ciphersuite;
use crate::Ciphersuite;
/// Shake256, fixed to a 114-byte output, as used by Ed448.
#[derive(Clone, Default)]

View File

@@ -1,17 +1,16 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(not(feature = "std"), no_std)]
use zeroize::Zeroize;
use sha2::Sha256;
use group::ff::PrimeField;
use elliptic_curve::{
generic_array::GenericArray,
bigint::{NonZero, CheckedAdd, Encoding, U384},
hash2curve::{Expander, ExpandMsg, ExpandMsgXmd},
};
use ciphersuite::{group::ff::PrimeField, Ciphersuite};
use crate::Ciphersuite;
macro_rules! kp_curve {
(
@@ -108,9 +107,12 @@ fn test_oversize_dst<C: Ciphersuite>() {
/// Ciphersuite for Secp256k1.
///
/// hash_to_F is implemented via the IETF draft for hash to curve's hash_to_field (v16).
#[cfg(feature = "secp256k1")]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct Secp256k1;
#[cfg(feature = "secp256k1")]
kp_curve!("secp256k1", k256, Secp256k1, b"secp256k1");
#[cfg(feature = "secp256k1")]
#[test]
fn test_secp256k1() {
ff_group_tests::group::test_prime_group_bits::<_, k256::ProjectivePoint>(&mut rand_core::OsRng);
@@ -143,9 +145,12 @@ fn test_secp256k1() {
/// Ciphersuite for P-256.
///
/// hash_to_F is implemented via the IETF draft for hash to curve's hash_to_field (v16).
#[cfg(feature = "p256")]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct P256;
#[cfg(feature = "p256")]
kp_curve!("p256", p256, P256, b"P-256");
#[cfg(feature = "p256")]
#[test]
fn test_p256() {
ff_group_tests::group::test_prime_group_bits::<_, p256::ProjectivePoint>(&mut rand_core::OsRng);

View File

@@ -2,7 +2,7 @@
Ciphersuites for elliptic curves premised on ff/group.
This library was
This library, except for the not recommended Ed448 ciphersuite, was
[audited by Cypher Stack in March 2023](https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf),
culminating in commit
[669d2dbffc1dafb82a09d9419ea182667115df06](https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06).

View File

@@ -1,12 +1,9 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("lib.md")]
#![cfg_attr(not(feature = "std"), no_std)]
use core::fmt::Debug;
#[cfg(any(feature = "alloc", feature = "std"))]
#[allow(unused_imports)]
use std_shims::prelude::*;
#[cfg(any(feature = "alloc", feature = "std"))]
use std_shims::io::{self, Read};
use rand_core::{RngCore, CryptoRng};
@@ -26,6 +23,25 @@ use group::{
#[cfg(any(feature = "alloc", feature = "std"))]
use group::GroupEncoding;
#[cfg(feature = "dalek")]
mod dalek;
#[cfg(feature = "ristretto")]
pub use dalek::Ristretto;
#[cfg(feature = "ed25519")]
pub use dalek::Ed25519;
#[cfg(feature = "kp256")]
mod kp256;
#[cfg(feature = "secp256k1")]
pub use kp256::Secp256k1;
#[cfg(feature = "p256")]
pub use kp256::P256;
#[cfg(feature = "ed448")]
mod ed448;
#[cfg(feature = "ed448")]
pub use ed448::*;
/// Unified trait defining a ciphersuite around an elliptic curve.
pub trait Ciphersuite:
'static + Send + Sync + Clone + Copy + PartialEq + Eq + Debug + Zeroize
@@ -83,9 +99,6 @@ pub trait Ciphersuite:
}
/// Read a canonical point from something implementing std::io::Read.
///
/// The provided implementation is safe so long as `GroupEncoding::to_bytes` always returns a
/// canonical serialization.
#[cfg(any(feature = "alloc", feature = "std"))]
#[allow(non_snake_case)]
fn read_G<R: Read>(reader: &mut R) -> io::Result<Self::G> {

View File

@@ -1,13 +1,13 @@
[package]
name = "dalek-ff-group"
version = "0.4.4"
version = "0.4.1"
description = "ff/group bindings around curve25519-dalek"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dalek-ff-group"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["curve25519", "ed25519", "ristretto", "dalek", "group"]
edition = "2021"
rust-version = "1.65"
rust-version = "1.66"
[package.metadata.docs.rs]
all-features = true
@@ -25,22 +25,18 @@ subtle = { version = "^2.4", default-features = false }
rand_core = { version = "0.6", default-features = false }
digest = { version = "0.10", default-features = false }
sha2 = { version = "0.10", default-features = false }
ff = { version = "0.13", default-features = false, features = ["bits"] }
group = { version = "0.13", default-features = false }
ciphersuite = { path = "../ciphersuite", default-features = false }
crypto-bigint = { version = "0.5", default-features = false, features = ["zeroize"] }
curve25519-dalek = { version = ">= 4.0, < 4.2", default-features = false, features = ["alloc", "zeroize", "digest", "group", "precomputed-tables"] }
[dev-dependencies]
hex = "0.4"
rand_core = { version = "0.6", default-features = false, features = ["std"] }
ff-group-tests = { path = "../ff-group-tests" }
[features]
alloc = ["zeroize/alloc", "ciphersuite/alloc"]
std = ["alloc", "zeroize/std", "subtle/std", "rand_core/std", "digest/std", "sha2/std", "ciphersuite/std"]
std = ["zeroize/std", "subtle/std", "rand_core/std", "digest/std"]
default = ["std"]

View File

@@ -17,7 +17,7 @@ use crypto_bigint::{
impl_modulus,
};
use group::ff::{Field, PrimeField, FieldBits, PrimeFieldBits, FromUniformBytes};
use group::ff::{Field, PrimeField, FieldBits, PrimeFieldBits};
use crate::{u8_from_bool, constant_time, math_op, math};
@@ -35,8 +35,7 @@ impl_modulus!(
type ResidueType = Residue<FieldModulus, { FieldModulus::LIMBS }>;
/// A constant-time implementation of the Ed25519 field.
#[derive(Clone, Copy, PartialEq, Eq, Default, Debug, Zeroize)]
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, Default, Debug)]
pub struct FieldElement(ResidueType);
// Square root of -1.
@@ -93,7 +92,7 @@ impl Neg for FieldElement {
}
}
impl Neg for &FieldElement {
impl<'a> Neg for &'a FieldElement {
type Output = FieldElement;
fn neg(self) -> Self::Output {
(*self).neg()
@@ -217,18 +216,10 @@ impl PrimeFieldBits for FieldElement {
}
impl FieldElement {
/// Create a FieldElement from a `crypto_bigint::U256`.
///
/// This will reduce the `U256` by the modulus, into a member of the field.
pub const fn from_u256(u256: &U256) -> Self {
FieldElement(Residue::new(u256))
}
/// Create a `FieldElement` from the reduction of a 512-bit number.
///
/// The bytes are interpreted in little-endian format.
pub fn wide_reduce(value: [u8; 64]) -> Self {
FieldElement(reduce(U512::from_le_bytes(value)))
/// Interpret the value as a little-endian integer, square it, and reduce it into a FieldElement.
pub fn from_square(value: [u8; 32]) -> FieldElement {
let value = U256::from_le_bytes(value);
FieldElement(reduce(U512::from(value.mul_wide(&value))))
}
/// Perform an exponentiation.
@@ -306,12 +297,6 @@ impl FieldElement {
}
}
impl FromUniformBytes<64> for FieldElement {
fn from_uniform_bytes(bytes: &[u8; 64]) -> Self {
Self::wide_reduce(*bytes)
}
}
impl Sum<FieldElement> for FieldElement {
fn sum<I: Iterator<Item = FieldElement>>(iter: I) -> FieldElement {
let mut res = FieldElement::ZERO;

View File

@@ -1,5 +1,5 @@
#![allow(deprecated)]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![no_std] // Prevents writing new code, in what should be a simple wrapper, which requires std
#![doc = include_str!("../README.md")]
#![allow(clippy::redundant_closure_call)]
@@ -30,7 +30,7 @@ use dalek::{
pub use constants::{ED25519_BASEPOINT_TABLE, RISTRETTO_BASEPOINT_TABLE};
use group::{
ff::{Field, PrimeField, FieldBits, PrimeFieldBits, FromUniformBytes},
ff::{Field, PrimeField, FieldBits, PrimeFieldBits},
Group, GroupEncoding,
prime::PrimeGroup,
};
@@ -38,24 +38,13 @@ use group::{
mod field;
pub use field::FieldElement;
mod ciphersuite;
pub use crate::ciphersuite::{Ed25519, Ristretto};
// Use black_box when possible
#[rustversion::since(1.66)]
mod black_box {
pub(crate) fn black_box<T>(val: T) -> T {
#[allow(clippy::incompatible_msrv)]
core::hint::black_box(val)
}
}
use core::hint::black_box;
#[rustversion::before(1.66)]
mod black_box {
pub(crate) fn black_box<T>(val: T) -> T {
val
}
fn black_box<T>(val: T) -> T {
val
}
use black_box::black_box;
fn u8_from_bool(bit_ref: &mut bool) -> u8 {
let bit_ref = black_box(bit_ref);
@@ -325,12 +314,6 @@ impl PrimeFieldBits for Scalar {
}
}
impl FromUniformBytes<64> for Scalar {
fn from_uniform_bytes(bytes: &[u8; 64]) -> Self {
Self::from_bytes_mod_order_wide(bytes)
}
}
impl Sum<Scalar> for Scalar {
fn sum<I: Iterator<Item = Scalar>>(iter: I) -> Scalar {
Self(DScalar::sum(iter))
@@ -368,12 +351,7 @@ macro_rules! dalek_group {
$BASEPOINT_POINT: ident,
$BASEPOINT_TABLE: ident
) => {
/// Wrapper around the dalek Point type.
///
/// All operations will be restricted to a prime-order subgroup (equivalent to the group itself
/// in the case of Ristretto). The exposure of the internal element does allow bypassing this
/// however, which may lead to undefined/computationally-unsafe behavior, and is entirely at
/// the user's risk.
/// Wrapper around the dalek Point type. For Ed25519, this is restricted to the prime subgroup.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
pub struct $Point(pub $DPoint);
deref_borrow!($Point, $DPoint);

View File

@@ -1,13 +1,13 @@
[package]
name = "dkg"
version = "0.6.1"
version = "0.5.1"
description = "Distributed key generation over ff/group"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["dkg", "multisig", "threshold", "ff", "group"]
edition = "2021"
rust-version = "1.66"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
@@ -17,25 +17,50 @@ rustdoc-args = ["--cfg", "docsrs"]
workspace = true
[dependencies]
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive", "alloc"] }
thiserror = { version = "1", default-features = false, optional = true }
thiserror = { version = "2", default-features = false }
rand_core = { version = "0.6", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
std-shims = { version = "0.1", path = "../../common/std-shims", default-features = false }
borsh = { version = "1", default-features = false, features = ["derive", "de_strict_order"], optional = true }
ciphersuite = { path = "../ciphersuite", version = "^0.4.1", default-features = false, features = ["alloc"] }
transcript = { package = "flexible-transcript", path = "../transcript", version = "^0.3.2", default-features = false, features = ["recommended"] }
chacha20 = { version = "0.9", default-features = false, features = ["zeroize"] }
ciphersuite = { path = "../ciphersuite", version = "^0.4.1", default-features = false }
multiexp = { path = "../multiexp", version = "0.4", default-features = false }
schnorr = { package = "schnorr-signatures", path = "../schnorr", version = "^0.5.1", default-features = false }
dleq = { path = "../dleq", version = "^0.4.1", default-features = false }
[dev-dependencies]
rand_core = { version = "0.6", default-features = false, features = ["getrandom"] }
ciphersuite = { path = "../ciphersuite", default-features = false, features = ["ristretto"] }
[features]
std = [
"thiserror/std",
"thiserror",
"rand_core/std",
"std-shims/std",
"borsh?/std",
"transcript/std",
"chacha20/std",
"ciphersuite/std",
"multiexp/std",
"multiexp/batch",
"schnorr/std",
"dleq/std",
"dleq/serialize"
]
borsh = ["dep:borsh"]
tests = ["rand_core/getrandom"]
default = ["std"]

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2021-2025 Luke Parker
Copyright (c) 2021-2023 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,15 +1,16 @@
# Distributed Key Generation
A crate implementing a type for keys, presumably the result of a distributed
key generation protocol, and utilities from there.
A collection of implementations of various distributed key generation protocols.
This crate used to host implementations of distributed key generation protocols
as well (hence the name). Those have been smashed into their own crates, such
as [`dkg-musig`](https://docs.rs/dkg-musig) and
[`dkg-pedpop`](https://docs.rs/dkg-pedpop).
All included protocols resolve into the provided `Threshold` types, intended to
enable their modularity. Additional utilities around these types, such as
promotion from one generator to another, are also provided.
Before being smashed, this crate was [audited by Cypher Stack in March 2023](
https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf
), culminating in commit [669d2dbffc1dafb82a09d9419ea182667115df06](
https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06
). Any subsequent changes have not undergone auditing.
Currently, the only included protocol is the two-round protocol from the
[FROST paper](https://eprint.iacr.org/2020/852).
This library was
[audited by Cypher Stack in March 2023](https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf),
culminating in commit
[669d2dbffc1dafb82a09d9419ea182667115df06](https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06).
Any subsequent changes have not undergone auditing.

View File

@@ -1,36 +0,0 @@
[package]
name = "dkg-dealer"
version = "0.6.0"
description = "Produce dkg::ThresholdKeys with a dealer key generation"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/dealer"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["dkg", "multisig", "threshold", "ff", "group"]
edition = "2021"
rust-version = "1.66"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
zeroize = { version = "^1.5", default-features = false }
rand_core = { version = "0.6", default-features = false }
std-shims = { version = "0.1", path = "../../../common/std-shims", default-features = false }
ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false }
dkg = { path = "../", version = "0.6", default-features = false }
[features]
std = [
"zeroize/std",
"rand_core/std",
"std-shims/std",
"ciphersuite/std",
"dkg/std",
]
default = ["std"]

View File

@@ -1,13 +0,0 @@
# Distributed Key Generation - Dealer
This crate implements a dealer key generation protocol for the
[`dkg`](https://docs.rs/dkg) crate's types. This provides a single point of
failure when the key is being generated and is NOT recommended for use outside
of tests.
This crate was originally part of (in some form) the `dkg` crate, which was
[audited by Cypher Stack in March 2023](
https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf
), culminating in commit [669d2dbffc1dafb82a09d9419ea182667115df06](
https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06
). Any subsequent changes have not undergone auditing.

View File

@@ -1,68 +0,0 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
#![no_std]
use core::ops::Deref;
use std_shims::{vec::Vec, collections::HashMap};
use zeroize::{Zeroize, Zeroizing};
use rand_core::{RngCore, CryptoRng};
use ciphersuite::{
group::ff::{Field, PrimeField},
Ciphersuite,
};
pub use dkg::*;
/// Create a key via a dealer key generation protocol.
pub fn key_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
rng: &mut R,
threshold: u16,
participants: u16,
) -> Result<HashMap<Participant, ThresholdKeys<C>>, DkgError> {
let mut coefficients = Vec::with_capacity(usize::from(participants));
// `.max(1)` so we always generate the 0th coefficient which we'll share
for _ in 0 .. threshold.max(1) {
coefficients.push(Zeroizing::new(C::F::random(&mut *rng)));
}
fn polynomial<F: PrimeField + Zeroize>(
coefficients: &[Zeroizing<F>],
l: Participant,
) -> Zeroizing<F> {
let l = F::from(u64::from(u16::from(l)));
// This should never be reached since Participant is explicitly non-zero
assert!(l != F::ZERO, "zero participant passed to polynomial");
let mut share = Zeroizing::new(F::ZERO);
for (idx, coefficient) in coefficients.iter().rev().enumerate() {
*share += coefficient.deref();
if idx != (coefficients.len() - 1) {
*share *= l;
}
}
share
}
let group_key = C::generator() * coefficients[0].deref();
let mut secret_shares = HashMap::with_capacity(participants as usize);
let mut verification_shares = HashMap::with_capacity(participants as usize);
for i in 1 ..= participants {
let i = Participant::new(i).expect("non-zero u16 wasn't a valid Participant index");
let secret_share = polynomial(&coefficients, i);
secret_shares.insert(i, secret_share.clone());
verification_shares.insert(i, C::generator() * *secret_share);
}
let mut res = HashMap::with_capacity(participants as usize);
for (i, secret_share) in secret_shares {
let keys = ThresholdKeys::new(
ThresholdParams::new(threshold, participants, i)?,
Interpolation::Lagrange,
secret_share,
verification_shares.clone(),
)?;
debug_assert_eq!(keys.group_key(), group_key);
res.insert(i, keys);
}
Ok(res)
}

View File

@@ -1,49 +0,0 @@
[package]
name = "dkg-musig"
version = "0.6.0"
description = "The MuSig key aggregation protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/musig"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["dkg", "multisig", "threshold", "ff", "group"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
thiserror = { version = "2", default-features = false }
rand_core = { version = "0.6", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
std-shims = { version = "0.1", path = "../../../common/std-shims", default-features = false }
multiexp = { path = "../../multiexp", version = "0.4", default-features = false }
ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false }
dkg = { path = "../", version = "0.6", default-features = false }
[dev-dependencies]
rand_core = { version = "0.6", default-features = false, features = ["getrandom"] }
dalek-ff-group = { path = "../../dalek-ff-group" }
dkg-recovery = { path = "../recovery", default-features = false, features = ["std"] }
[features]
std = [
"thiserror/std",
"rand_core/std",
"std-shims/std",
"multiexp/std",
"ciphersuite/std",
"dkg/std",
]
default = ["std"]

View File

@@ -1,12 +0,0 @@
# Distributed Key Generation - MuSig
This implements the MuSig key aggregation protocol for the
[`dkg`](https://docs.rs/dkg) crate's types.
This crate was originally part of (in some form) the `dkg` crate, which was
[audited by Cypher Stack in March 2023](
https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf
), culminating in commit
[669d2dbffc1dafb82a09d9419ea182667115df06](
https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06
). Any subsequent changes have not undergone auditing.

View File

@@ -1,162 +0,0 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
#![cfg_attr(not(feature = "std"), no_std)]
use core::ops::Deref;
use std_shims::{
vec,
vec::Vec,
collections::{HashSet, HashMap},
};
use zeroize::Zeroizing;
use ciphersuite::{group::GroupEncoding, Ciphersuite};
pub use dkg::*;
#[cfg(test)]
mod tests;
/// Errors encountered when working with threshold keys.
#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)]
pub enum MusigError<C: Ciphersuite> {
/// No keys were provided.
#[error("no keys provided")]
NoKeysProvided,
/// Too many keys were provided.
#[error("too many keys (allowed {max}, provided {provided})")]
TooManyKeysProvided {
/// The maximum amount of keys allowed.
max: u16,
/// The amount of keys provided.
provided: usize,
},
/// A participant was duplicated.
#[error("a participant was duplicated")]
DuplicatedParticipant(C::G),
/// Participating, yet our public key wasn't found in the list of keys.
#[error("private key's public key wasn't present in the list of public keys")]
NotPresent,
/// An error propagated from the underlying `dkg` crate.
#[error("error from dkg ({0})")]
DkgError(DkgError),
}
fn check_keys<C: Ciphersuite>(keys: &[C::G]) -> Result<u16, MusigError<C>> {
if keys.is_empty() {
Err(MusigError::NoKeysProvided)?;
}
let keys_len = u16::try_from(keys.len())
.map_err(|_| MusigError::TooManyKeysProvided { max: u16::MAX, provided: keys.len() })?;
let mut set = HashSet::with_capacity(keys.len());
for key in keys {
let bytes = key.to_bytes().as_ref().to_vec();
if !set.insert(bytes) {
Err(MusigError::DuplicatedParticipant(*key))?;
}
}
Ok(keys_len)
}
fn binding_factor_transcript<C: Ciphersuite>(
context: [u8; 32],
keys_len: u16,
keys: &[C::G],
) -> Vec<u8> {
debug_assert_eq!(usize::from(keys_len), keys.len());
let mut transcript = vec![];
transcript.extend(&context);
transcript.extend(keys_len.to_le_bytes());
for key in keys {
transcript.extend(key.to_bytes().as_ref());
}
transcript
}
fn binding_factor<C: Ciphersuite>(mut transcript: Vec<u8>, i: u16) -> C::F {
transcript.extend(i.to_le_bytes());
C::hash_to_F(b"dkg-musig", &transcript)
}
#[allow(clippy::type_complexity)]
fn musig_key_multiexp<C: Ciphersuite>(
context: [u8; 32],
keys: &[C::G],
) -> Result<Vec<(C::F, C::G)>, MusigError<C>> {
let keys_len = check_keys::<C>(keys)?;
let transcript = binding_factor_transcript::<C>(context, keys_len, keys);
let mut multiexp = Vec::with_capacity(keys.len());
for i in 1 ..= keys_len {
multiexp.push((binding_factor::<C>(transcript.clone(), i), keys[usize::from(i - 1)]));
}
Ok(multiexp)
}
/// The group key resulting from using this library's MuSig key aggregation.
///
/// This function executes in variable time and MUST NOT be used with secret data.
pub fn musig_key_vartime<C: Ciphersuite>(
context: [u8; 32],
keys: &[C::G],
) -> Result<C::G, MusigError<C>> {
Ok(multiexp::multiexp_vartime(&musig_key_multiexp(context, keys)?))
}
/// The group key resulting from using this library's MuSig key aggregation.
pub fn musig_key<C: Ciphersuite>(context: [u8; 32], keys: &[C::G]) -> Result<C::G, MusigError<C>> {
Ok(multiexp::multiexp(&musig_key_multiexp(context, keys)?))
}
/// A n-of-n non-interactive DKG which does not guarantee the usability of the resulting key.
pub fn musig<C: Ciphersuite>(
context: [u8; 32],
private_key: Zeroizing<C::F>,
keys: &[C::G],
) -> Result<ThresholdKeys<C>, MusigError<C>> {
let our_pub_key = C::generator() * private_key.deref();
let Some(our_i) = keys.iter().position(|key| *key == our_pub_key) else {
Err(MusigError::DkgError(DkgError::NotParticipating))?
};
let keys_len: u16 = check_keys::<C>(keys)?;
let params = ThresholdParams::new(
keys_len,
keys_len,
// The `+ 1` won't fail as `keys.len() <= u16::MAX`, so any index is `< u16::MAX`
Participant::new(
u16::try_from(our_i).expect("keys.len() <= u16::MAX yet index of keys > u16::MAX?") + 1,
)
.expect("i + 1 != 0"),
)
.map_err(MusigError::DkgError)?;
let transcript = binding_factor_transcript::<C>(context, keys_len, keys);
let mut binding_factors = Vec::with_capacity(keys.len());
let mut multiexp = Vec::with_capacity(keys.len());
let mut verification_shares = HashMap::with_capacity(keys.len());
for (i, key) in (1 ..= keys_len).zip(keys.iter().copied()) {
let binding_factor = binding_factor::<C>(transcript.clone(), i);
binding_factors.push(binding_factor);
multiexp.push((binding_factor, key));
let i = Participant::new(i).expect("non-zero u16 wasn't a valid Participant index?");
verification_shares.insert(i, key);
}
let group_key = multiexp::multiexp(&multiexp);
debug_assert_eq!(our_pub_key, verification_shares[&params.i()]);
debug_assert_eq!(musig_key_vartime::<C>(context, keys), Ok(group_key));
ThresholdKeys::new(
params,
Interpolation::Constant(binding_factors),
private_key,
verification_shares,
)
.map_err(MusigError::DkgError)
}

View File

@@ -1,71 +0,0 @@
use std::collections::HashMap;
use zeroize::Zeroizing;
use rand_core::OsRng;
use dalek_ff_group::Ristretto;
use ciphersuite::{group::ff::Field, Ciphersuite};
use dkg_recovery::recover_key;
use crate::*;
/// Tests MuSig key generation.
#[test]
pub fn test_musig() {
const PARTICIPANTS: u16 = 5;
let mut keys = vec![];
let mut pub_keys = vec![];
for _ in 0 .. PARTICIPANTS {
let key = Zeroizing::new(<Ristretto as Ciphersuite>::F::random(&mut OsRng));
pub_keys.push(<Ristretto as Ciphersuite>::generator() * *key);
keys.push(key);
}
const CONTEXT: [u8; 32] = *b"MuSig Test ";
// Empty signing set
musig::<Ristretto>(CONTEXT, Zeroizing::new(<Ristretto as Ciphersuite>::F::ZERO), &[])
.unwrap_err();
// Signing set we're not part of
musig::<Ristretto>(
CONTEXT,
Zeroizing::new(<Ristretto as Ciphersuite>::F::ZERO),
&[<Ristretto as Ciphersuite>::generator()],
)
.unwrap_err();
// Test with n keys
{
let mut created_keys = HashMap::new();
let mut verification_shares = HashMap::new();
let group_key = musig_key::<Ristretto>(CONTEXT, &pub_keys).unwrap();
for (i, key) in keys.iter().enumerate() {
let these_keys = musig::<Ristretto>(CONTEXT, key.clone(), &pub_keys).unwrap();
assert_eq!(these_keys.params().t(), PARTICIPANTS);
assert_eq!(these_keys.params().n(), PARTICIPANTS);
assert_eq!(usize::from(u16::from(these_keys.params().i())), i + 1);
verification_shares.insert(
these_keys.params().i(),
<Ristretto as Ciphersuite>::generator() * **these_keys.original_secret_share(),
);
assert_eq!(these_keys.group_key(), group_key);
created_keys.insert(these_keys.params().i(), these_keys);
}
for keys in created_keys.values() {
for (l, verification_share) in &verification_shares {
assert_eq!(keys.original_verification_share(*l), *verification_share);
}
}
assert_eq!(
<Ristretto as Ciphersuite>::generator() *
*recover_key(&created_keys.values().cloned().collect::<Vec<_>>()).unwrap(),
group_key
);
}
}

View File

@@ -1,37 +0,0 @@
[package]
name = "dkg-pedpop"
version = "0.6.0"
description = "The PedPoP distributed key generation protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/pedpop"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["dkg", "multisig", "threshold", "ff", "group"]
edition = "2021"
rust-version = "1.80"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
thiserror = { version = "2", default-features = false, features = ["std"] }
zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] }
rand_core = { version = "0.6", default-features = false, features = ["std"] }
transcript = { package = "flexible-transcript", path = "../../transcript", version = "^0.3.3", default-features = false, features = ["std", "recommended"] }
chacha20 = { version = "0.9", default-features = false, features = ["std", "zeroize"] }
multiexp = { path = "../../multiexp", version = "0.4", default-features = false, features = ["std"] }
ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false, features = ["std"] }
schnorr = { package = "schnorr-signatures", path = "../../schnorr", version = "^0.5.1", default-features = false, features = ["std"] }
dleq = { path = "../../dleq", version = "^0.4.1", default-features = false, features = ["std", "serialize"] }
dkg = { path = "../", version = "0.6", default-features = false, features = ["std"] }
[dev-dependencies]
rand_core = { version = "0.6", default-features = false, features = ["getrandom"] }
dalek-ff-group = { path = "../../dalek-ff-group", default-features = false }

View File

@@ -1,12 +0,0 @@
# Distributed Key Generation - PedPoP
This implements the PedPoP distributed key generation protocol for the
[`dkg`](https://docs.rs/dkg) crate's types.
This crate was originally part of the `dkg` crate, which was
[audited by Cypher Stack in March 2023](
https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf
), culminating in commit
[669d2dbffc1dafb82a09d9419ea182667115df06](
https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06
). Any subsequent changes have not undergone auditing.

View File

@@ -1,346 +0,0 @@
use std::collections::HashMap;
use rand_core::{RngCore, CryptoRng, OsRng};
use dalek_ff_group::Ristretto;
use ciphersuite::Ciphersuite;
use crate::*;
const THRESHOLD: u16 = 3;
const PARTICIPANTS: u16 = 5;
/// Clone a map without a specific value.
fn clone_without<K: Clone + core::cmp::Eq + core::hash::Hash, V: Clone>(
map: &HashMap<K, V>,
without: &K,
) -> HashMap<K, V> {
let mut res = map.clone();
res.remove(without).unwrap();
res
}
type PedPoPEncryptedMessage<C> = EncryptedMessage<C, SecretShare<<C as Ciphersuite>::F>>;
type PedPoPSecretShares<C> = HashMap<Participant, PedPoPEncryptedMessage<C>>;
const CONTEXT: [u8; 32] = *b"DKG Test Key Generation ";
// Commit, then return commitment messages, enc keys, and shares
#[allow(clippy::type_complexity)]
fn commit_enc_keys_and_shares<R: RngCore + CryptoRng, C: Ciphersuite>(
rng: &mut R,
) -> (
HashMap<Participant, KeyMachine<C>>,
HashMap<Participant, EncryptionKeyMessage<C, Commitments<C>>>,
HashMap<Participant, C::G>,
HashMap<Participant, PedPoPSecretShares<C>>,
) {
let mut machines = HashMap::new();
let mut commitments = HashMap::new();
let mut enc_keys = HashMap::new();
for i in (1 ..= PARTICIPANTS).map(|i| Participant::new(i).unwrap()) {
let params = ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap();
let machine = KeyGenMachine::<C>::new(params, CONTEXT);
let (machine, these_commitments) = machine.generate_coefficients(rng);
machines.insert(i, machine);
commitments.insert(
i,
EncryptionKeyMessage::read::<&[u8]>(&mut these_commitments.serialize().as_ref(), params)
.unwrap(),
);
enc_keys.insert(i, commitments[&i].enc_key());
}
let mut secret_shares = HashMap::new();
let machines = machines
.drain()
.map(|(l, machine)| {
let (machine, mut shares) =
machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap();
let shares = shares
.drain()
.map(|(l, share)| {
(
l,
EncryptedMessage::read::<&[u8]>(
&mut share.serialize().as_ref(),
// Only t/n actually matters, so hardcode i to 1 here
ThresholdParams::new(THRESHOLD, PARTICIPANTS, Participant::new(1).unwrap()).unwrap(),
)
.unwrap(),
)
})
.collect::<HashMap<_, _>>();
secret_shares.insert(l, shares);
(l, machine)
})
.collect::<HashMap<_, _>>();
(machines, commitments, enc_keys, secret_shares)
}
fn generate_secret_shares<C: Ciphersuite>(
shares: &HashMap<Participant, PedPoPSecretShares<C>>,
recipient: Participant,
) -> PedPoPSecretShares<C> {
let mut our_secret_shares = HashMap::new();
for (i, shares) in shares {
if recipient == *i {
continue;
}
our_secret_shares.insert(*i, shares[&recipient].clone());
}
our_secret_shares
}
/// Fully perform the PedPoP key generation algorithm.
fn pedpop_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
rng: &mut R,
) -> HashMap<Participant, ThresholdKeys<C>> {
let (mut machines, _, _, secret_shares) = commit_enc_keys_and_shares::<_, C>(rng);
let mut verification_shares = None;
let mut group_key = None;
machines
.drain()
.map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let these_keys = machine.calculate_share(rng, our_secret_shares).unwrap().complete();
// Verify the verification_shares are agreed upon
if verification_shares.is_none() {
verification_shares = Some(
these_keys
.params()
.all_participant_indexes()
.map(|i| (i, these_keys.original_verification_share(i)))
.collect::<HashMap<_, _>>(),
);
}
assert_eq!(
verification_shares.as_ref().unwrap(),
&these_keys
.params()
.all_participant_indexes()
.map(|i| (i, these_keys.original_verification_share(i)))
.collect::<HashMap<_, _>>()
);
// Verify the group keys are agreed upon
if group_key.is_none() {
group_key = Some(these_keys.group_key());
}
assert_eq!(group_key.unwrap(), these_keys.group_key());
(i, these_keys)
})
.collect::<HashMap<_, _>>()
}
const ONE: Participant = Participant::new(1).unwrap();
const TWO: Participant = Participant::new(2).unwrap();
#[test]
fn test_pedpop() {
let _ = core::hint::black_box(pedpop_gen::<_, Ristretto>(&mut OsRng));
}
fn test_blame(
commitment_msgs: &HashMap<Participant, EncryptionKeyMessage<Ristretto, Commitments<Ristretto>>>,
machines: Vec<BlameMachine<Ristretto>>,
msg: &PedPoPEncryptedMessage<Ristretto>,
blame: &Option<EncryptionKeyProof<Ristretto>>,
) {
for machine in machines {
let (additional, blamed) = machine.blame(ONE, TWO, msg.clone(), blame.clone());
assert_eq!(blamed, ONE);
// Verify additional blame also works
assert_eq!(additional.blame(ONE, TWO, msg.clone(), blame.clone()), ONE);
// Verify machines constructed with AdditionalBlameMachine::new work
assert_eq!(
AdditionalBlameMachine::new(CONTEXT, PARTICIPANTS, commitment_msgs.clone()).unwrap().blame(
ONE,
TWO,
msg.clone(),
blame.clone()
),
ONE,
);
}
}
// TODO: Write a macro which expands to the following
#[test]
fn invalid_encryption_pop_blame() {
let (mut machines, commitment_msgs, _, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
// Mutate the PoP of the encrypted message from 1 to 2
secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_pop();
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == TWO {
assert_eq!(
machine.err(),
Some(PedPoPError::InvalidShare { participant: ONE, blame: None })
);
// Explicitly declare we have a blame object, which happens to be None since invalid PoP
// is self-explainable
blame = Some(None);
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap());
}
#[test]
fn invalid_ecdh_blame() {
let (mut machines, commitment_msgs, _, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
// Mutate the share to trigger a blame event
// Mutates from 2 to 1, as 1 is expected to end up malicious for test_blame to pass
// While here, 2 is malicious, this is so 1 creates the blame proof
// We then malleate 1's blame proof, so 1 ends up malicious
// Doesn't simply invalidate the PoP as that won't have a blame statement
// By mutating the encrypted data, we do ensure a blame statement is created
secret_shares
.get_mut(&TWO)
.unwrap()
.get_mut(&ONE)
.unwrap()
.invalidate_msg(&mut OsRng, CONTEXT, TWO);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == ONE {
blame = Some(match machine.err() {
Some(PedPoPError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
blame.as_mut().unwrap().as_mut().unwrap().invalidate_key();
test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap());
}
// This should be largely equivalent to the prior test
#[test]
fn invalid_dleq_blame() {
let (mut machines, commitment_msgs, _, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
secret_shares
.get_mut(&TWO)
.unwrap()
.get_mut(&ONE)
.unwrap()
.invalidate_msg(&mut OsRng, CONTEXT, TWO);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == ONE {
blame = Some(match machine.err() {
Some(PedPoPError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
blame.as_mut().unwrap().as_mut().unwrap().invalidate_dleq();
test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap());
}
#[test]
fn invalid_share_serialization_blame() {
let (mut machines, commitment_msgs, enc_keys, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_serialization(
&mut OsRng,
CONTEXT,
ONE,
enc_keys[&TWO],
);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == TWO {
blame = Some(match machine.err() {
Some(PedPoPError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap());
}
#[test]
fn invalid_share_value_blame() {
let (mut machines, commitment_msgs, enc_keys, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_value(
&mut OsRng,
CONTEXT,
ONE,
enc_keys[&TWO],
);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == TWO {
blame = Some(match machine.err() {
Some(PedPoPError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap());
}

View File

@@ -1,34 +0,0 @@
[package]
name = "dkg-promote"
version = "0.6.1"
description = "Promotions for keys from the dkg crate"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/promote"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["dkg", "multisig", "threshold", "ff", "group"]
edition = "2021"
rust-version = "1.80"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
thiserror = { version = "2", default-features = false, features = ["std"] }
rand_core = { version = "0.6", default-features = false, features = ["std"] }
transcript = { package = "flexible-transcript", path = "../../transcript", version = "^0.3.2", default-features = false, features = ["std", "recommended"] }
ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false, features = ["std"] }
dleq = { path = "../../dleq", version = "^0.4.1", default-features = false, features = ["std", "serialize"] }
dkg = { path = "../", version = "0.6.1", default-features = false, features = ["std"] }
[dev-dependencies]
zeroize = { version = "^1.5", default-features = false, features = ["std", "zeroize_derive"] }
rand_core = { version = "0.6", default-features = false, features = ["getrandom"] }
dalek-ff-group = { path = "../../dalek-ff-group" }
dkg-recovery = { path = "../recovery", default-features = false, features = ["std"] }

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2021-2025 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,13 +0,0 @@
# Distributed Key Generation - Promote
This crate implements 'promotions' for keys from the
[`dkg`](https://docs.rs/dkg) crate. A promotion takes a set of keys and maps it
to a different `Ciphersuite`.
This crate was originally part of the `dkg` crate, which was
[audited by Cypher Stack in March 2023](
https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf
), culminating in commit
[669d2dbffc1dafb82a09d9419ea182667115df06](
https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06
). Any subsequent changes have not undergone auditing.

View File

@@ -1,113 +0,0 @@
use core::marker::PhantomData;
use std::collections::HashMap;
use zeroize::{Zeroize, Zeroizing};
use rand_core::OsRng;
use dalek_ff_group::Ristretto;
use ciphersuite::{
group::{ff::Field, Group},
Ciphersuite,
};
use dkg::*;
use dkg_recovery::recover_key;
use crate::{GeneratorPromotion, GeneratorProof};
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
struct AltGenerator<C: Ciphersuite> {
_curve: PhantomData<C>,
}
impl<C: Ciphersuite> Ciphersuite for AltGenerator<C> {
type F = C::F;
type G = C::G;
type H = C::H;
const ID: &'static [u8] = b"Alternate Ciphersuite";
fn generator() -> Self::G {
C::G::generator() * <C as Ciphersuite>::hash_to_F(b"DKG Promotion Test", b"generator")
}
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
<C as Ciphersuite>::hash_to_F(dst, data)
}
}
/// Clone a map without a specific value.
pub fn clone_without<K: Clone + core::cmp::Eq + core::hash::Hash, V: Clone>(
map: &HashMap<K, V>,
without: &K,
) -> HashMap<K, V> {
let mut res = map.clone();
res.remove(without).unwrap();
res
}
// Test promotion of threshold keys to another generator
#[test]
fn test_generator_promotion() {
// Generate a set of `ThresholdKeys`
const PARTICIPANTS: u16 = 5;
let keys: [ThresholdKeys<_>; PARTICIPANTS as usize] = {
let shares: [<Ristretto as Ciphersuite>::F; PARTICIPANTS as usize] =
core::array::from_fn(|_| <Ristretto as Ciphersuite>::F::random(&mut OsRng));
let verification_shares = (0 .. PARTICIPANTS)
.map(|i| {
(
Participant::new(i + 1).unwrap(),
<Ristretto as Ciphersuite>::generator() * shares[usize::from(i)],
)
})
.collect::<HashMap<_, _>>();
core::array::from_fn(|i| {
ThresholdKeys::new(
ThresholdParams::new(
PARTICIPANTS,
PARTICIPANTS,
Participant::new(u16::try_from(i + 1).unwrap()).unwrap(),
)
.unwrap(),
Interpolation::Constant(vec![<Ristretto as Ciphersuite>::F::ONE; PARTICIPANTS as usize]),
Zeroizing::new(shares[i]),
verification_shares.clone(),
)
.unwrap()
})
};
// Perform the promotion
let mut promotions = HashMap::new();
let mut proofs = HashMap::new();
for keys in &keys {
let i = keys.params().i();
let (promotion, proof) =
GeneratorPromotion::<_, AltGenerator<Ristretto>>::promote(&mut OsRng, keys.clone());
promotions.insert(i, promotion);
proofs.insert(
i,
GeneratorProof::<Ristretto>::read::<&[u8]>(&mut proof.serialize().as_ref()).unwrap(),
);
}
// Complete the promotion, and verify it worked
let new_group_key = AltGenerator::<Ristretto>::generator() * *recover_key(&keys).unwrap();
for (i, promoting) in promotions.drain() {
let promoted = promoting.complete(&clone_without(&proofs, &i)).unwrap();
assert_eq!(keys[usize::from(u16::from(i) - 1)].params(), promoted.params());
assert_eq!(
keys[usize::from(u16::from(i) - 1)].original_secret_share(),
promoted.original_secret_share()
);
assert_eq!(new_group_key, promoted.group_key());
for l in 0 .. PARTICIPANTS {
let verification_share =
promoted.original_verification_share(Participant::new(l + 1).unwrap());
assert_eq!(
AltGenerator::<Ristretto>::generator() * **keys[usize::from(l)].original_secret_share(),
verification_share
);
}
}
}

View File

@@ -1,34 +0,0 @@
[package]
name = "dkg-recovery"
version = "0.6.0"
description = "Recover a secret-shared key from a collection of dkg::ThresholdKeys"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dkg/recovery"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["dkg", "multisig", "threshold", "ff", "group"]
edition = "2021"
rust-version = "1.66"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
zeroize = { version = "^1.5", default-features = false }
thiserror = { version = "2", default-features = false }
ciphersuite = { path = "../../ciphersuite", version = "^0.4.1", default-features = false }
dkg = { path = "../", version = "0.6", default-features = false }
[features]
std = [
"zeroize/std",
"thiserror/std",
"ciphersuite/std",
"dkg/std",
]
default = ["std"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2021-2025 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,14 +0,0 @@
# Distributed Key Generation - Recovery
A utility function to recover a key from its secret shares.
Keys likely SHOULD NOT ever be recovered, making this primarily intended for
testing purposes. Instead, the shares of the key should be used to produce
shares for the desired action, allowing using the key while never
reconstructing it.
Before being smashed, this crate was [audited by Cypher Stack in March 2023](
https://github.com/serai-dex/serai/raw/e1bb2c191b7123fd260d008e31656d090d559d21/audits/Cypher%20Stack%20crypto%20March%202023/Audit.pdf
), culminating in commit [669d2dbffc1dafb82a09d9419ea182667115df06](
https://github.com/serai-dex/serai/tree/669d2dbffc1dafb82a09d9419ea182667115df06
). Any subsequent changes have not undergone auditing.

View File

@@ -1,85 +0,0 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
#![no_std]
use core::ops::{Deref, DerefMut};
extern crate alloc;
use alloc::vec::Vec;
use zeroize::Zeroizing;
use ciphersuite::Ciphersuite;
pub use dkg::*;
/// Errors encountered when recovering a secret-shared key from a collection of
/// `dkg::ThresholdKeys`.
#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)]
pub enum RecoveryError {
/// No keys were provided.
#[error("no keys provided")]
NoKeysProvided,
/// Not enough keys were provided.
#[error("not enough keys provided (threshold required {required}, provided {provided})")]
NotEnoughKeysProvided { required: u16, provided: usize },
/// The keys had inconsistent parameters.
#[error("keys had inconsistent parameters")]
InconsistentParameters,
/// The keys are from distinct secret-sharing sessions or otherwise corrupt.
#[error("recovery failed")]
Failure,
/// An error propagated from the underlying `dkg` crate.
#[error("error from dkg ({0})")]
DkgError(DkgError),
}
/// Recover a shared secret from a collection of `dkg::ThresholdKeys`.
pub fn recover_key<C: Ciphersuite>(
keys: &[ThresholdKeys<C>],
) -> Result<Zeroizing<C::F>, RecoveryError> {
let included = keys.iter().map(|keys| keys.params().i()).collect::<Vec<_>>();
let keys_len = keys.len();
let mut keys = keys.iter();
let first_keys = keys.next().ok_or(RecoveryError::NoKeysProvided)?;
{
let t = first_keys.params().t();
if keys_len < usize::from(t) {
Err(RecoveryError::NotEnoughKeysProvided { required: t, provided: keys_len })?;
}
}
{
let first_params = (
first_keys.params().t(),
first_keys.params().n(),
first_keys.group_key(),
first_keys.current_scalar(),
first_keys.current_offset(),
);
for keys in keys.clone() {
let params = (
keys.params().t(),
keys.params().n(),
keys.group_key(),
keys.current_scalar(),
keys.current_offset(),
);
if params != first_params {
Err(RecoveryError::InconsistentParameters)?;
}
}
}
let mut res: Zeroizing<_> =
first_keys.view(included.clone()).map_err(RecoveryError::DkgError)?.secret_share().clone();
for keys in keys {
*res.deref_mut() +=
keys.view(included.clone()).map_err(RecoveryError::DkgError)?.secret_share().deref();
}
if (C::generator() * res.deref()) != first_keys.group_key() {
Err(RecoveryError::Failure)?;
}
Ok(res)
}

View File

@@ -21,7 +21,7 @@ use multiexp::BatchVerifier;
use schnorr::SchnorrSignature;
use dleq::DLEqProof;
use dkg::{Participant, ThresholdParams};
use crate::{Participant, ThresholdParams};
mod sealed {
use super::*;
@@ -69,7 +69,7 @@ impl<C: Ciphersuite, M: Message> EncryptionKeyMessage<C, M> {
buf
}
#[cfg(test)]
#[cfg(any(test, feature = "tests"))]
pub(crate) fn enc_key(&self) -> C::G {
self.enc_key
}
@@ -98,11 +98,11 @@ fn ecdh<C: Ciphersuite>(private: &Zeroizing<C::F>, public: C::G) -> Zeroizing<C:
// Each ecdh must be distinct. Reuse of an ecdh for multiple ciphers will cause the messages to be
// leaked.
fn cipher<C: Ciphersuite>(context: [u8; 32], ecdh: &Zeroizing<C::G>) -> ChaCha20 {
fn cipher<C: Ciphersuite>(context: &str, ecdh: &Zeroizing<C::G>) -> ChaCha20 {
// Ideally, we'd box this transcript with ZAlloc, yet that's only possible on nightly
// TODO: https://github.com/serai-dex/serai/issues/151
let mut transcript = RecommendedTranscript::new(b"DKG Encryption v0.2");
transcript.append_message(b"context", context);
transcript.append_message(b"context", context.as_bytes());
transcript.domain_separate(b"encryption_key");
@@ -134,7 +134,7 @@ fn cipher<C: Ciphersuite>(context: [u8; 32], ecdh: &Zeroizing<C::G>) -> ChaCha20
fn encrypt<R: RngCore + CryptoRng, C: Ciphersuite, E: Encryptable>(
rng: &mut R,
context: [u8; 32],
context: &str,
from: Participant,
to: C::G,
mut msg: Zeroizing<E>,
@@ -197,7 +197,7 @@ impl<C: Ciphersuite, E: Encryptable> EncryptedMessage<C, E> {
pub(crate) fn invalidate_msg<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
context: [u8; 32],
context: &str,
from: Participant,
) {
// Invalidate the message by specifying a new key/Schnorr PoP
@@ -219,7 +219,7 @@ impl<C: Ciphersuite, E: Encryptable> EncryptedMessage<C, E> {
pub(crate) fn invalidate_share_serialization<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
context: [u8; 32],
context: &str,
from: Participant,
to: C::G,
) {
@@ -243,7 +243,7 @@ impl<C: Ciphersuite, E: Encryptable> EncryptedMessage<C, E> {
pub(crate) fn invalidate_share_value<R: RngCore + CryptoRng>(
&mut self,
rng: &mut R,
context: [u8; 32],
context: &str,
from: Participant,
to: C::G,
) {
@@ -300,14 +300,14 @@ impl<C: Ciphersuite> EncryptionKeyProof<C> {
// This still doesn't mean the DKG offers an authenticated channel. The per-message keys have no
// root of trust other than their existence in the assumed-to-exist external authenticated channel.
fn pop_challenge<C: Ciphersuite>(
context: [u8; 32],
context: &str,
nonce: C::G,
key: C::G,
sender: Participant,
msg: &[u8],
) -> C::F {
let mut transcript = RecommendedTranscript::new(b"DKG Encryption Key Proof of Possession v0.2");
transcript.append_message(b"context", context);
transcript.append_message(b"context", context.as_bytes());
transcript.domain_separate(b"proof_of_possession");
@@ -323,9 +323,9 @@ fn pop_challenge<C: Ciphersuite>(
C::hash_to_F(b"DKG-encryption-proof_of_possession", &transcript.challenge(b"schnorr"))
}
fn encryption_key_transcript(context: [u8; 32]) -> RecommendedTranscript {
fn encryption_key_transcript(context: &str) -> RecommendedTranscript {
let mut transcript = RecommendedTranscript::new(b"DKG Encryption Key Correctness Proof v0.2");
transcript.append_message(b"context", context);
transcript.append_message(b"context", context.as_bytes());
transcript
}
@@ -337,17 +337,58 @@ pub(crate) enum DecryptionError {
InvalidProof,
}
// A simple box for managing decryption.
#[derive(Clone, Debug)]
pub(crate) struct Decryption<C: Ciphersuite> {
context: [u8; 32],
// A simple box for managing encryption.
#[derive(Clone)]
pub(crate) struct Encryption<C: Ciphersuite> {
context: String,
i: Option<Participant>,
enc_key: Zeroizing<C::F>,
enc_pub_key: C::G,
enc_keys: HashMap<Participant, C::G>,
}
impl<C: Ciphersuite> Decryption<C> {
pub(crate) fn new(context: [u8; 32]) -> Self {
Self { context, enc_keys: HashMap::new() }
impl<C: Ciphersuite> fmt::Debug for Encryption<C> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt
.debug_struct("Encryption")
.field("context", &self.context)
.field("i", &self.i)
.field("enc_pub_key", &self.enc_pub_key)
.field("enc_keys", &self.enc_keys)
.finish_non_exhaustive()
}
}
impl<C: Ciphersuite> Zeroize for Encryption<C> {
fn zeroize(&mut self) {
self.enc_key.zeroize();
self.enc_pub_key.zeroize();
for (_, mut value) in self.enc_keys.drain() {
value.zeroize();
}
}
}
impl<C: Ciphersuite> Encryption<C> {
pub(crate) fn new<R: RngCore + CryptoRng>(
context: String,
i: Option<Participant>,
rng: &mut R,
) -> Self {
let enc_key = Zeroizing::new(C::random_nonzero_F(rng));
Self {
context,
i,
enc_pub_key: C::generator() * enc_key.deref(),
enc_key,
enc_keys: HashMap::new(),
}
}
pub(crate) fn registration<M: Message>(&self, msg: M) -> EncryptionKeyMessage<C, M> {
EncryptionKeyMessage { msg, enc_key: self.enc_pub_key }
}
pub(crate) fn register<M: Message>(
&mut self,
participant: Participant,
@@ -361,109 +402,13 @@ impl<C: Ciphersuite> Decryption<C> {
msg.msg
}
// Given a message, and the intended decryptor, and a proof for its key, decrypt the message.
// Returns None if the key was wrong.
pub(crate) fn decrypt_with_proof<E: Encryptable>(
&self,
from: Participant,
decryptor: Participant,
mut msg: EncryptedMessage<C, E>,
// There's no encryption key proof if the accusation is of an invalid signature
proof: Option<EncryptionKeyProof<C>>,
) -> Result<Zeroizing<E>, DecryptionError> {
if !msg.pop.verify(
msg.key,
pop_challenge::<C>(self.context, msg.pop.R, msg.key, from, msg.msg.deref().as_ref()),
) {
Err(DecryptionError::InvalidSignature)?;
}
if let Some(proof) = proof {
// Verify this is the decryption key for this message
proof
.dleq
.verify(
&mut encryption_key_transcript(self.context),
&[C::generator(), msg.key],
&[self.enc_keys[&decryptor], *proof.key],
)
.map_err(|_| DecryptionError::InvalidProof)?;
cipher::<C>(self.context, &proof.key).apply_keystream(msg.msg.as_mut().as_mut());
Ok(msg.msg)
} else {
Err(DecryptionError::InvalidProof)
}
}
}
// A simple box for managing encryption.
#[derive(Clone)]
pub(crate) struct Encryption<C: Ciphersuite> {
context: [u8; 32],
i: Participant,
enc_key: Zeroizing<C::F>,
enc_pub_key: C::G,
decryption: Decryption<C>,
}
impl<C: Ciphersuite> fmt::Debug for Encryption<C> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt
.debug_struct("Encryption")
.field("context", &self.context)
.field("i", &self.i)
.field("enc_pub_key", &self.enc_pub_key)
.field("decryption", &self.decryption)
.finish_non_exhaustive()
}
}
impl<C: Ciphersuite> Zeroize for Encryption<C> {
fn zeroize(&mut self) {
self.enc_key.zeroize();
self.enc_pub_key.zeroize();
for (_, mut value) in self.decryption.enc_keys.drain() {
value.zeroize();
}
}
}
impl<C: Ciphersuite> Encryption<C> {
pub(crate) fn new<R: RngCore + CryptoRng>(
context: [u8; 32],
i: Participant,
rng: &mut R,
) -> Self {
let enc_key = Zeroizing::new(C::random_nonzero_F(rng));
Self {
context,
i,
enc_pub_key: C::generator() * enc_key.deref(),
enc_key,
decryption: Decryption::new(context),
}
}
pub(crate) fn registration<M: Message>(&self, msg: M) -> EncryptionKeyMessage<C, M> {
EncryptionKeyMessage { msg, enc_key: self.enc_pub_key }
}
pub(crate) fn register<M: Message>(
&mut self,
participant: Participant,
msg: EncryptionKeyMessage<C, M>,
) -> M {
self.decryption.register(participant, msg)
}
pub(crate) fn encrypt<R: RngCore + CryptoRng, E: Encryptable>(
&self,
rng: &mut R,
participant: Participant,
msg: Zeroizing<E>,
) -> EncryptedMessage<C, E> {
encrypt(rng, self.context, self.i, self.decryption.enc_keys[&participant], msg)
encrypt(rng, &self.context, self.i.unwrap(), self.enc_keys[&participant], msg)
}
pub(crate) fn decrypt<R: RngCore + CryptoRng, I: Copy + Zeroize, E: Encryptable>(
@@ -481,18 +426,18 @@ impl<C: Ciphersuite> Encryption<C> {
batch,
batch_id,
msg.key,
pop_challenge::<C>(self.context, msg.pop.R, msg.key, from, msg.msg.deref().as_ref()),
pop_challenge::<C>(&self.context, msg.pop.R, msg.key, from, msg.msg.deref().as_ref()),
);
let key = ecdh::<C>(&self.enc_key, msg.key);
cipher::<C>(self.context, &key).apply_keystream(msg.msg.as_mut().as_mut());
cipher::<C>(&self.context, &key).apply_keystream(msg.msg.as_mut().as_mut());
(
msg.msg,
EncryptionKeyProof {
key,
dleq: DLEqProof::prove(
rng,
&mut encryption_key_transcript(self.context),
&mut encryption_key_transcript(&self.context),
&[C::generator(), msg.key],
&self.enc_key,
),
@@ -500,7 +445,38 @@ impl<C: Ciphersuite> Encryption<C> {
)
}
pub(crate) fn into_decryption(self) -> Decryption<C> {
self.decryption
// Given a message, and the intended decryptor, and a proof for its key, decrypt the message.
// Returns None if the key was wrong.
pub(crate) fn decrypt_with_proof<E: Encryptable>(
&self,
from: Participant,
decryptor: Participant,
mut msg: EncryptedMessage<C, E>,
// There's no encryption key proof if the accusation is of an invalid signature
proof: Option<EncryptionKeyProof<C>>,
) -> Result<Zeroizing<E>, DecryptionError> {
if !msg.pop.verify(
msg.key,
pop_challenge::<C>(&self.context, msg.pop.R, msg.key, from, msg.msg.deref().as_ref()),
) {
Err(DecryptionError::InvalidSignature)?;
}
if let Some(proof) = proof {
// Verify this is the decryption key for this message
proof
.dleq
.verify(
&mut encryption_key_transcript(&self.context),
&[C::generator(), msg.key],
&[self.enc_keys[&decryptor], *proof.key],
)
.map_err(|_| DecryptionError::InvalidProof)?;
cipher::<C>(&self.context, &proof.key).apply_keystream(msg.msg.as_mut().as_mut());
Ok(msg.msg)
} else {
Err(DecryptionError::InvalidProof)
}
}
}

File diff suppressed because it is too large Load Diff

141
crypto/dkg/src/musig.rs Normal file
View File

@@ -0,0 +1,141 @@
#[cfg(feature = "std")]
use core::ops::Deref;
use std_shims::{vec, vec::Vec, collections::HashSet};
#[cfg(feature = "std")]
use std_shims::collections::HashMap;
#[cfg(feature = "std")]
use zeroize::Zeroizing;
#[cfg(feature = "std")]
use ciphersuite::group::ff::Field;
use ciphersuite::{
group::{Group, GroupEncoding},
Ciphersuite,
};
use crate::DkgError;
#[cfg(feature = "std")]
use crate::{Participant, ThresholdParams, ThresholdCore, lagrange};
fn check_keys<C: Ciphersuite>(keys: &[C::G]) -> Result<u16, DkgError<()>> {
if keys.is_empty() {
Err(DkgError::InvalidSigningSet)?;
}
// Too many signers
let keys_len = u16::try_from(keys.len()).map_err(|_| DkgError::InvalidSigningSet)?;
// Duplicated public keys
if keys.iter().map(|key| key.to_bytes().as_ref().to_vec()).collect::<HashSet<_>>().len() !=
keys.len()
{
Err(DkgError::InvalidSigningSet)?;
}
Ok(keys_len)
}
// This function panics if called with keys whose length exceed 2**16.
// This is fine since it's internal and all calls occur after calling check_keys, which does check
// the keys' length.
fn binding_factor_transcript<C: Ciphersuite>(
context: &[u8],
keys: &[C::G],
) -> Result<Vec<u8>, DkgError<()>> {
let mut transcript = vec![];
transcript.push(u8::try_from(context.len()).map_err(|_| DkgError::InvalidSigningSet)?);
transcript.extend(context);
transcript.extend(u16::try_from(keys.len()).unwrap().to_le_bytes());
for key in keys {
transcript.extend(key.to_bytes().as_ref());
}
Ok(transcript)
}
fn binding_factor<C: Ciphersuite>(mut transcript: Vec<u8>, i: u16) -> C::F {
transcript.extend(i.to_le_bytes());
C::hash_to_F(b"musig", &transcript)
}
/// The group key resulting from using this library's MuSig key gen.
///
/// This function will return an error if the context is longer than 255 bytes.
///
/// Creating an aggregate key with a list containing duplicated public keys will return an error.
pub fn musig_key<C: Ciphersuite>(context: &[u8], keys: &[C::G]) -> Result<C::G, DkgError<()>> {
let keys_len = check_keys::<C>(keys)?;
let transcript = binding_factor_transcript::<C>(context, keys)?;
let mut res = C::G::identity();
for i in 1 ..= keys_len {
res += keys[usize::from(i - 1)] * binding_factor::<C>(transcript.clone(), i);
}
Ok(res)
}
/// A n-of-n non-interactive DKG which does not guarantee the usability of the resulting key.
///
/// Creating an aggregate key with a list containing duplicated public keys returns an error.
#[cfg(feature = "std")]
pub fn musig<C: Ciphersuite>(
context: &[u8],
private_key: &Zeroizing<C::F>,
keys: &[C::G],
) -> Result<ThresholdCore<C>, DkgError<()>> {
let keys_len = check_keys::<C>(keys)?;
let our_pub_key = C::generator() * private_key.deref();
let Some(pos) = keys.iter().position(|key| *key == our_pub_key) else {
// Not present in signing set
Err(DkgError::InvalidSigningSet)?
};
let params = ThresholdParams::new(
keys_len,
keys_len,
// These errors shouldn't be possible, as pos is bounded to len - 1
// Since len is prior guaranteed to be within u16::MAX, pos + 1 must also be
Participant::new((pos + 1).try_into().map_err(|_| DkgError::InvalidSigningSet)?)
.ok_or(DkgError::InvalidSigningSet)?,
)?;
// Calculate the binding factor per-key
let transcript = binding_factor_transcript::<C>(context, keys)?;
let mut binding = Vec::with_capacity(keys.len());
for i in 1 ..= keys_len {
binding.push(binding_factor::<C>(transcript.clone(), i));
}
// Multiply our private key by our binding factor
let mut secret_share = private_key.clone();
*secret_share *= binding[pos];
// Calculate verification shares
let mut verification_shares = HashMap::new();
// When this library offers a ThresholdView for a specific signing set, it applies the lagrange
// factor
// Since this is a n-of-n scheme, there's only one possible signing set, and one possible
// lagrange factor
// In the name of simplicity, we define the group key as the sum of all bound keys
// Accordingly, the secret share must be multiplied by the inverse of the lagrange factor, along
// with all verification shares
// This is less performant than simply defining the group key as the sum of all post-lagrange
// bound keys, yet the simplicity is preferred
let included = (1 ..= keys_len)
// This error also shouldn't be possible, for the same reasons as documented above
.map(|l| Participant::new(l).ok_or(DkgError::InvalidSigningSet))
.collect::<Result<Vec<_>, _>>()?;
let mut group_key = C::G::identity();
for (l, p) in included.iter().enumerate() {
let bound = keys[l] * binding[l];
group_key += bound;
let lagrange_inv = lagrange::<C::F>(*p, &included).invert().unwrap();
if params.i() == *p {
*secret_share *= lagrange_inv;
}
verification_shares.insert(*p, bound * lagrange_inv);
}
debug_assert_eq!(C::generator() * secret_share.deref(), verification_shares[&params.i()]);
debug_assert_eq!(musig_key::<C>(context, keys).unwrap(), group_key);
Ok(ThresholdCore { params, secret_share, group_key, verification_shares })
}

View File

@@ -1,20 +1,15 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
// This crate requires `dleq` which doesn't support no-std via std-shims
// #![cfg_attr(not(feature = "std"), no_std)]
use core::{marker::PhantomData, ops::Deref, fmt};
use std::{
io::{self, Read, Write},
collections::HashMap,
};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use transcript::{Transcript, RecommendedTranscript};
use multiexp::{multiexp_vartime, BatchVerifier};
use ciphersuite::{
group::{
ff::{Field, PrimeField},
@@ -22,75 +17,29 @@ use ciphersuite::{
},
Ciphersuite,
};
use multiexp::{multiexp_vartime, BatchVerifier};
use schnorr::SchnorrSignature;
pub use dkg::*;
use crate::{
Participant, DkgError, ThresholdParams, ThresholdCore, validate_map,
encryption::{
ReadWrite, EncryptionKeyMessage, EncryptedMessage, Encryption, EncryptionKeyProof,
DecryptionError,
},
};
mod encryption;
pub use encryption::*;
#[cfg(test)]
mod tests;
/// Errors possible during key generation.
#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)]
pub enum PedPoPError<C: Ciphersuite> {
/// An incorrect amount of participants was provided.
#[error("incorrect amount of participants (expected {expected}, found {found})")]
IncorrectAmountOfParticipants { expected: usize, found: usize },
/// An invalid proof of knowledge was provided.
#[error("invalid proof of knowledge (participant {0})")]
InvalidCommitments(Participant),
/// An invalid DKG share was provided.
#[error("invalid share (participant {participant}, blame {blame})")]
InvalidShare { participant: Participant, blame: Option<EncryptionKeyProof<C>> },
/// A participant was missing.
#[error("missing participant {0}")]
MissingParticipant(Participant),
/// An error propagated from the underlying `dkg` crate.
#[error("error from dkg ({0})")]
DkgError(DkgError),
}
// Validate a map of values to have the expected included participants
fn validate_map<T, C: Ciphersuite>(
map: &HashMap<Participant, T>,
included: &[Participant],
ours: Participant,
) -> Result<(), PedPoPError<C>> {
if (map.len() + 1) != included.len() {
Err(PedPoPError::IncorrectAmountOfParticipants {
expected: included.len(),
found: map.len() + 1,
})?;
}
for included in included {
if *included == ours {
if map.contains_key(included) {
Err(PedPoPError::DkgError(DkgError::DuplicatedParticipant(*included)))?;
}
continue;
}
if !map.contains_key(included) {
Err(PedPoPError::MissingParticipant(*included))?;
}
}
Ok(())
}
type FrostError<C> = DkgError<EncryptionKeyProof<C>>;
#[allow(non_snake_case)]
fn challenge<C: Ciphersuite>(context: [u8; 32], l: Participant, R: &[u8], Am: &[u8]) -> C::F {
let mut transcript = RecommendedTranscript::new(b"DKG PedPoP v0.2");
fn challenge<C: Ciphersuite>(context: &str, l: Participant, R: &[u8], Am: &[u8]) -> C::F {
let mut transcript = RecommendedTranscript::new(b"DKG FROST v0.2");
transcript.domain_separate(b"schnorr_proof_of_knowledge");
transcript.append_message(b"context", context);
transcript.append_message(b"context", context.as_bytes());
transcript.append_message(b"participant", l.to_bytes());
transcript.append_message(b"nonce", R);
transcript.append_message(b"commitments", Am);
C::hash_to_F(b"DKG-PedPoP-proof_of_knowledge-0", &transcript.challenge(b"schnorr"))
C::hash_to_F(b"DKG-FROST-proof_of_knowledge-0", &transcript.challenge(b"schnorr"))
}
/// The commitments message, intended to be broadcast to all other parties.
@@ -137,19 +86,19 @@ impl<C: Ciphersuite> ReadWrite for Commitments<C> {
#[derive(Debug, Zeroize)]
pub struct KeyGenMachine<C: Ciphersuite> {
params: ThresholdParams,
context: [u8; 32],
context: String,
_curve: PhantomData<C>,
}
impl<C: Ciphersuite> KeyGenMachine<C> {
/// Create a new machine to generate a key.
///
/// The context should be unique among multisigs.
pub fn new(params: ThresholdParams, context: [u8; 32]) -> KeyGenMachine<C> {
/// The context string should be unique among multisigs.
pub fn new(params: ThresholdParams, context: String) -> KeyGenMachine<C> {
KeyGenMachine { params, context, _curve: PhantomData }
}
/// Start generating a key according to the PedPoP DKG specification present in the FROST paper.
/// Start generating a key according to the FROST DKG spec.
///
/// Returns a commitments message to be sent to all parties over an authenticated channel. If any
/// party submits multiple sets of commitments, they MUST be treated as malicious.
@@ -157,7 +106,7 @@ impl<C: Ciphersuite> KeyGenMachine<C> {
self,
rng: &mut R,
) -> (SecretShareMachine<C>, EncryptionKeyMessage<C, Commitments<C>>) {
let t = usize::from(self.params.t());
let t = usize::from(self.params.t);
let mut coefficients = Vec::with_capacity(t);
let mut commitments = Vec::with_capacity(t);
let mut cached_msg = vec![];
@@ -180,11 +129,11 @@ impl<C: Ciphersuite> KeyGenMachine<C> {
// There's no reason to spend the time and effort to make this deterministic besides a
// general obsession with canonicity and determinism though
r,
challenge::<C>(self.context, self.params.i(), nonce.to_bytes().as_ref(), &cached_msg),
challenge::<C>(&self.context, self.params.i(), nonce.to_bytes().as_ref(), &cached_msg),
);
// Additionally create an encryption mechanism to protect the secret shares
let encryption = Encryption::new(self.context, self.params.i(), rng);
let encryption = Encryption::new(self.context.clone(), Some(self.params.i), rng);
// Step 4: Broadcast
let msg =
@@ -276,7 +225,7 @@ impl<F: PrimeField> ReadWrite for SecretShare<F> {
#[derive(Zeroize)]
pub struct SecretShareMachine<C: Ciphersuite> {
params: ThresholdParams,
context: [u8; 32],
context: String,
coefficients: Vec<Zeroizing<C::F>>,
our_commitments: Vec<C::G>,
encryption: Encryption<C>,
@@ -301,21 +250,21 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
&mut self,
rng: &mut R,
mut commitment_msgs: HashMap<Participant, EncryptionKeyMessage<C, Commitments<C>>>,
) -> Result<HashMap<Participant, Vec<C::G>>, PedPoPError<C>> {
) -> Result<HashMap<Participant, Vec<C::G>>, FrostError<C>> {
validate_map(
&commitment_msgs,
&self.params.all_participant_indexes().collect::<Vec<_>>(),
&(1 ..= self.params.n()).map(Participant).collect::<Vec<_>>(),
self.params.i(),
)?;
let mut batch = BatchVerifier::<Participant, C::G>::new(commitment_msgs.len());
let mut commitments = HashMap::new();
for l in self.params.all_participant_indexes() {
for l in (1 ..= self.params.n()).map(Participant) {
let Some(msg) = commitment_msgs.remove(&l) else { continue };
let mut msg = self.encryption.register(l, msg);
if msg.commitments.len() != self.params.t().into() {
Err(PedPoPError::InvalidCommitments(l))?;
Err(FrostError::InvalidCommitments(l))?;
}
// Step 5: Validate each proof of knowledge
@@ -325,15 +274,15 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
&mut batch,
l,
msg.commitments[0],
challenge::<C>(self.context, l, msg.sig.R.to_bytes().as_ref(), &msg.cached_msg),
challenge::<C>(&self.context, l, msg.sig.R.to_bytes().as_ref(), &msg.cached_msg),
);
commitments.insert(l, msg.commitments.drain(..).collect::<Vec<_>>());
}
batch.verify_vartime_with_vartime_blame().map_err(PedPoPError::InvalidCommitments)?;
batch.verify_vartime_with_vartime_blame().map_err(FrostError::InvalidCommitments)?;
commitments.insert(self.params.i(), self.our_commitments.drain(..).collect());
commitments.insert(self.params.i, self.our_commitments.drain(..).collect());
Ok(commitments)
}
@@ -350,13 +299,13 @@ impl<C: Ciphersuite> SecretShareMachine<C> {
commitments: HashMap<Participant, EncryptionKeyMessage<C, Commitments<C>>>,
) -> Result<
(KeyMachine<C>, HashMap<Participant, EncryptedMessage<C, SecretShare<C::F>>>),
PedPoPError<C>,
FrostError<C>,
> {
let commitments = self.verify_r1(&mut *rng, commitments)?;
// Step 1: Generate secret shares for all other parties
let mut res = HashMap::new();
for l in self.params.all_participant_indexes() {
for l in (1 ..= self.params.n()).map(Participant) {
// Don't insert our own shares to the byte buffer which is meant to be sent around
// An app developer could accidentally send it. Best to keep this black boxed
if l == self.params.i() {
@@ -464,10 +413,10 @@ impl<C: Ciphersuite> KeyMachine<C> {
mut self,
rng: &mut R,
mut shares: HashMap<Participant, EncryptedMessage<C, SecretShare<C::F>>>,
) -> Result<BlameMachine<C>, PedPoPError<C>> {
) -> Result<BlameMachine<C>, FrostError<C>> {
validate_map(
&shares,
&self.params.all_participant_indexes().collect::<Vec<_>>(),
&(1 ..= self.params.n()).map(Participant).collect::<Vec<_>>(),
self.params.i(),
)?;
@@ -478,7 +427,7 @@ impl<C: Ciphersuite> KeyMachine<C> {
self.encryption.decrypt(rng, &mut batch, BatchId::Decryption(l), l, share_bytes);
let share =
Zeroizing::new(Option::<C::F>::from(C::F::from_repr(share_bytes.0)).ok_or_else(|| {
PedPoPError::InvalidShare { participant: l, blame: Some(blame.clone()) }
FrostError::InvalidShare { participant: l, blame: Some(blame.clone()) }
})?);
share_bytes.zeroize();
*self.secret += share.deref();
@@ -495,7 +444,7 @@ impl<C: Ciphersuite> KeyMachine<C> {
BatchId::Decryption(l) => (l, None),
BatchId::Share(l) => (l, Some(blames.remove(&l).unwrap())),
};
PedPoPError::InvalidShare { participant: l, blame }
FrostError::InvalidShare { participant: l, blame }
})?;
// Stripe commitments per t and sum them in advance. Calculating verification shares relies on
@@ -509,7 +458,7 @@ impl<C: Ciphersuite> KeyMachine<C> {
// Calculate each user's verification share
let mut verification_shares = HashMap::new();
for i in self.params.all_participant_indexes() {
for i in (1 ..= self.params.n()).map(Participant) {
verification_shares.insert(
i,
if i == self.params.i() {
@@ -523,11 +472,13 @@ impl<C: Ciphersuite> KeyMachine<C> {
let KeyMachine { commitments, encryption, params, secret } = self;
Ok(BlameMachine {
commitments,
encryption: encryption.into_decryption(),
result: Some(
ThresholdKeys::new(params, Interpolation::Lagrange, secret, verification_shares)
.map_err(PedPoPError::DkgError)?,
),
encryption,
result: Some(ThresholdCore {
params,
secret_share: secret,
group_key: stripes[0],
verification_shares,
}),
})
}
}
@@ -535,8 +486,8 @@ impl<C: Ciphersuite> KeyMachine<C> {
/// A machine capable of handling blame proofs.
pub struct BlameMachine<C: Ciphersuite> {
commitments: HashMap<Participant, Vec<C::G>>,
encryption: Decryption<C>,
result: Option<ThresholdKeys<C>>,
encryption: Encryption<C>,
result: Option<ThresholdCore<C>>,
}
impl<C: Ciphersuite> fmt::Debug for BlameMachine<C> {
@@ -554,6 +505,7 @@ impl<C: Ciphersuite> Zeroize for BlameMachine<C> {
for commitments in self.commitments.values_mut() {
commitments.zeroize();
}
self.encryption.zeroize();
self.result.zeroize();
}
}
@@ -568,7 +520,7 @@ impl<C: Ciphersuite> BlameMachine<C> {
/// territory of consensus protocols. This library does not handle that nor does it provide any
/// tooling to do so. This function is solely intended to force users to acknowledge they're
/// completing the protocol, not processing any blame.
pub fn complete(self) -> ThresholdKeys<C> {
pub fn complete(self) -> ThresholdCore<C> {
self.result.unwrap()
}
@@ -646,16 +598,17 @@ impl<C: Ciphersuite> AdditionalBlameMachine<C> {
/// authenticated as having come from the supposed party and verified as valid. Usage of invalid
/// commitments is considered undefined behavior, and may cause everything from inaccurate blame
/// to panics.
pub fn new(
context: [u8; 32],
pub fn new<R: RngCore + CryptoRng>(
rng: &mut R,
context: String,
n: u16,
mut commitment_msgs: HashMap<Participant, EncryptionKeyMessage<C, Commitments<C>>>,
) -> Result<Self, PedPoPError<C>> {
) -> Result<Self, FrostError<C>> {
let mut commitments = HashMap::new();
let mut encryption = Decryption::new(context);
let mut encryption = Encryption::new(context, None, rng);
for i in 1 ..= n {
let i = Participant::new(i).unwrap();
let Some(msg) = commitment_msgs.remove(&i) else { Err(PedPoPError::MissingParticipant(i))? };
let Some(msg) = commitment_msgs.remove(&i) else { Err(DkgError::MissingParticipant(i))? };
commitments.insert(i, encryption.register(i, msg).commitments);
}
Ok(AdditionalBlameMachine(BlameMachine { commitments, encryption, result: None }))

View File

@@ -1,11 +1,7 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![doc = include_str!("../README.md")]
// This crate requires `dleq` which doesn't support no-std via std-shims
// #![cfg_attr(not(feature = "std"), no_std)]
use core::{marker::PhantomData, ops::Deref};
use std::{
io::{self, Read, Write},
sync::Arc,
collections::HashMap,
};
@@ -16,37 +12,11 @@ use ciphersuite::{group::GroupEncoding, Ciphersuite};
use transcript::{Transcript, RecommendedTranscript};
use dleq::DLEqProof;
pub use dkg::*;
use crate::{Participant, DkgError, ThresholdCore, ThresholdKeys, validate_map};
#[cfg(test)]
mod tests;
/// Errors encountered when promoting keys.
#[derive(Clone, PartialEq, Eq, Debug, thiserror::Error)]
pub enum PromotionError {
/// Invalid participant identifier.
#[error("invalid participant (1 <= participant <= {n}, yet participant is {participant})")]
InvalidParticipant {
/// The total amount of participants.
n: u16,
/// The specified participant.
participant: Participant,
},
/// An incorrect amount of participants was specified.
#[error("incorrect amount of participants. {t} <= amount <= {n}, yet amount is {amount}")]
IncorrectAmountOfParticipants {
/// The threshold required.
t: u16,
/// The total amount of participants.
n: u16,
/// The amount of participants specified.
amount: usize,
},
/// Participant provided an invalid proof.
#[error("invalid proof {0}")]
InvalidProof(Participant),
/// Promote a set of keys to another Ciphersuite definition.
pub trait CiphersuitePromote<C2: Ciphersuite> {
fn promote(self) -> ThresholdKeys<C2>;
}
fn transcript<G: GroupEncoding>(key: &G, i: Participant) -> RecommendedTranscript {
@@ -95,21 +65,20 @@ pub struct GeneratorPromotion<C1: Ciphersuite, C2: Ciphersuite> {
}
impl<C1: Ciphersuite, C2: Ciphersuite<F = C1::F, G = C1::G>> GeneratorPromotion<C1, C2> {
/// Begin promoting keys from one generator to another.
///
/// Returns a proof this share was properly promoted.
/// Begin promoting keys from one generator to another. Returns a proof this share was properly
/// promoted.
pub fn promote<R: RngCore + CryptoRng>(
rng: &mut R,
base: ThresholdKeys<C1>,
) -> (GeneratorPromotion<C1, C2>, GeneratorProof<C1>) {
// Do a DLEqProof for the new generator
let proof = GeneratorProof {
share: C2::generator() * base.original_secret_share().deref(),
share: C2::generator() * base.secret_share().deref(),
proof: DLEqProof::prove(
rng,
&mut transcript(&base.original_group_key(), base.params().i()),
&mut transcript(&base.core.group_key(), base.params().i),
&[C1::generator(), C2::generator()],
base.original_secret_share(),
base.secret_share(),
),
};
@@ -120,49 +89,34 @@ impl<C1: Ciphersuite, C2: Ciphersuite<F = C1::F, G = C1::G>> GeneratorPromotion<
pub fn complete(
self,
proofs: &HashMap<Participant, GeneratorProof<C1>>,
) -> Result<ThresholdKeys<C2>, PromotionError> {
) -> Result<ThresholdKeys<C2>, DkgError<()>> {
let params = self.base.params();
if proofs.len() != (usize::from(params.n()) - 1) {
Err(PromotionError::IncorrectAmountOfParticipants {
t: params.n(),
n: params.n(),
amount: proofs.len() + 1,
})?;
}
for i in proofs.keys().copied() {
if u16::from(i) > params.n() {
Err(PromotionError::InvalidParticipant { n: params.n(), participant: i })?;
}
}
validate_map(proofs, &(1 ..= params.n).map(Participant).collect::<Vec<_>>(), params.i)?;
let original_shares = self.base.verification_shares();
let mut verification_shares = HashMap::new();
verification_shares.insert(params.i(), self.proof.share);
for i in 1 ..= params.n() {
let i = Participant::new(i).unwrap();
if i == params.i() {
continue;
}
let proof = proofs.get(&i).unwrap();
verification_shares.insert(params.i, self.proof.share);
for (i, proof) in proofs {
let i = *i;
proof
.proof
.verify(
&mut transcript(&self.base.original_group_key(), i),
&mut transcript(&self.base.core.group_key(), i),
&[C1::generator(), C2::generator()],
&[self.base.original_verification_share(i), proof.share],
&[original_shares[&i], proof.share],
)
.map_err(|_| PromotionError::InvalidProof(i))?;
.map_err(|_| DkgError::InvalidCommitments(i))?;
verification_shares.insert(i, proof.share);
}
Ok(
ThresholdKeys::new(
Ok(ThresholdKeys {
core: Arc::new(ThresholdCore::new(
params,
self.base.interpolation().clone(),
self.base.original_secret_share().clone(),
self.base.secret_share().clone(),
verification_shares,
)
.unwrap(),
)
)),
offset: None,
})
}
}

101
crypto/dkg/src/tests/mod.rs Normal file
View File

@@ -0,0 +1,101 @@
use core::ops::Deref;
use std::collections::HashMap;
use zeroize::Zeroizing;
use rand_core::{RngCore, CryptoRng};
use ciphersuite::{group::ff::Field, Ciphersuite};
use crate::{Participant, ThresholdCore, ThresholdKeys, lagrange, musig::musig as musig_fn};
mod musig;
pub use musig::test_musig;
/// FROST key generation testing utility.
pub mod pedpop;
use pedpop::pedpop_gen;
// Promotion test.
mod promote;
use promote::test_generator_promotion;
/// Constant amount of participants to use when testing.
pub const PARTICIPANTS: u16 = 5;
/// Constant threshold of participants to use when testing.
pub const THRESHOLD: u16 = ((PARTICIPANTS * 2) / 3) + 1;
/// Clone a map without a specific value.
pub fn clone_without<K: Clone + core::cmp::Eq + core::hash::Hash, V: Clone>(
map: &HashMap<K, V>,
without: &K,
) -> HashMap<K, V> {
let mut res = map.clone();
res.remove(without).unwrap();
res
}
/// Recover the secret from a collection of keys.
///
/// This will panic if no keys, an insufficient amount of keys, or the wrong keys are provided.
pub fn recover_key<C: Ciphersuite>(keys: &HashMap<Participant, ThresholdKeys<C>>) -> C::F {
let first = keys.values().next().expect("no keys provided");
assert!(keys.len() >= first.params().t().into(), "not enough keys provided");
let included = keys.keys().copied().collect::<Vec<_>>();
let group_private = keys.iter().fold(C::F::ZERO, |accum, (i, keys)| {
accum + (lagrange::<C::F>(*i, &included) * keys.secret_share().deref())
});
assert_eq!(C::generator() * group_private, first.group_key(), "failed to recover keys");
group_private
}
/// Generate threshold keys for tests.
pub fn key_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
rng: &mut R,
) -> HashMap<Participant, ThresholdKeys<C>> {
let res = pedpop_gen(rng)
.drain()
.map(|(i, core)| {
assert_eq!(
&ThresholdCore::<C>::read::<&[u8]>(&mut core.serialize().as_ref()).unwrap(),
&core
);
(i, ThresholdKeys::new(core))
})
.collect();
assert_eq!(C::generator() * recover_key(&res), res[&Participant(1)].group_key());
res
}
/// Generate MuSig keys for tests.
pub fn musig_key_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
rng: &mut R,
) -> HashMap<Participant, ThresholdKeys<C>> {
let mut keys = vec![];
let mut pub_keys = vec![];
for _ in 0 .. PARTICIPANTS {
let key = Zeroizing::new(C::F::random(&mut *rng));
pub_keys.push(C::generator() * *key);
keys.push(key);
}
let mut res = HashMap::new();
for key in keys {
let these_keys = musig_fn::<C>(b"Test MuSig Key Gen", &key, &pub_keys).unwrap();
res.insert(these_keys.params().i(), ThresholdKeys::new(these_keys));
}
assert_eq!(C::generator() * recover_key(&res), res[&Participant(1)].group_key());
res
}
/// Run the test suite on a ciphersuite.
pub fn test_ciphersuite<R: RngCore + CryptoRng, C: Ciphersuite>(rng: &mut R) {
key_gen::<_, C>(rng);
test_generator_promotion::<_, C>(rng);
}
#[test]
fn test_with_ristretto() {
test_ciphersuite::<_, ciphersuite::Ristretto>(&mut rand_core::OsRng);
}

View File

@@ -0,0 +1,61 @@
use std::collections::HashMap;
use zeroize::Zeroizing;
use rand_core::{RngCore, CryptoRng};
use ciphersuite::{group::ff::Field, Ciphersuite};
use crate::{
ThresholdKeys,
musig::{musig_key, musig},
tests::{PARTICIPANTS, recover_key},
};
/// Tests MuSig key generation.
pub fn test_musig<R: RngCore + CryptoRng, C: Ciphersuite>(rng: &mut R) {
let mut keys = vec![];
let mut pub_keys = vec![];
for _ in 0 .. PARTICIPANTS {
let key = Zeroizing::new(C::F::random(&mut *rng));
pub_keys.push(C::generator() * *key);
keys.push(key);
}
const CONTEXT: &[u8] = b"MuSig Test";
// Empty signing set
musig::<C>(CONTEXT, &Zeroizing::new(C::F::ZERO), &[]).unwrap_err();
// Signing set we're not part of
musig::<C>(CONTEXT, &Zeroizing::new(C::F::ZERO), &[C::generator()]).unwrap_err();
// Test with n keys
{
let mut created_keys = HashMap::new();
let mut verification_shares = HashMap::new();
let group_key = musig_key::<C>(CONTEXT, &pub_keys).unwrap();
for (i, key) in keys.iter().enumerate() {
let these_keys = musig::<C>(CONTEXT, key, &pub_keys).unwrap();
assert_eq!(these_keys.params().t(), PARTICIPANTS);
assert_eq!(these_keys.params().n(), PARTICIPANTS);
assert_eq!(usize::from(these_keys.params().i().0), i + 1);
verification_shares
.insert(these_keys.params().i(), C::generator() * **these_keys.secret_share());
assert_eq!(these_keys.group_key(), group_key);
created_keys.insert(these_keys.params().i(), ThresholdKeys::new(these_keys));
}
for keys in created_keys.values() {
assert_eq!(keys.verification_shares(), verification_shares);
}
assert_eq!(C::generator() * recover_key(&created_keys), group_key);
}
}
#[test]
fn musig_literal() {
test_musig::<_, ciphersuite::Ristretto>(&mut rand_core::OsRng)
}

View File

@@ -0,0 +1,333 @@
use std::collections::HashMap;
use rand_core::{RngCore, CryptoRng};
use ciphersuite::Ciphersuite;
use crate::{
Participant, ThresholdParams, ThresholdCore,
pedpop::{Commitments, KeyGenMachine, SecretShare, KeyMachine},
encryption::{EncryptionKeyMessage, EncryptedMessage},
tests::{THRESHOLD, PARTICIPANTS, clone_without},
};
type PedPoPEncryptedMessage<C> = EncryptedMessage<C, SecretShare<<C as Ciphersuite>::F>>;
type PedPoPSecretShares<C> = HashMap<Participant, PedPoPEncryptedMessage<C>>;
const CONTEXT: &str = "DKG Test Key Generation";
// Commit, then return commitment messages, enc keys, and shares
#[allow(clippy::type_complexity)]
fn commit_enc_keys_and_shares<R: RngCore + CryptoRng, C: Ciphersuite>(
rng: &mut R,
) -> (
HashMap<Participant, KeyMachine<C>>,
HashMap<Participant, EncryptionKeyMessage<C, Commitments<C>>>,
HashMap<Participant, C::G>,
HashMap<Participant, PedPoPSecretShares<C>>,
) {
let mut machines = HashMap::new();
let mut commitments = HashMap::new();
let mut enc_keys = HashMap::new();
for i in (1 ..= PARTICIPANTS).map(Participant) {
let params = ThresholdParams::new(THRESHOLD, PARTICIPANTS, i).unwrap();
let machine = KeyGenMachine::<C>::new(params, CONTEXT.to_string());
let (machine, these_commitments) = machine.generate_coefficients(rng);
machines.insert(i, machine);
commitments.insert(
i,
EncryptionKeyMessage::read::<&[u8]>(&mut these_commitments.serialize().as_ref(), params)
.unwrap(),
);
enc_keys.insert(i, commitments[&i].enc_key());
}
let mut secret_shares = HashMap::new();
let machines = machines
.drain()
.map(|(l, machine)| {
let (machine, mut shares) =
machine.generate_secret_shares(rng, clone_without(&commitments, &l)).unwrap();
let shares = shares
.drain()
.map(|(l, share)| {
(
l,
EncryptedMessage::read::<&[u8]>(
&mut share.serialize().as_ref(),
// Only t/n actually matters, so hardcode i to 1 here
ThresholdParams { t: THRESHOLD, n: PARTICIPANTS, i: Participant(1) },
)
.unwrap(),
)
})
.collect::<HashMap<_, _>>();
secret_shares.insert(l, shares);
(l, machine)
})
.collect::<HashMap<_, _>>();
(machines, commitments, enc_keys, secret_shares)
}
fn generate_secret_shares<C: Ciphersuite>(
shares: &HashMap<Participant, PedPoPSecretShares<C>>,
recipient: Participant,
) -> PedPoPSecretShares<C> {
let mut our_secret_shares = HashMap::new();
for (i, shares) in shares {
if recipient == *i {
continue;
}
our_secret_shares.insert(*i, shares[&recipient].clone());
}
our_secret_shares
}
/// Fully perform the PedPoP key generation algorithm.
pub fn pedpop_gen<R: RngCore + CryptoRng, C: Ciphersuite>(
rng: &mut R,
) -> HashMap<Participant, ThresholdCore<C>> {
let (mut machines, _, _, secret_shares) = commit_enc_keys_and_shares::<_, C>(rng);
let mut verification_shares = None;
let mut group_key = None;
machines
.drain()
.map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let these_keys = machine.calculate_share(rng, our_secret_shares).unwrap().complete();
// Verify the verification_shares are agreed upon
if verification_shares.is_none() {
verification_shares = Some(these_keys.verification_shares());
}
assert_eq!(verification_shares.as_ref().unwrap(), &these_keys.verification_shares());
// Verify the group keys are agreed upon
if group_key.is_none() {
group_key = Some(these_keys.group_key());
}
assert_eq!(group_key.unwrap(), these_keys.group_key());
(i, these_keys)
})
.collect::<HashMap<_, _>>()
}
#[cfg(test)]
mod literal {
use rand_core::OsRng;
use ciphersuite::Ristretto;
use crate::{
DkgError,
encryption::EncryptionKeyProof,
pedpop::{BlameMachine, AdditionalBlameMachine},
};
use super::*;
const ONE: Participant = Participant(1);
const TWO: Participant = Participant(2);
fn test_blame(
commitment_msgs: &HashMap<Participant, EncryptionKeyMessage<Ristretto, Commitments<Ristretto>>>,
machines: Vec<BlameMachine<Ristretto>>,
msg: &PedPoPEncryptedMessage<Ristretto>,
blame: &Option<EncryptionKeyProof<Ristretto>>,
) {
for machine in machines {
let (additional, blamed) = machine.blame(ONE, TWO, msg.clone(), blame.clone());
assert_eq!(blamed, ONE);
// Verify additional blame also works
assert_eq!(additional.blame(ONE, TWO, msg.clone(), blame.clone()), ONE);
// Verify machines constructed with AdditionalBlameMachine::new work
assert_eq!(
AdditionalBlameMachine::new(
&mut OsRng,
CONTEXT.to_string(),
PARTICIPANTS,
commitment_msgs.clone()
)
.unwrap()
.blame(ONE, TWO, msg.clone(), blame.clone()),
ONE,
);
}
}
// TODO: Write a macro which expands to the following
#[test]
fn invalid_encryption_pop_blame() {
let (mut machines, commitment_msgs, _, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
// Mutate the PoP of the encrypted message from 1 to 2
secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_pop();
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == TWO {
assert_eq!(machine.err(), Some(DkgError::InvalidShare { participant: ONE, blame: None }));
// Explicitly declare we have a blame object, which happens to be None since invalid PoP
// is self-explainable
blame = Some(None);
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap());
}
#[test]
fn invalid_ecdh_blame() {
let (mut machines, commitment_msgs, _, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
// Mutate the share to trigger a blame event
// Mutates from 2 to 1, as 1 is expected to end up malicious for test_blame to pass
// While here, 2 is malicious, this is so 1 creates the blame proof
// We then malleate 1's blame proof, so 1 ends up malicious
// Doesn't simply invalidate the PoP as that won't have a blame statement
// By mutating the encrypted data, we do ensure a blame statement is created
secret_shares
.get_mut(&TWO)
.unwrap()
.get_mut(&ONE)
.unwrap()
.invalidate_msg(&mut OsRng, CONTEXT, TWO);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == ONE {
blame = Some(match machine.err() {
Some(DkgError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
blame.as_mut().unwrap().as_mut().unwrap().invalidate_key();
test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap());
}
// This should be largely equivalent to the prior test
#[test]
fn invalid_dleq_blame() {
let (mut machines, commitment_msgs, _, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
secret_shares
.get_mut(&TWO)
.unwrap()
.get_mut(&ONE)
.unwrap()
.invalidate_msg(&mut OsRng, CONTEXT, TWO);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == ONE {
blame = Some(match machine.err() {
Some(DkgError::InvalidShare { participant: TWO, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
blame.as_mut().unwrap().as_mut().unwrap().invalidate_dleq();
test_blame(&commitment_msgs, machines, &secret_shares[&TWO][&ONE].clone(), &blame.unwrap());
}
#[test]
fn invalid_share_serialization_blame() {
let (mut machines, commitment_msgs, enc_keys, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_serialization(
&mut OsRng,
CONTEXT,
ONE,
enc_keys[&TWO],
);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == TWO {
blame = Some(match machine.err() {
Some(DkgError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap());
}
#[test]
fn invalid_share_value_blame() {
let (mut machines, commitment_msgs, enc_keys, mut secret_shares) =
commit_enc_keys_and_shares::<_, Ristretto>(&mut OsRng);
secret_shares.get_mut(&ONE).unwrap().get_mut(&TWO).unwrap().invalidate_share_value(
&mut OsRng,
CONTEXT,
ONE,
enc_keys[&TWO],
);
let mut blame = None;
let machines = machines
.drain()
.filter_map(|(i, machine)| {
let our_secret_shares = generate_secret_shares(&secret_shares, i);
let machine = machine.calculate_share(&mut OsRng, our_secret_shares);
if i == TWO {
blame = Some(match machine.err() {
Some(DkgError::InvalidShare { participant: ONE, blame: Some(blame) }) => Some(blame),
_ => panic!(),
});
None
} else {
Some(machine.unwrap())
}
})
.collect::<Vec<_>>();
test_blame(&commitment_msgs, machines, &secret_shares[&ONE][&TWO].clone(), &blame.unwrap());
}
}

View File

@@ -0,0 +1,62 @@
use core::{marker::PhantomData, ops::Deref};
use std::collections::HashMap;
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use ciphersuite::{group::Group, Ciphersuite};
use crate::{
promote::{GeneratorPromotion, GeneratorProof},
tests::{clone_without, key_gen, recover_key},
};
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
struct AltGenerator<C: Ciphersuite> {
_curve: PhantomData<C>,
}
impl<C: Ciphersuite> Ciphersuite for AltGenerator<C> {
type F = C::F;
type G = C::G;
type H = C::H;
const ID: &'static [u8] = b"Alternate Ciphersuite";
fn generator() -> Self::G {
C::G::generator() * <C as Ciphersuite>::hash_to_F(b"DKG Promotion Test", b"generator")
}
fn hash_to_F(dst: &[u8], data: &[u8]) -> Self::F {
<C as Ciphersuite>::hash_to_F(dst, data)
}
}
// Test promotion of threshold keys to another generator
pub(crate) fn test_generator_promotion<R: RngCore + CryptoRng, C: Ciphersuite>(rng: &mut R) {
let keys = key_gen::<_, C>(&mut *rng);
let mut promotions = HashMap::new();
let mut proofs = HashMap::new();
for (i, keys) in &keys {
let (promotion, proof) =
GeneratorPromotion::<_, AltGenerator<C>>::promote(&mut *rng, keys.clone());
promotions.insert(*i, promotion);
proofs.insert(*i, GeneratorProof::<C>::read::<&[u8]>(&mut proof.serialize().as_ref()).unwrap());
}
let new_group_key = AltGenerator::<C>::generator() * recover_key(&keys);
for (i, promoting) in promotions.drain() {
let promoted = promoting.complete(&clone_without(&proofs, &i)).unwrap();
assert_eq!(keys[&i].params(), promoted.params());
assert_eq!(keys[&i].secret_share(), promoted.secret_share());
assert_eq!(new_group_key, promoted.group_key());
for (l, verification_share) in promoted.verification_shares() {
assert_eq!(
AltGenerator::<C>::generator() * keys[&l].secret_share().deref(),
verification_share
);
}
}
}

View File

@@ -18,7 +18,7 @@ workspace = true
[dependencies]
rustversion = "1"
thiserror = { version = "2", default-features = false, optional = true }
thiserror = { version = "1", optional = true }
rand_core = { version = "0.6", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
@@ -44,7 +44,7 @@ dalek-ff-group = { path = "../dalek-ff-group" }
transcript = { package = "flexible-transcript", path = "../transcript", features = ["recommended"] }
[features]
std = ["thiserror?/std", "rand_core/std", "zeroize/std", "digest/std", "transcript/std", "ff/std", "multiexp?/std"]
std = ["rand_core/std", "zeroize/std", "digest/std", "transcript/std", "ff/std", "multiexp?/std"]
serialize = ["std"]
# Needed for cross-group DLEqs

View File

@@ -92,7 +92,7 @@ impl<G: PrimeGroup> Generators<G> {
}
/// Error for cross-group DLEq proofs.
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
#[derive(Error, PartialEq, Eq, Debug)]
pub enum DLEqError {
/// Invalid proof length.
#[error("invalid proof length")]

View File

@@ -1,4 +1,4 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![cfg_attr(not(feature = "std"), no_std)]
#![doc = include_str!("../README.md")]
@@ -37,11 +37,11 @@ pub(crate) fn challenge<T: Transcript, F: PrimeField>(transcript: &mut T) -> F {
// Get a wide amount of bytes to safely reduce without bias
// In most cases, <=1.5x bytes is enough. 2x is still standard and there's some theoretical
// groups which may technically require more than 1.5x bytes for this to work as intended
let target_bytes = usize::try_from(F::NUM_BITS).unwrap().div_ceil(8) * 2;
let target_bytes = ((usize::try_from(F::NUM_BITS).unwrap() + 7) / 8) * 2;
let mut challenge_bytes = transcript.challenge(b"challenge");
let challenge_bytes_len = challenge_bytes.as_ref().len();
// If the challenge is 32 bytes, and we need 64, we need two challenges
let needed_challenges = target_bytes.div_ceil(challenge_bytes_len);
let needed_challenges = (target_bytes + (challenge_bytes_len - 1)) / challenge_bytes_len;
// The following algorithm should be equivalent to a wide reduction of the challenges,
// interpreted as concatenated, big-endian byte string

View File

@@ -1,13 +1,13 @@
[package]
name = "minimal-ed448"
version = "0.4.2"
version = "0.4.0"
description = "Unaudited, inefficient implementation of Ed448 in Rust"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/ed448"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = ["ed448", "ff", "group"]
edition = "2021"
rust-version = "1.65"
rust-version = "1.66"
[package.metadata.docs.rs]
all-features = true
@@ -24,11 +24,8 @@ rand_core = { version = "0.6", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
subtle = { version = "^2.4", default-features = false }
sha3 = { version = "0.10", default-features = false }
ff = { version = "0.13", default-features = false, features = ["bits"] }
group = { version = "0.13", default-features = false }
ciphersuite = { path = "../ciphersuite", default-features = false }
generic-array = { version = "1", default-features = false }
crypto-bigint = { version = "0.5", default-features = false, features = ["zeroize"] }
@@ -41,6 +38,5 @@ rand_core = { version = "0.6", default-features = false, features = ["std"] }
ff-group-tests = { path = "../ff-group-tests" }
[features]
alloc = ["zeroize/alloc", "ciphersuite/alloc"]
std = ["alloc", "rand_core/std", "zeroize/std", "subtle/std", "sha3/std", "ff/std", "ciphersuite/std"]
std = ["rand_core/std", "zeroize/std", "subtle/std", "ff/std"]
default = ["std"]

View File

@@ -2,19 +2,11 @@ use zeroize::Zeroize;
// Use black_box when possible
#[rustversion::since(1.66)]
mod black_box {
pub(crate) fn black_box<T>(val: T) -> T {
#[allow(clippy::incompatible_msrv)]
core::hint::black_box(val)
}
}
use core::hint::black_box;
#[rustversion::before(1.66)]
mod black_box {
pub(crate) fn black_box<T>(val: T) -> T {
val
}
fn black_box<T>(val: T) -> T {
val
}
use black_box::black_box;
pub(crate) fn u8_from_bool(bit_ref: &mut bool) -> u8 {
let bit_ref = black_box(bit_ref);

View File

@@ -1,4 +1,4 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![no_std]
#![allow(clippy::redundant_closure_call)]
@@ -14,6 +14,3 @@ pub use field::FieldElement;
mod point;
pub use point::Point;
mod ciphersuite;
pub use crate::ciphersuite::Ed448;

Some files were not shown because too many files have changed in this diff Show More