mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 12:19:24 +00:00
Compare commits
125 Commits
next
...
46b1f1b7ec
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
46b1f1b7ec | ||
|
|
09113201e7 | ||
|
|
556d294157 | ||
|
|
82ca889ed3 | ||
|
|
cde0f753c2 | ||
|
|
6ff0ef7aa6 | ||
|
|
f9e3d1b142 | ||
|
|
a793aa18ef | ||
|
|
5662beeb8a | ||
|
|
509bd58f4e | ||
|
|
367a5769e8 | ||
|
|
cb6eb6430a | ||
|
|
4f82e5912c | ||
|
|
ac7af40f2e | ||
|
|
264bdd46ca | ||
|
|
c52f7634de | ||
|
|
21eaa5793d | ||
|
|
c744a80d80 | ||
|
|
a34f9f6164 | ||
|
|
353683cfd2 | ||
|
|
d4f77159c4 | ||
|
|
191bf4bdea | ||
|
|
06a4824aba | ||
|
|
e65a37e639 | ||
|
|
4653ef4a61 | ||
|
|
ce08fad931 | ||
|
|
1866bb7ae3 | ||
|
|
aff2065c31 | ||
|
|
7300700108 | ||
|
|
31874ceeae | ||
|
|
012b8fddae | ||
|
|
d2f58232c8 | ||
|
|
49794b6a75 | ||
|
|
973287d0a1 | ||
|
|
1b499edfe1 | ||
|
|
642848bd24 | ||
|
|
f7fb78bdd6 | ||
|
|
65613750e1 | ||
|
|
56f6ba2dac | ||
|
|
08f6af8bb9 | ||
|
|
3512b3832d | ||
|
|
1164f92ea1 | ||
|
|
0a3ead0e19 | ||
|
|
ea66cd0d1a | ||
|
|
8b32fba458 | ||
|
|
e63acf3f67 | ||
|
|
d373d2a4c9 | ||
|
|
cbf998ff30 | ||
|
|
ef07253a27 | ||
|
|
ffae6753ec | ||
|
|
a04215bc13 | ||
|
|
28aea8a442 | ||
|
|
7b46477ca0 | ||
|
|
e62b62ddfb | ||
|
|
a2d8d0fd13 | ||
|
|
b2b36b17c4 | ||
|
|
9de8394efa | ||
|
|
3cb9432daa | ||
|
|
3f5150b3fa | ||
|
|
d74b00b9e4 | ||
|
|
3955f92cc2 | ||
|
|
a1ef18a039 | ||
|
|
bec806230a | ||
|
|
8bafeab5b3 | ||
|
|
3722df7326 | ||
|
|
ddb8e1398e | ||
|
|
2be69b23b1 | ||
|
|
a82ccadbb0 | ||
|
|
1ff2934927 | ||
|
|
cd4ffa862f | ||
|
|
c0a4d85ae6 | ||
|
|
55e845fe12 | ||
|
|
5ea087d177 | ||
|
|
dd7dc0c1dc | ||
|
|
c83fbb3e44 | ||
|
|
befbbbfb84 | ||
|
|
d0f497dc68 | ||
|
|
1b755a5d48 | ||
|
|
e5efcd56ba | ||
|
|
5d60b3c2ae | ||
|
|
ae923b24ff | ||
|
|
d304cd97e1 | ||
|
|
2b56dcdf3f | ||
|
|
90804c4c30 | ||
|
|
46caca2f51 | ||
|
|
2077e485bb | ||
|
|
28dbef8a1c | ||
|
|
3541197aa5 | ||
|
|
a2209dd6ff | ||
|
|
2032cf355f | ||
|
|
fe41b09fd4 | ||
|
|
74bad049a7 | ||
|
|
72fefb3d85 | ||
|
|
200c1530a4 | ||
|
|
5736b87b57 | ||
|
|
ada94e8c5d | ||
|
|
75240ed327 | ||
|
|
6177cf5c07 | ||
|
|
0d38dc96b6 | ||
|
|
e8094523ff | ||
|
|
53a64bc7e2 | ||
|
|
3c6e889732 | ||
|
|
354efc0192 | ||
|
|
e20058feae | ||
|
|
09f0714894 | ||
|
|
d3d539553c | ||
|
|
b08ae8e6a7 | ||
|
|
35db2924b4 | ||
|
|
bfff823bf7 | ||
|
|
352af85498 | ||
|
|
ecad89b269 | ||
|
|
48f5ed71d7 | ||
|
|
ed9cbdd8e0 | ||
|
|
0ac11defcc | ||
|
|
24e89316d5 | ||
|
|
3f03dac050 | ||
|
|
820b710928 | ||
|
|
88c7ae3e7d | ||
|
|
dd5e43760d | ||
|
|
776e417fd2 | ||
|
|
2f8ce15a92 | ||
|
|
af56304676 | ||
|
|
62a2c4f20e | ||
|
|
c69841710a | ||
|
|
3158590675 |
2
.github/actions/bitcoin/action.yml
vendored
2
.github/actions/bitcoin/action.yml
vendored
@@ -5,7 +5,7 @@ inputs:
|
||||
version:
|
||||
description: "Version to download and run"
|
||||
required: false
|
||||
default: "29.1"
|
||||
default: "30.0"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
|
||||
@@ -52,9 +52,9 @@ runs:
|
||||
- name: Install solc
|
||||
shell: bash
|
||||
run: |
|
||||
cargo +1.90 install svm-rs --version =0.5.19
|
||||
svm install 0.8.26
|
||||
svm use 0.8.26
|
||||
cargo +1.91 install svm-rs --version =0.5.19
|
||||
svm install 0.8.29
|
||||
svm use 0.8.29
|
||||
|
||||
- name: Remove preinstalled Docker
|
||||
shell: bash
|
||||
|
||||
2
.github/actions/monero-wallet-rpc/action.yml
vendored
2
.github/actions/monero-wallet-rpc/action.yml
vendored
@@ -5,7 +5,7 @@ inputs:
|
||||
version:
|
||||
description: "Version to download and run"
|
||||
required: false
|
||||
default: v0.18.3.4
|
||||
default: v0.18.4.3
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
|
||||
2
.github/actions/monero/action.yml
vendored
2
.github/actions/monero/action.yml
vendored
@@ -5,7 +5,7 @@ inputs:
|
||||
version:
|
||||
description: "Version to download and run"
|
||||
required: false
|
||||
default: v0.18.3.4
|
||||
default: v0.18.4.3
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
|
||||
4
.github/actions/test-dependencies/action.yml
vendored
4
.github/actions/test-dependencies/action.yml
vendored
@@ -5,12 +5,12 @@ inputs:
|
||||
monero-version:
|
||||
description: "Monero version to download and run as a regtest node"
|
||||
required: false
|
||||
default: v0.18.3.4
|
||||
default: v0.18.4.3
|
||||
|
||||
bitcoin-version:
|
||||
description: "Bitcoin version to download and run as a regtest node"
|
||||
required: false
|
||||
default: "29.1"
|
||||
default: "30.0"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
|
||||
2
.github/nightly-version
vendored
2
.github/nightly-version
vendored
@@ -1 +1 @@
|
||||
nightly-2025-11-01
|
||||
nightly-2025-11-11
|
||||
|
||||
2
.github/workflows/daily-deny.yml
vendored
2
.github/workflows/daily-deny.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
key: rust-advisory-db
|
||||
|
||||
- name: Install cargo deny
|
||||
run: cargo +1.90 install cargo-deny --version =0.18.4
|
||||
run: cargo +1.91 install cargo-deny --version =0.18.5
|
||||
|
||||
- name: Run cargo deny
|
||||
run: cargo deny -L error --all-features check --hide-inclusion-graph
|
||||
|
||||
20
.github/workflows/lint.yml
vendored
20
.github/workflows/lint.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
uses: ./.github/actions/build-dependencies
|
||||
|
||||
- name: Install nightly rust
|
||||
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32v1-none -c rust-src -c clippy
|
||||
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32v1-none -c clippy
|
||||
|
||||
- name: Run Clippy
|
||||
run: cargo +${{ steps.nightly.outputs.version }} clippy --all-features --all-targets -- -D warnings -A clippy::items_after_test_module
|
||||
@@ -52,7 +52,7 @@ jobs:
|
||||
key: rust-advisory-db
|
||||
|
||||
- name: Install cargo deny
|
||||
run: cargo +1.90 install cargo-deny --version =0.18.4
|
||||
run: cargo +1.91 install cargo-deny --version =0.18.5
|
||||
|
||||
- name: Run cargo deny
|
||||
run: cargo deny -L error --all-features check --hide-inclusion-graph
|
||||
@@ -88,8 +88,8 @@ jobs:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Verify all dependencies are in use
|
||||
run: |
|
||||
cargo +1.90 install cargo-machete --version =0.9.1
|
||||
cargo +1.90 machete
|
||||
cargo +1.91 install cargo-machete --version =0.9.1
|
||||
cargo +1.91 machete
|
||||
|
||||
msrv:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -98,7 +98,7 @@ jobs:
|
||||
- name: Verify claimed `rust-version`
|
||||
shell: bash
|
||||
run: |
|
||||
cargo +1.90 install cargo-msrv --version =0.18.4
|
||||
cargo +1.91 install cargo-msrv --version =0.18.4
|
||||
|
||||
function check_msrv {
|
||||
# We `cd` into the directory passed as the first argument, but will return to the
|
||||
@@ -155,8 +155,6 @@ jobs:
|
||||
# Correct the last line, which was malleated to "],"
|
||||
members=$(echo "$members" | sed "$(echo "$members" | wc -l)s/\]\,/\]/")
|
||||
|
||||
# Don't check the patches
|
||||
members=$(echo "$members" | grep -v "patches")
|
||||
# Don't check the following
|
||||
# Most of these are binaries, with the exception of the Substrate runtime which has a
|
||||
# bespoke build pipeline
|
||||
@@ -192,12 +190,12 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
|
||||
- name: Slither
|
||||
run: |
|
||||
python3 -m pip install solc-select
|
||||
solc-select install 0.8.26
|
||||
solc-select use 0.8.26
|
||||
|
||||
python3 -m pip install slither-analyzer
|
||||
|
||||
slither --include-paths ./networks/ethereum/schnorr/contracts/Schnorr.sol
|
||||
|
||||
32
.github/workflows/tests.yml
vendored
32
.github/workflows/tests.yml
vendored
@@ -61,6 +61,7 @@ jobs:
|
||||
-p serai-monero-processor \
|
||||
-p tendermint-machine \
|
||||
-p tributary-sdk \
|
||||
-p serai-cosign-types \
|
||||
-p serai-cosign \
|
||||
-p serai-coordinator-substrate \
|
||||
-p serai-coordinator-tributary \
|
||||
@@ -82,23 +83,19 @@ jobs:
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p serai-primitives \
|
||||
-p serai-coins-primitives \
|
||||
-p serai-coins-pallet \
|
||||
-p serai-dex-pallet \
|
||||
-p serai-validator-sets-primitives \
|
||||
-p serai-validator-sets-pallet \
|
||||
-p serai-genesis-liquidity-primitives \
|
||||
-p serai-genesis-liquidity-pallet \
|
||||
-p serai-emissions-primitives \
|
||||
-p serai-emissions-pallet \
|
||||
-p serai-economic-security-pallet \
|
||||
-p serai-in-instructions-primitives \
|
||||
-p serai-in-instructions-pallet \
|
||||
-p serai-signals-primitives \
|
||||
-p serai-signals-pallet \
|
||||
-p serai-abi \
|
||||
-p serai-core-pallet \
|
||||
-p serai-coins-pallet \
|
||||
-p serai-validator-sets-pallet \
|
||||
-p serai-signals-pallet \
|
||||
-p serai-dex-pallet \
|
||||
-p serai-genesis-liquidity-pallet \
|
||||
-p serai-economic-security-pallet \
|
||||
-p serai-emissions-pallet \
|
||||
-p serai-in-instructions-pallet \
|
||||
-p serai-runtime \
|
||||
-p serai-node
|
||||
-p serai-substrate-tests
|
||||
|
||||
test-serai-client:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -109,4 +106,9 @@ jobs:
|
||||
uses: ./.github/actions/build-dependencies
|
||||
|
||||
- name: Run Tests
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client-bitcoin
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client-ethereum
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client-monero
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client-serai
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -2,11 +2,10 @@ target
|
||||
|
||||
# Don't commit any `Cargo.lock` which aren't the workspace's
|
||||
Cargo.lock
|
||||
!./Cargo.lock
|
||||
!/Cargo.lock
|
||||
|
||||
# Don't commit any `Dockerfile`, as they're auto-generated, except the only one which isn't
|
||||
Dockerfile
|
||||
Dockerfile.fast-epoch
|
||||
!orchestration/runtime/Dockerfile
|
||||
|
||||
.test-logs
|
||||
|
||||
1186
Cargo.lock
generated
1186
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
73
Cargo.toml
73
Cargo.toml
@@ -62,13 +62,14 @@ members = [
|
||||
"processor/ethereum/primitives",
|
||||
"processor/ethereum/test-primitives",
|
||||
"processor/ethereum/deployer",
|
||||
"processor/ethereum/router",
|
||||
"processor/ethereum/erc20",
|
||||
"processor/ethereum/router",
|
||||
"processor/ethereum",
|
||||
"processor/monero",
|
||||
|
||||
"coordinator/tributary-sdk/tendermint",
|
||||
"coordinator/tributary-sdk",
|
||||
"coordinator/cosign/types",
|
||||
"coordinator/cosign",
|
||||
"coordinator/substrate",
|
||||
"coordinator/tributary",
|
||||
@@ -77,34 +78,25 @@ members = [
|
||||
"coordinator",
|
||||
|
||||
"substrate/primitives",
|
||||
|
||||
"substrate/coins/primitives",
|
||||
"substrate/coins/pallet",
|
||||
|
||||
"substrate/dex/pallet",
|
||||
|
||||
"substrate/validator-sets/primitives",
|
||||
"substrate/validator-sets/pallet",
|
||||
|
||||
"substrate/genesis-liquidity/primitives",
|
||||
"substrate/genesis-liquidity/pallet",
|
||||
|
||||
"substrate/emissions/primitives",
|
||||
"substrate/emissions/pallet",
|
||||
|
||||
"substrate/economic-security/pallet",
|
||||
|
||||
"substrate/in-instructions/primitives",
|
||||
"substrate/in-instructions/pallet",
|
||||
|
||||
"substrate/signals/primitives",
|
||||
"substrate/signals/pallet",
|
||||
|
||||
"substrate/abi",
|
||||
|
||||
"substrate/core",
|
||||
"substrate/coins",
|
||||
"substrate/validator-sets",
|
||||
"substrate/signals",
|
||||
"substrate/dex",
|
||||
"substrate/genesis-liquidity",
|
||||
"substrate/economic-security",
|
||||
"substrate/emissions",
|
||||
"substrate/in-instructions",
|
||||
|
||||
"substrate/runtime",
|
||||
"substrate/node",
|
||||
|
||||
"substrate/client/bitcoin",
|
||||
"substrate/client/ethereum",
|
||||
"substrate/client/monero",
|
||||
"substrate/client/serai",
|
||||
"substrate/client",
|
||||
|
||||
"orchestration",
|
||||
@@ -117,6 +109,7 @@ members = [
|
||||
"tests/message-queue",
|
||||
# TODO "tests/processor",
|
||||
# TODO "tests/coordinator",
|
||||
"tests/substrate",
|
||||
# TODO "tests/full-stack",
|
||||
"tests/reproducible-runtime",
|
||||
]
|
||||
@@ -172,22 +165,34 @@ panic = "unwind"
|
||||
overflow-checks = true
|
||||
|
||||
[patch.crates-io]
|
||||
# Point to empty crates for unused crates in our tree
|
||||
ark-ff-3 = { package = "ark-ff", path = "patches/ethereum/ark-ff-0.3" }
|
||||
ark-ff-4 = { package = "ark-ff", path = "patches/ethereum/ark-ff-0.4" }
|
||||
c-kzg = { path = "patches/ethereum/c-kzg" }
|
||||
secp256k1-30 = { package = "secp256k1", path = "patches/ethereum/secp256k1-30" }
|
||||
|
||||
# Dependencies from monero-oxide which originate from within our own tree
|
||||
std-shims = { path = "patches/std-shims" }
|
||||
simple-request = { path = "patches/simple-request" }
|
||||
multiexp = { path = "crypto/multiexp" }
|
||||
flexible-transcript = { path = "crypto/transcript" }
|
||||
ciphersuite = { path = "patches/ciphersuite" }
|
||||
dalek-ff-group = { path = "patches/dalek-ff-group" }
|
||||
dalek-ff-group = { path = "crypto/dalek-ff-group" }
|
||||
minimal-ed448 = { path = "crypto/ed448" }
|
||||
modular-frost = { path = "crypto/frost" }
|
||||
|
||||
# Patch due to `std` now including the required functionality
|
||||
is_terminal_polyfill = { path = "./patches/is_terminal_polyfill" }
|
||||
# This has a non-deprecated `std` alternative since Rust's 2024 edition
|
||||
home = { path = "patches/home" }
|
||||
|
||||
# Updates to the latest version
|
||||
darling = { path = "patches/darling" }
|
||||
thiserror = { path = "patches/thiserror" }
|
||||
|
||||
# https://github.com/rust-lang-nursery/lazy-static.rs/issues/201
|
||||
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
|
||||
|
||||
# These has an `std` alternative since Rust's 2024 edition
|
||||
home = { path = "patches/home" }
|
||||
|
||||
# directories-next was created because directories was unmaintained
|
||||
# directories-next is now unmaintained while directories is maintained
|
||||
# The directories author pulls in ridiculously pointless crates and prefers
|
||||
@@ -196,16 +201,22 @@ home = { path = "patches/home" }
|
||||
option-ext = { path = "patches/option-ext" }
|
||||
directories-next = { path = "patches/directories-next" }
|
||||
|
||||
# Patch to include `FromUniformBytes<64>` over Scalar
|
||||
# Patch from a fork back to upstream
|
||||
parity-bip39 = { path = "patches/parity-bip39" }
|
||||
|
||||
# Patch to include `FromUniformBytes<64>` over `Scalar`
|
||||
k256 = { git = "https://github.com/kayabaNerve/elliptic-curves", rev = "4994c9ab163781a88cd4a49beae812a89a44e8c3" }
|
||||
p256 = { git = "https://github.com/kayabaNerve/elliptic-curves", rev = "4994c9ab163781a88cd4a49beae812a89a44e8c3" }
|
||||
|
||||
# `jemalloc` conflicts with `mimalloc`, so patch to a `rocksdb` which never uses `jemalloc`
|
||||
librocksdb-sys = { path = "patches/librocksdb-sys" }
|
||||
|
||||
[workspace.lints.clippy]
|
||||
incompatible_msrv = "allow" # Manually verified with a GitHub workflow
|
||||
manual_is_multiple_of = "allow"
|
||||
unwrap_or_default = "allow"
|
||||
map_unwrap_or = "allow"
|
||||
needless_continue = "allow"
|
||||
manual_is_multiple_of = "allow"
|
||||
incompatible_msrv = "allow" # Manually verified with a GitHub workflow
|
||||
borrow_as_ptr = "deny"
|
||||
cast_lossless = "deny"
|
||||
cast_possible_truncation = "deny"
|
||||
|
||||
@@ -17,7 +17,7 @@ rustdoc-args = ["--cfg", "docsrs"]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
parity-db = { version = "0.5", default-features = false, optional = true }
|
||||
parity-db = { version = "0.5", default-features = false, features = ["arc"], optional = true }
|
||||
rocksdb = { version = "0.24", default-features = false, features = ["zstd"], optional = true }
|
||||
|
||||
[features]
|
||||
|
||||
@@ -15,7 +15,7 @@ pub fn serai_db_key(
|
||||
///
|
||||
/// Creates a unit struct and a default implementation for the `key`, `get`, and `set`. The macro
|
||||
/// uses a syntax similar to defining a function. Parameters are concatenated to produce a key,
|
||||
/// they must be `scale` encodable. The return type is used to auto encode and decode the database
|
||||
/// they must be `borsh` serializable. The return type is used to auto (de)serialize the database
|
||||
/// value bytes using `borsh`.
|
||||
///
|
||||
/// # Arguments
|
||||
@@ -54,11 +54,10 @@ macro_rules! create_db {
|
||||
)?;
|
||||
impl$(<$($generic_name: $generic_type),+>)? $field_name$(<$($generic_name),+>)? {
|
||||
pub(crate) fn key($($arg: $arg_type),*) -> Vec<u8> {
|
||||
use scale::Encode;
|
||||
$crate::serai_db_key(
|
||||
stringify!($db_name).as_bytes(),
|
||||
stringify!($field_name).as_bytes(),
|
||||
($($arg),*).encode()
|
||||
&borsh::to_vec(&($($arg),*)).unwrap(),
|
||||
)
|
||||
}
|
||||
pub(crate) fn set(
|
||||
|
||||
@@ -35,6 +35,9 @@ mod mutex_shim {
|
||||
pub use mutex_shim::{ShimMutex as Mutex, MutexGuard};
|
||||
|
||||
#[rustversion::before(1.80)]
|
||||
pub use spin::Lazy as LazyLock;
|
||||
|
||||
#[rustversion::since(1.80)]
|
||||
#[cfg(not(feature = "std"))]
|
||||
pub use spin::Lazy as LazyLock;
|
||||
#[rustversion::since(1.80)]
|
||||
|
||||
@@ -31,7 +31,6 @@ frost = { package = "modular-frost", path = "../crypto/frost" }
|
||||
frost-schnorrkel = { path = "../crypto/schnorrkel" }
|
||||
|
||||
hex = { version = "0.4", default-features = false, features = ["std"] }
|
||||
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std", "derive", "bit-vec"] }
|
||||
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
|
||||
|
||||
zalloc = { path = "../common/zalloc" }
|
||||
@@ -43,7 +42,7 @@ messages = { package = "serai-processor-messages", path = "../processor/messages
|
||||
message-queue = { package = "serai-message-queue", path = "../message-queue" }
|
||||
tributary-sdk = { path = "./tributary-sdk" }
|
||||
|
||||
serai-client = { path = "../substrate/client", default-features = false, features = ["serai", "borsh"] }
|
||||
serai-client = { path = "../substrate/client", default-features = false, features = ["serai"] }
|
||||
|
||||
log = { version = "0.4", default-features = false, features = ["std"] }
|
||||
env_logger = { version = "0.10", default-features = false, features = ["humantime"] }
|
||||
|
||||
@@ -19,11 +19,9 @@ workspace = true
|
||||
|
||||
[dependencies]
|
||||
blake2 = { version = "0.11.0-rc.0", default-features = false, features = ["alloc"] }
|
||||
schnorrkel = { version = "0.11", default-features = false, features = ["std"] }
|
||||
|
||||
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std", "derive"] }
|
||||
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
|
||||
serai-client = { path = "../../substrate/client", default-features = false, features = ["serai", "borsh"] }
|
||||
serai-abi = { path = "../../substrate/abi", default-features = false, features = ["std"] }
|
||||
|
||||
log = { version = "0.4", default-features = false, features = ["std"] }
|
||||
|
||||
@@ -31,3 +29,5 @@ tokio = { version = "1", default-features = false }
|
||||
|
||||
serai-db = { path = "../../common/db", version = "0.1.1" }
|
||||
serai-task = { path = "../../common/task", version = "0.1" }
|
||||
|
||||
serai-cosign-types = { path = "./types" }
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
use core::future::Future;
|
||||
use std::{sync::Arc, collections::HashMap};
|
||||
|
||||
use serai_client::{
|
||||
primitives::{SeraiAddress, Amount},
|
||||
validator_sets::primitives::ExternalValidatorSet,
|
||||
Serai,
|
||||
use serai_abi::primitives::{
|
||||
balance::Amount, validator_sets::ExternalValidatorSet, address::SeraiAddress,
|
||||
};
|
||||
use serai_client::Serai;
|
||||
|
||||
use serai_db::*;
|
||||
use serai_task::ContinuallyRan;
|
||||
@@ -37,7 +36,7 @@ async fn block_has_events_justifying_a_cosign(
|
||||
block_number: u64,
|
||||
) -> Result<(Block, HasEvents), String> {
|
||||
let block = serai
|
||||
.finalized_block_by_number(block_number)
|
||||
.block_by_number(block_number)
|
||||
.await
|
||||
.map_err(|e| format!("{e:?}"))?
|
||||
.ok_or_else(|| "couldn't get block which should've been finalized".to_string())?;
|
||||
@@ -67,7 +66,7 @@ impl<D: Db> ContinuallyRan for CosignIntendTask<D> {
|
||||
async move {
|
||||
let start_block_number = ScanCosignFrom::get(&self.db).unwrap_or(1);
|
||||
let latest_block_number =
|
||||
self.serai.latest_finalized_block().await.map_err(|e| format!("{e:?}"))?.number();
|
||||
self.serai.latest_finalized_block_number().await.map_err(|e| format!("{e:?}"))?.number();
|
||||
|
||||
for block_number in start_block_number ..= latest_block_number {
|
||||
let mut txn = self.db.txn();
|
||||
|
||||
@@ -7,18 +7,24 @@ use std::{sync::Arc, collections::HashMap, time::Instant};
|
||||
|
||||
use blake2::{Digest, Blake2s256};
|
||||
|
||||
use scale::{Encode, Decode};
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use serai_client::{
|
||||
primitives::{ExternalNetworkId, SeraiAddress},
|
||||
validator_sets::primitives::{Session, ExternalValidatorSet, KeyPair},
|
||||
Public, Block, Serai, TemporalSerai,
|
||||
use serai_abi::{
|
||||
primitives::{
|
||||
crypto::{Public, KeyPair},
|
||||
network_id::ExternalNetworkId,
|
||||
validator_sets::{Session, ExternalValidatorSet},
|
||||
address::SeraiAddress,
|
||||
},
|
||||
Block,
|
||||
};
|
||||
use serai_client::{Serai, TemporalSerai};
|
||||
|
||||
use serai_db::*;
|
||||
use serai_task::*;
|
||||
|
||||
use serai_cosign_types::*;
|
||||
|
||||
/// The cosigns which are intended to be performed.
|
||||
mod intend;
|
||||
/// The evaluator of the cosigns.
|
||||
@@ -28,9 +34,6 @@ mod delay;
|
||||
pub use delay::BROADCAST_FREQUENCY;
|
||||
use delay::LatestCosignedBlockNumber;
|
||||
|
||||
/// The schnorrkel context to used when signing a cosign.
|
||||
pub const COSIGN_CONTEXT: &[u8] = b"/serai/coordinator/cosign";
|
||||
|
||||
/// A 'global session', defined as all validator sets used for cosigning at a given moment.
|
||||
///
|
||||
/// We evaluate cosign faults within a global session. This ensures even if cosigners cosign
|
||||
@@ -78,68 +81,6 @@ enum HasEvents {
|
||||
No,
|
||||
}
|
||||
|
||||
/// An intended cosign.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub struct CosignIntent {
|
||||
/// The global session this cosign is being performed under.
|
||||
pub global_session: [u8; 32],
|
||||
/// The number of the block to cosign.
|
||||
pub block_number: u64,
|
||||
/// The hash of the block to cosign.
|
||||
pub block_hash: [u8; 32],
|
||||
/// If this cosign must be handled before further cosigns are.
|
||||
pub notable: bool,
|
||||
}
|
||||
|
||||
/// A cosign.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, BorshSerialize, BorshDeserialize)]
|
||||
pub struct Cosign {
|
||||
/// The global session this cosign is being performed under.
|
||||
pub global_session: [u8; 32],
|
||||
/// The number of the block to cosign.
|
||||
pub block_number: u64,
|
||||
/// The hash of the block to cosign.
|
||||
pub block_hash: [u8; 32],
|
||||
/// The actual cosigner.
|
||||
pub cosigner: ExternalNetworkId,
|
||||
}
|
||||
|
||||
impl CosignIntent {
|
||||
/// Convert this into a `Cosign`.
|
||||
pub fn into_cosign(self, cosigner: ExternalNetworkId) -> Cosign {
|
||||
let CosignIntent { global_session, block_number, block_hash, notable: _ } = self;
|
||||
Cosign { global_session, block_number, block_hash, cosigner }
|
||||
}
|
||||
}
|
||||
|
||||
impl Cosign {
|
||||
/// The message to sign to sign this cosign.
|
||||
///
|
||||
/// This must be signed with schnorrkel, the context set to `COSIGN_CONTEXT`.
|
||||
pub fn signature_message(&self) -> Vec<u8> {
|
||||
// We use a schnorrkel context to domain-separate this
|
||||
self.encode()
|
||||
}
|
||||
}
|
||||
|
||||
/// A signed cosign.
|
||||
#[derive(Clone, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub struct SignedCosign {
|
||||
/// The cosign.
|
||||
pub cosign: Cosign,
|
||||
/// The signature for the cosign.
|
||||
pub signature: [u8; 64],
|
||||
}
|
||||
|
||||
impl SignedCosign {
|
||||
fn verify_signature(&self, signer: serai_client::Public) -> bool {
|
||||
let Ok(signer) = schnorrkel::PublicKey::from_bytes(&signer.0) else { return false };
|
||||
let Ok(signature) = schnorrkel::Signature::from_bytes(&self.signature) else { return false };
|
||||
|
||||
signer.verify_simple(COSIGN_CONTEXT, &self.cosign.signature_message(), &signature).is_ok()
|
||||
}
|
||||
}
|
||||
|
||||
create_db! {
|
||||
Cosign {
|
||||
// The following are populated by the intend task and used throughout the library
|
||||
@@ -219,8 +160,8 @@ async fn keys_for_network(
|
||||
async fn cosigning_sets(
|
||||
serai: &TemporalSerai<'_>,
|
||||
) -> Result<Vec<(ExternalValidatorSet, Public)>, String> {
|
||||
let mut sets = Vec::with_capacity(serai_client::primitives::EXTERNAL_NETWORKS.len());
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
let mut sets = vec![];
|
||||
for network in ExternalNetworkId::all() {
|
||||
let Some((session, keys)) = keys_for_network(serai, network).await? else {
|
||||
// If this network doesn't have usable keys, move on
|
||||
continue;
|
||||
@@ -346,8 +287,8 @@ impl<D: Db> Cosigning<D> {
|
||||
/// If this global session hasn't produced any notable cosigns, this will return the latest
|
||||
/// cosigns for this session.
|
||||
pub fn notable_cosigns(getter: &impl Get, global_session: [u8; 32]) -> Vec<SignedCosign> {
|
||||
let mut cosigns = Vec::with_capacity(serai_client::primitives::EXTERNAL_NETWORKS.len());
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
let mut cosigns = vec![];
|
||||
for network in ExternalNetworkId::all() {
|
||||
if let Some(cosign) = NetworksLatestCosignedBlock::get(getter, global_session, network) {
|
||||
cosigns.push(cosign);
|
||||
}
|
||||
@@ -364,7 +305,7 @@ impl<D: Db> Cosigning<D> {
|
||||
let mut cosigns = Faults::get(&self.db, faulted).expect("faulted with no faults");
|
||||
// Also include all of our recognized-as-honest cosigns in an attempt to induce fault
|
||||
// identification in those who see the faulty cosigns as honest
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
for network in ExternalNetworkId::all() {
|
||||
if let Some(cosign) = NetworksLatestCosignedBlock::get(&self.db, faulted, network) {
|
||||
if cosign.cosign.global_session == faulted {
|
||||
cosigns.push(cosign);
|
||||
@@ -376,8 +317,8 @@ impl<D: Db> Cosigning<D> {
|
||||
let Some(global_session) = evaluator::currently_evaluated_global_session(&self.db) else {
|
||||
return vec![];
|
||||
};
|
||||
let mut cosigns = Vec::with_capacity(serai_client::primitives::EXTERNAL_NETWORKS.len());
|
||||
for network in serai_client::primitives::EXTERNAL_NETWORKS {
|
||||
let mut cosigns = vec![];
|
||||
for network in ExternalNetworkId::all() {
|
||||
if let Some(cosign) = NetworksLatestCosignedBlock::get(&self.db, global_session, network) {
|
||||
cosigns.push(cosign);
|
||||
}
|
||||
|
||||
25
coordinator/cosign/types/Cargo.toml
Normal file
25
coordinator/cosign/types/Cargo.toml
Normal file
@@ -0,0 +1,25 @@
|
||||
[package]
|
||||
name = "serai-cosign-types"
|
||||
version = "0.1.0"
|
||||
description = "Evaluator of cosigns for the Serai network"
|
||||
license = "AGPL-3.0-only"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/coordinator/cosign"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = []
|
||||
edition = "2021"
|
||||
publish = false
|
||||
rust-version = "1.85"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
schnorrkel = { version = "0.11", default-features = false, features = ["std"] }
|
||||
|
||||
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
|
||||
|
||||
serai-primitives = { path = "../../../substrate/primitives", default-features = false, features = ["std"] }
|
||||
72
coordinator/cosign/types/src/lib.rs
Normal file
72
coordinator/cosign/types/src/lib.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![deny(missing_docs)]
|
||||
//! Types used when cosigning Serai. For more info, please see `serai-cosign`.
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use serai_primitives::{crypto::Public, network_id::ExternalNetworkId};
|
||||
|
||||
/// The schnorrkel context to used when signing a cosign.
|
||||
pub const COSIGN_CONTEXT: &[u8] = b"/serai/coordinator/cosign";
|
||||
|
||||
/// An intended cosign.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub struct CosignIntent {
|
||||
/// The global session this cosign is being performed under.
|
||||
pub global_session: [u8; 32],
|
||||
/// The number of the block to cosign.
|
||||
pub block_number: u64,
|
||||
/// The hash of the block to cosign.
|
||||
pub block_hash: [u8; 32],
|
||||
/// If this cosign must be handled before further cosigns are.
|
||||
pub notable: bool,
|
||||
}
|
||||
|
||||
/// A cosign.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub struct Cosign {
|
||||
/// The global session this cosign is being performed under.
|
||||
pub global_session: [u8; 32],
|
||||
/// The number of the block to cosign.
|
||||
pub block_number: u64,
|
||||
/// The hash of the block to cosign.
|
||||
pub block_hash: [u8; 32],
|
||||
/// The actual cosigner.
|
||||
pub cosigner: ExternalNetworkId,
|
||||
}
|
||||
|
||||
impl CosignIntent {
|
||||
/// Convert this into a `Cosign`.
|
||||
pub fn into_cosign(self, cosigner: ExternalNetworkId) -> Cosign {
|
||||
let CosignIntent { global_session, block_number, block_hash, notable: _ } = self;
|
||||
Cosign { global_session, block_number, block_hash, cosigner }
|
||||
}
|
||||
}
|
||||
|
||||
impl Cosign {
|
||||
/// The message to sign to sign this cosign.
|
||||
///
|
||||
/// This must be signed with schnorrkel, the context set to `COSIGN_CONTEXT`.
|
||||
pub fn signature_message(&self) -> Vec<u8> {
|
||||
// We use a schnorrkel context to domain-separate this
|
||||
borsh::to_vec(self).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
/// A signed cosign.
|
||||
#[derive(Clone, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub struct SignedCosign {
|
||||
/// The cosign.
|
||||
pub cosign: Cosign,
|
||||
/// The signature for the cosign.
|
||||
pub signature: [u8; 64],
|
||||
}
|
||||
|
||||
impl SignedCosign {
|
||||
/// Verify a cosign's signature.
|
||||
pub fn verify_signature(&self, signer: Public) -> bool {
|
||||
let Ok(signer) = schnorrkel::PublicKey::from_bytes(&signer.0) else { return false };
|
||||
let Ok(signature) = schnorrkel::Signature::from_bytes(&self.signature) else { return false };
|
||||
|
||||
signer.verify_simple(COSIGN_CONTEXT, &self.cosign.signature_message(), &signature).is_ok()
|
||||
}
|
||||
}
|
||||
@@ -22,7 +22,7 @@ borsh = { version = "1", default-features = false, features = ["std", "derive",
|
||||
|
||||
serai-db = { path = "../../common/db", version = "0.1" }
|
||||
|
||||
serai-client = { path = "../../substrate/client", default-features = false, features = ["serai", "borsh"] }
|
||||
serai-primitives = { path = "../../substrate/primitives", default-features = false, features = ["std"] }
|
||||
serai-cosign = { path = "../cosign" }
|
||||
tributary-sdk = { path = "../tributary-sdk" }
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ schnorrkel = { version = "0.11", default-features = false, features = ["std"] }
|
||||
hex = { version = "0.4", default-features = false, features = ["std"] }
|
||||
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
|
||||
|
||||
serai-client = { path = "../../../substrate/client", default-features = false, features = ["serai", "borsh"] }
|
||||
serai-client = { path = "../../../substrate/client", default-features = false, features = ["serai"] }
|
||||
serai-cosign = { path = "../../cosign" }
|
||||
tributary-sdk = { path = "../../tributary-sdk" }
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use core::future::Future;
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
use serai_client::validator_sets::primitives::{MAX_KEY_SHARES_PER_SET, ExternalValidatorSet};
|
||||
use serai_primitives::{MAX_KEY_SHARES_PER_SET, ExternalValidatorSet};
|
||||
|
||||
use futures_lite::FutureExt;
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ use std::collections::HashMap;
|
||||
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use serai_client::{primitives::ExternalNetworkId, validator_sets::primitives::ExternalValidatorSet};
|
||||
use serai_primitives::{network_id::ExternalNetworkId, validator_sets::ExternalValidatorSet};
|
||||
|
||||
use serai_db::Db;
|
||||
use tributary_sdk::{ReadWrite, TransactionTrait, Tributary, TributaryReader};
|
||||
|
||||
@@ -11,7 +11,6 @@ use tokio::sync::mpsc;
|
||||
|
||||
use serai_db::{Get, DbTxn, Db as DbTrait, create_db, db_channel};
|
||||
|
||||
use scale::Encode;
|
||||
use serai_client::validator_sets::primitives::ExternalValidatorSet;
|
||||
|
||||
use tributary_sdk::{TransactionKind, TransactionError, ProvidedError, TransactionTrait, Tributary};
|
||||
@@ -479,7 +478,8 @@ pub(crate) async fn spawn_tributary<P: P2p>(
|
||||
return;
|
||||
}
|
||||
|
||||
let genesis = <[u8; 32]>::from(Blake2s::<U32>::digest((set.serai_block, set.set).encode()));
|
||||
let genesis =
|
||||
<[u8; 32]>::from(Blake2s::<U32>::digest(borsh::to_vec(&(set.serai_block, set.set)).unwrap()));
|
||||
|
||||
// Since the Serai block will be finalized, then cosigned, before we handle this, this time will
|
||||
// be a couple of minutes stale. While the Tributary will still function with a start time in the
|
||||
|
||||
@@ -20,12 +20,11 @@ workspace = true
|
||||
[dependencies]
|
||||
bitvec = { version = "1", default-features = false, features = ["std"] }
|
||||
|
||||
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std", "derive", "bit-vec"] }
|
||||
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
|
||||
|
||||
dkg = { path = "../../crypto/dkg", default-features = false, features = ["std"] }
|
||||
|
||||
serai-client = { path = "../../substrate/client", version = "0.1", default-features = false, features = ["serai", "borsh"] }
|
||||
serai-client = { path = "../../substrate/client", version = "0.1", default-features = false, features = ["serai"] }
|
||||
|
||||
log = { version = "0.4", default-features = false, features = ["std"] }
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use scale::{Encode, Decode};
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use dkg::Participant;
|
||||
@@ -178,14 +177,13 @@ impl Keys {
|
||||
signature_participants,
|
||||
signature,
|
||||
);
|
||||
_public_db::Keys::set(txn, set.network, &(set.session, tx.encode()));
|
||||
_public_db::Keys::set(txn, set.network, &(set.session, tx));
|
||||
}
|
||||
pub(crate) fn take(
|
||||
txn: &mut impl DbTxn,
|
||||
network: ExternalNetworkId,
|
||||
) -> Option<(Session, Transaction)> {
|
||||
let (session, tx) = _public_db::Keys::take(txn, network)?;
|
||||
Some((session, <_>::decode(&mut tx.as_slice()).unwrap()))
|
||||
_public_db::Keys::take(txn, network)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -226,13 +224,12 @@ impl SlashReports {
|
||||
slash_report,
|
||||
signature,
|
||||
);
|
||||
_public_db::SlashReports::set(txn, set.network, &(set.session, tx.encode()));
|
||||
_public_db::SlashReports::set(txn, set.network, &(set.session, tx));
|
||||
}
|
||||
pub(crate) fn take(
|
||||
txn: &mut impl DbTxn,
|
||||
network: ExternalNetworkId,
|
||||
) -> Option<(Session, Transaction)> {
|
||||
let (session, tx) = _public_db::SlashReports::take(txn, network)?;
|
||||
Some((session, <_>::decode(&mut tx.as_slice()).unwrap()))
|
||||
_public_db::SlashReports::take(txn, network)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ log = { version = "0.4", default-features = false, features = ["std"] }
|
||||
|
||||
serai-db = { path = "../../common/db", version = "0.1" }
|
||||
|
||||
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std", "derive"] }
|
||||
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
|
||||
futures-util = { version = "0.3", default-features = false, features = ["std", "sink", "channel"] }
|
||||
futures-channel = { version = "0.3", default-features = false, features = ["std", "sink"] }
|
||||
tendermint = { package = "tendermint-machine", path = "./tendermint", version = "0.2" }
|
||||
|
||||
@@ -5,7 +5,7 @@ use ciphersuite::{group::GroupEncoding, *};
|
||||
|
||||
use serai_db::{Get, DbTxn, Db};
|
||||
|
||||
use scale::Decode;
|
||||
use borsh::BorshDeserialize;
|
||||
|
||||
use tendermint::ext::{Network, Commit};
|
||||
|
||||
@@ -62,7 +62,7 @@ impl<D: Db, T: TransactionTrait> Blockchain<D, T> {
|
||||
D::key(
|
||||
b"tributary_blockchain",
|
||||
b"next_nonce",
|
||||
[genesis.as_ref(), signer.to_bytes().as_ref(), order].concat(),
|
||||
[genesis.as_slice(), signer.to_bytes().as_slice(), order].concat(),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -109,7 +109,7 @@ impl<D: Db, T: TransactionTrait> Blockchain<D, T> {
|
||||
|
||||
pub(crate) fn block_from_db(db: &D, genesis: [u8; 32], block: &[u8; 32]) -> Option<Block<T>> {
|
||||
db.get(Self::block_key(&genesis, block))
|
||||
.map(|bytes| Block::<T>::read::<&[u8]>(&mut bytes.as_ref()).unwrap())
|
||||
.map(|bytes| Block::<T>::read::<&[u8]>(&mut bytes.as_slice()).unwrap())
|
||||
}
|
||||
|
||||
pub(crate) fn commit_from_db(db: &D, genesis: [u8; 32], block: &[u8; 32]) -> Option<Vec<u8>> {
|
||||
@@ -169,7 +169,7 @@ impl<D: Db, T: TransactionTrait> Blockchain<D, T> {
|
||||
// we must have a commit per valid hash
|
||||
let commit = Self::commit_from_db(db, genesis, &hash).unwrap();
|
||||
// commit has to be valid if it is coming from our db
|
||||
Some(Commit::<N::SignatureScheme>::decode(&mut commit.as_ref()).unwrap())
|
||||
Some(Commit::<N::SignatureScheme>::deserialize_reader(&mut commit.as_slice()).unwrap())
|
||||
};
|
||||
let unsigned_in_chain =
|
||||
|hash: [u8; 32]| db.get(Self::unsigned_included_key(&self.genesis, &hash)).is_some();
|
||||
@@ -244,7 +244,7 @@ impl<D: Db, T: TransactionTrait> Blockchain<D, T> {
|
||||
let commit = |block: u64| -> Option<Commit<N::SignatureScheme>> {
|
||||
let commit = self.commit_by_block_number(block)?;
|
||||
// commit has to be valid if it is coming from our db
|
||||
Some(Commit::<N::SignatureScheme>::decode(&mut commit.as_ref()).unwrap())
|
||||
Some(Commit::<N::SignatureScheme>::deserialize_reader(&mut commit.as_slice()).unwrap())
|
||||
};
|
||||
|
||||
let mut txn_db = db.clone();
|
||||
|
||||
@@ -3,10 +3,11 @@ use std::{sync::Arc, io};
|
||||
|
||||
use zeroize::Zeroizing;
|
||||
|
||||
use borsh::BorshDeserialize;
|
||||
|
||||
use ciphersuite::*;
|
||||
use dalek_ff_group::Ristretto;
|
||||
|
||||
use scale::Decode;
|
||||
use futures_channel::mpsc::UnboundedReceiver;
|
||||
use futures_util::{StreamExt, SinkExt};
|
||||
use ::tendermint::{
|
||||
@@ -177,7 +178,7 @@ impl<D: Db, T: TransactionTrait, P: P2p> Tributary<D, T, P> {
|
||||
let block_number = BlockNumber(blockchain.block_number());
|
||||
|
||||
let start_time = if let Some(commit) = blockchain.commit(&blockchain.tip()) {
|
||||
Commit::<Validators>::decode(&mut commit.as_ref()).unwrap().end_time
|
||||
Commit::<Validators>::deserialize_reader(&mut commit.as_slice()).unwrap().end_time
|
||||
} else {
|
||||
start_time
|
||||
};
|
||||
@@ -276,8 +277,8 @@ impl<D: Db, T: TransactionTrait, P: P2p> Tributary<D, T, P> {
|
||||
}
|
||||
|
||||
let block = TendermintBlock(block.serialize());
|
||||
let mut commit_ref = commit.as_ref();
|
||||
let Ok(commit) = Commit::<Arc<Validators>>::decode(&mut commit_ref) else {
|
||||
let mut commit_ref = commit.as_slice();
|
||||
let Ok(commit) = Commit::<Arc<Validators>>::deserialize_reader(&mut commit_ref) else {
|
||||
log::error!("sent an invalidly serialized commit");
|
||||
return false;
|
||||
};
|
||||
@@ -327,7 +328,7 @@ impl<D: Db, T: TransactionTrait, P: P2p> Tributary<D, T, P> {
|
||||
|
||||
Some(&TENDERMINT_MESSAGE) => {
|
||||
let Ok(msg) =
|
||||
SignedMessageFor::<TendermintNetwork<D, T, P>>::decode::<&[u8]>(&mut &msg[1 ..])
|
||||
SignedMessageFor::<TendermintNetwork<D, T, P>>::deserialize_reader(&mut &msg[1 ..])
|
||||
else {
|
||||
log::error!("received invalid tendermint message");
|
||||
return false;
|
||||
@@ -367,15 +368,17 @@ impl<D: Db, T: TransactionTrait> TributaryReader<D, T> {
|
||||
Blockchain::<D, T>::commit_from_db(&self.0, self.1, hash)
|
||||
}
|
||||
pub fn parsed_commit(&self, hash: &[u8; 32]) -> Option<Commit<Validators>> {
|
||||
self.commit(hash).map(|commit| Commit::<Validators>::decode(&mut commit.as_ref()).unwrap())
|
||||
self
|
||||
.commit(hash)
|
||||
.map(|commit| Commit::<Validators>::deserialize_reader(&mut commit.as_slice()).unwrap())
|
||||
}
|
||||
pub fn block_after(&self, hash: &[u8; 32]) -> Option<[u8; 32]> {
|
||||
Blockchain::<D, T>::block_after(&self.0, self.1, hash)
|
||||
}
|
||||
pub fn time_of_block(&self, hash: &[u8; 32]) -> Option<u64> {
|
||||
self
|
||||
.commit(hash)
|
||||
.map(|commit| Commit::<Validators>::decode(&mut commit.as_ref()).unwrap().end_time)
|
||||
self.commit(hash).map(|commit| {
|
||||
Commit::<Validators>::deserialize_reader(&mut commit.as_slice()).unwrap().end_time
|
||||
})
|
||||
}
|
||||
|
||||
pub fn locally_provided_txs_in_block(&self, hash: &[u8; 32], order: &str) -> bool {
|
||||
|
||||
@@ -21,7 +21,7 @@ use schnorr::{
|
||||
|
||||
use serai_db::Db;
|
||||
|
||||
use scale::{Encode, Decode};
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
use tendermint::{
|
||||
SignedMessageFor,
|
||||
ext::{
|
||||
@@ -248,7 +248,7 @@ impl Weights for Validators {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub struct TendermintBlock(pub Vec<u8>);
|
||||
impl BlockTrait for TendermintBlock {
|
||||
type Id = [u8; 32];
|
||||
@@ -300,7 +300,7 @@ impl<D: Db, T: TransactionTrait, P: P2p> Network for TendermintNetwork<D, T, P>
|
||||
fn broadcast(&mut self, msg: SignedMessageFor<Self>) -> impl Send + Future<Output = ()> {
|
||||
async move {
|
||||
let mut to_broadcast = vec![TENDERMINT_MESSAGE];
|
||||
to_broadcast.extend(msg.encode());
|
||||
msg.serialize(&mut to_broadcast).unwrap();
|
||||
self.p2p.broadcast(self.genesis, to_broadcast).await
|
||||
}
|
||||
}
|
||||
@@ -390,7 +390,7 @@ impl<D: Db, T: TransactionTrait, P: P2p> Network for TendermintNetwork<D, T, P>
|
||||
return invalid_block();
|
||||
};
|
||||
|
||||
let encoded_commit = commit.encode();
|
||||
let encoded_commit = borsh::to_vec(&commit).unwrap();
|
||||
loop {
|
||||
let block_res = self.blockchain.write().await.add_block::<Self>(
|
||||
&block,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::io;
|
||||
|
||||
use scale::{Encode, Decode, IoReader};
|
||||
use borsh::BorshDeserialize;
|
||||
|
||||
use blake2::{Digest, Blake2s256};
|
||||
|
||||
@@ -27,14 +27,14 @@ pub enum TendermintTx {
|
||||
|
||||
impl ReadWrite for TendermintTx {
|
||||
fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
||||
Evidence::decode(&mut IoReader(reader))
|
||||
Evidence::deserialize_reader(reader)
|
||||
.map(TendermintTx::SlashEvidence)
|
||||
.map_err(|_| io::Error::new(io::ErrorKind::InvalidData, "invalid evidence format"))
|
||||
}
|
||||
|
||||
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
|
||||
match self {
|
||||
TendermintTx::SlashEvidence(ev) => writer.write_all(&ev.encode()),
|
||||
TendermintTx::SlashEvidence(ev) => writer.write_all(&borsh::to_vec(&ev).unwrap()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,8 +10,6 @@ use dalek_ff_group::Ristretto;
|
||||
use ciphersuite::*;
|
||||
use schnorr::SchnorrSignature;
|
||||
|
||||
use scale::Encode;
|
||||
|
||||
use ::tendermint::{
|
||||
ext::{Network, Signer as SignerTrait, SignatureScheme, BlockNumber, RoundNumber},
|
||||
SignedMessageFor, DataFor, Message, SignedMessage, Data, Evidence,
|
||||
@@ -200,7 +198,7 @@ pub async fn signed_from_data<N: Network>(
|
||||
round: RoundNumber(round_number),
|
||||
data,
|
||||
};
|
||||
let sig = signer.sign(&msg.encode()).await;
|
||||
let sig = signer.sign(&borsh::to_vec(&msg).unwrap()).await;
|
||||
SignedMessage { msg, sig }
|
||||
}
|
||||
|
||||
@@ -213,5 +211,5 @@ pub async fn random_evidence_tx<N: Network>(
|
||||
let data = Data::Proposal(Some(RoundNumber(0)), b);
|
||||
let signer_id = signer.validator_id().await.unwrap();
|
||||
let signed = signed_from_data::<N>(signer, signer_id, 0, 0, data).await;
|
||||
TendermintTx::SlashEvidence(Evidence::InvalidValidRound(signed.encode()))
|
||||
TendermintTx::SlashEvidence(Evidence::InvalidValidRound(borsh::to_vec(&signed).unwrap()))
|
||||
}
|
||||
|
||||
@@ -6,8 +6,6 @@ use rand::{RngCore, rngs::OsRng};
|
||||
use dalek_ff_group::Ristretto;
|
||||
use ciphersuite::*;
|
||||
|
||||
use scale::Encode;
|
||||
|
||||
use tendermint::{
|
||||
time::CanonicalInstant,
|
||||
round::RoundData,
|
||||
@@ -52,7 +50,10 @@ async fn invalid_valid_round() {
|
||||
async move {
|
||||
let data = Data::Proposal(valid_round, TendermintBlock(vec![]));
|
||||
let signed = signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, data).await;
|
||||
(signed.clone(), TendermintTx::SlashEvidence(Evidence::InvalidValidRound(signed.encode())))
|
||||
(
|
||||
signed.clone(),
|
||||
TendermintTx::SlashEvidence(Evidence::InvalidValidRound(borsh::to_vec(&signed).unwrap())),
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
@@ -70,7 +71,8 @@ async fn invalid_valid_round() {
|
||||
let mut random_sig = [0u8; 64];
|
||||
OsRng.fill_bytes(&mut random_sig);
|
||||
signed.sig = random_sig;
|
||||
let tx = TendermintTx::SlashEvidence(Evidence::InvalidValidRound(signed.encode()));
|
||||
let tx =
|
||||
TendermintTx::SlashEvidence(Evidence::InvalidValidRound(borsh::to_vec(&signed).unwrap()));
|
||||
|
||||
// should fail
|
||||
assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err());
|
||||
@@ -90,7 +92,10 @@ async fn invalid_precommit_signature() {
|
||||
let signed =
|
||||
signed_from_data::<N>(signer.clone().into(), signer_id, 1, 0, Data::Precommit(precommit))
|
||||
.await;
|
||||
(signed.clone(), TendermintTx::SlashEvidence(Evidence::InvalidPrecommit(signed.encode())))
|
||||
(
|
||||
signed.clone(),
|
||||
TendermintTx::SlashEvidence(Evidence::InvalidPrecommit(borsh::to_vec(&signed).unwrap())),
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
@@ -120,7 +125,8 @@ async fn invalid_precommit_signature() {
|
||||
let mut random_sig = [0u8; 64];
|
||||
OsRng.fill_bytes(&mut random_sig);
|
||||
signed.sig = random_sig;
|
||||
let tx = TendermintTx::SlashEvidence(Evidence::InvalidPrecommit(signed.encode()));
|
||||
let tx =
|
||||
TendermintTx::SlashEvidence(Evidence::InvalidPrecommit(borsh::to_vec(&signed).unwrap()));
|
||||
assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err());
|
||||
}
|
||||
}
|
||||
@@ -138,24 +144,32 @@ async fn evidence_with_prevote() {
|
||||
// it should fail for all reasons.
|
||||
let mut txs = vec![];
|
||||
txs.push(TendermintTx::SlashEvidence(Evidence::InvalidPrecommit(
|
||||
signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id))
|
||||
.await
|
||||
.encode(),
|
||||
borsh::to_vec(
|
||||
&&signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id))
|
||||
.await,
|
||||
)
|
||||
.unwrap(),
|
||||
)));
|
||||
txs.push(TendermintTx::SlashEvidence(Evidence::InvalidValidRound(
|
||||
signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id))
|
||||
.await
|
||||
.encode(),
|
||||
borsh::to_vec(
|
||||
&signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id))
|
||||
.await,
|
||||
)
|
||||
.unwrap(),
|
||||
)));
|
||||
// Since these require a second message, provide this one again
|
||||
// ConflictingMessages can be fired for actually conflicting Prevotes however
|
||||
txs.push(TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
|
||||
signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id))
|
||||
.await
|
||||
.encode(),
|
||||
signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id))
|
||||
.await
|
||||
.encode(),
|
||||
borsh::to_vec(
|
||||
&signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id))
|
||||
.await,
|
||||
)
|
||||
.unwrap(),
|
||||
borsh::to_vec(
|
||||
&signed_from_data::<N>(signer.clone().into(), signer_id, 0, 0, Data::Prevote(block_id))
|
||||
.await,
|
||||
)
|
||||
.unwrap(),
|
||||
)));
|
||||
txs
|
||||
}
|
||||
@@ -189,16 +203,16 @@ async fn conflicting_msgs_evidence_tx() {
|
||||
// non-conflicting data should fail
|
||||
let signed_1 = signed_for_b_r(0, 0, Data::Proposal(None, TendermintBlock(vec![0x11]))).await;
|
||||
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
|
||||
signed_1.encode(),
|
||||
signed_1.encode(),
|
||||
borsh::to_vec(&signed_1).unwrap(),
|
||||
borsh::to_vec(&signed_1).unwrap(),
|
||||
));
|
||||
assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err());
|
||||
|
||||
// conflicting data should pass
|
||||
let signed_2 = signed_for_b_r(0, 0, Data::Proposal(None, TendermintBlock(vec![0x22]))).await;
|
||||
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
|
||||
signed_1.encode(),
|
||||
signed_2.encode(),
|
||||
borsh::to_vec(&signed_1).unwrap(),
|
||||
borsh::to_vec(&signed_2).unwrap(),
|
||||
));
|
||||
verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap();
|
||||
|
||||
@@ -206,16 +220,16 @@ async fn conflicting_msgs_evidence_tx() {
|
||||
// (except for Precommit)
|
||||
let signed_2 = signed_for_b_r(0, 1, Data::Proposal(None, TendermintBlock(vec![0x22]))).await;
|
||||
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
|
||||
signed_1.encode(),
|
||||
signed_2.encode(),
|
||||
borsh::to_vec(&signed_1).unwrap(),
|
||||
borsh::to_vec(&signed_2).unwrap(),
|
||||
));
|
||||
verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap_err();
|
||||
|
||||
// Proposals for different block numbers should also fail as evidence
|
||||
let signed_2 = signed_for_b_r(1, 0, Data::Proposal(None, TendermintBlock(vec![0x22]))).await;
|
||||
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
|
||||
signed_1.encode(),
|
||||
signed_2.encode(),
|
||||
borsh::to_vec(&signed_1).unwrap(),
|
||||
borsh::to_vec(&signed_2).unwrap(),
|
||||
));
|
||||
verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap_err();
|
||||
}
|
||||
@@ -225,16 +239,16 @@ async fn conflicting_msgs_evidence_tx() {
|
||||
// non-conflicting data should fail
|
||||
let signed_1 = signed_for_b_r(0, 0, Data::Prevote(Some([0x11; 32]))).await;
|
||||
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
|
||||
signed_1.encode(),
|
||||
signed_1.encode(),
|
||||
borsh::to_vec(&signed_1).unwrap(),
|
||||
borsh::to_vec(&signed_1).unwrap(),
|
||||
));
|
||||
assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err());
|
||||
|
||||
// conflicting data should pass
|
||||
let signed_2 = signed_for_b_r(0, 0, Data::Prevote(Some([0x22; 32]))).await;
|
||||
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
|
||||
signed_1.encode(),
|
||||
signed_2.encode(),
|
||||
borsh::to_vec(&signed_1).unwrap(),
|
||||
borsh::to_vec(&signed_2).unwrap(),
|
||||
));
|
||||
verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap();
|
||||
|
||||
@@ -242,16 +256,16 @@ async fn conflicting_msgs_evidence_tx() {
|
||||
// (except for Precommit)
|
||||
let signed_2 = signed_for_b_r(0, 1, Data::Prevote(Some([0x22; 32]))).await;
|
||||
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
|
||||
signed_1.encode(),
|
||||
signed_2.encode(),
|
||||
borsh::to_vec(&signed_1).unwrap(),
|
||||
borsh::to_vec(&signed_2).unwrap(),
|
||||
));
|
||||
verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap_err();
|
||||
|
||||
// Proposals for different block numbers should also fail as evidence
|
||||
let signed_2 = signed_for_b_r(1, 0, Data::Prevote(Some([0x22; 32]))).await;
|
||||
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
|
||||
signed_1.encode(),
|
||||
signed_2.encode(),
|
||||
borsh::to_vec(&signed_1).unwrap(),
|
||||
borsh::to_vec(&signed_2).unwrap(),
|
||||
));
|
||||
verify_tendermint_tx::<N>(&tx, &validators, commit).unwrap_err();
|
||||
}
|
||||
@@ -273,8 +287,8 @@ async fn conflicting_msgs_evidence_tx() {
|
||||
.await;
|
||||
|
||||
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
|
||||
signed_1.encode(),
|
||||
signed_2.encode(),
|
||||
borsh::to_vec(&signed_1).unwrap(),
|
||||
borsh::to_vec(&signed_2).unwrap(),
|
||||
));
|
||||
|
||||
// update schema so that we don't fail due to invalid signature
|
||||
@@ -292,8 +306,8 @@ async fn conflicting_msgs_evidence_tx() {
|
||||
let signed_1 = signed_for_b_r(0, 0, Data::Proposal(None, TendermintBlock(vec![]))).await;
|
||||
let signed_2 = signed_for_b_r(0, 0, Data::Prevote(None)).await;
|
||||
let tx = TendermintTx::SlashEvidence(Evidence::ConflictingMessages(
|
||||
signed_1.encode(),
|
||||
signed_2.encode(),
|
||||
borsh::to_vec(&signed_1).unwrap(),
|
||||
borsh::to_vec(&signed_2).unwrap(),
|
||||
));
|
||||
assert!(verify_tendermint_tx::<N>(&tx, &validators, commit).is_err());
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ thiserror = { version = "2", default-features = false, features = ["std"] }
|
||||
hex = { version = "0.4", default-features = false, features = ["std"] }
|
||||
log = { version = "0.4", default-features = false, features = ["std"] }
|
||||
|
||||
parity-scale-codec = { version = "3", default-features = false, features = ["std", "derive"] }
|
||||
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
|
||||
|
||||
futures-util = { version = "0.3", default-features = false, features = ["std", "async-await-macro", "sink", "channel"] }
|
||||
futures-channel = { version = "0.3", default-features = false, features = ["std", "sink"] }
|
||||
|
||||
@@ -3,33 +3,41 @@ use std::{sync::Arc, collections::HashSet};
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
use parity_scale_codec::{Encode, Decode};
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use crate::{SignedMessageFor, SlashEvent, commit_msg};
|
||||
|
||||
/// An alias for a series of traits required for a type to be usable as a validator ID,
|
||||
/// automatically implemented for all types satisfying those traits.
|
||||
pub trait ValidatorId:
|
||||
Send + Sync + Clone + Copy + PartialEq + Eq + Hash + Debug + Encode + Decode
|
||||
Send + Sync + Clone + Copy + PartialEq + Eq + Hash + Debug + BorshSerialize + BorshDeserialize
|
||||
{
|
||||
}
|
||||
impl<V: Send + Sync + Clone + Copy + PartialEq + Eq + Hash + Debug + Encode + Decode> ValidatorId
|
||||
for V
|
||||
#[rustfmt::skip]
|
||||
impl<
|
||||
V: Send + Sync + Clone + Copy + PartialEq + Eq + Hash + Debug + BorshSerialize + BorshDeserialize,
|
||||
> ValidatorId for V
|
||||
{
|
||||
}
|
||||
|
||||
/// An alias for a series of traits required for a type to be usable as a signature,
|
||||
/// automatically implemented for all types satisfying those traits.
|
||||
pub trait Signature: Send + Sync + Clone + PartialEq + Eq + Debug + Encode + Decode {}
|
||||
impl<S: Send + Sync + Clone + PartialEq + Eq + Debug + Encode + Decode> Signature for S {}
|
||||
pub trait Signature:
|
||||
Send + Sync + Clone + PartialEq + Eq + Debug + BorshSerialize + BorshDeserialize
|
||||
{
|
||||
}
|
||||
impl<S: Send + Sync + Clone + PartialEq + Eq + Debug + BorshSerialize + BorshDeserialize> Signature
|
||||
for S
|
||||
{
|
||||
}
|
||||
|
||||
// Type aliases which are distinct according to the type system
|
||||
|
||||
/// A struct containing a Block Number, wrapped to have a distinct type.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Encode, Decode)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub struct BlockNumber(pub u64);
|
||||
/// A struct containing a round number, wrapped to have a distinct type.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Encode, Decode)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub struct RoundNumber(pub u32);
|
||||
|
||||
/// A signer for a validator.
|
||||
@@ -127,7 +135,7 @@ impl<S: SignatureScheme> SignatureScheme for Arc<S> {
|
||||
/// A commit for a specific block.
|
||||
///
|
||||
/// The list of validators have weight exceeding the threshold for a valid commit.
|
||||
#[derive(PartialEq, Debug, Encode, Decode)]
|
||||
#[derive(PartialEq, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub struct Commit<S: SignatureScheme> {
|
||||
/// End time of the round which created this commit, used as the start time of the next block.
|
||||
pub end_time: u64,
|
||||
@@ -185,7 +193,7 @@ impl<W: Weights> Weights for Arc<W> {
|
||||
}
|
||||
|
||||
/// Simplified error enum representing a block's validity.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error, Encode, Decode)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error, BorshSerialize, BorshDeserialize)]
|
||||
pub enum BlockError {
|
||||
/// Malformed block which is wholly invalid.
|
||||
#[error("invalid block")]
|
||||
@@ -197,9 +205,20 @@ pub enum BlockError {
|
||||
}
|
||||
|
||||
/// Trait representing a Block.
|
||||
pub trait Block: Send + Sync + Clone + PartialEq + Eq + Debug + Encode + Decode {
|
||||
pub trait Block:
|
||||
Send + Sync + Clone + PartialEq + Eq + Debug + BorshSerialize + BorshDeserialize
|
||||
{
|
||||
// Type used to identify blocks. Presumably a cryptographic hash of the block.
|
||||
type Id: Send + Sync + Copy + Clone + PartialEq + Eq + AsRef<[u8]> + Debug + Encode + Decode;
|
||||
type Id: Send
|
||||
+ Sync
|
||||
+ Copy
|
||||
+ Clone
|
||||
+ PartialEq
|
||||
+ Eq
|
||||
+ AsRef<[u8]>
|
||||
+ Debug
|
||||
+ BorshSerialize
|
||||
+ BorshDeserialize;
|
||||
|
||||
/// Return the deterministic, unique ID for this block.
|
||||
fn id(&self) -> Self::Id;
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
#![expect(clippy::cast_possible_truncation)]
|
||||
|
||||
use core::fmt::Debug;
|
||||
|
||||
use std::{
|
||||
@@ -8,7 +6,7 @@ use std::{
|
||||
collections::{VecDeque, HashMap},
|
||||
};
|
||||
|
||||
use parity_scale_codec::{Encode, Decode, IoReader};
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use futures_channel::mpsc;
|
||||
use futures_util::{
|
||||
@@ -43,14 +41,14 @@ pub fn commit_msg(end_time: u64, id: &[u8]) -> Vec<u8> {
|
||||
[&end_time.to_le_bytes(), id].concat()
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Encode, Decode)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub enum Step {
|
||||
Propose,
|
||||
Prevote,
|
||||
Precommit,
|
||||
}
|
||||
|
||||
#[derive(Clone, Eq, Debug, Encode, Decode)]
|
||||
#[derive(Clone, Eq, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub enum Data<B: Block, S: Signature> {
|
||||
Proposal(Option<RoundNumber>, B),
|
||||
Prevote(Option<B::Id>),
|
||||
@@ -92,7 +90,7 @@ impl<B: Block, S: Signature> Data<B, S> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub struct Message<V: ValidatorId, B: Block, S: Signature> {
|
||||
pub sender: V,
|
||||
pub block: BlockNumber,
|
||||
@@ -102,7 +100,7 @@ pub struct Message<V: ValidatorId, B: Block, S: Signature> {
|
||||
}
|
||||
|
||||
/// A signed Tendermint consensus message to be broadcast to the other validators.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub struct SignedMessage<V: ValidatorId, B: Block, S: Signature> {
|
||||
pub msg: Message<V, B, S>,
|
||||
pub sig: S,
|
||||
@@ -119,18 +117,18 @@ impl<V: ValidatorId, B: Block, S: Signature> SignedMessage<V, B, S> {
|
||||
&self,
|
||||
signer: &Scheme,
|
||||
) -> bool {
|
||||
signer.verify(self.msg.sender, &self.msg.encode(), &self.sig)
|
||||
signer.verify(self.msg.sender, &borsh::to_vec(&self.msg).unwrap(), &self.sig)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Encode, Decode)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub enum SlashReason {
|
||||
FailToPropose,
|
||||
InvalidBlock,
|
||||
InvalidProposer,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub enum Evidence {
|
||||
ConflictingMessages(Vec<u8>, Vec<u8>),
|
||||
InvalidPrecommit(Vec<u8>),
|
||||
@@ -161,7 +159,7 @@ pub type SignedMessageFor<N> = SignedMessage<
|
||||
>;
|
||||
|
||||
pub fn decode_signed_message<N: Network>(mut data: &[u8]) -> Option<SignedMessageFor<N>> {
|
||||
SignedMessageFor::<N>::decode(&mut data).ok()
|
||||
SignedMessageFor::<N>::deserialize_reader(&mut data).ok()
|
||||
}
|
||||
|
||||
fn decode_and_verify_signed_message<N: Network>(
|
||||
@@ -341,7 +339,7 @@ impl<N: Network + 'static> TendermintMachine<N> {
|
||||
target: "tendermint",
|
||||
"proposer for block {}, round {round:?} was {} (me: {res})",
|
||||
self.block.number.0,
|
||||
hex::encode(proposer.encode()),
|
||||
hex::encode(borsh::to_vec(&proposer).unwrap()),
|
||||
);
|
||||
res
|
||||
}
|
||||
@@ -422,7 +420,11 @@ impl<N: Network + 'static> TendermintMachine<N> {
|
||||
// TODO: If the new slash event has evidence, emit to prevent a low-importance slash from
|
||||
// cancelling emission of high-importance slashes
|
||||
if !self.block.slashes.contains(&validator) {
|
||||
log::info!(target: "tendermint", "Slashing validator {}", hex::encode(validator.encode()));
|
||||
log::info!(
|
||||
target: "tendermint",
|
||||
"Slashing validator {}",
|
||||
hex::encode(borsh::to_vec(&validator).unwrap()),
|
||||
);
|
||||
self.block.slashes.insert(validator);
|
||||
self.network.slash(validator, slash_event).await;
|
||||
}
|
||||
@@ -672,7 +674,7 @@ impl<N: Network + 'static> TendermintMachine<N> {
|
||||
self
|
||||
.slash(
|
||||
msg.sender,
|
||||
SlashEvent::WithEvidence(Evidence::InvalidPrecommit(signed.encode())),
|
||||
SlashEvent::WithEvidence(Evidence::InvalidPrecommit(borsh::to_vec(&signed).unwrap())),
|
||||
)
|
||||
.await;
|
||||
Err(TendermintError::Malicious)?;
|
||||
@@ -743,7 +745,10 @@ impl<N: Network + 'static> TendermintMachine<N> {
|
||||
self.broadcast(Data::Prevote(None));
|
||||
}
|
||||
self
|
||||
.slash(msg.sender, SlashEvent::WithEvidence(Evidence::InvalidValidRound(msg.encode())))
|
||||
.slash(
|
||||
msg.sender,
|
||||
SlashEvent::WithEvidence(Evidence::InvalidValidRound(borsh::to_vec(&msg).unwrap())),
|
||||
)
|
||||
.await;
|
||||
Err(TendermintError::Malicious)?;
|
||||
}
|
||||
@@ -1034,7 +1039,7 @@ impl<N: Network + 'static> TendermintMachine<N> {
|
||||
|
||||
while !messages.is_empty() {
|
||||
self.network.broadcast(
|
||||
SignedMessageFor::<N>::decode(&mut IoReader(&mut messages))
|
||||
SignedMessageFor::<N>::deserialize_reader(&mut messages)
|
||||
.expect("saved invalid message to DB")
|
||||
).await;
|
||||
}
|
||||
@@ -1059,7 +1064,7 @@ impl<N: Network + 'static> TendermintMachine<N> {
|
||||
} {
|
||||
if our_message {
|
||||
assert!(sig.is_none());
|
||||
sig = Some(self.signer.sign(&msg.encode()).await);
|
||||
sig = Some(self.signer.sign(&borsh::to_vec(&msg).unwrap()).await);
|
||||
}
|
||||
let sig = sig.unwrap();
|
||||
|
||||
@@ -1079,7 +1084,7 @@ impl<N: Network + 'static> TendermintMachine<N> {
|
||||
let message_tape_key = message_tape_key(self.genesis);
|
||||
let mut txn = self.db.txn();
|
||||
let mut message_tape = txn.get(&message_tape_key).unwrap_or(vec![]);
|
||||
message_tape.extend(signed_msg.encode());
|
||||
signed_msg.serialize(&mut message_tape).unwrap();
|
||||
txn.put(&message_tape_key, message_tape);
|
||||
txn.commit();
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
use std::{sync::Arc, collections::HashMap};
|
||||
|
||||
use parity_scale_codec::Encode;
|
||||
|
||||
use crate::{ext::*, RoundNumber, Step, DataFor, SignedMessageFor, Evidence};
|
||||
|
||||
type RoundLog<N> = HashMap<<N as Network>::ValidatorId, HashMap<Step, SignedMessageFor<N>>>;
|
||||
@@ -39,7 +37,10 @@ impl<N: Network> MessageLog<N> {
|
||||
target: "tendermint",
|
||||
"Validator sent multiple messages for the same block + round + step"
|
||||
);
|
||||
Err(Evidence::ConflictingMessages(existing.encode(), signed.encode()))?;
|
||||
Err(Evidence::ConflictingMessages(
|
||||
borsh::to_vec(&existing).unwrap(),
|
||||
borsh::to_vec(&signed).unwrap(),
|
||||
))?;
|
||||
}
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ use std::{
|
||||
time::{UNIX_EPOCH, SystemTime, Duration},
|
||||
};
|
||||
|
||||
use parity_scale_codec::{Encode, Decode};
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use futures_util::sink::SinkExt;
|
||||
use tokio::{sync::RwLock, time::sleep};
|
||||
@@ -89,7 +89,7 @@ impl Weights for TestWeights {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
|
||||
struct TestBlock {
|
||||
id: TestBlockId,
|
||||
valid: Result<(), BlockError>,
|
||||
|
||||
@@ -21,7 +21,6 @@ workspace = true
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["std"] }
|
||||
rand_core = { version = "0.6", default-features = false, features = ["std"] }
|
||||
|
||||
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std", "derive"] }
|
||||
borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] }
|
||||
|
||||
blake2 = { version = "0.11.0-rc.0", default-features = false, features = ["alloc"] }
|
||||
@@ -30,7 +29,7 @@ dalek-ff-group = { path = "../../crypto/dalek-ff-group", default-features = fals
|
||||
dkg = { path = "../../crypto/dkg", default-features = false, features = ["std"] }
|
||||
schnorr = { package = "schnorr-signatures", path = "../../crypto/schnorr", default-features = false, features = ["std"] }
|
||||
|
||||
serai-client = { path = "../../substrate/client", default-features = false, features = ["serai", "borsh"] }
|
||||
serai-primitives = { path = "../../substrate/primitives", default-features = false, features = ["std"] }
|
||||
|
||||
serai-db = { path = "../../common/db" }
|
||||
serai-task = { path = "../../common/task", version = "0.1" }
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use scale::Encode;
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use serai_client::{primitives::SeraiAddress, validator_sets::primitives::ExternalValidatorSet};
|
||||
use serai_primitives::{address::SeraiAddress, validator_sets::primitives::ExternalValidatorSet};
|
||||
|
||||
use messages::sign::{VariantSignId, SignId};
|
||||
|
||||
@@ -16,7 +15,7 @@ use serai_cosign::CosignIntent;
|
||||
use crate::transaction::SigningProtocolRound;
|
||||
|
||||
/// A topic within the database which the group participates in
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Encode, BorshSerialize, BorshDeserialize)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub enum Topic {
|
||||
/// Vote to remove a participant
|
||||
RemoveParticipant {
|
||||
@@ -125,7 +124,7 @@ impl Topic {
|
||||
Topic::DkgConfirmation { attempt, round: _ } => Some({
|
||||
let id = {
|
||||
let mut id = [0; 32];
|
||||
let encoded_set = set.encode();
|
||||
let encoded_set = borsh::to_vec(set).unwrap();
|
||||
id[.. encoded_set.len()].copy_from_slice(&encoded_set);
|
||||
VariantSignId::Batch(id)
|
||||
};
|
||||
|
||||
@@ -8,9 +8,9 @@ use std::collections::HashMap;
|
||||
use ciphersuite::group::GroupEncoding;
|
||||
use dkg::Participant;
|
||||
|
||||
use serai_client::{
|
||||
primitives::SeraiAddress,
|
||||
validator_sets::primitives::{ExternalValidatorSet, Slash},
|
||||
use serai_primitives::{
|
||||
address::SeraiAddress,
|
||||
validator_sets::{ExternalValidatorSet, Slash},
|
||||
};
|
||||
|
||||
use serai_db::*;
|
||||
|
||||
@@ -12,10 +12,9 @@ use ciphersuite::{
|
||||
use dalek_ff_group::Ristretto;
|
||||
use schnorr::SchnorrSignature;
|
||||
|
||||
use scale::Encode;
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use serai_client::{primitives::SeraiAddress, validator_sets::primitives::MAX_KEY_SHARES_PER_SET};
|
||||
use serai_primitives::{addess::SeraiAddress, validator_sets::MAX_KEY_SHARES_PER_SET};
|
||||
|
||||
use messages::sign::VariantSignId;
|
||||
|
||||
@@ -29,7 +28,7 @@ use tributary_sdk::{
|
||||
use crate::db::Topic;
|
||||
|
||||
/// The round this data is for, within a signing protocol.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Encode, BorshSerialize, BorshDeserialize)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub enum SigningProtocolRound {
|
||||
/// A preprocess.
|
||||
Preprocess,
|
||||
@@ -242,19 +241,20 @@ impl TransactionTrait for Transaction {
|
||||
fn kind(&self) -> TransactionKind {
|
||||
match self {
|
||||
Transaction::RemoveParticipant { participant, signed } => TransactionKind::Signed(
|
||||
(b"RemoveParticipant", participant).encode(),
|
||||
borsh::to_vec(&(b"RemoveParticipant".as_slice(), participant)).unwrap(),
|
||||
signed.to_tributary_signed(0),
|
||||
),
|
||||
|
||||
Transaction::DkgParticipation { signed, .. } => {
|
||||
TransactionKind::Signed(b"DkgParticipation".encode(), signed.to_tributary_signed(0))
|
||||
}
|
||||
Transaction::DkgParticipation { signed, .. } => TransactionKind::Signed(
|
||||
borsh::to_vec(b"DkgParticipation".as_slice()).unwrap(),
|
||||
signed.to_tributary_signed(0),
|
||||
),
|
||||
Transaction::DkgConfirmationPreprocess { attempt, signed, .. } => TransactionKind::Signed(
|
||||
(b"DkgConfirmation", attempt).encode(),
|
||||
borsh::to_vec(b"DkgConfirmation".as_slice(), attempt).unwrap(),
|
||||
signed.to_tributary_signed(0),
|
||||
),
|
||||
Transaction::DkgConfirmationShare { attempt, signed, .. } => TransactionKind::Signed(
|
||||
(b"DkgConfirmation", attempt).encode(),
|
||||
borsh::to_vec(b"DkgConfirmation".as_slice(), attempt).unwrap(),
|
||||
signed.to_tributary_signed(1),
|
||||
),
|
||||
|
||||
@@ -264,13 +264,14 @@ impl TransactionTrait for Transaction {
|
||||
Transaction::Batch { .. } => TransactionKind::Provided("Batch"),
|
||||
|
||||
Transaction::Sign { id, attempt, round, signed, .. } => TransactionKind::Signed(
|
||||
(b"Sign", id, attempt).encode(),
|
||||
borsh::to_vec(b"Sign".as_slice(), id, attempt).unwrap(),
|
||||
signed.to_tributary_signed(round.nonce()),
|
||||
),
|
||||
|
||||
Transaction::SlashReport { signed, .. } => {
|
||||
TransactionKind::Signed(b"SlashReport".encode(), signed.to_tributary_signed(0))
|
||||
}
|
||||
Transaction::SlashReport { signed, .. } => TransactionKind::Signed(
|
||||
borsh::to_vec(b"SlashReport".as_slice()).unwrap(),
|
||||
signed.to_tributary_signed(0),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -25,6 +25,7 @@ rand_core = { version = "0.6", default-features = false }
|
||||
sha2 = { version = "0.11.0-rc.2", default-features = false, features = ["zeroize"] }
|
||||
blake2 = { version = "0.11.0-rc.2", default-features = false, features = ["zeroize"] }
|
||||
|
||||
crypto-bigint = { version = "0.5", default-features = false }
|
||||
prime-field = { path = "../prime-field", default-features = false }
|
||||
ciphersuite = { version = "0.4.2", path = "../ciphersuite", default-features = false }
|
||||
|
||||
|
||||
@@ -286,3 +286,21 @@ prime_field::odd_prime_field_with_specific_repr!(
|
||||
false,
|
||||
crate::ThirtyTwoArray
|
||||
);
|
||||
|
||||
impl FieldElement {
|
||||
/// This method is hidden as it's not part of our API commitment and has no guarantees made for
|
||||
/// it. It MAY panic for an undefined class of inputs.
|
||||
// TODO: `monero-oxide` requires this. PR `monero-oxide` to not require this.
|
||||
#[doc(hidden)]
|
||||
pub const fn from_u256(value: &crypto_bigint::U256) -> Self {
|
||||
let mut bytes = [0; 32];
|
||||
|
||||
let mut i = 0;
|
||||
while i < 256 {
|
||||
bytes[i / 32] |= (value.bit_vartime(i) as u8) << (i % 8);
|
||||
i += 1;
|
||||
}
|
||||
|
||||
FieldElement::from_bytes(&bytes).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,19 +23,12 @@ thiserror = { version = "2", default-features = false }
|
||||
|
||||
std-shims = { version = "0.1", path = "../../common/std-shims", default-features = false, features = ["alloc"] }
|
||||
|
||||
borsh = { version = "1", default-features = false, features = ["derive", "de_strict_order"], optional = true }
|
||||
|
||||
ciphersuite = { path = "../ciphersuite", version = "^0.4.1", default-features = false, features = ["alloc"] }
|
||||
|
||||
[features]
|
||||
std = [
|
||||
"thiserror/std",
|
||||
|
||||
"std-shims/std",
|
||||
|
||||
"borsh?/std",
|
||||
|
||||
"ciphersuite/std",
|
||||
]
|
||||
borsh = ["dep:borsh"]
|
||||
default = ["std"]
|
||||
|
||||
@@ -22,7 +22,6 @@ use ciphersuite::{
|
||||
|
||||
/// The ID of a participant, defined as a non-zero u16.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Zeroize)]
|
||||
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize))]
|
||||
pub struct Participant(u16);
|
||||
impl Participant {
|
||||
/// Create a new Participant identifier from a u16.
|
||||
@@ -129,18 +128,8 @@ pub enum DkgError {
|
||||
NotParticipating,
|
||||
}
|
||||
|
||||
// Manually implements BorshDeserialize so we can enforce it's a valid index
|
||||
#[cfg(feature = "borsh")]
|
||||
impl borsh::BorshDeserialize for Participant {
|
||||
fn deserialize_reader<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
||||
Participant::new(u16::deserialize_reader(reader)?)
|
||||
.ok_or_else(|| io::Error::other("invalid participant"))
|
||||
}
|
||||
}
|
||||
|
||||
/// Parameters for a multisig.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
||||
#[cfg_attr(feature = "borsh", derive(borsh::BorshSerialize))]
|
||||
pub struct ThresholdParams {
|
||||
/// Participants needed to sign on behalf of the group.
|
||||
t: u16,
|
||||
@@ -210,16 +199,6 @@ impl ThresholdParams {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "borsh")]
|
||||
impl borsh::BorshDeserialize for ThresholdParams {
|
||||
fn deserialize_reader<R: io::Read>(reader: &mut R) -> io::Result<Self> {
|
||||
let t = u16::deserialize_reader(reader)?;
|
||||
let n = u16::deserialize_reader(reader)?;
|
||||
let i = Participant::deserialize_reader(reader)?;
|
||||
ThresholdParams::new(t, n, i).map_err(|e| io::Error::other(format!("{e:?}")))
|
||||
}
|
||||
}
|
||||
|
||||
/// A method of interpolation.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub enum Interpolation<F: Zeroize + PrimeField> {
|
||||
|
||||
17
deny.toml
17
deny.toml
@@ -10,7 +10,6 @@ ignore = [
|
||||
"RUSTSEC-2022-0061", # https://github.com/serai-dex/serai/227
|
||||
"RUSTSEC-2024-0370", # proc-macro-error is unmaintained
|
||||
"RUSTSEC-2024-0436", # paste is unmaintained
|
||||
"RUSTSEC-2025-0057", # fxhash is unmaintained, fixed with bytecodealliance/wasmtime/pull/11634
|
||||
]
|
||||
|
||||
[licenses]
|
||||
@@ -29,7 +28,6 @@ allow = [
|
||||
"ISC",
|
||||
"Zlib",
|
||||
"Unicode-3.0",
|
||||
# "OpenSSL", # Commented as it's not currently in-use within the Serai tree
|
||||
"CDLA-Permissive-2.0",
|
||||
|
||||
# Non-invasive copyleft
|
||||
@@ -73,6 +71,7 @@ exceptions = [
|
||||
{ allow = ["AGPL-3.0-only"], name = "serai-monero-processor" },
|
||||
|
||||
{ allow = ["AGPL-3.0-only"], name = "tributary-sdk" },
|
||||
{ allow = ["AGPL-3.0-only"], name = "serai-cosign-types" },
|
||||
{ allow = ["AGPL-3.0-only"], name = "serai-cosign" },
|
||||
{ allow = ["AGPL-3.0-only"], name = "serai-coordinator-substrate" },
|
||||
{ allow = ["AGPL-3.0-only"], name = "serai-coordinator-tributary" },
|
||||
@@ -82,6 +81,7 @@ exceptions = [
|
||||
|
||||
{ allow = ["AGPL-3.0-only"], name = "pallet-session" },
|
||||
|
||||
{ allow = ["AGPL-3.0-only"], name = "serai-core-pallet" },
|
||||
{ allow = ["AGPL-3.0-only"], name = "serai-coins-pallet" },
|
||||
{ allow = ["AGPL-3.0-only"], name = "serai-dex-pallet" },
|
||||
|
||||
@@ -107,6 +107,7 @@ exceptions = [
|
||||
{ allow = ["AGPL-3.0-only"], name = "serai-message-queue-tests" },
|
||||
{ allow = ["AGPL-3.0-only"], name = "serai-processor-tests" },
|
||||
{ allow = ["AGPL-3.0-only"], name = "serai-coordinator-tests" },
|
||||
{ allow = ["AGPL-3.0-only"], name = "serai-substrate-tests" },
|
||||
{ allow = ["AGPL-3.0-only"], name = "serai-full-stack-tests" },
|
||||
{ allow = ["AGPL-3.0-only"], name = "serai-reproducible-runtime-tests" },
|
||||
]
|
||||
@@ -124,12 +125,22 @@ multiple-versions = "warn"
|
||||
wildcards = "warn"
|
||||
highlight = "all"
|
||||
deny = [
|
||||
# Contains a non-reproducible binary blob
|
||||
# https://github.com/serde-rs/serde/pull/2514
|
||||
# https://github.com/serde-rs/serde/issues/2575
|
||||
{ name = "serde_derive", version = ">=1.0.172, <1.0.185" },
|
||||
# Introduced an insecure implementation of `borsh` removed with `0.15.1`
|
||||
# https://github.com/rust-lang/hashbrown/issues/576
|
||||
{ name = "hashbrown", version = "=0.15.0" },
|
||||
|
||||
# Legacy which _no one_ should use anymore
|
||||
{ name = "is-terminal", version = "*" },
|
||||
# Stop introduction into the tree without realizing it
|
||||
{ name = "once_cell_polyfill", version = "*" },
|
||||
|
||||
# Conflicts with our usage of mimalloc
|
||||
# https://github.com/serai-dex/serai/issues/690
|
||||
{ name = "tikv-jemalloc-sys", version = "*" },
|
||||
]
|
||||
|
||||
[sources]
|
||||
@@ -141,5 +152,7 @@ allow-git = [
|
||||
"https://github.com/kayabaNerve/elliptic-curves",
|
||||
"https://github.com/monero-oxide/monero-oxide",
|
||||
"https://github.com/kayabaNerve/monero-oxide",
|
||||
"https://github.com/rust-bitcoin/rust-bip39",
|
||||
"https://github.com/rust-rocksdb/rust-rocksdb",
|
||||
"https://github.com/serai-dex/patch-polkadot-sdk",
|
||||
]
|
||||
|
||||
@@ -46,7 +46,7 @@ serai-db = { path = "../common/db", optional = true }
|
||||
|
||||
serai-env = { path = "../common/env" }
|
||||
|
||||
serai-primitives = { path = "../substrate/primitives", features = ["borsh"] }
|
||||
serai-primitives = { path = "../substrate/primitives", default-features = false, features = ["std"] }
|
||||
|
||||
[features]
|
||||
parity-db = ["serai-db/parity-db"]
|
||||
|
||||
@@ -7,7 +7,7 @@ use dalek_ff_group::Ristretto;
|
||||
pub(crate) use ciphersuite::{group::GroupEncoding, WrappedGroup, GroupCanonicalEncoding};
|
||||
pub(crate) use schnorr_signatures::SchnorrSignature;
|
||||
|
||||
pub(crate) use serai_primitives::ExternalNetworkId;
|
||||
pub(crate) use serai_primitives::network_id::ExternalNetworkId;
|
||||
|
||||
pub(crate) use tokio::{
|
||||
io::{AsyncReadExt, AsyncWriteExt},
|
||||
@@ -198,7 +198,7 @@ async fn main() {
|
||||
KEYS.write().unwrap().insert(service, key);
|
||||
let mut queues = QUEUES.write().unwrap();
|
||||
if service == Service::Coordinator {
|
||||
for network in serai_primitives::EXTERNAL_NETWORKS {
|
||||
for network in ExternalNetworkId::all() {
|
||||
queues.insert(
|
||||
(service, Service::Processor(network)),
|
||||
RwLock::new(Queue(db.clone(), service, Service::Processor(network))),
|
||||
@@ -213,12 +213,13 @@ async fn main() {
|
||||
};
|
||||
|
||||
// Make queues for each ExternalNetworkId
|
||||
for network in serai_primitives::EXTERNAL_NETWORKS {
|
||||
for network in ExternalNetworkId::all() {
|
||||
// Use a match so we error if the list of NetworkIds changes
|
||||
let Some(key) = read_key(match network {
|
||||
ExternalNetworkId::Bitcoin => "BITCOIN_KEY",
|
||||
ExternalNetworkId::Ethereum => "ETHEREUM_KEY",
|
||||
ExternalNetworkId::Monero => "MONERO_KEY",
|
||||
_ => panic!("unrecognized network"),
|
||||
}) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
@@ -4,7 +4,7 @@ use ciphersuite::{group::GroupEncoding, FromUniformBytes, WrappedGroup, WithPref
|
||||
|
||||
use borsh::{BorshSerialize, BorshDeserialize};
|
||||
|
||||
use serai_primitives::ExternalNetworkId;
|
||||
use serai_primitives::network_id::ExternalNetworkId;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub enum Service {
|
||||
|
||||
@@ -6,7 +6,7 @@ license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/networks/bitcoin"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.85"
|
||||
rust-version = "1.89"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
@@ -30,8 +30,8 @@ k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic"
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.11", default-features = false, features = ["secp256k1"] }
|
||||
|
||||
hex = { version = "0.4", default-features = false, optional = true }
|
||||
serde = { version = "1", default-features = false, features = ["derive"], optional = true }
|
||||
serde_json = { version = "1", default-features = false, optional = true }
|
||||
core-json-traits = { version = "0.4", default-features = false, features = ["alloc"], optional = true }
|
||||
core-json-derive = { version = "0.4", default-features = false, optional = true }
|
||||
simple-request = { path = "../../common/request", version = "0.3", default-features = false, features = ["tokio", "tls", "basic-auth"], optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
@@ -52,15 +52,16 @@ std = [
|
||||
"rand_core/std",
|
||||
|
||||
"bitcoin/std",
|
||||
"bitcoin/serde",
|
||||
|
||||
"k256/std",
|
||||
"frost/std",
|
||||
|
||||
]
|
||||
rpc = [
|
||||
"std",
|
||||
"hex/std",
|
||||
"serde/std",
|
||||
"serde_json/std",
|
||||
"core-json-traits",
|
||||
"core-json-derive",
|
||||
"simple-request",
|
||||
]
|
||||
hazmat = []
|
||||
default = ["std"]
|
||||
default = ["std", "rpc"]
|
||||
|
||||
@@ -14,7 +14,7 @@ pub(crate) mod crypto;
|
||||
/// Wallet functionality to create transactions.
|
||||
pub mod wallet;
|
||||
/// A minimal asynchronous Bitcoin RPC client.
|
||||
#[cfg(feature = "std")]
|
||||
#[cfg(feature = "rpc")]
|
||||
pub mod rpc;
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
use core::fmt::Debug;
|
||||
use std::collections::HashSet;
|
||||
use core::{str::FromStr, fmt::Debug};
|
||||
use std::{io::Read, collections::HashSet};
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
use serde::{Deserialize, de::DeserializeOwned};
|
||||
use serde_json::json;
|
||||
|
||||
use simple_request::{hyper, Request, TokioClient as Client};
|
||||
|
||||
use bitcoin::{
|
||||
@@ -14,19 +11,12 @@ use bitcoin::{
|
||||
Txid, Transaction, BlockHash, Block,
|
||||
};
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Deserialize)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Error {
|
||||
code: isize,
|
||||
message: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum RpcResponse<T> {
|
||||
Ok { result: T },
|
||||
Err { error: Error },
|
||||
}
|
||||
|
||||
/// A minimal asynchronous Bitcoin RPC client.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Rpc {
|
||||
@@ -34,14 +24,14 @@ pub struct Rpc {
|
||||
url: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
||||
#[derive(Clone, Debug, Error)]
|
||||
pub enum RpcError {
|
||||
#[error("couldn't connect to node")]
|
||||
ConnectionError,
|
||||
#[error("request had an error: {0:?}")]
|
||||
RequestError(Error),
|
||||
#[error("node replied with invalid JSON")]
|
||||
InvalidJson(serde_json::error::Category),
|
||||
InvalidJson,
|
||||
#[error("node sent an invalid response ({0})")]
|
||||
InvalidResponse(&'static str),
|
||||
#[error("node was missing expected methods")]
|
||||
@@ -66,7 +56,7 @@ impl Rpc {
|
||||
Rpc { client: Client::with_connection_pool().map_err(|_| RpcError::ConnectionError)?, url };
|
||||
|
||||
// Make an RPC request to verify the node is reachable and sane
|
||||
let res: String = rpc.rpc_call("help", json!([])).await?;
|
||||
let res: String = rpc.call("help", "[]").await?;
|
||||
|
||||
// Verify all methods we expect are present
|
||||
// If we had a more expanded RPC, due to differences in RPC versions, it wouldn't make sense to
|
||||
@@ -103,18 +93,16 @@ impl Rpc {
|
||||
}
|
||||
|
||||
/// Perform an arbitrary RPC call.
|
||||
pub async fn rpc_call<Response: DeserializeOwned + Debug>(
|
||||
pub async fn call<Response: 'static + Default + core_json_traits::JsonDeserialize>(
|
||||
&self,
|
||||
method: &str,
|
||||
params: serde_json::Value,
|
||||
params: &str,
|
||||
) -> Result<Response, RpcError> {
|
||||
let mut request = Request::from(
|
||||
hyper::Request::post(&self.url)
|
||||
.header("Content-Type", "application/json")
|
||||
.body(
|
||||
serde_json::to_vec(&json!({ "jsonrpc": "2.0", "method": method, "params": params }))
|
||||
.unwrap()
|
||||
.into(),
|
||||
format!(r#"{{ "method": "{method}", "params": {params} }}"#).as_bytes().to_vec().into(),
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
@@ -129,11 +117,53 @@ impl Rpc {
|
||||
.await
|
||||
.map_err(|_| RpcError::ConnectionError)?;
|
||||
|
||||
let res: RpcResponse<Response> =
|
||||
serde_json::from_reader(&mut res).map_err(|e| RpcError::InvalidJson(e.classify()))?;
|
||||
#[derive(Default, core_json_derive::JsonDeserialize)]
|
||||
struct InternalError {
|
||||
code: Option<i64>,
|
||||
message: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(core_json_derive::JsonDeserialize)]
|
||||
struct RpcResponse<T: core_json_traits::JsonDeserialize> {
|
||||
result: Option<T>,
|
||||
error: Option<InternalError>,
|
||||
}
|
||||
impl<T: core_json_traits::JsonDeserialize> Default for RpcResponse<T> {
|
||||
fn default() -> Self {
|
||||
Self { result: None, error: None }
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: `core_json::ReadAdapter`
|
||||
let mut res_vec = vec![];
|
||||
res.read_to_end(&mut res_vec).map_err(|_| RpcError::ConnectionError)?;
|
||||
let res = <RpcResponse<Response> as core_json_traits::JsonStructure>::deserialize_structure::<
|
||||
_,
|
||||
core_json_traits::ConstStack<32>,
|
||||
>(res_vec.as_slice())
|
||||
.map_err(|_| RpcError::InvalidJson)?;
|
||||
|
||||
match res {
|
||||
RpcResponse::Ok { result } => Ok(result),
|
||||
RpcResponse::Err { error } => Err(RpcError::RequestError(error)),
|
||||
RpcResponse { result: Some(result), error: None } => Ok(result),
|
||||
RpcResponse { result: None, error: Some(error) } => {
|
||||
let code =
|
||||
error.code.ok_or_else(|| RpcError::InvalidResponse("error was missing `code`"))?;
|
||||
let code = isize::try_from(code)
|
||||
.map_err(|_| RpcError::InvalidResponse("error code exceeded isize::MAX"))?;
|
||||
let message =
|
||||
error.message.ok_or_else(|| RpcError::InvalidResponse("error was missing `message`"))?;
|
||||
Err(RpcError::RequestError(Error { code, message }))
|
||||
}
|
||||
// `invalidateblock` yields this edge case
|
||||
// TODO: https://github.com/core-json/core-json/issues/18
|
||||
RpcResponse { result: None, error: None } => {
|
||||
if core::any::TypeId::of::<Response>() == core::any::TypeId::of::<()>() {
|
||||
Ok(Default::default())
|
||||
} else {
|
||||
Err(RpcError::InvalidResponse("response lacked both a result and an error"))
|
||||
}
|
||||
}
|
||||
_ => Err(RpcError::InvalidResponse("response contained both a result and an error")),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -146,14 +176,15 @@ impl Rpc {
|
||||
// tip block of the current chain. The "height" of a block is defined as the amount of blocks
|
||||
// present when the block was created. Accordingly, the genesis block has height 0, and
|
||||
// getblockcount will return 0 when it's only the only block, despite their being one block.
|
||||
self.rpc_call("getblockcount", json!([])).await
|
||||
usize::try_from(self.call::<u64>("getblockcount", "[]").await?)
|
||||
.map_err(|_| RpcError::InvalidResponse("latest block number exceeded usize::MAX"))
|
||||
}
|
||||
|
||||
/// Get the hash of a block by the block's number.
|
||||
pub async fn get_block_hash(&self, number: usize) -> Result<[u8; 32], RpcError> {
|
||||
let mut hash = self
|
||||
.rpc_call::<BlockHash>("getblockhash", json!([number]))
|
||||
.await?
|
||||
let mut hash =
|
||||
BlockHash::from_str(&self.call::<String>("getblockhash", &format!("[{number}]")).await?)
|
||||
.map_err(|_| RpcError::InvalidResponse("block hash was not valid hex"))?
|
||||
.as_raw_hash()
|
||||
.to_byte_array();
|
||||
// bitcoin stores the inner bytes in reverse order.
|
||||
@@ -163,16 +194,25 @@ impl Rpc {
|
||||
|
||||
/// Get a block's number by its hash.
|
||||
pub async fn get_block_number(&self, hash: &[u8; 32]) -> Result<usize, RpcError> {
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[derive(Default, core_json_derive::JsonDeserialize)]
|
||||
struct Number {
|
||||
height: usize,
|
||||
height: Option<u64>,
|
||||
}
|
||||
Ok(self.rpc_call::<Number>("getblockheader", json!([hex::encode(hash)])).await?.height)
|
||||
usize::try_from(
|
||||
self
|
||||
.call::<Number>("getblockheader", &format!(r#"["{}"]"#, hex::encode(hash)))
|
||||
.await?
|
||||
.height
|
||||
.ok_or_else(|| {
|
||||
RpcError::InvalidResponse("`getblockheader` did not include `height` field")
|
||||
})?,
|
||||
)
|
||||
.map_err(|_| RpcError::InvalidResponse("block number exceeded usize::MAX"))
|
||||
}
|
||||
|
||||
/// Get a block by its hash.
|
||||
pub async fn get_block(&self, hash: &[u8; 32]) -> Result<Block, RpcError> {
|
||||
let hex = self.rpc_call::<String>("getblock", json!([hex::encode(hash), 0])).await?;
|
||||
let hex = self.call::<String>("getblock", &format!(r#"["{}", 0]"#, hex::encode(hash))).await?;
|
||||
let bytes: Vec<u8> = FromHex::from_hex(&hex)
|
||||
.map_err(|_| RpcError::InvalidResponse("node didn't use hex to encode the block"))?;
|
||||
let block: Block = encode::deserialize(&bytes)
|
||||
@@ -189,8 +229,13 @@ impl Rpc {
|
||||
|
||||
/// Publish a transaction.
|
||||
pub async fn send_raw_transaction(&self, tx: &Transaction) -> Result<Txid, RpcError> {
|
||||
let txid = match self.rpc_call("sendrawtransaction", json!([encode::serialize_hex(tx)])).await {
|
||||
Ok(txid) => txid,
|
||||
let txid = match self
|
||||
.call::<String>("sendrawtransaction", &format!(r#"["{}"]"#, encode::serialize_hex(tx)))
|
||||
.await
|
||||
{
|
||||
Ok(txid) => {
|
||||
Txid::from_str(&txid).map_err(|_| RpcError::InvalidResponse("TXID was not valid hex"))?
|
||||
}
|
||||
Err(e) => {
|
||||
// A const from Bitcoin's bitcoin/src/rpc/protocol.h
|
||||
const RPC_VERIFY_ALREADY_IN_CHAIN: isize = -27;
|
||||
@@ -211,7 +256,8 @@ impl Rpc {
|
||||
|
||||
/// Get a transaction by its hash.
|
||||
pub async fn get_transaction(&self, hash: &[u8; 32]) -> Result<Transaction, RpcError> {
|
||||
let hex = self.rpc_call::<String>("getrawtransaction", json!([hex::encode(hash)])).await?;
|
||||
let hex =
|
||||
self.call::<String>("getrawtransaction", &format!(r#"["{}"]"#, hex::encode(hash))).await?;
|
||||
let bytes: Vec<u8> = FromHex::from_hex(&hex)
|
||||
.map_err(|_| RpcError::InvalidResponse("node didn't use hex to encode the transaction"))?;
|
||||
let tx: Transaction = encode::deserialize(&bytes)
|
||||
|
||||
@@ -14,9 +14,9 @@ pub(crate) async fn rpc() -> Rpc {
|
||||
// If this node has already been interacted with, clear its chain
|
||||
if rpc.get_latest_block_number().await.unwrap() > 0 {
|
||||
rpc
|
||||
.rpc_call(
|
||||
.call(
|
||||
"invalidateblock",
|
||||
serde_json::json!([hex::encode(rpc.get_block_hash(1).await.unwrap())]),
|
||||
&format!(r#"["{}"]"#, hex::encode(rpc.get_block_hash(1).await.unwrap())),
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
|
||||
@@ -41,21 +41,21 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
|
||||
let block_number = rpc.get_latest_block_number().await.unwrap() + 1;
|
||||
|
||||
rpc
|
||||
.rpc_call::<Vec<String>>(
|
||||
.call::<Vec<String>>(
|
||||
"generatetoaddress",
|
||||
serde_json::json!([
|
||||
1,
|
||||
&format!(
|
||||
r#"[1, "{}"]"#,
|
||||
Address::from_script(&p2tr_script_buf(key).unwrap(), Network::Regtest).unwrap()
|
||||
]),
|
||||
),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Mine until maturity
|
||||
rpc
|
||||
.rpc_call::<Vec<String>>(
|
||||
.call::<Vec<String>>(
|
||||
"generatetoaddress",
|
||||
serde_json::json!([100, Address::p2sh(Script::new(), Network::Regtest).unwrap()]),
|
||||
&format!(r#"[100, "{}"]"#, Address::p2sh(Script::new(), Network::Regtest).unwrap()),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -6,7 +6,7 @@ use std::{path::PathBuf, fs, process::Command};
|
||||
|
||||
/// Build contracts from the specified path, outputting the artifacts to the specified path.
|
||||
///
|
||||
/// Requires solc 0.8.26.
|
||||
/// Requires solc 0.8.29.
|
||||
pub fn build(
|
||||
include_paths: &[&str],
|
||||
contracts_path: &str,
|
||||
@@ -35,8 +35,8 @@ pub fn build(
|
||||
if let Some(version) = line.strip_prefix("Version: ") {
|
||||
let version =
|
||||
version.split('+').next().ok_or_else(|| "no value present on line".to_string())?;
|
||||
if version != "0.8.26" {
|
||||
Err(format!("version was {version}, 0.8.26 required"))?
|
||||
if version != "0.8.29" {
|
||||
Err(format!("version was {version}, 0.8.29 required"))?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,10 +16,12 @@ rustdoc-args = ["--cfg", "docsrs"]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
subtle = { version = "2", default-features = false, features = ["std"] }
|
||||
sha3 = { version = "0.10", default-features = false, features = ["std"] }
|
||||
group = { version = "0.13", default-features = false, features = ["alloc"] }
|
||||
k256 = { version = "^0.13.1", default-features = false, features = ["std", "arithmetic"] }
|
||||
std-shims = { path = "../../../common/std-shims", version = "0.1", default-features = false }
|
||||
|
||||
subtle = { version = "2", default-features = false }
|
||||
sha3 = { version = "0.10", default-features = false }
|
||||
group = { version = "0.13", default-features = false }
|
||||
k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic"] }
|
||||
|
||||
[build-dependencies]
|
||||
build-solidity-contracts = { path = "../build-contracts", version = "0.1" }
|
||||
@@ -40,3 +42,8 @@ alloy-provider = { version = "1", default-features = false }
|
||||
alloy-node-bindings = { version = "1", default-features = false }
|
||||
|
||||
tokio = { version = "1", default-features = false, features = ["macros"] }
|
||||
|
||||
[features]
|
||||
alloc = ["std-shims/alloc", "group/alloc"]
|
||||
std = ["alloc", "std-shims/std", "subtle/std", "sha3/std", "k256/std"]
|
||||
default = ["std"]
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
pragma solidity ^0.8.26;
|
||||
pragma solidity ^0.8.29;
|
||||
|
||||
/// @title A library for verifying Schnorr signatures
|
||||
/// @author Luke Parker <lukeparker@serai.exchange>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
pragma solidity ^0.8.26;
|
||||
pragma solidity ^0.8.29;
|
||||
|
||||
import "../Schnorr.sol";
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![deny(missing_docs)]
|
||||
#![allow(non_snake_case)]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
mod public_key;
|
||||
pub use public_key::PublicKey;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::io;
|
||||
use std_shims::io;
|
||||
|
||||
use sha3::{Digest, Keccak256};
|
||||
|
||||
@@ -77,6 +77,7 @@ impl Signature {
|
||||
}
|
||||
|
||||
/// Write the signature.
|
||||
#[cfg(feature = "alloc")]
|
||||
pub fn write(&self, writer: &mut impl io::Write) -> io::Result<()> {
|
||||
writer.write_all(&self.to_bytes())
|
||||
}
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA256
|
||||
|
||||
# This GPG-signed message exists to confirm the SHA256 sums of Monero binaries.
|
||||
#
|
||||
# Please verify the signature against the key for binaryFate in the
|
||||
# source code repository (/utils/gpg_keys).
|
||||
#
|
||||
#
|
||||
## CLI
|
||||
6122f0bcaca12d5badd92002338847d16032f6d52d86155c203bcb67d4fe1518 monero-android-armv7-v0.18.4.2.tar.bz2
|
||||
3b248c3201f028205915403b4b2f173df0dd8bf47eeb268fd67a4661251469d3 monero-android-armv8-v0.18.4.2.tar.bz2
|
||||
b4e2b7de80107a1b4613b878d8e2114244b3fb16397821d69baa72d9b0f8c8d5 monero-freebsd-x64-v0.18.4.2.tar.bz2
|
||||
ecb2577499a3b0901d731e11d462d3fadcd70095f3ab0def0c27ee64dc56b061 monero-linux-armv7-v0.18.4.2.tar.bz2
|
||||
a39530054dac348b219f1048a24ca629da26990f72cf9c1f6b6853e3d8c39a79 monero-linux-armv8-v0.18.4.2.tar.bz2
|
||||
18492ace80bf8ef2f44aa9a99b4f20adf00fd59c675a6a496211a720088d5d1a monero-linux-riscv64-v0.18.4.2.tar.bz2
|
||||
41d023f2357244ea43ee0a74796f5705ce75ce7373a5865d4959fefa13ecab06 monero-linux-x64-v0.18.4.2.tar.bz2
|
||||
03e77a4836861a47430664fa703dd149a355b3b214bc400b04ed38eb064a3ef0 monero-linux-x86-v0.18.4.2.tar.bz2
|
||||
9b98da6911b4769abef229c20e21f29d919b11db156965d6f139d2e1ad6625c2 monero-mac-armv8-v0.18.4.2.tar.bz2
|
||||
b1b1b580320118d3b6eaa5575fdbd73cf4db90fcc025b7abf875c5e5b4e335c1 monero-mac-x64-v0.18.4.2.tar.bz2
|
||||
14dd5aa11308f106183dd7834aa200e74ce6f3497103973696b556e893a4fef2 monero-win-x64-v0.18.4.2.zip
|
||||
934d9dbeb06ff5610d2c96ebe34fa480e74f78eaeb3fa3e47d89b7961c9bc5e0 monero-win-x86-v0.18.4.2.zip
|
||||
e9ec2062b3547db58f00102e6905621116ab7f56a331e0bc9b9e892607b87d24 monero-source-v0.18.4.2.tar.bz2
|
||||
#
|
||||
## GUI
|
||||
9d6e87add7e3ac006ee34c13c4f629252595395f54421db768f72dc233e94ea8 monero-gui-install-win-x64-v0.18.4.2.exe
|
||||
e4fcdea3f0ff27c3616a8a75545f42a4e4866ea374fa2eeaa9c87027573358ea monero-gui-linux-x64-v0.18.4.2.tar.bz2
|
||||
3dfee5c5d8e000c72eb3755bf0eb03ca7c5928b69c3a241e147ad22d144e00a7 monero-gui-mac-armv8-v0.18.4.2.dmg
|
||||
16abadcbd608d4f7ba20d17a297f2aa2c9066d33f6f22bf3fcdca679ab603990 monero-gui-mac-x64-v0.18.4.2.dmg
|
||||
4daff8850280173d46464ba9a9de7f712228ad1ef76a1c4954531e4fd2b86d86 monero-gui-win-x64-v0.18.4.2.zip
|
||||
691085e61ece6c56738431f3cfd395536ca0675214e5991e0dbfab85025e82d7 monero-gui-source-v0.18.4.2.tar.bz2
|
||||
#
|
||||
#
|
||||
# ~binaryFate
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAEBCAAdFiEEgaxZH+nEtlxYBq/D8K9NRioL35IFAmitx+kACgkQ8K9NRioL
|
||||
35J6cQ/7ByvGstg/a5lIYbB+Lz5bNiPozCILD9/offvC7GgOvna9rkHuofuLS+pX
|
||||
qhYEMrjFjmp03XMY+i68M83qkBEZ+yU5iNDbwRuHUNMMWaaGlhnhm3nyUVtDpjjr
|
||||
4xwVsee+dzi0JZhVQG7HJFURiP2Ub5Ua6bSaATDoT/aUYdhmrOnQiH2+VxogiCv3
|
||||
JStDqXq6LpFjzw7UkAfxxu1PW+AQFNBzi3L0qWfzb5WWL7xuK63wXGmEkYBlvult
|
||||
qt3LUhDUzMrfZ5GiiOYDEw44Y2atD4ibOYtBnllCX9CKNb0o2KKU6Qkj+CYqqtnE
|
||||
uGNOt1oT09VPOtE7OUkBLVkALjef7ZXRibE7tN4wSnsrG39DP795/52L6CGJbl4n
|
||||
UDnHzLCUbuvhnoAu5U+rUP5nUEDYS9ANNyj610ogNCo7YjfzLH641WSQ/UnuXKkA
|
||||
RmK8xIiKoOnUeOanX99zqeXqV7gQdQMlfwLUr3pQzCI2YjdvxdRoedSEi5nX5KvO
|
||||
Snf3BcCYMBemGYqVMdo95tc0Gmsw12/O8WwrBbTea+PeAXJuLaBxrLNn+RNZLfF/
|
||||
UJYq2VcEwxG6vXb3cJ5lDKmRDDRI8Fxu6Amdab+6ponhM8Zy3eAynVIO952pLA7N
|
||||
dtl72RsimM+sgHXP4ERYL4c6WARSHE5sAiog43dr56l3PPmM8pE=
|
||||
=SoHG
|
||||
-----END PGP SIGNATURE-----
|
||||
50
orchestration/dev/networks/monero/hashes-v0.18.4.3.txt
Normal file
50
orchestration/dev/networks/monero/hashes-v0.18.4.3.txt
Normal file
@@ -0,0 +1,50 @@
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA256
|
||||
|
||||
# This GPG-signed message exists to confirm the SHA256 sums of Monero binaries.
|
||||
#
|
||||
# Please verify the signature against the key for binaryFate in the
|
||||
# source code repository (/utils/gpg_keys).
|
||||
#
|
||||
#
|
||||
## CLI
|
||||
4e1481835824b9233f204553d4a19645274824f3f6185d8a4b50198470752f54 monero-android-armv7-v0.18.4.3.tar.bz2
|
||||
1aebd24aaaec3d1e87a64163f2e30ab2cd45f3902a7a859413f6870944775c21 monero-android-armv8-v0.18.4.3.tar.bz2
|
||||
ff7b9c5cf2cb3d602c3dff1902ac0bc3394768cefc260b6003a9ad4bcfb7c6a4 monero-freebsd-x64-v0.18.4.3.tar.bz2
|
||||
3ac83049bc565fb5238501f0fa629cdd473bbe94d5fb815088af8e6ff1d761cd monero-linux-armv7-v0.18.4.3.tar.bz2
|
||||
b1cc5f135de3ba8512d56deb4b536b38c41addde922b2a53bf443aeaf2a5a800 monero-linux-armv8-v0.18.4.3.tar.bz2
|
||||
95baaa6e8957b92caeaed7fb19b5c2659373df8dd5f4de2601ed3dae7b17ce2f monero-linux-riscv64-v0.18.4.3.tar.bz2
|
||||
3a7b36ae4da831a4e9913e0a891728f4c43cd320f9b136cdb6686b1d0a33fafa monero-linux-x64-v0.18.4.3.tar.bz2
|
||||
e0b51ca71934c33cb83cfa8535ffffebf431a2fc9efe3acf2baad96fb6ce21ec monero-linux-x86-v0.18.4.3.tar.bz2
|
||||
bab9a6d3c2ca519386cff5ff0b5601642a495ed1a209736acaf354468cba1145 monero-mac-armv8-v0.18.4.3.tar.bz2
|
||||
a8d8273b14f31569f5b7aa3063fbd322e3caec3d63f9f51e287dfc539c7f7d61 monero-mac-x64-v0.18.4.3.tar.bz2
|
||||
bd9f615657c35d2d7dd9a5168ad54f1547dbf9a335dee7f12fab115f6f394e36 monero-win-x64-v0.18.4.3.zip
|
||||
e642ed7bbfa34c30b185387fa553aa9c3ea608db1f3fc0e9332afa9b522c9c1a monero-win-x86-v0.18.4.3.zip
|
||||
6ba5e082c8fa25216aba7aea8198f3e23d4b138df15c512457081e1eb3d03ff6 monero-source-v0.18.4.3.tar.bz2
|
||||
#
|
||||
## GUI
|
||||
7b9255c696a462a00a810d9c8f94e60400a9e7d6438e8d6a8b693e9c13dca9ab monero-gui-install-win-x64-v0.18.4.3.exe
|
||||
0bd84de0a7c18b2a3ea8e8eff2194ae000cf1060045badfd4ab48674bc1b9325 monero-gui-linux-x64-v0.18.4.3.tar.bz2
|
||||
68ea30db32efb4a0671ec723297b6629d932fa188edf76edb38a37adaa3528e6 monero-gui-mac-armv8-v0.18.4.3.dmg
|
||||
27243b01f030fdae68c59cae1daf21f530bbadeaf10579d2908db9a834191cee monero-gui-mac-x64-v0.18.4.3.dmg
|
||||
dc9531cb4319b37b2c2dea4126e44a0fe6e7b6f34d278ccf5dd9ba693e3031e0 monero-gui-win-x64-v0.18.4.3.zip
|
||||
0d44687644db9b1824f324416e98f4a46b3bb0a5ed09af54b2835b6facaa0cdd monero-gui-source-v0.18.4.3.tar.bz2
|
||||
#
|
||||
#
|
||||
# ~binaryFate
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
|
||||
iQIzBAEBCAAdFiEEgaxZH+nEtlxYBq/D8K9NRioL35IFAmjntxwACgkQ8K9NRioL
|
||||
35J57g//dUOY1KAoLNaV7XLJyGbNk1lT6c2+A8h1wkK6iNQhXsmnc6rcigsHXrG0
|
||||
LQyVUuZJ6ELhNb6BnH5V0zbcB72t8XjkSEqlYhStUfMnaUvj1VdXtL/OnSs3fEvt
|
||||
Zwz6QBTxIKDDYEYyXrvCK96cYaYlIOgK3IVn/zoHdrHRUqTXqRJkFoTHum5l783y
|
||||
vr9BwMFFWYePUrilphjIiLyJDl+eB5al8PaJkqK2whxBUHoA2jF1edJOSq2mZajI
|
||||
+L2fBYClePS8oqwoKGquqCh2RVcmdtXtQTVzRIoNx14qFzP8ymqa+6z1Ygkri7bV
|
||||
qMCJk7KQ8ND7uU9NShpaCIqrZpr5GZ4Al6SRkcpK/7mipQcy2QpKJR3iOpcfiTX1
|
||||
YmYGVmLB3zmHu2kiS0kogZv6Ob7+tVFzOQ8NZX4FVnpB0N0phqMfNFOfHzdQZrsZ
|
||||
qg29HNc9sHlUmsOVmE5w+7Oq+s79yvQB3034XXi/9wQu+f8fKRhqZboe0fe77FLf
|
||||
QXoAYrZZ7LnGz0Z75Q9O4RB7uxM0Ug5imvyEFus4iuBVyBWjgcfyLnbkKJtbXmfn
|
||||
BZBbTProhPJfVa/VffBxW9HZB27W7O14oGWVpUkGWnVMZfVY/78XTUHwxaScQsPO
|
||||
SGawjobQsB3pTMNr/kra1XTjkti70si8Fcs5ueYWGB3yfc6r3hU=
|
||||
=5HRY
|
||||
-----END PGP SIGNATURE-----
|
||||
@@ -1,14 +1,7 @@
|
||||
# rust:1.89.0-slim-bookworm as of August 1st, 2025 (GMT)
|
||||
FROM --platform=linux/amd64 rust@sha256:703cfb0f80db8eb8a3452bf5151162472039c1b37fe4fb2957b495a6f0104ae7 AS deterministic
|
||||
# rust:1.91.1-alpine as of November 11th, 2025 (GMT)
|
||||
FROM --platform=linux/amd64 rust@sha256:700c0959b23445f69c82676b72caa97ca4359decd075dca55b13339df27dc4d3 AS deterministic
|
||||
|
||||
# Move to a Debian package snapshot
|
||||
RUN rm -rf /etc/apt/sources.list.d/debian.sources && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
echo "deb [arch=amd64] http://snapshot.debian.org/archive/debian/20250801T000000Z bookworm main" > /etc/apt/sources.list && \
|
||||
apt update
|
||||
|
||||
# Install dependencies
|
||||
RUN apt update -y && apt upgrade -y && apt install -y clang
|
||||
RUN apk add musl-dev=1.2.5-r10
|
||||
|
||||
# Add the wasm toolchain
|
||||
RUN rustup target add wasm32v1-none
|
||||
@@ -35,6 +28,7 @@ ADD AGPL-3.0 /serai
|
||||
WORKDIR /serai
|
||||
|
||||
# Build the runtime, copying it to the volume if it exists
|
||||
ENV RUSTFLAGS="-Ctarget-feature=-crt-static"
|
||||
CMD cargo build --release -p serai-runtime && \
|
||||
mkdir -p /volume && \
|
||||
cp /serai/target/release/wbuild/serai-runtime/serai_runtime.wasm /volume/serai.wasm
|
||||
|
||||
@@ -19,6 +19,7 @@ pub fn coordinator(
|
||||
let setup = mimalloc(Os::Debian).to_string() +
|
||||
&build_serai_service(
|
||||
"",
|
||||
Os::Debian,
|
||||
network.release(),
|
||||
&format!("{db} {longer_reattempts}"),
|
||||
"serai-coordinator",
|
||||
|
||||
@@ -4,7 +4,13 @@ use crate::{Network, Os, mimalloc, os, build_serai_service, write_dockerfile};
|
||||
|
||||
pub fn ethereum_relayer(orchestration_path: &Path, network: Network) {
|
||||
let setup = mimalloc(Os::Debian).to_string() +
|
||||
&build_serai_service("", network.release(), network.db(), "serai-ethereum-relayer");
|
||||
&build_serai_service(
|
||||
"",
|
||||
Os::Debian,
|
||||
network.release(),
|
||||
network.db(),
|
||||
"serai-ethereum-relayer",
|
||||
);
|
||||
|
||||
let env_vars = [
|
||||
("DB_PATH", "/volume/ethereum-relayer-db".to_string()),
|
||||
|
||||
@@ -143,13 +143,20 @@ WORKDIR /home/{user}
|
||||
}
|
||||
}
|
||||
|
||||
fn build_serai_service(prelude: &str, release: bool, features: &str, package: &str) -> String {
|
||||
fn build_serai_service(
|
||||
prelude: &str,
|
||||
os: Os,
|
||||
release: bool,
|
||||
features: &str,
|
||||
package: &str,
|
||||
) -> String {
|
||||
let profile = if release { "release" } else { "debug" };
|
||||
let profile_flag = if release { "--release" } else { "" };
|
||||
|
||||
format!(
|
||||
(match os {
|
||||
Os::Debian => {
|
||||
r#"
|
||||
FROM rust:1.90-slim-trixie AS builder
|
||||
FROM rust:1.91-slim-trixie AS builder
|
||||
|
||||
COPY --from=mimalloc-debian libmimalloc.so /usr/lib
|
||||
RUN echo "/usr/lib/libmimalloc.so" >> /etc/ld.so.preload
|
||||
@@ -161,7 +168,25 @@ RUN apt install -y pkg-config libclang-dev clang
|
||||
|
||||
# Dependencies for the Serai node
|
||||
RUN apt install -y make protobuf-compiler
|
||||
"#
|
||||
}
|
||||
Os::Alpine => {
|
||||
r#"
|
||||
FROM rust:1.91-alpine AS builder
|
||||
|
||||
COPY --from=mimalloc-alpine libmimalloc.so /usr/lib
|
||||
ENV LD_PRELOAD=libmimalloc.so
|
||||
|
||||
RUN apk update && apk upgrade
|
||||
|
||||
# Add dev dependencies
|
||||
RUN apk add clang-dev
|
||||
"#
|
||||
}
|
||||
})
|
||||
.to_owned() +
|
||||
&format!(
|
||||
r#"
|
||||
# Add the wasm toolchain
|
||||
RUN rustup target add wasm32v1-none
|
||||
|
||||
|
||||
@@ -13,8 +13,8 @@ pub fn message_queue(
|
||||
ethereum_key: <Ristretto as WrappedGroup>::G,
|
||||
monero_key: <Ristretto as WrappedGroup>::G,
|
||||
) {
|
||||
let setup = mimalloc(Os::Debian).to_string() +
|
||||
&build_serai_service("", network.release(), network.db(), "serai-message-queue");
|
||||
let setup = mimalloc(Os::Alpine).to_string() +
|
||||
&build_serai_service("", Os::Alpine, network.release(), network.db(), "serai-message-queue");
|
||||
|
||||
let env_vars = [
|
||||
("COORDINATOR_KEY", hex::encode(coordinator_key.to_bytes())),
|
||||
@@ -41,7 +41,7 @@ CMD {env_vars_str} serai-message-queue
|
||||
"#
|
||||
);
|
||||
|
||||
let run = os(Os::Debian, "", "messagequeue") + &run_message_queue;
|
||||
let run = os(Os::Alpine, "", "messagequeue") + &run_message_queue;
|
||||
let res = setup + &run;
|
||||
|
||||
let mut message_queue_path = orchestration_path.to_path_buf();
|
||||
|
||||
@@ -7,10 +7,10 @@ FROM alpine:latest AS mimalloc-alpine
|
||||
RUN apk update && apk upgrade && apk --no-cache add gcc g++ libc-dev make cmake git
|
||||
RUN git clone https://github.com/microsoft/mimalloc && \
|
||||
cd mimalloc && \
|
||||
git checkout 43ce4bd7fd34bcc730c1c7471c99995597415488 && \
|
||||
git checkout fbd8b99c2b828428947d70fdc046bb55609be93e && \
|
||||
mkdir -p out/secure && \
|
||||
cd out/secure && \
|
||||
cmake -DMI_SECURE=ON ../.. && \
|
||||
cmake -DMI_SECURE=ON -DMI_GUARDED=on ../.. && \
|
||||
make && \
|
||||
cp ./libmimalloc-secure.so ../../../libmimalloc.so
|
||||
"#;
|
||||
@@ -21,10 +21,10 @@ FROM debian:trixie-slim AS mimalloc-debian
|
||||
RUN apt update && apt upgrade -y && apt install -y gcc g++ make cmake git
|
||||
RUN git clone https://github.com/microsoft/mimalloc && \
|
||||
cd mimalloc && \
|
||||
git checkout 43ce4bd7fd34bcc730c1c7471c99995597415488 && \
|
||||
git checkout fbd8b99c2b828428947d70fdc046bb55609be93e && \
|
||||
mkdir -p out/secure && \
|
||||
cd out/secure && \
|
||||
cmake -DMI_SECURE=ON ../.. && \
|
||||
cmake -DMI_SECURE=ON -DMI_GUARDED=on ../.. && \
|
||||
make && \
|
||||
cp ./libmimalloc-secure.so ../../../libmimalloc.so
|
||||
"#;
|
||||
|
||||
@@ -7,7 +7,7 @@ pub fn bitcoin(orchestration_path: &Path, network: Network) {
|
||||
const DOWNLOAD_BITCOIN: &str = r#"
|
||||
FROM alpine:latest AS bitcoin
|
||||
|
||||
ENV BITCOIN_VERSION=29.1
|
||||
ENV BITCOIN_VERSION=30.0
|
||||
|
||||
RUN apk --no-cache add wget git gnupg
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ fn monero_internal(
|
||||
monero_binary: &str,
|
||||
ports: &str,
|
||||
) {
|
||||
const MONERO_VERSION: &str = "0.18.4.2";
|
||||
const MONERO_VERSION: &str = "0.18.4.3";
|
||||
|
||||
let arch = match std::env::consts::ARCH {
|
||||
// We probably would run this without issues yet it's not worth needing to provide support for
|
||||
|
||||
@@ -22,12 +22,13 @@ pub fn processor(
|
||||
if coin == "ethereum" {
|
||||
r#"
|
||||
RUN cargo install svm-rs
|
||||
RUN svm install 0.8.26
|
||||
RUN svm use 0.8.26
|
||||
RUN svm install 0.8.29
|
||||
RUN svm use 0.8.29
|
||||
"#
|
||||
} else {
|
||||
""
|
||||
},
|
||||
Os::Debian,
|
||||
network.release(),
|
||||
&format!("binaries {} {coin}", network.db()),
|
||||
"serai-processor",
|
||||
|
||||
@@ -12,9 +12,8 @@ pub fn serai(
|
||||
serai_key: &Zeroizing<<Ristretto as WrappedGroup>::F>,
|
||||
) {
|
||||
// Always builds in release for performance reasons
|
||||
let setup = mimalloc(Os::Debian).to_string() + &build_serai_service("", true, "", "serai-node");
|
||||
let setup_fast_epoch =
|
||||
mimalloc(Os::Debian).to_string() + &build_serai_service("", true, "fast-epoch", "serai-node");
|
||||
let setup =
|
||||
mimalloc(Os::Debian).to_string() + &build_serai_service("", Os::Debian, true, "", "serai-node");
|
||||
|
||||
let env_vars = [("KEY", hex::encode(serai_key.to_repr()))];
|
||||
let mut env_vars_str = String::new();
|
||||
@@ -39,16 +38,9 @@ CMD {env_vars_str} "/run.sh"
|
||||
|
||||
let run = os(Os::Debian, "", "serai") + &run_serai;
|
||||
let res = setup + &run;
|
||||
let res_fast_epoch = setup_fast_epoch + &run;
|
||||
|
||||
let mut serai_path = orchestration_path.to_path_buf();
|
||||
serai_path.push("serai");
|
||||
|
||||
let mut serai_fast_epoch_path = serai_path.clone();
|
||||
|
||||
serai_path.push("Dockerfile");
|
||||
serai_fast_epoch_path.push("Dockerfile.fast-epoch");
|
||||
|
||||
write_dockerfile(serai_path, &res);
|
||||
write_dockerfile(serai_fast_epoch_path, &res_fast_epoch);
|
||||
}
|
||||
|
||||
@@ -12,8 +12,7 @@ edition = "2021"
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
[workspace]
|
||||
|
||||
[dependencies]
|
||||
std-shims = { path = "../../common/std-shims", version = "0.1.4", default-features = false, optional = true }
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
[package]
|
||||
name = "dalek-ff-group"
|
||||
version = "0.5.99"
|
||||
description = "ff/group bindings around curve25519-dalek"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/crypto/dalek-ff-group"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = ["curve25519", "ed25519", "ristretto", "dalek", "group"]
|
||||
edition = "2021"
|
||||
rust-version = "1.85"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", default-features = false }
|
||||
|
||||
crypto-bigint-05 = { package = "crypto-bigint", version = "0.5", default-features = false, features = ["zeroize"] }
|
||||
crypto-bigint = { version = "0.6", default-features = false, features = ["zeroize"] }
|
||||
prime-field = { path = "../../crypto/prime-field", default-features = false }
|
||||
|
||||
[features]
|
||||
alloc = ["dalek-ff-group/alloc", "prime-field/alloc"]
|
||||
std = ["alloc", "dalek-ff-group/std", "prime-field/std"]
|
||||
default = ["std"]
|
||||
@@ -1,4 +0,0 @@
|
||||
# Dalek FF/Group
|
||||
|
||||
Patch for the `crates.io` `dalek-ff-group` to use the in-tree `dalek-ff-group`,
|
||||
resolving relevant breaking changes made since.
|
||||
@@ -1,44 +0,0 @@
|
||||
#![allow(deprecated)]
|
||||
#![cfg_attr(docsrs, feature(doc_cfg))]
|
||||
#![no_std] // Prevents writing new code, in what should be a simple wrapper, which requires std
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![allow(clippy::redundant_closure_call)]
|
||||
|
||||
pub use dalek_ff_group::{Scalar, EdwardsPoint, RistrettoPoint, Ed25519, Ristretto};
|
||||
|
||||
type ThirtyTwoArray = [u8; 32];
|
||||
prime_field::odd_prime_field_with_specific_repr!(
|
||||
FieldElement,
|
||||
"0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffed",
|
||||
"02",
|
||||
false,
|
||||
crate::ThirtyTwoArray
|
||||
);
|
||||
|
||||
impl FieldElement {
|
||||
/// Create a FieldElement from a `crypto_bigint::U256`.
|
||||
///
|
||||
/// This will reduce the `U256` by the modulus, into a member of the field.
|
||||
#[deprecated]
|
||||
pub const fn from_u256(u256: &crypto_bigint_05::U256) -> Self {
|
||||
const MODULUS: crypto_bigint::U256 = crypto_bigint::U256::from_be_hex(
|
||||
"7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffed",
|
||||
);
|
||||
let mut u256 = crypto_bigint::U256::from_words(*u256.as_words());
|
||||
loop {
|
||||
let result = FieldElement::from_bytes(&u256.to_le_bytes());
|
||||
if let Some(result) = result {
|
||||
return result;
|
||||
}
|
||||
u256 = u256.wrapping_sub(&MODULUS);
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a `FieldElement` from the reduction of a 512-bit number.
|
||||
///
|
||||
/// The bytes are interpreted in little-endian format.
|
||||
#[deprecated]
|
||||
pub fn wide_reduce(value: [u8; 64]) -> Self {
|
||||
<FieldElement as prime_field::ff::FromUniformBytes<_>>::from_uniform_bytes(&value)
|
||||
}
|
||||
}
|
||||
18
patches/darling/Cargo.toml
Normal file
18
patches/darling/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "darling"
|
||||
version = "0.20.99"
|
||||
description = "Patch to the latest version"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/patches/darling"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = []
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[workspace]
|
||||
|
||||
[dependencies]
|
||||
darling = { version = "0.21" }
|
||||
1
patches/darling/src/lib.rs
Normal file
1
patches/darling/src/lib.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub use darling::*;
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "directories-next"
|
||||
version = "2.0.0"
|
||||
version = "2.0.99"
|
||||
description = "Patch from directories-next back to directories"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/patches/directories-next"
|
||||
@@ -12,5 +12,7 @@ edition = "2021"
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[workspace]
|
||||
|
||||
[dependencies]
|
||||
directories = "5"
|
||||
directories = "6"
|
||||
|
||||
18
patches/ethereum/ark-ff-0.3/Cargo.toml
Normal file
18
patches/ethereum/ark-ff-0.3/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "ark-ff"
|
||||
version = "0.3.99"
|
||||
description = "Patch to an empty crate"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/patches/ethereum/ark-ff-0.3"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = []
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[workspace]
|
||||
|
||||
[features]
|
||||
std = []
|
||||
1
patches/ethereum/ark-ff-0.3/src/lib.rs
Normal file
1
patches/ethereum/ark-ff-0.3/src/lib.rs
Normal file
@@ -0,0 +1 @@
|
||||
const _NEVER_COMPILED: [(); 0 - 1] = [(); 0 - 1];
|
||||
18
patches/ethereum/ark-ff-0.4/Cargo.toml
Normal file
18
patches/ethereum/ark-ff-0.4/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "ark-ff"
|
||||
version = "0.4.99"
|
||||
description = "Patch to an empty crate"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/patches/ethereum/ark-ff-0.4"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = []
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[workspace]
|
||||
|
||||
[features]
|
||||
std = []
|
||||
1
patches/ethereum/ark-ff-0.4/src/lib.rs
Normal file
1
patches/ethereum/ark-ff-0.4/src/lib.rs
Normal file
@@ -0,0 +1 @@
|
||||
const _NEVER_COMPILED: [(); 0 - 1] = [(); 0 - 1];
|
||||
20
patches/ethereum/c-kzg/Cargo.toml
Normal file
20
patches/ethereum/c-kzg/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "c-kzg"
|
||||
version = "2.99.99"
|
||||
description = "Patch to an empty crate"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/patches/ethereum/c-kzg"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = []
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[workspace]
|
||||
|
||||
[features]
|
||||
std = []
|
||||
serde = []
|
||||
ethereum_kzg_settings = []
|
||||
1
patches/ethereum/c-kzg/src/lib.rs
Normal file
1
patches/ethereum/c-kzg/src/lib.rs
Normal file
@@ -0,0 +1 @@
|
||||
const _NEVER_COMPILED: [(); 0 - 1] = [(); 0 - 1];
|
||||
23
patches/ethereum/secp256k1-30/Cargo.toml
Normal file
23
patches/ethereum/secp256k1-30/Cargo.toml
Normal file
@@ -0,0 +1,23 @@
|
||||
[package]
|
||||
name = "secp256k1"
|
||||
version = "0.30.99"
|
||||
description = "Patch to an empty crate"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/patches/ethereum/secp256k1-30"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = []
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[workspace]
|
||||
|
||||
[features]
|
||||
alloc = []
|
||||
std = []
|
||||
rand = []
|
||||
serde = []
|
||||
global-context = []
|
||||
recovery = []
|
||||
1
patches/ethereum/secp256k1-30/src/lib.rs
Normal file
1
patches/ethereum/secp256k1-30/src/lib.rs
Normal file
@@ -0,0 +1 @@
|
||||
const _NEVER_COMPILED: [(); 0 - 1] = [(); 0 - 1];
|
||||
16
patches/is_terminal_polyfill/Cargo.toml
Normal file
16
patches/is_terminal_polyfill/Cargo.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "is_terminal_polyfill"
|
||||
version = "1.70.1"
|
||||
description = "is_terminal_polyfill written around std::io::IsTerminal"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/patches/is_terminal_polyfill"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = []
|
||||
edition = "2021"
|
||||
rust-version = "1.70"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[workspace]
|
||||
1
patches/is_terminal_polyfill/src/lib.rs
Normal file
1
patches/is_terminal_polyfill/src/lib.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub use std::io::IsTerminal;
|
||||
35
patches/librocksdb-sys/Cargo.toml
Normal file
35
patches/librocksdb-sys/Cargo.toml
Normal file
@@ -0,0 +1,35 @@
|
||||
[package]
|
||||
name = "librocksdb-sys"
|
||||
version = "0.17.99"
|
||||
description = "Replacement for `librocksdb-sys` which removes the `jemalloc` feature"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/patches/librocksdb-sys"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = []
|
||||
edition = "2018"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[workspace]
|
||||
|
||||
[dependencies]
|
||||
# librocksdb-sys 0.17.3+10.4.2, with the commit provided by crates.io in `cargo_vcs_info.json`
|
||||
librocksdb-sys = { git = "https://github.com/rust-rocksdb/rust-rocksdb", commit = "bb7d2168eab1bc7849f23adbcb825e3aba1bd2f4", default-features = false }
|
||||
|
||||
[features]
|
||||
default = ["librocksdb-sys/default"]
|
||||
jemalloc = []
|
||||
static = ["librocksdb-sys/static"]
|
||||
bindgen-runtime = ["librocksdb-sys/bindgen-runtime"]
|
||||
bindgen-static = ["librocksdb-sys/bindgen-static"]
|
||||
mt_static = ["librocksdb-sys/mt_static"]
|
||||
io-uring = ["librocksdb-sys/io-uring"]
|
||||
snappy = ["librocksdb-sys/snappy"]
|
||||
lz4 = ["librocksdb-sys/lz4"]
|
||||
zstd = ["librocksdb-sys/zstd"]
|
||||
zlib = ["librocksdb-sys/zlib"]
|
||||
bzip2 = ["librocksdb-sys/bzip2"]
|
||||
rtti = ["librocksdb-sys/rtti"]
|
||||
lto = ["librocksdb-sys/lto"]
|
||||
1
patches/librocksdb-sys/src/lib.rs
Normal file
1
patches/librocksdb-sys/src/lib.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub use librocksdb_sys::*;
|
||||
@@ -11,3 +11,5 @@ edition = "2021"
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[workspace]
|
||||
|
||||
30
patches/parity-bip39/Cargo.toml
Normal file
30
patches/parity-bip39/Cargo.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
[package]
|
||||
name = "parity-bip39"
|
||||
version = "2.99.99"
|
||||
description = "Patch back to the upstream library"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/patches/parity-bip39"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = []
|
||||
edition = "2021"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[workspace]
|
||||
|
||||
[dependencies]
|
||||
bip39 = { git = "https://github.com/rust-bitcoin/rust-bip39", commit = "f735e2559f30049f6738d1bf68c69a0b7bd7b858", default-features = false }
|
||||
|
||||
[features]
|
||||
default = ["bip39/default"]
|
||||
alloc = ["bip39/alloc"]
|
||||
std = ["bip39/std"]
|
||||
|
||||
unicode-normalization = ["bip39/unicode-normalization"]
|
||||
all-languages = ["bip39/all-languages"]
|
||||
|
||||
zeroize = ["bip39/zeroize"]
|
||||
rand_core = ["bip39/rand_core"]
|
||||
rand = ["bip39/rand"]
|
||||
3
patches/parity-bip39/src/lib.rs
Normal file
3
patches/parity-bip39/src/lib.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
pub use bip39::*;
|
||||
@@ -7,16 +7,17 @@ repository = "https://github.com/serai-dex/serai/tree/develop/patches/simple-req
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
keywords = ["nostd", "no_std", "alloc", "io"]
|
||||
edition = "2021"
|
||||
rust-version = "1.71"
|
||||
rust-version = "1.65"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[workspace]
|
||||
|
||||
[dependencies]
|
||||
hyper-util = { version = "0.1", default-features = false, features = ["tokio"] }
|
||||
simple-request = { path = "../../common/request", default-features = false, features = ["tokio"] }
|
||||
simple-request = { path = "../../common/request", features = ["tokio"] }
|
||||
|
||||
[features]
|
||||
tls = ["simple-request/tls"]
|
||||
basic-auth = ["simple-request/basic-auth"]
|
||||
|
||||
@@ -1,2 +1,21 @@
|
||||
pub use simple_request::{hyper, Error, Request, TokioClient as Client};
|
||||
pub type Response<'a> = simple_request::Response<'a, hyper_util::rt::tokio::TokioExecutor>;
|
||||
use hyper_util::rt::tokio::TokioExecutor;
|
||||
pub use simple_request::{hyper, Error, Request};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Client(simple_request::Client<TokioExecutor>);
|
||||
|
||||
pub type Response<'a> = simple_request::Response<'a, TokioExecutor>;
|
||||
|
||||
impl Client {
|
||||
pub fn with_connection_pool() -> Client {
|
||||
Self(simple_request::Client::with_connection_pool().unwrap())
|
||||
}
|
||||
|
||||
pub fn without_connection_pool(host: &str) -> Result<Client, Error> {
|
||||
simple_request::Client::without_connection_pool(host).map(Self)
|
||||
}
|
||||
|
||||
pub async fn request<R: Into<Request>>(&self, request: R) -> Result<Response<'_>, Error> {
|
||||
self.0.request(request).await
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user