16 Commits

Author SHA1 Message Date
Luke Parker
35265c615b Remove sp-crypto-ec-utils patch reintroduced in latest rebase 2023-12-17 05:40:21 -05:00
Luke Parker
bbad7738d9 Correct addition of rust-src component in Dockerfiles 2023-12-17 05:40:21 -05:00
Luke Parker
d149d8a08a Split the components back up 2023-12-17 05:40:21 -05:00
Luke Parker
ffae13b3be Patch matches, mach 2023-12-17 05:40:21 -05:00
Luke Parker
5e42fd2848 Combine -c flags in lint workflow 2023-12-17 05:40:21 -05:00
Luke Parker
d62f311b93 Include rust-src, remove simple-mermaid from tree 2023-12-17 05:40:21 -05:00
Luke Parker
ec0d38a90d cargo update to polkadot-sdk/experimental rebased over polkadot-sdk 1.5.0 2023-12-17 05:40:21 -05:00
Luke Parker
d8c7465722 Rebase to the latest develop 2023-12-17 05:40:21 -05:00
Luke Parker
0983a49b29 Patch is-terminal to the std-included IsTerminal 2023-12-17 05:40:21 -05:00
Luke Parker
08adb3a199 Remove TX_VERSION from substrate/client, as CheckTxVersion was removed from the runtime
b892b0122e for reasoning.
2023-12-17 05:40:21 -05:00
Luke Parker
f03ca53509 Correct justification_generation_period from 0 to 1
Avoids a divide by zero in sc-consensus-grandpa.
2023-12-17 05:40:21 -05:00
Luke Parker
1820e916ca Add patches for directories/directories-next/option-ext
The rational is detailed in the root Cargo.toml.

While I don't personally mind MPL dependencies, even if I don't prefer them
(they're allowed in the deny.toml for a reason), I do mind the pointless scope
creep and wish to highlight how little it actually used from the crate by
re-defining it as the single function.

We could also fork directories-next, or directories, and remove the usage of
option-ext per https://github.com/dirs-dev/dirs-sys-rs/issues/24, yet that'd be
a much larger task than what was done here.

In the future, it may be beneficial to submit a PR to wasmtime replacing
directories-next with home, a cargo-team maintained library to get the home
directory and associated folders. An example migration can be found at
https://github.com/harryfei/which-rs/pull/80.
2023-12-17 05:40:21 -05:00
Luke Parker
a5065e52e9 Correct rebase re: Coordinator Cargo.toml 2023-12-17 05:40:21 -05:00
Luke Parker
e78fd12da6 Set the justification generation period to 1 2023-12-17 05:40:21 -05:00
Luke Parker
b8e5fa7ff2 Add empty crates to clean up the Cargo.lock
Also removes a now unused RUSTSEC allow in deny.
2023-12-17 05:40:21 -05:00
Luke Parker
d038aa95fd Move to polkadot-sdk
Still a WIP, and using an experimental branch with 2 not-yet-merged PRs merged.
2023-12-17 05:40:20 -05:00
364 changed files with 7597 additions and 11733 deletions

View File

@@ -12,7 +12,7 @@ runs:
steps:
- name: Bitcoin Daemon Cache
id: cache-bitcoind
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
with:
path: bitcoin.tar.gz
key: bitcoind-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
@@ -37,4 +37,11 @@ runs:
- name: Bitcoin Regtest Daemon
shell: bash
run: PATH=$PATH:/usr/bin ./orchestration/dev/coins/bitcoin/run.sh -daemon
run: |
RPC_USER=serai
RPC_PASS=seraidex
bitcoind -txindex -regtest \
-rpcuser=$RPC_USER -rpcpassword=$RPC_PASS \
-rpcbind=127.0.0.1 -rpcbind=$(hostname) -rpcallowip=0.0.0.0/0 \
-daemon

View File

@@ -1,6 +1,12 @@
name: build-dependencies
description: Installs build dependencies for Serai
inputs:
github-token:
description: "GitHub token to install Protobuf with"
require: true
default:
runs:
using: "composite"
steps:
@@ -14,36 +20,22 @@ runs:
sudo apt autoremove -y
sudo apt clean
docker system prune -a --volumes
if: runner.os == 'Linux'
- name: Remove unused packages
- name: Install apt dependencies
shell: bash
run: |
(gem uninstall -aIx) || (exit 0)
brew uninstall --force "*msbuild*" "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli
brew uninstall --force "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*"
brew uninstall --force "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*"
brew uninstall --force "*qemu*" "*sql*" "*texinfo*" "*imagemagick*"
brew cleanup
if: runner.os == 'macOS'
run: sudo apt install -y ca-certificates
- name: Install dependencies
shell: bash
run: |
if [ "$RUNNER_OS" == "Linux" ]; then
sudo apt install -y ca-certificates protobuf-compiler
elif [ "$RUNNER_OS" == "Windows" ]; then
choco install protoc
elif [ "$RUNNER_OS" == "macOS" ]; then
brew install protobuf
fi
- name: Install Protobuf
uses: arduino/setup-protoc@a8b67ba40b37d35169e222f3bb352603327985b6
with:
repo-token: ${{ inputs.github-token }}
- name: Install solc
shell: bash
run: |
cargo install svm-rs
svm install 0.8.25
svm use 0.8.25
svm install 0.8.16
svm use 0.8.16
# - name: Cache Rust
# uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43

View File

@@ -5,14 +5,14 @@ inputs:
version:
description: "Version to download and run"
required: false
default: v0.18.3.1
default: v0.18.2.0
runs:
using: "composite"
steps:
- name: Monero Wallet RPC Cache
id: cache-monero-wallet-rpc
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
with:
path: monero-wallet-rpc
key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
@@ -41,9 +41,4 @@ runs:
- name: Monero Wallet RPC
shell: bash
run: |
./monero-wallet-rpc --allow-mismatched-daemon-version \
--daemon-address 0.0.0.0:18081 --daemon-login serai:seraidex \
--disable-rpc-login --rpc-bind-port 18082 \
--wallet-dir ./ \
--detach
run: ./monero-wallet-rpc --disable-rpc-login --rpc-bind-port 6061 --allow-mismatched-daemon-version --wallet-dir ./ --detach

View File

@@ -5,16 +5,16 @@ inputs:
version:
description: "Version to download and run"
required: false
default: v0.18.3.1
default: v0.18.2.0
runs:
using: "composite"
steps:
- name: Monero Daemon Cache
id: cache-monerod
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
with:
path: /usr/bin/monerod
path: monerod
key: monerod-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
- name: Download the Monero Daemon
@@ -37,10 +37,8 @@ runs:
wget https://downloads.getmonero.org/cli/$FILE
tar -xvf $FILE
sudo mv monero-x86_64-linux-gnu-${{ inputs.version }}/monerod /usr/bin/monerod
sudo chmod 777 /usr/bin/monerod
sudo chmod +x /usr/bin/monerod
mv monero-x86_64-linux-gnu-${{ inputs.version }}/monerod monerod
- name: Monero Regtest Daemon
shell: bash
run: PATH=$PATH:/usr/bin ./orchestration/dev/coins/monero/run.sh --detach
run: ./monerod --regtest --offline --fixed-difficulty=1 --detach

View File

@@ -2,10 +2,15 @@ name: test-dependencies
description: Installs test dependencies for Serai
inputs:
github-token:
description: "GitHub token to install Protobuf with"
require: true
default:
monero-version:
description: "Monero version to download and run as a regtest node"
required: false
default: v0.18.3.1
default: v0.18.2.0
bitcoin-version:
description: "Bitcoin version to download and run as a regtest node"
@@ -17,6 +22,8 @@ runs:
steps:
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ inputs.github-token }}
- name: Install Foundry
uses: foundry-rs/foundry-toolchain@cb603ca0abb544f301eaed59ac0baf579aa6aecf

View File

@@ -1 +1 @@
nightly-2024-02-07
nightly-2023-12-04

View File

@@ -25,6 +25,8 @@ jobs:
- name: Test Dependencies
uses: ./.github/actions/test-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Run Tests
run: |

View File

@@ -21,6 +21,8 @@ jobs:
- name: Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Run Tests
run: |

View File

@@ -9,8 +9,9 @@ on:
- "crypto/**"
- "coins/**"
- "message-queue/**"
- "orchestration/message-queue/**"
- "coordinator/**"
- "orchestration/**"
- "orchestration/coordinator/**"
- "tests/docker/**"
- "tests/coordinator/**"
@@ -20,8 +21,9 @@ on:
- "crypto/**"
- "coins/**"
- "message-queue/**"
- "orchestration/message-queue/**"
- "coordinator/**"
- "orchestration/**"
- "orchestration/coordinator/**"
- "tests/docker/**"
- "tests/coordinator/**"
@@ -35,6 +37,8 @@ jobs:
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ inputs.github-token }}
- name: Run coordinator Docker tests
run: cd tests/coordinator && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -23,6 +23,8 @@ jobs:
- name: Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Run Tests
run: |

View File

@@ -12,7 +12,7 @@ jobs:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Advisory Cache
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
with:
path: ~/.cargo/advisory-db
key: rust-advisory-db

View File

@@ -17,6 +17,8 @@ jobs:
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ inputs.github-token }}
- name: Run Full Stack Docker tests
run: cd tests/full-stack && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -9,24 +9,21 @@ on:
jobs:
clippy:
strategy:
matrix:
os: [ubuntu-latest, macos-13, macos-14, windows-latest]
runs-on: ${{ matrix.os }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Get nightly version to use
id: nightly
shell: bash
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
- name: Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install nightly rust
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32-unknown-unknown -c clippy
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32-unknown-unknown -c rust-src -c clippy
- name: Run Clippy
run: cargo +${{ steps.nightly.outputs.version }} clippy --all-features --all-targets -- -D warnings -A clippy::items_after_test_module
@@ -37,8 +34,7 @@ jobs:
# The above clippy run will cause it to be updated, so checking there's
# no differences present now performs the desired check
- name: Verify lockfile
shell: bash
run: git diff | wc -l | LC_ALL="en_US.utf8" grep -x -e "^[ ]*0"
run: git diff | wc -l | grep -x "0"
deny:
runs-on: ubuntu-latest
@@ -46,7 +42,7 @@ jobs:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Advisory Cache
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
with:
path: ~/.cargo/advisory-db
key: rust-advisory-db
@@ -64,7 +60,6 @@ jobs:
- name: Get nightly version to use
id: nightly
shell: bash
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
- name: Install nightly rust
@@ -73,6 +68,14 @@ jobs:
- name: Run rustfmt
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check
dockerfiles:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
- name: Verify Dockerfiles are up to date
# Runs the file which generates them and checks the diff has no lines
run: cd orchestration && ./dockerfiles.sh && git diff | wc -l | grep -x "0"
machete:
runs-on: ubuntu-latest
steps:

View File

@@ -8,7 +8,7 @@ on:
- "common/**"
- "crypto/**"
- "message-queue/**"
- "orchestration/**"
- "orchestration/message-queue/**"
- "tests/docker/**"
- "tests/message-queue/**"
@@ -17,7 +17,7 @@ on:
- "common/**"
- "crypto/**"
- "message-queue/**"
- "orchestration/**"
- "orchestration/message-queue/**"
- "tests/docker/**"
- "tests/message-queue/**"
@@ -31,6 +31,8 @@ jobs:
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ inputs.github-token }}
- name: Run message-queue Docker tests
run: cd tests/message-queue && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -21,6 +21,8 @@ jobs:
- name: Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Run Tests
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p mini-serai

View File

@@ -24,6 +24,8 @@ jobs:
- name: Test Dependencies
uses: ./.github/actions/test-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Run Unit Tests Without Features
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib
@@ -43,6 +45,7 @@ jobs:
- name: Test Dependencies
uses: ./.github/actions/test-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
monero-version: ${{ matrix.version }}
- name: Run Integration Tests Without Features

View File

@@ -27,6 +27,8 @@ jobs:
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ inputs.github-token }}
- name: Install RISC-V Toolchain
run: sudo apt update && sudo apt install -y gcc-riscv64-unknown-elf gcc-multilib && rustup target add riscv32imac-unknown-none-elf

View File

@@ -1,90 +0,0 @@
# MIT License
#
# Copyright (c) 2022 just-the-docs
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
# Sample workflow for building and deploying a Jekyll site to GitHub Pages
name: Deploy Jekyll site to Pages
on:
push:
branches:
- "develop"
paths:
- "docs/**"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
# Allow one concurrent deployment
concurrency:
group: "pages"
cancel-in-progress: true
jobs:
# Build job
build:
runs-on: ubuntu-latest
defaults:
run:
working-directory: docs
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
bundler-cache: true
cache-version: 0
working-directory: "${{ github.workspace }}/docs"
- name: Setup Pages
id: pages
uses: actions/configure-pages@v3
- name: Build with Jekyll
run: bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}"
env:
JEKYLL_ENV: production
- name: Upload artifact
uses: actions/upload-pages-artifact@v1
with:
path: "docs/_site/"
# Deployment job
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
needs: build
steps:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v2

View File

@@ -9,8 +9,9 @@ on:
- "crypto/**"
- "coins/**"
- "message-queue/**"
- "orchestration/message-queue/**"
- "processor/**"
- "orchestration/**"
- "orchestration/processor/**"
- "tests/docker/**"
- "tests/processor/**"
@@ -20,8 +21,9 @@ on:
- "crypto/**"
- "coins/**"
- "message-queue/**"
- "orchestration/message-queue/**"
- "processor/**"
- "orchestration/**"
- "orchestration/processor/**"
- "tests/docker/**"
- "tests/processor/**"
@@ -35,6 +37,8 @@ jobs:
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ inputs.github-token }}
- name: Run processor Docker tests
run: cd tests/processor && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -31,6 +31,8 @@ jobs:
- name: Install Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ inputs.github-token }}
- name: Run Reproducible Runtime tests
run: cd tests/reproducible-runtime && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -33,6 +33,8 @@ jobs:
- name: Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Run Tests
run: |
@@ -52,6 +54,8 @@ jobs:
- name: Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Run Tests
run: |
@@ -75,6 +79,8 @@ jobs:
- name: Build Dependencies
uses: ./.github/actions/build-dependencies
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Run Tests
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client

6
.gitignore vendored
View File

@@ -1,7 +1,3 @@
target
Dockerfile
Dockerfile.fast-epoch
!orchestration/runtime/Dockerfile
.test-logs
.vscode
.test-logs

2486
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,19 +1,6 @@
[workspace]
resolver = "2"
members = [
# Version patches
"patches/zstd",
"patches/rocksdb",
"patches/proc-macro-crate",
# std patches
"patches/matches",
"patches/is-terminal",
# Rewrites/redirects
"patches/option-ext",
"patches/directories-next",
"common/std-shims",
"common/zalloc",
"common/db",
@@ -49,6 +36,12 @@ members = [
"coordinator/tributary",
"coordinator",
"substrate/tree-cleanup/is-terminal",
"substrate/tree-cleanup/option-ext",
"substrate/tree-cleanup/directories-next",
"substrate/tree-cleanup/bandersnatch_vrfs",
"substrate/tree-cleanup/w3f-bls",
"substrate/primitives",
"substrate/coins/primitives",
@@ -70,8 +63,6 @@ members = [
"substrate/client",
"orchestration",
"mini",
"tests/no-std",
@@ -108,28 +99,35 @@ panic = "unwind"
# https://github.com/rust-lang-nursery/lazy-static.rs/issues/201
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
# Needed due to dockertest's usage of `Rc`s when we need `Arc`s
dockertest = { git = "https://github.com/kayabaNerve/dockertest-rs", branch = "arc" }
# wasmtime pulls in an old version for this
zstd = { path = "patches/zstd" }
# Needed for WAL compression
rocksdb = { path = "patches/rocksdb" }
# proc-macro-crate 2 binds to an old version of toml for msrv so we patch to 3
proc-macro-crate = { path = "patches/proc-macro-crate" }
# subxt *can* pull these off crates.io yet there's no benefit to this
sp-core-hashing = { git = "https://github.com/serai-dex/polkadot-sdk", branch = "experimental" }
sp-std = { git = "https://github.com/serai-dex/polkadot-sdk", branch = "experimental" }
# is-terminal now has an std-based solution with an equivalent API
is-terminal = { path = "patches/is-terminal" }
is-terminal = { path = "substrate/tree-cleanup/is-terminal" }
# So does matches
matches = { path = "patches/matches" }
matches = { path = "substrate/tree-cleanup/matches" }
# directories-next was created because directories was unmaintained
# directories-next is now unmaintained while directories is maintained
# The directories author pulls in ridiculously pointless crates and prefers
# copyleft licenses
# Serai's polkadot-sdk consolidated to directories-next, as directories-next is
# acceptable and because we couldn't consolidate to directories without forking
# wasmtime
# The following two patches resolve everything
option-ext = { path = "patches/option-ext" }
directories-next = { path = "patches/directories-next" }
option-ext = { path = "substrate/tree-cleanup/option-ext" }
directories-next = { path = "substrate/tree-cleanup/directories-next" }
# mach is unmaintained, so this wraps mach2 as mach
mach = { path = "substrate/tree-cleanup/mach" }
# cargo believes the following are in-tree despite no features activating them
# We provide empty crates to not only prove they're unused, yet also clean up
# our Cargo.lock
w3f-bls = { path = "substrate/tree-cleanup/w3f-bls" }
[patch."https://github.com/w3f/ring-vrf"]
bandersnatch_vrfs = { path = "substrate/tree-cleanup/bandersnatch_vrfs" }
[workspace.lints.clippy]
unwrap_or_default = "allow"

View File

@@ -5,16 +5,13 @@ Bitcoin, Ethereum, DAI, and Monero, offering a liquidity-pool-based trading
experience. Funds are stored in an economically secured threshold-multisig
wallet.
[Getting Started](spec/Getting%20Started.md)
[Getting Started](docs/Getting%20Started.md)
### Layout
- `audits`: Audits for various parts of Serai.
- `spec`: The specification of the Serai protocol, both internally and as
networked.
- `docs`: User-facing documentation on the Serai protocol.
- `docs`: Documentation on the Serai protocol.
- `common`: Crates containing utilities common to a variety of areas under
Serai, none neatly fitting under another category.

View File

@@ -12,7 +12,7 @@ pub fn SEQUENTIAL() -> &'static Mutex<()> {
#[allow(dead_code)]
pub(crate) async fn rpc() -> Rpc {
let rpc = Rpc::new("http://serai:seraidex@127.0.0.1:8332".to_string()).await.unwrap();
let rpc = Rpc::new("http://serai:seraidex@127.0.0.1:18443".to_string()).await.unwrap();
// If this node has already been interacted with, clear its chain
if rpc.get_latest_block_number().await.unwrap() > 0 {

View File

@@ -1,7 +1,3 @@
# Solidity build outputs
# solidity build outputs
cache
artifacts
# Auto-generated ABI files
src/abi/schnorr.rs
src/abi/router.rs

View File

@@ -30,9 +30,6 @@ ethers-core = { version = "2", default-features = false }
ethers-providers = { version = "2", default-features = false }
ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] }
[build-dependencies]
ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] }
[dev-dependencies]
rand_core = { version = "0.6", default-features = false, features = ["std"] }

View File

@@ -1,20 +1,6 @@
use std::process::Command;
use ethers_contract::Abigen;
fn main() {
println!("cargo:rerun-if-changed=contracts/*");
println!("cargo:rerun-if-changed=artifacts/*");
for line in String::from_utf8(Command::new("solc").args(["--version"]).output().unwrap().stdout)
.unwrap()
.lines()
{
if let Some(version) = line.strip_prefix("Version: ") {
let version = version.split('+').next().unwrap();
assert_eq!(version, "0.8.25");
}
}
println!("cargo:rerun-if-changed=contracts");
println!("cargo:rerun-if-changed=artifacts");
#[rustfmt::skip]
let args = [
@@ -22,21 +8,8 @@ fn main() {
"-o", "./artifacts", "--overwrite",
"--bin", "--abi",
"--optimize",
"./contracts/Schnorr.sol", "./contracts/Router.sol",
"./contracts/Schnorr.sol"
];
assert!(Command::new("solc").args(args).status().unwrap().success());
Abigen::new("Schnorr", "./artifacts/Schnorr.abi")
.unwrap()
.generate()
.unwrap()
.write_to_file("./src/abi/schnorr.rs")
.unwrap();
Abigen::new("Router", "./artifacts/Router.abi")
.unwrap()
.generate()
.unwrap()
.write_to_file("./src/abi/router.rs")
.unwrap();
assert!(std::process::Command::new("solc").args(args).status().unwrap().success());
}

View File

@@ -1,90 +0,0 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
import "./Schnorr.sol";
contract Router is Schnorr {
// Contract initializer
// TODO: Replace with a MuSig of the genesis validators
address public initializer;
// Nonce is incremented for each batch of transactions executed
uint256 public nonce;
// fixed parity for the public keys used in this contract
uint8 constant public KEY_PARITY = 27;
// current public key's x-coordinate
// note: this key must always use the fixed parity defined above
bytes32 public seraiKey;
struct OutInstruction {
address to;
uint256 value;
bytes data;
}
struct Signature {
bytes32 c;
bytes32 s;
}
// success is a uint256 representing a bitfield of transaction successes
event Executed(uint256 nonce, bytes32 batch, uint256 success);
// error types
error NotInitializer();
error AlreadyInitialized();
error InvalidKey();
error TooManyTransactions();
constructor() {
initializer = msg.sender;
}
// initSeraiKey can be called by the contract initializer to set the first
// public key, only if the public key has yet to be set.
function initSeraiKey(bytes32 _seraiKey) external {
if (msg.sender != initializer) revert NotInitializer();
if (seraiKey != 0) revert AlreadyInitialized();
if (_seraiKey == bytes32(0)) revert InvalidKey();
seraiKey = _seraiKey;
}
// updateSeraiKey validates the given Schnorr signature against the current public key,
// and if successful, updates the contract's public key to the given one.
function updateSeraiKey(
bytes32 _seraiKey,
Signature memory sig
) public {
if (_seraiKey == bytes32(0)) revert InvalidKey();
bytes32 message = keccak256(abi.encodePacked("updateSeraiKey", _seraiKey));
if (!verify(KEY_PARITY, seraiKey, message, sig.c, sig.s)) revert InvalidSignature();
seraiKey = _seraiKey;
}
// execute accepts a list of transactions to execute as well as a Schnorr signature.
// if signature verification passes, the given transactions are executed.
// if signature verification fails, this function will revert.
function execute(
OutInstruction[] calldata transactions,
Signature memory sig
) public {
if (transactions.length > 256) revert TooManyTransactions();
bytes32 message = keccak256(abi.encode("execute", nonce, transactions));
// This prevents re-entrancy from causing double spends yet does allow
// out-of-order execution via re-entrancy
nonce++;
if (!verify(KEY_PARITY, seraiKey, message, sig.c, sig.s)) revert InvalidSignature();
uint256 successes;
for(uint256 i = 0; i < transactions.length; i++) {
(bool success, ) = transactions[i].to.call{value: transactions[i].value, gas: 200_000}(transactions[i].data);
assembly {
successes := or(successes, shl(i, success))
}
}
emit Executed(nonce, message, successes);
}
}

View File

@@ -1,4 +1,4 @@
// SPDX-License-Identifier: AGPLv3
//SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
// see https://github.com/noot/schnorr-verify for implementation details
@@ -7,32 +7,29 @@ contract Schnorr {
uint256 constant public Q =
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
error InvalidSOrA();
error InvalidSignature();
// parity := public key y-coord parity (27 or 28)
// px := public key x-coord
// message := 32-byte hash of the message
// c := schnorr signature challenge
// message := 32-byte message
// s := schnorr signature
// e := schnorr signature challenge
function verify(
uint8 parity,
bytes32 px,
bytes32 message,
bytes32 c,
bytes32 s
bytes32 s,
bytes32 e
) public view returns (bool) {
// ecrecover = (m, v, r, s);
bytes32 sa = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
bytes32 ca = bytes32(Q - mulmod(uint256(c), uint256(px), Q));
bytes32 sp = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
bytes32 ep = bytes32(Q - mulmod(uint256(e), uint256(px), Q));
if (sa == 0) revert InvalidSOrA();
require(sp != 0);
// the ecrecover precompile implementation checks that the `r` and `s`
// inputs are non-zero (in this case, `px` and `ca`), thus we don't need to
// check if they're zero.
address R = ecrecover(sa, parity, px, ca);
if (R == address(0)) revert InvalidSignature();
return c == keccak256(
// inputs are non-zero (in this case, `px` and `ep`), thus we don't need to
// check if they're zero.will make me
address R = ecrecover(sp, parity, px, ep);
require(R != address(0), "ecrecover failed");
return e == keccak256(
abi.encodePacked(R, uint8(parity), px, block.chainid, message)
);
}

View File

@@ -1,6 +0,0 @@
#[rustfmt::skip]
#[allow(clippy::all)]
pub(crate) mod schnorr;
#[rustfmt::skip]
#[allow(clippy::all)]
pub(crate) mod router;

View File

@@ -0,0 +1,36 @@
use thiserror::Error;
use eyre::{eyre, Result};
use ethers_providers::{Provider, Http};
use ethers_contract::abigen;
use crate::crypto::ProcessedSignature;
#[derive(Error, Debug)]
pub enum EthereumError {
#[error("failed to verify Schnorr signature")]
VerificationError,
}
abigen!(Schnorr, "./artifacts/Schnorr.abi");
pub async fn call_verify(
contract: &Schnorr<Provider<Http>>,
params: &ProcessedSignature,
) -> Result<()> {
if contract
.verify(
params.parity + 27,
params.px.to_bytes().into(),
params.message,
params.s.to_bytes().into(),
params.e.to_bytes().into(),
)
.call()
.await?
{
Ok(())
} else {
Err(eyre!(EthereumError::VerificationError))
}
}

View File

@@ -1,54 +1,50 @@
use sha3::{Digest, Keccak256};
use group::ff::PrimeField;
use group::Group;
use k256::{
elliptic_curve::{
bigint::ArrayEncoding, ops::Reduce, point::AffineCoordinates, sec1::ToEncodedPoint,
bigint::ArrayEncoding, ops::Reduce, point::DecompressPoint, sec1::ToEncodedPoint,
},
ProjectivePoint, Scalar, U256,
AffinePoint, ProjectivePoint, Scalar, U256,
};
use frost::{
algorithm::{Hram, SchnorrSignature},
curve::Secp256k1,
};
use frost::{algorithm::Hram, curve::Secp256k1};
pub(crate) fn keccak256(data: &[u8]) -> [u8; 32] {
pub fn keccak256(data: &[u8]) -> [u8; 32] {
Keccak256::digest(data).into()
}
pub(crate) fn address(point: &ProjectivePoint) -> [u8; 20] {
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
Scalar::reduce(U256::from_be_slice(&keccak256(data)))
}
pub fn address(point: &ProjectivePoint) -> [u8; 20] {
let encoded_point = point.to_encoded_point(false);
// Last 20 bytes of the hash of the concatenated x and y coordinates
// We obtain the concatenated x and y coordinates via the uncompressed encoding of the point
keccak256(&encoded_point.as_ref()[1 .. 65])[12 ..].try_into().unwrap()
keccak256(&encoded_point.as_ref()[1 .. 65])[12 .. 32].try_into().unwrap()
}
#[allow(non_snake_case)]
pub struct PublicKey {
pub A: ProjectivePoint,
pub px: Scalar,
pub parity: u8,
}
pub fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
if r.is_zero().into() || s.is_zero().into() {
return None;
}
impl PublicKey {
#[allow(non_snake_case)]
pub fn new(A: ProjectivePoint) -> Option<PublicKey> {
let affine = A.to_affine();
let parity = u8::from(bool::from(affine.y_is_odd())) + 27;
if parity != 27 {
None?;
let R = AffinePoint::decompress(&r.to_bytes(), v.into());
#[allow(non_snake_case)]
if let Some(R) = Option::<AffinePoint>::from(R) {
#[allow(non_snake_case)]
let R = ProjectivePoint::from(R);
let r = r.invert().unwrap();
let u1 = ProjectivePoint::GENERATOR * (-message * r);
let u2 = R * (s * r);
let key: ProjectivePoint = u1 + u2;
if !bool::from(key.is_identity()) {
return Some(address(&key));
}
}
let x_coord = affine.x();
let x_coord_scalar = <Scalar as Reduce<U256>>::reduce_bytes(&x_coord);
// Return None if a reduction would occur
if x_coord_scalar.to_repr() != x_coord {
None?;
}
Some(PublicKey { A, px: x_coord_scalar, parity })
}
None
}
#[derive(Clone, Default)]
@@ -59,33 +55,53 @@ impl Hram<Secp256k1> for EthereumHram {
let a_encoded_point = A.to_encoded_point(true);
let mut a_encoded = a_encoded_point.as_ref().to_owned();
a_encoded[0] += 25; // Ethereum uses 27/28 for point parity
assert!((a_encoded[0] == 27) || (a_encoded[0] == 28));
let mut data = address(R).to_vec();
data.append(&mut a_encoded);
data.extend(m);
data.append(&mut m.to_vec());
Scalar::reduce(U256::from_be_slice(&keccak256(&data)))
}
}
pub struct Signature {
pub(crate) c: Scalar,
pub(crate) s: Scalar,
pub struct ProcessedSignature {
pub s: Scalar,
pub px: Scalar,
pub parity: u8,
pub message: [u8; 32],
pub e: Scalar,
}
impl Signature {
pub fn new(
public_key: &PublicKey,
#[allow(non_snake_case)]
pub fn preprocess_signature_for_ecrecover(
m: [u8; 32],
R: &ProjectivePoint,
s: Scalar,
A: &ProjectivePoint,
chain_id: U256,
m: &[u8],
signature: SchnorrSignature<Secp256k1>,
) -> Option<Signature> {
let c = EthereumHram::hram(
&signature.R,
&public_key.A,
&[chain_id.to_be_byte_array().as_slice(), &keccak256(m)].concat(),
);
if !signature.verify(public_key.A, c) {
None?;
}
Some(Signature { c, s: signature.s })
) -> (Scalar, Scalar) {
let processed_sig = process_signature_for_contract(m, R, s, A, chain_id);
let sr = processed_sig.s.mul(&processed_sig.px).negate();
let er = processed_sig.e.mul(&processed_sig.px).negate();
(sr, er)
}
#[allow(non_snake_case)]
pub fn process_signature_for_contract(
m: [u8; 32],
R: &ProjectivePoint,
s: Scalar,
A: &ProjectivePoint,
chain_id: U256,
) -> ProcessedSignature {
let encoded_pk = A.to_encoded_point(true);
let px = &encoded_pk.as_ref()[1 .. 33];
let px_scalar = Scalar::reduce(U256::from_be_slice(px));
let e = EthereumHram::hram(R, A, &[chain_id.to_be_byte_array().as_slice(), &m].concat());
ProcessedSignature {
s,
px: px_scalar,
parity: &encoded_pk.as_ref()[0] - 2,
#[allow(non_snake_case)]
message: m,
e,
}
}

View File

@@ -1,16 +1,2 @@
use thiserror::Error;
pub mod contract;
pub mod crypto;
pub(crate) mod abi;
pub mod schnorr;
pub mod router;
#[cfg(test)]
mod tests;
#[derive(Error, Debug)]
pub enum Error {
#[error("failed to verify Schnorr signature")]
InvalidSignature,
}

View File

@@ -1,30 +0,0 @@
pub use crate::abi::router::*;
/*
use crate::crypto::{ProcessedSignature, PublicKey};
use ethers::{contract::ContractFactory, prelude::*, solc::artifacts::contract::ContractBytecode};
use eyre::Result;
use std::{convert::From, fs::File, sync::Arc};
pub async fn router_update_public_key<M: Middleware + 'static>(
contract: &Router<M>,
public_key: &PublicKey,
signature: &ProcessedSignature,
) -> std::result::Result<Option<TransactionReceipt>, eyre::ErrReport> {
let tx = contract.update_public_key(public_key.px.to_bytes().into(), signature.into());
let pending_tx = tx.send().await?;
let receipt = pending_tx.await?;
Ok(receipt)
}
pub async fn router_execute<M: Middleware + 'static>(
contract: &Router<M>,
txs: Vec<Rtransaction>,
signature: &ProcessedSignature,
) -> std::result::Result<Option<TransactionReceipt>, eyre::ErrReport> {
let tx = contract.execute(txs, signature.into()).send();
let pending_tx = tx.send().await?;
let receipt = pending_tx.await?;
Ok(receipt)
}
*/

View File

@@ -1,34 +0,0 @@
use eyre::{eyre, Result};
use group::ff::PrimeField;
use ethers_providers::{Provider, Http};
use crate::{
Error,
crypto::{keccak256, PublicKey, Signature},
};
pub use crate::abi::schnorr::*;
pub async fn call_verify(
contract: &Schnorr<Provider<Http>>,
public_key: &PublicKey,
message: &[u8],
signature: &Signature,
) -> Result<()> {
if contract
.verify(
public_key.parity,
public_key.px.to_repr().into(),
keccak256(message),
signature.c.to_repr().into(),
signature.s.to_repr().into(),
)
.call()
.await?
{
Ok(())
} else {
Err(eyre!(Error::InvalidSignature))
}
}

View File

@@ -1,132 +0,0 @@
use rand_core::OsRng;
use sha2::Sha256;
use sha3::{Digest, Keccak256};
use group::Group;
use k256::{
ecdsa::{hazmat::SignPrimitive, signature::DigestVerifier, SigningKey, VerifyingKey},
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, point::DecompressPoint},
U256, Scalar, AffinePoint, ProjectivePoint,
};
use frost::{
curve::Secp256k1,
algorithm::{Hram, IetfSchnorr},
tests::{algorithm_machines, sign},
};
use crate::{crypto::*, tests::key_gen};
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
Scalar::reduce(U256::from_be_slice(&keccak256(data)))
}
pub(crate) fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
if r.is_zero().into() || s.is_zero().into() || !((v == 27) || (v == 28)) {
return None;
}
#[allow(non_snake_case)]
let R = AffinePoint::decompress(&r.to_bytes(), (v - 27).into());
#[allow(non_snake_case)]
if let Some(R) = Option::<AffinePoint>::from(R) {
#[allow(non_snake_case)]
let R = ProjectivePoint::from(R);
let r = r.invert().unwrap();
let u1 = ProjectivePoint::GENERATOR * (-message * r);
let u2 = R * (s * r);
let key: ProjectivePoint = u1 + u2;
if !bool::from(key.is_identity()) {
return Some(address(&key));
}
}
None
}
#[test]
fn test_ecrecover() {
let private = SigningKey::random(&mut OsRng);
let public = VerifyingKey::from(&private);
// Sign the signature
const MESSAGE: &[u8] = b"Hello, World!";
let (sig, recovery_id) = private
.as_nonzero_scalar()
.try_sign_prehashed_rfc6979::<Sha256>(&Keccak256::digest(MESSAGE), b"")
.unwrap();
// Sanity check the signature verifies
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
{
assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ());
}
// Perform the ecrecover
assert_eq!(
ecrecover(
hash_to_scalar(MESSAGE),
u8::from(recovery_id.unwrap().is_y_odd()) + 27,
*sig.r(),
*sig.s()
)
.unwrap(),
address(&ProjectivePoint::from(public.as_affine()))
);
}
// Run the sign test with the EthereumHram
#[test]
fn test_signing() {
let (keys, _) = key_gen();
const MESSAGE: &[u8] = b"Hello, World!";
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let _sig =
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
}
#[allow(non_snake_case)]
pub fn preprocess_signature_for_ecrecover(
R: ProjectivePoint,
public_key: &PublicKey,
chain_id: U256,
m: &[u8],
s: Scalar,
) -> (u8, Scalar, Scalar) {
let c = EthereumHram::hram(
&R,
&public_key.A,
&[chain_id.to_be_byte_array().as_slice(), &keccak256(m)].concat(),
);
let sa = -(s * public_key.px);
let ca = -(c * public_key.px);
(public_key.parity, sa, ca)
}
#[test]
fn test_ecrecover_hack() {
let (keys, public_key) = key_gen();
const MESSAGE: &[u8] = b"Hello, World!";
let hashed_message = keccak256(MESSAGE);
let chain_id = U256::ONE;
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let sig = sign(
&mut OsRng,
&algo,
keys.clone(),
algorithm_machines(&mut OsRng, &algo, &keys),
full_message,
);
let (parity, sa, ca) =
preprocess_signature_for_ecrecover(sig.R, &public_key, chain_id, MESSAGE, sig.s);
let q = ecrecover(sa, parity, public_key.px, ca).unwrap();
assert_eq!(q, address(&sig.R));
}

View File

@@ -1,92 +0,0 @@
use std::{sync::Arc, time::Duration, fs::File, collections::HashMap};
use rand_core::OsRng;
use group::ff::PrimeField;
use k256::{Scalar, ProjectivePoint};
use frost::{curve::Secp256k1, Participant, ThresholdKeys, tests::key_gen as frost_key_gen};
use ethers_core::{
types::{H160, Signature as EthersSignature},
abi::Abi,
};
use ethers_contract::ContractFactory;
use ethers_providers::{Middleware, Provider, Http};
use crate::crypto::PublicKey;
mod crypto;
mod schnorr;
mod router;
pub fn key_gen() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, PublicKey) {
let mut keys = frost_key_gen::<_, Secp256k1>(&mut OsRng);
let mut group_key = keys[&Participant::new(1).unwrap()].group_key();
let mut offset = Scalar::ZERO;
while PublicKey::new(group_key).is_none() {
offset += Scalar::ONE;
group_key += ProjectivePoint::GENERATOR;
}
for keys in keys.values_mut() {
*keys = keys.offset(offset);
}
let public_key = PublicKey::new(group_key).unwrap();
(keys, public_key)
}
// TODO: Replace with a contract deployment from an unknown account, so the environment solely has
// to fund the deployer, not create/pass a wallet
// TODO: Deterministic deployments across chains
pub async fn deploy_contract(
chain_id: u32,
client: Arc<Provider<Http>>,
wallet: &k256::ecdsa::SigningKey,
name: &str,
) -> eyre::Result<H160> {
let abi: Abi =
serde_json::from_reader(File::open(format!("./artifacts/{name}.abi")).unwrap()).unwrap();
let hex_bin_buf = std::fs::read_to_string(format!("./artifacts/{name}.bin")).unwrap();
let hex_bin =
if let Some(stripped) = hex_bin_buf.strip_prefix("0x") { stripped } else { &hex_bin_buf };
let bin = hex::decode(hex_bin).unwrap();
let factory = ContractFactory::new(abi, bin.into(), client.clone());
let mut deployment_tx = factory.deploy(())?.tx;
deployment_tx.set_chain_id(chain_id);
deployment_tx.set_gas(1_000_000);
let (max_fee_per_gas, max_priority_fee_per_gas) = client.estimate_eip1559_fees(None).await?;
deployment_tx.as_eip1559_mut().unwrap().max_fee_per_gas = Some(max_fee_per_gas);
deployment_tx.as_eip1559_mut().unwrap().max_priority_fee_per_gas = Some(max_priority_fee_per_gas);
let sig_hash = deployment_tx.sighash();
let (sig, rid) = wallet.sign_prehash_recoverable(sig_hash.as_ref()).unwrap();
// EIP-155 v
let mut v = u64::from(rid.to_byte());
assert!((v == 0) || (v == 1));
v += u64::from((chain_id * 2) + 35);
let r = sig.r().to_repr();
let r_ref: &[u8] = r.as_ref();
let s = sig.s().to_repr();
let s_ref: &[u8] = s.as_ref();
let deployment_tx =
deployment_tx.rlp_signed(&EthersSignature { r: r_ref.into(), s: s_ref.into(), v });
let pending_tx = client.send_raw_transaction(deployment_tx).await?;
let mut receipt;
while {
receipt = client.get_transaction_receipt(pending_tx.tx_hash()).await?;
receipt.is_none()
} {
tokio::time::sleep(Duration::from_secs(6)).await;
}
let receipt = receipt.unwrap();
assert!(receipt.status == Some(1.into()));
Ok(receipt.contract_address.unwrap())
}

View File

@@ -1,109 +0,0 @@
use std::{convert::TryFrom, sync::Arc, collections::HashMap};
use rand_core::OsRng;
use group::ff::PrimeField;
use frost::{
curve::Secp256k1,
Participant, ThresholdKeys,
algorithm::IetfSchnorr,
tests::{algorithm_machines, sign},
};
use ethers_core::{
types::{H160, U256, Bytes},
abi::AbiEncode,
utils::{Anvil, AnvilInstance},
};
use ethers_providers::{Middleware, Provider, Http};
use crate::{
crypto::{keccak256, PublicKey, EthereumHram, Signature},
router::{self, *},
tests::{key_gen, deploy_contract},
};
async fn setup_test() -> (
u32,
AnvilInstance,
Router<Provider<Http>>,
HashMap<Participant, ThresholdKeys<Secp256k1>>,
PublicKey,
) {
let anvil = Anvil::new().spawn();
let provider = Provider::<Http>::try_from(anvil.endpoint()).unwrap();
let chain_id = provider.get_chainid().await.unwrap().as_u32();
let wallet = anvil.keys()[0].clone().into();
let client = Arc::new(provider);
let contract_address =
deploy_contract(chain_id, client.clone(), &wallet, "Router").await.unwrap();
let contract = Router::new(contract_address, client.clone());
let (keys, public_key) = key_gen();
// Set the key to the threshold keys
let tx = contract.init_serai_key(public_key.px.to_repr().into()).gas(100_000);
let pending_tx = tx.send().await.unwrap();
let receipt = pending_tx.await.unwrap().unwrap();
assert!(receipt.status == Some(1.into()));
(chain_id, anvil, contract, keys, public_key)
}
#[tokio::test]
async fn test_deploy_contract() {
setup_test().await;
}
pub fn hash_and_sign(
keys: &HashMap<Participant, ThresholdKeys<Secp256k1>>,
public_key: &PublicKey,
chain_id: U256,
message: &[u8],
) -> Signature {
let hashed_message = keccak256(message);
let mut chain_id_bytes = [0; 32];
chain_id.to_big_endian(&mut chain_id_bytes);
let full_message = &[chain_id_bytes.as_slice(), &hashed_message].concat();
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let sig = sign(
&mut OsRng,
&algo,
keys.clone(),
algorithm_machines(&mut OsRng, &algo, keys),
full_message,
);
Signature::new(public_key, k256::U256::from_words(chain_id.0), message, sig).unwrap()
}
#[tokio::test]
async fn test_router_execute() {
let (chain_id, _anvil, contract, keys, public_key) = setup_test().await;
let to = H160([0u8; 20]);
let value = U256([0u64; 4]);
let data = Bytes::from([0]);
let tx = OutInstruction { to, value, data: data.clone() };
let nonce_call = contract.nonce();
let nonce = nonce_call.call().await.unwrap();
let encoded =
("execute".to_string(), nonce, vec![router::OutInstruction { to, value, data }]).encode();
let sig = hash_and_sign(&keys, &public_key, chain_id.into(), &encoded);
let tx = contract
.execute(vec![tx], router::Signature { c: sig.c.to_repr().into(), s: sig.s.to_repr().into() })
.gas(300_000);
let pending_tx = tx.send().await.unwrap();
let receipt = dbg!(pending_tx.await.unwrap().unwrap());
assert!(receipt.status == Some(1.into()));
println!("gas used: {:?}", receipt.cumulative_gas_used);
println!("logs: {:?}", receipt.logs);
}

View File

@@ -1,67 +0,0 @@
use std::{convert::TryFrom, sync::Arc};
use rand_core::OsRng;
use ::k256::{elliptic_curve::bigint::ArrayEncoding, U256, Scalar};
use ethers_core::utils::{keccak256, Anvil, AnvilInstance};
use ethers_providers::{Middleware, Provider, Http};
use frost::{
curve::Secp256k1,
algorithm::IetfSchnorr,
tests::{algorithm_machines, sign},
};
use crate::{
crypto::*,
schnorr::*,
tests::{key_gen, deploy_contract},
};
async fn setup_test() -> (u32, AnvilInstance, Schnorr<Provider<Http>>) {
let anvil = Anvil::new().spawn();
let provider = Provider::<Http>::try_from(anvil.endpoint()).unwrap();
let chain_id = provider.get_chainid().await.unwrap().as_u32();
let wallet = anvil.keys()[0].clone().into();
let client = Arc::new(provider);
let contract_address =
deploy_contract(chain_id, client.clone(), &wallet, "Schnorr").await.unwrap();
let contract = Schnorr::new(contract_address, client.clone());
(chain_id, anvil, contract)
}
#[tokio::test]
async fn test_deploy_contract() {
setup_test().await;
}
#[tokio::test]
async fn test_ecrecover_hack() {
let (chain_id, _anvil, contract) = setup_test().await;
let chain_id = U256::from(chain_id);
let (keys, public_key) = key_gen();
const MESSAGE: &[u8] = b"Hello, World!";
let hashed_message = keccak256(MESSAGE);
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let sig = sign(
&mut OsRng,
&algo,
keys.clone(),
algorithm_machines(&mut OsRng, &algo, &keys),
full_message,
);
let sig = Signature::new(&public_key, chain_id, MESSAGE, sig).unwrap();
call_verify(&contract, &public_key, MESSAGE, &sig).await.unwrap();
// Test an invalid signature fails
let mut sig = sig;
sig.s += Scalar::ONE;
assert!(call_verify(&contract, &public_key, MESSAGE, &sig).await.is_err());
}

View File

@@ -0,0 +1,128 @@
use std::{convert::TryFrom, sync::Arc, time::Duration, fs::File};
use rand_core::OsRng;
use ::k256::{
elliptic_curve::{bigint::ArrayEncoding, PrimeField},
U256,
};
use ethers_core::{
types::Signature,
abi::Abi,
utils::{keccak256, Anvil, AnvilInstance},
};
use ethers_contract::ContractFactory;
use ethers_providers::{Middleware, Provider, Http};
use frost::{
curve::Secp256k1,
Participant,
algorithm::IetfSchnorr,
tests::{key_gen, algorithm_machines, sign},
};
use ethereum_serai::{
crypto,
contract::{Schnorr, call_verify},
};
// TODO: Replace with a contract deployment from an unknown account, so the environment solely has
// to fund the deployer, not create/pass a wallet
pub async fn deploy_schnorr_verifier_contract(
chain_id: u32,
client: Arc<Provider<Http>>,
wallet: &k256::ecdsa::SigningKey,
) -> eyre::Result<Schnorr<Provider<Http>>> {
let abi: Abi = serde_json::from_reader(File::open("./artifacts/Schnorr.abi").unwrap()).unwrap();
let hex_bin_buf = std::fs::read_to_string("./artifacts/Schnorr.bin").unwrap();
let hex_bin =
if let Some(stripped) = hex_bin_buf.strip_prefix("0x") { stripped } else { &hex_bin_buf };
let bin = hex::decode(hex_bin).unwrap();
let factory = ContractFactory::new(abi, bin.into(), client.clone());
let mut deployment_tx = factory.deploy(())?.tx;
deployment_tx.set_chain_id(chain_id);
deployment_tx.set_gas(500_000);
let (max_fee_per_gas, max_priority_fee_per_gas) = client.estimate_eip1559_fees(None).await?;
deployment_tx.as_eip1559_mut().unwrap().max_fee_per_gas = Some(max_fee_per_gas);
deployment_tx.as_eip1559_mut().unwrap().max_priority_fee_per_gas = Some(max_priority_fee_per_gas);
let sig_hash = deployment_tx.sighash();
let (sig, rid) = wallet.sign_prehash_recoverable(sig_hash.as_ref()).unwrap();
// EIP-155 v
let mut v = u64::from(rid.to_byte());
assert!((v == 0) || (v == 1));
v += u64::from((chain_id * 2) + 35);
let r = sig.r().to_repr();
let r_ref: &[u8] = r.as_ref();
let s = sig.s().to_repr();
let s_ref: &[u8] = s.as_ref();
let deployment_tx = deployment_tx.rlp_signed(&Signature { r: r_ref.into(), s: s_ref.into(), v });
let pending_tx = client.send_raw_transaction(deployment_tx).await?;
let mut receipt;
while {
receipt = client.get_transaction_receipt(pending_tx.tx_hash()).await?;
receipt.is_none()
} {
tokio::time::sleep(Duration::from_secs(6)).await;
}
let receipt = receipt.unwrap();
assert!(receipt.status == Some(1.into()));
let contract = Schnorr::new(receipt.contract_address.unwrap(), client.clone());
Ok(contract)
}
async fn deploy_test_contract() -> (u32, AnvilInstance, Schnorr<Provider<Http>>) {
let anvil = Anvil::new().spawn();
let provider =
Provider::<Http>::try_from(anvil.endpoint()).unwrap().interval(Duration::from_millis(10u64));
let chain_id = provider.get_chainid().await.unwrap().as_u32();
let wallet = anvil.keys()[0].clone().into();
let client = Arc::new(provider);
(chain_id, anvil, deploy_schnorr_verifier_contract(chain_id, client, &wallet).await.unwrap())
}
#[tokio::test]
async fn test_deploy_contract() {
deploy_test_contract().await;
}
#[tokio::test]
async fn test_ecrecover_hack() {
let (chain_id, _anvil, contract) = deploy_test_contract().await;
let chain_id = U256::from(chain_id);
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
let group_key = keys[&Participant::new(1).unwrap()].group_key();
const MESSAGE: &[u8] = b"Hello, World!";
let hashed_message = keccak256(MESSAGE);
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
let algo = IetfSchnorr::<Secp256k1, crypto::EthereumHram>::ietf();
let sig = sign(
&mut OsRng,
&algo,
keys.clone(),
algorithm_machines(&mut OsRng, &algo, &keys),
full_message,
);
let mut processed_sig =
crypto::process_signature_for_contract(hashed_message, &sig.R, sig.s, &group_key, chain_id);
call_verify(&contract, &processed_sig).await.unwrap();
// test invalid signature fails
processed_sig.message[0] = 0;
assert!(call_verify(&contract, &processed_sig).await.is_err());
}

View File

@@ -0,0 +1,87 @@
use k256::{
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, sec1::ToEncodedPoint},
ProjectivePoint, Scalar, U256,
};
use frost::{curve::Secp256k1, Participant};
use ethereum_serai::crypto::*;
#[test]
fn test_ecrecover() {
use rand_core::OsRng;
use sha2::Sha256;
use sha3::{Digest, Keccak256};
use k256::ecdsa::{hazmat::SignPrimitive, signature::DigestVerifier, SigningKey, VerifyingKey};
let private = SigningKey::random(&mut OsRng);
let public = VerifyingKey::from(&private);
const MESSAGE: &[u8] = b"Hello, World!";
let (sig, recovery_id) = private
.as_nonzero_scalar()
.try_sign_prehashed_rfc6979::<Sha256>(&Keccak256::digest(MESSAGE), b"")
.unwrap();
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
{
assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ());
}
assert_eq!(
ecrecover(hash_to_scalar(MESSAGE), recovery_id.unwrap().is_y_odd().into(), *sig.r(), *sig.s())
.unwrap(),
address(&ProjectivePoint::from(public.as_affine()))
);
}
#[test]
fn test_signing() {
use frost::{
algorithm::IetfSchnorr,
tests::{algorithm_machines, key_gen, sign},
};
use rand_core::OsRng;
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
let _group_key = keys[&Participant::new(1).unwrap()].group_key();
const MESSAGE: &[u8] = b"Hello, World!";
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let _sig =
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
}
#[test]
fn test_ecrecover_hack() {
use frost::{
algorithm::IetfSchnorr,
tests::{algorithm_machines, key_gen, sign},
};
use rand_core::OsRng;
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
let group_key = keys[&Participant::new(1).unwrap()].group_key();
let group_key_encoded = group_key.to_encoded_point(true);
let group_key_compressed = group_key_encoded.as_ref();
let group_key_x = Scalar::reduce(U256::from_be_slice(&group_key_compressed[1 .. 33]));
const MESSAGE: &[u8] = b"Hello, World!";
let hashed_message = keccak256(MESSAGE);
let chain_id = U256::ONE;
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let sig = sign(
&mut OsRng,
&algo,
keys.clone(),
algorithm_machines(&mut OsRng, &algo, &keys),
full_message,
);
let (sr, er) =
preprocess_signature_for_ecrecover(hashed_message, &sig.R, sig.s, &group_key, chain_id);
let q = ecrecover(sr, group_key_compressed[0] - 2, group_key_x, er).unwrap();
assert_eq!(q, address(&sig.R));
}

View File

@@ -0,0 +1,2 @@
mod contract;
mod crypto;

View File

@@ -26,9 +26,6 @@ curve25519-dalek = { version = "4", default-features = false, features = ["alloc
group = { version = "0.13", default-features = false }
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.4", default-features = false }
[dev-dependencies]
hex = "0.4"
[features]
std = ["std-shims/std", "subtle/std", "sha3/std", "dalek-ff-group/std"]
default = ["std"]

View File

@@ -7,16 +7,7 @@ use dalek_ff_group::FieldElement;
use crate::hash;
/// Decompress canonically encoded ed25519 point
/// It does not check if the point is in the prime order subgroup
pub fn decompress_point(bytes: [u8; 32]) -> Option<EdwardsPoint> {
CompressedEdwardsY(bytes)
.decompress()
// Ban points which are either unreduced or -0
.filter(|point| point.compress().to_bytes() == bytes)
}
/// Monero's hash to point function, as named `hash_to_ec`.
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
#[allow(non_snake_case)]
let A = FieldElement::from(486662u64);
@@ -56,5 +47,5 @@ pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
let mut bytes = Y.to_repr();
bytes[31] |= sign.unwrap_u8() << 7;
decompress_point(bytes).unwrap().mul_by_cofactor()
CompressedEdwardsY(bytes).decompress().unwrap().mul_by_cofactor()
}

View File

@@ -9,7 +9,7 @@ use std_shims::{sync::OnceLock, vec::Vec};
use sha3::{Digest, Keccak256};
use curve25519_dalek::edwards::{EdwardsPoint as DalekPoint};
use curve25519_dalek::edwards::{EdwardsPoint as DalekPoint, CompressedEdwardsY};
use group::{Group, GroupEncoding};
use dalek_ff_group::EdwardsPoint;
@@ -18,10 +18,7 @@ mod varint;
use varint::write_varint;
mod hash_to_point;
pub use hash_to_point::{hash_to_point, decompress_point};
#[cfg(test)]
mod tests;
pub use hash_to_point::hash_to_point;
fn hash(data: &[u8]) -> [u8; 32] {
Keccak256::digest(data).into()
@@ -32,7 +29,10 @@ static H_CELL: OnceLock<DalekPoint> = OnceLock::new();
#[allow(non_snake_case)]
pub fn H() -> DalekPoint {
*H_CELL.get_or_init(|| {
decompress_point(hash(&EdwardsPoint::generator().to_bytes())).unwrap().mul_by_cofactor()
CompressedEdwardsY(hash(&EdwardsPoint::generator().to_bytes()))
.decompress()
.unwrap()
.mul_by_cofactor()
})
}

View File

@@ -1,38 +0,0 @@
use crate::{decompress_point, hash_to_point};
#[test]
fn crypto_tests() {
// tests.txt file copied from monero repo
// https://github.com/monero-project/monero/
// blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/tests/crypto/tests.txt
let reader = include_str!("./tests.txt");
for line in reader.lines() {
let mut words = line.split_whitespace();
let command = words.next().unwrap();
match command {
"check_key" => {
let key = words.next().unwrap();
let expected = match words.next().unwrap() {
"true" => true,
"false" => false,
_ => unreachable!("invalid result"),
};
let actual = decompress_point(hex::decode(key).unwrap().try_into().unwrap());
assert_eq!(actual.is_some(), expected);
}
"hash_to_ec" => {
let bytes = words.next().unwrap();
let expected = words.next().unwrap();
let actual = hash_to_point(hex::decode(bytes).unwrap().try_into().unwrap());
assert_eq!(hex::encode(actual.compress().to_bytes()), expected);
}
_ => unreachable!("unknown command"),
}
}
}

View File

@@ -1 +0,0 @@
mod hash_to_point;

View File

@@ -1,628 +0,0 @@
check_key c2cb3cf3840aa9893e00ec77093d3d44dba7da840b51c48462072d58d8efd183 false
check_key bd85a61bae0c101d826cbed54b1290f941d26e70607a07fc6f0ad611eb8f70a6 true
check_key 328f81cad4eba24ab2bad7c0e56b1e2e7346e625bcb06ae649aef3ffa0b8bef3 false
check_key 6016a5463b9e5a58c3410d3f892b76278883473c3f0b69459172d3de49e85abe true
check_key 4c71282b2add07cdc6898a2622553f1ca4eb851e5cb121181628be5f3814c5b1 false
check_key 69393c25c3b50e177f81f20f852dd604e768eb30052e23108b3cfa1a73f2736e true
check_key 3d5a89b676cb84c2be3428d20a660dc6a37cae13912e127888a5132e8bac2163 true
check_key 78cd665deb28cebc6208f307734c56fccdf5fa7e2933fadfcdd2b6246e9ae95c false
check_key e03b2414e260580f86ee294cd4c636a5b153e617f704e81dad248fbf715b2ee4 true
check_key 28c3503ce82d7cdc8e0d96c4553bcf0352bbcfc73925495dbe541e7e1df105fc false
check_key 06855c3c3e0d03fec354059bda319b39916bdc10b6581e3f41b335ee7b014fd5 false
check_key 556381485df0d7d5a268ab5ecfb2984b060acc63471183fcf538bf273b0c0cb5 true
check_key c7f76d82ac64b1e7fdc32761ff00d6f0f7ada4cf223aa5a11187e3a02e1d5319 true
check_key cfa85d8bdb6f633fcf031adee3a299ac42eeb6bd707744049f652f6322f5aa47 true
check_key 91e9b63ced2b08979fee713365464cc3417c4f238f9bdd3396efbb3c58e195ee true
check_key 7b56e76fe94bd30b3b2f2c4ba5fe4c504821753a8965eb1cbcf8896e2d6aba19 true
check_key 7338df494bc416cf5edcc02069e067f39cb269ce67bd9faba956021ce3b3de3a false
check_key f9a1f27b1618342a558379f4815fa5039a8fe9d98a09f45c1af857ba99231dc1 false
check_key b2a1f37718180d4448a7fcb5f788048b1a7132dde1cfd25f0b9b01776a21c687 true
check_key 0d3a0f9443a8b24510ad1e76a8117cca03bce416edfe35e3c2a2c2712454f8dc false
check_key d8d3d806a76f120c4027dc9c9d741ad32e06861b9cfbc4ce39289c04e251bb3c false
check_key 1e9e3ba7bc536cd113606842835d1f05b4b9e65875742f3a35bfb2d63164b5d5 true
check_key 5c52d0087997a2cdf1d01ed0560d94b4bfd328cb741cb9a8d46ff50374b35a57 true
check_key bb669d4d7ffc4b91a14defedcdbd96b330108b01adc63aa685e2165284c0033b false
check_key d2709ae751a0a6fd796c98456fa95a7b64b75a3434f1caa3496eeaf5c14109b4 true
check_key e0c238cba781684e655b10a7d4af04ab7ff2e7022182d7ed2279d6adf36b3e7a false
check_key 34ebb4bf871572cee5c6935716fab8c8ec28feef4f039763d8f039b84a50bf4c false
check_key 4730d4f38ec3f3b83e32e6335d2506df4ee39858848842c5a0184417fcc639e4 true
check_key d42cf7fdf5e17e0a8a7f88505a2b7a3d297113bd93d3c20fa87e11509ec905a2 true
check_key b757c95059cefabb0080d3a8ebca82e46efecfd29881be3121857f9d915e388c false
check_key bbe777aaf04d02b96c0632f4b1c6f35f1c7bcbc5f22af192f92c077709a2b50b false
check_key 73518522aabd28566f858c33fccb34b7a4de0e283f6f783f625604ee647afad9 true
check_key f230622c4a8f6e516590466bd10f86b64fbef61695f6a054d37604e0b024d5af false
check_key bc6b9a8379fd6c369f7c3bd9ddce58db6b78f27a41d798bb865c3920824d0943 false
check_key 45a4f87c25898cd6be105fa1602b85c4d862782adaac8b85c996c4a2bcd8af47 true
check_key eb4ad3561d21c4311affbd7cc2c7ff5fd509f72f88ba67dc097a75c31fdbd990 false
check_key 2f34f4630c09a23b7ecc19f02b4190a26df69e07e13de8069ae5ff80d23762fc true
check_key 2ea4e4fb5085eb5c8adee0d5ab7d35c67d74d343bd816cd13924536cffc2527c true
check_key 5d35467ee6705a0d35818aa9ae94e4603c3e5500bfc4cf4c4f77a7160a597aa6 true
check_key 8ff42bc76796e20c99b6e879369bd4b46a256db1366416291de9166e39d5a093 true
check_key 0262ba718850df6c621e8a24cd9e4831c047e38818a89e15c7a06a489a4558e1 false
check_key 58b29b2ba238b534b08fb46f05f430e61cb77dc251b0bb50afec1b6061fd9247 false
check_key 153170e3dc2b0e1b368fc0d0e31053e872f094cdace9a2846367f0d9245a109b false
check_key 40419d309d07522d493bb047ca9b5fb6c401aae226eefae6fd395f5bb9114200 true
check_key 713068818d256ef69c78cd6082492013fbd48de3c9e7e076415dd0a692994504 true
check_key a7218ee08e50781b0c87312d5e0031467e863c10081668e3792d96cbcee4e474 true
check_key 356ce516b00e674ef1729c75b0a68090e7265cef675bbf32bf809495b67e9342 false
check_key 52a5c053293675e3efd2c585047002ea6d77931cbf38f541b9070d319dc0d237 false
check_key 77c0080bf157e069b18c4c604cc9505c5ec6f0f9930e087592d70507ca1b5534 false
check_key e733bc41f880a4cfb1ca6f397916504130807289cacfca10b15f5b8d058ed1bf false
check_key c4f1d3c884908a574ecea8be10e02277de35ef84a1d10f105f2be996f285161f true
check_key aed677f7f69e146aa0863606ac580fc0bbdc22a88c4b4386abaa4bdfff66bcc9 false
check_key 6ad0edf59769599af8caa986f502afc67aecbebb8107aaf5e7d3ae51d5cf8dd8 false
check_key 64a0a70e99be1f775c222ee9cd6f1bee6f632cb9417899af398ff9aff70661c6 true
check_key c63afaa03bb5c4ed7bc77aac175dbfb73f904440b2e3056a65850ac1bd261332 false
check_key a4e89cd2471c26951513b1cfbdcf053a86575e095af52495276aa56ede8ce344 false
check_key 2ce935d97f7c3ddb973de685d20f58ee39938fe557216328045ec2b83f3132be true
check_key 3e3d38b1fca93c1559ac030d586616354c668aa76245a09e3fa6de55ac730973 true
check_key 8b81b9681f76a4254007fd07ed1ded25fc675973ccb23afd06074805194733a4 false
check_key 26d1c15dfc371489439e29bcef2afcf7ed01fac24960fdc2e7c20847a8067588 true
check_key 85c1199b5a4591fc4cc36d23660648c1b9cfbb0e9c47199fa3eea33299a3dcec false
check_key 60830ba5449c1f04ac54675dfc7cac7510106c4b7549852551f8fe65971123e2 false
check_key 3e43c28c024597b3b836e4bc16905047cbf6e841b80e0b8cd6a325049070c2a5 false
check_key 474792c16a0032343a6f28f4cb564747c3b1ea0b6a6b9a42f7c71d7cc3dd3b44 true
check_key c8ec5e67cb5786673085191881950a3ca20dde88f46851b01dd91c695cfbad16 true
check_key 861c4b24b24a87b8559e0bb665f84dcc506c147a909f335ae4573b92299f042f false
check_key 2c9e0fe3e4983d79f86c8c36928528f1bc90d94352ce427032cdef6906d84d0b true
check_key 9293742822c2dff63fdc1bf6645c864fd527cea2ddba6d4f3048d202fc340c9a true
check_key 3956422ad380ef19cb9fe360ef09cc7aaec7163eea4114392a7a0b2e2671914e true
check_key 5ae8e72cadda85e525922fec11bd53a261cf26ee230fe85a1187f831b1b2c258 false
check_key 973feca43a0baf450c30ace5dc19015e19400f0898316e28d9f3c631da31f99a true
check_key dd946c91a2077f45c5c16939e53859d9beabaf065e7b1b993d5e5cd385f8716e true
check_key b3928f2d67e47f6bd6da81f72e64908d8ff391af5689f0202c4c6fec7666ffe8 true
check_key 313382e82083697d7f9d256c3b3800b099b56c3ef33cacdccbd40a65622e25fc false
check_key 7d65380c12144802d39ed9306eed79fe165854273700437c0b4b50559800c058 true
check_key 4db5c20a49422fd27739c9ca80e2271a8a125dfcead22cb8f035d0e1b7b163be true
check_key dd76a9f565ef0e44d1531349ec4c5f7c3c387c2f5823e693b4952f4b0b70808c true
check_key 66430bf628eae23918c3ed17b42138db1f98c24819e55fc4a07452d0c85603eb true
check_key 9f0b677830c3f089c27daf724bb10be848537f8285de83ab0292d35afb617f77 false
check_key cbf98287391fb00b1e68ad64e9fb10198025864c099b8b9334d840457e673874 true
check_key a42552e9446e49a83aed9e3370506671216b2d1471392293b8fc2b81c81a73ee false
check_key fb3de55ac81a923d506a514602d65d004ec9d13e8b47e82d73af06da73006673 false
check_key e17abb78e58a4b72ff4ad7387b290f2811be880b394b8bcaae7748ac09930169 false
check_key 9ffbda7ace69753761cdb5eb01f75433efa5cdb6a4f1b664874182c6a95adcba true
check_key 507123c979179ea0a3f7f67fb485f71c8636ec4ec70aa47b92f3c707e7541a54 false
check_key f1d0b156571994ef578c61cb6545d34f834eb30e4357539a5633c862d4dffa91 false
check_key 3de62311ec14f9ee95828c190b2dc3f03059d6119e8dfccb7323efc640e07c75 false
check_key 5e50bb48bc9f6dd11d52c1f0d10d8ae5674d7a4af89cbbce178dafc8a562e5fe false
check_key 20b2c16497be101995391ceefb979814b0ea76f1ed5b6987985bcdcd17b36a81 false
check_key d63bff73b914ce791c840e99bfae0d47afdb99c2375e33c8f149d0df03d97873 false
check_key 3f24b3d94b5ddd244e4c4e67a6d9f533f0396ca30454aa0ca799f21328b81d47 true
check_key 6a44c016f09225a6d2e830290719d33eb29b53b553eea7737ed3a6e297b2e7d2 true
check_key ff0f34df0c76c207b8340be2009db72f730c69c2bbfeea2013105eaccf1d1f8e true
check_key 4baf559869fe4e915e219c3c8d9a2330fc91e542a5a2a7311d4d59fee996f807 true
check_key 1632207dfef26e97d13b0d0035ea9468fc5a8a89b0990fce77bb143c9d7f3b67 true
check_key fcb3dee3993d1a47630f29410903dd03706bd5e81c5802e6f1b9095cbdb404d3 true
check_key fb527092b9809e3d27d7588c7ef89915a769b99c1e03e7f72bbead9ed837daae false
check_key 902b118d27d40ab9cbd55edd375801ce302cdb59e09c8659a3ea1401918d8bba false
check_key 4d6fbf25ca51e263a700f1abf84f758dde3d11b632e908b3093d64fe2e70ea0a true
check_key f4c3211ec70affc1c9a94a6589460ee8360dad5f8c679152f16994038532e3fc true
check_key c2b3d73ac14956d7fdf12fa92235af1bb09e1566a6a6ffd0025682c750abdd69 false
check_key b7e68c12207d2e2104fb2ca224829b6fccc1c0e2154e8a931e3c837a945f4430 false
check_key 56ca0ca227708f1099bda1463db9559541c8c11ffad7b3d95c717471f25a01bf true
check_key 3eef3a46833e4d851671182a682e344e36bea7211a001f3b8af1093a9c83f1b2 true
check_key bd1f4a4f26cab7c1cbc0e17049b90854d6d28d2d55181e1b5f7a8045fcdfa06e true
check_key 8537b01c87e7c184d9555e8d93363dcd9b60a8acc94cd3e41eb7525fd3e1d35a false
check_key 68ace49179d549bad391d98ab2cc8afee65f98ce14955c3c1b16e850fabec231 true
check_key f9922f8a660e7c3e4f3735a817d18b72f59166a0be2d99795f953cf233a27e24 true
check_key 036b6be3da26e80508d5a5a6a5999a1fe0db1ac4e9ade8f1ea2eaf2ea9b1a70e true
check_key 5e595e886ce16b5ea31f53bcb619f16c8437276618c595739fece6339731feb0 false
check_key 4ee2cebae3476ed2eeb7efef9d20958538b3642f938403302682a04115c0f8ed false
check_key 519eedbd0da8676063ce7d5a605b3fc27afeecded857afa24b894ad248c87b5d false
check_key ce2b627c0accf4a3105796680c37792b30c6337d2d4fea11678282455ff82ff7 false
check_key aa26ed99071a8416215e8e7ded784aa7c2b303aab67e66f7539905d7e922eb4d false
check_key 435ae49c9ca26758aa103bdcca8d51393b1906fe27a61c5245361e554f335ec2 true
check_key 42568af395bd30024f6ccc95205c0e11a6ad1a7ee100f0ec46fcdf0af88e91fb false
check_key 0b4a78d1fde56181445f04ca4780f0725daa9c375b496fab6c037d6b2c2275db true
check_key 2f82d2a3c8ce801e1ad334f9e074a4fbf76ffac4080a7331dc1359c2b4f674a4 false
check_key 24297d8832d733ed052dd102d4c40e813f702006f325644ccf0cb2c31f77953f false
check_key 5231a53f6bea7c75b273bde4a9f673044ed87796f20e0909978f29d98fc8d4f0 true
check_key 94b5affcf78be5cf62765c32a0794bc06b4900e8a47ddba0e166ec20cec05935 true
check_key c14b4d846ea52ffbbb36aa62f059453af3cfae306280dada185d2d385ef8f317 true
check_key cceb34fddf01a6182deb79c6000a998742d4800d23d1d8472e3f43cd61f94508 true
check_key 1faffa33407fba1634d4136cf9447896776c16293b033c6794f06774b514744c true
check_key faaac98f644a2b77fb09ba0ebf5fcddf3ff55f6604c0e9e77f0278063e25113a true
check_key 09e8525b00bea395978279ca979247a76f38f86dce4465eb76c140a7f904c109 true
check_key 2d797fc725e7fb6d3b412694e7386040effe4823cdf01f6ec7edea4bc0e77e20 false
check_key bbb74dabee651a65f46bca472df6a8a749cc4ba5ca35078df5f6d27a772f922a false
check_key 77513ca00f3866607c3eff5c2c011beffa775c0022c5a4e7de1120a27e6687fd true
check_key 10064c14ace2a998fc2843eeeb62884fe3f7ab331ca70613d6a978f44d9868eb false
check_key 026ae84beb5e54c62629a7b63702e85044e38cadfc9a1fcabee6099ba185005c false
check_key aef91536292b7ba34a3e787fb019523c2fa7a0d56fca069cc82ccb6b02a45b14 false
check_key 147bb1a82c623c722540feaad82b7adf4b85c6ec0cbcef3ca52906f3e85617ac true
check_key fc9fb281a0847d58dc9340ef35ef02f7d20671142f12bdd1bfb324ab61d03911 false
check_key b739801b9455ac617ca4a7190e2806669f638d4b2f9288171afb55e1542c8d71 false
check_key 494cc1e2ee997eb1eb051f83c4c89968116714ddf74e460d4fa1c6e7c72e3eb3 true
check_key ed2fbdf2b727ed9284db90ec900a942224787a880bc41d95c4bc4cf136260fd7 true
check_key 02843d3e6fc6835ad03983670a592361a26948eb3e31648d572416a944d4909e true
check_key c14fea556a7e1b6b6c3d4e2e38a4e7e95d834220ff0140d3f7f561a34e460801 true
check_key 5f8f82a35452d0b0d09ffb40a1154641916c31e161ad1a6ab8cfddc2004efdf6 false
check_key 7b93d72429fab07b49956007eba335bb8c5629fbf9e7a601eaa030f196934a56 true
check_key 6a63ed96d2e46c2874beaf82344065d94b1e5c04406997f94caf4ccd97cfbab9 false
check_key c915f409e1e0f776d1f440aa6969cfec97559ef864b07d8c0d7c1163871b4603 true
check_key d06bc33630fc94303c2c369481308f805f5ce53c40141160aa4a1f072967617e false
check_key 1aafb14ca15043c2589bcd32c7c5f29479216a1980e127e9536729faf1c40266 true
check_key 58c115624a20f4b0c152ccd048c54a28a938556863ab8521b154d3165d3649cd false
check_key 9001ba086e8aa8a67e128f36d700cc641071556306db7ec9b8ac12a6256b27b7 false
check_key 898c468541634fb0def11f82c781341fce0def7b15695af4e642e397218c730c true
check_key 47ea6539e65b7b611b0e1ae9ee170adf7c31581ca9f78796d8ebbcc5cd74b712 false
check_key 0c60952a64eeac446652f5d3c136fd36966cf66310c15ee6ab2ecbf981461257 false
check_key 682264c4686dc7736b6e46bdc8ab231239bc5dac3f5cb9681a1e97a527945e8e true
check_key 276006845ca0ea4238b231434e20ad8b8b2a36876effbe1d1e3ffb1f14973397 true
check_key eecd3a49e55e32446f86c045dce123ef6fe2e5c57db1d850644b3c56ec689fce true
check_key a4dced63589118db3d5aebf6b5670e71250f07485ca4bb6dddf9cce3e4c227a1 false
check_key b8ade608ba43d55db7ab481da88b74a9be513fca651c03e04d30cc79f50e0276 false
check_key 0d91de88d007a03fe782f904808b036ff63dec6b73ce080c55231afd4ed261c3 true
check_key 87c59becb52dd16501edadbb0e06b0406d69541c4d46115351e79951a8dd9c28 true
check_key 9aee723be2265171fe10a86d1d3e9cf5a4e46178e859db83f86d1c6db104a247 false
check_key 509d34ae5bf56db011845b8cdf0cc7729ed602fce765e9564cb433b4d4421a43 false
check_key 06e766d9a6640558767c2aab29f73199130bfdc07fd858a73e6ae8e7b7ba23ba false
check_key 801c4fe5ab3e7cf13f7aa2ca3bc57cc8eba587d21f8bc4cd40b1e98db7aec8d9 false
check_key d85ad63aeb7d2faa22e5c9b87cd27f45b01e6d0fdc4c3ddf105584ac0a021465 false
check_key a7ca13051eb2baeb5befa5e236e482e0bb71803ad06a6eae3ae48742393329d2 true
check_key 5a9ba3ec20f116173d933bf5cf35c320ed3751432f3ab453e4a6c51c1d243257 false
check_key a4091add8a6710c03285a422d6e67863a48b818f61c62e989b1e9b2ace240a87 false
check_key bdee0c6442e6808f25bb18e21b19032cf93a55a5f5c6426fba2227a41c748684 true
check_key d4aeb6cdad9667ec3b65c7fbc5bfd1b82bba1939c6bb448a86e40aec42be5f25 false
check_key 73525b30a77f1212f7e339ec11f48c453e476f3669e6e70bebabc2fe9e37c160 true
check_key 45501f2dc4d0a3131f9e0fe37a51c14869ab610abd8bf0158111617924953629 false
check_key 07d0e4c592aa3676adf81cca31a95d50c8c269d995a78cde27b2a9a7a93083a6 false
check_key a1797d6178c18add443d22fdbf45ca5e49ead2f78b70bdf1500f570ee90adca5 true
check_key 0961e82e6e7855d7b7bf96777e14ae729f91c5bbd20f805bd7daac5ccbec4bab false
check_key 57f5ba0ad36e997a4fb585cd2fc81b9cc5418db702c4d1e366639bb432d37c73 true
check_key 82b005be61580856841e042ee8be74ae4ca66bb6733478e81ca1e56213de5c05 false
check_key d7733dcae1874c93e9a2bd46385f720801f913744d60479930dad7d56c767cdc false
check_key b8b8b698609ac3f1bd8f4965151b43b362e6c5e3d1c1feae312c1d43976d59ab true
check_key 4bba7815a9a1b86a5b80b17ac0b514e2faa7a24024f269b330e5b7032ae8c04e true
check_key 0f70da8f8266b58acda259935ef1a947c923f8698622c5503520ff31162e877b false
check_key 233eaa3db80f314c6c895d1328a658a9175158fa2483ed216670c288a04b27bc false
check_key a889f124fabfd7a1e2d176f485be0cbd8b3eeaafeee4f40e99e2a56befb665be true
check_key 2b7b8abc198b11cf7efa21bc63ec436f790fe1f9b8c044440f183ab291af61d6 true
check_key 2491804714f7938cf501fb2adf07597b4899b919cabbaab49518b8f8767fdc6a true
check_key 52744a54fcb00dc930a5d7c2bc866cbfc1e75dd38b38021fd792bb0ca9f43164 true
check_key e42cbf70b81ba318419104dffbb0cdc3b7e7d4698e422206b753a4e2e6fc69bb false
check_key 2faff73e4fed62965f3dbf2e6446b5fea0364666cc8c9450b6ed63bbb6f5f0e7 true
check_key 8b963928d75be661c3c18ddd4f4d1f37ebc095ce1edc13fe8b23784c8f416dfd false
check_key b1162f952808434e4d2562ffda98bd311613d655d8cf85dc86e0a6c59f7158bc true
check_key 5a69adcd9e4f5b0020467e968d85877cb3aa04fa86088d4499b57ca65a665836 true
check_key 61ab47da432c829d0bc9d4fdb59520b135428eec665ad509678188b81c7adf49 false
check_key 154bb547f22f65a87c0c3f56294f5791d04a3c14c8125d256aeed8ec54c4a06e true
check_key 0a78197861c30fd3547b5f2eabd96d3ac22ac0632f03b7afd9d5d2bfc2db352f true
check_key 8bdeadcca1f1f8a4a67b01ed2f10ef31aba7b034e8d1df3a69fe9aebf32454e0 false
check_key f4b17dfca559be7d5cea500ac01e834624fed9befae3af746b39073d5f63190d true
check_key 622c52821e16ddc63b58f3ec2b959fe8c6ea6b1a596d9a58fd81178963f41c01 true
check_key 07bedd5d55c937ef5e23a56c6e58f31adb91224d985285d7fef39ede3a9efb17 false
check_key 5179bf3b7458648e57dc20f003c6bbfd55e8cd7c0a6e90df6ef8e8183b46f99d true
check_key 683c80c3f304f10fdd53a84813b5c25b1627ebd14eb29b258b41cd14396ef41f true
check_key c266244ed597c438170875fe7874f81258a830105ca1108131e6b8fea95eb8ba true
check_key 0c1cdc693df29c2d1e66b2ce3747e34a30287d5eb6c302495634ec856593fe8e true
check_key 28950f508f6a0d4c20ab5e4d55b80565a6a539092e72b7eb0ed9fa5017ecef88 false
check_key 8328a2a5fcfc4433b1c283539a8943e6eb8cc16c59f29dedc3af2c77cfd56f25 true
check_key 5d0f82319676d4d3636ff5dc2a38ea5ec8aeaac4835fdcab983ab35d76b7967b false
check_key cafcc75e94a014115f25c23aaae86e67352f928f468d4312b92240ff0f3a4481 false
check_key 3e5fdd8072574218f389d018e959669e8ca4ef20b114ea7dce7bfb32339f9f42 true
check_key 591763e3390a78ccb529ceea3d3a97165878b179ad2edaa166fd3c78ec69d391 true
check_key 7a0a196935bf79dc2b1c3050e8f2bf0665f7773fc07511b828ec1c4b1451d317 false
check_key 9cf0c034162131fbaa94a608f58546d0acbcc2e67b62a0b2be2ce75fc8c25b9a false
check_key e3840846e3d32644d45654b96def09a5d6968caca9048c13fcaab7ae8851c316 false
check_key a4e330253739af588d70fbda23543f6df7d76d894a486d169e5fedf7ed32d2e2 false
check_key cfb41db7091223865f7ecbdda92b9a6fb08887827831451de5bcb3165395d95d true
check_key 3d10bd023cef8ae30229fdbfa7446a3c218423d00f330857ff6adde080749015 false
check_key 4403b53b8d4112bb1727bb8b5fd63d1f79f107705ffe17867704e70a61875328 false
check_key 121ef0813a9f76b7a9c045058557c5072de6a102f06a9b103ead6af079420c29 true
check_key 386204cf473caf3854351dda55844a41162eb9ce4740e1e31cfef037b41bc56e false
check_key eb5872300dc658161df469364283e4658f37f6a1349976f8973bd6b5d1d57a39 true
check_key b8f32188f0fc62eeb38a561ff7b7f3c94440e6d366a05ef7636958bc97834d02 false
check_key a817f129a8292df79eef8531736fdebb2e985304653e7ef286574d0703b40fb4 false
check_key 2c06595bc103447b9c20a71cd358c704cb43b0b34c23fb768e6730ac9494f39e true
check_key dd84bc4c366ced4f65c50c26beb8a9bc26c88b7d4a77effbb0f7af1b28e25734 false
check_key 76b4d33810eed637f90d49a530ac5415df97cafdac6f17eda1ba7eb9a14e5886 true
check_key 926ce5161c4c92d90ec4efc58e5f449a2c385766c42d2e60af16b7362097aef5 false
check_key 20c661f1e95e94a745eb9ec7a4fa719eff2f64052968e448d4734f90952aefee false
check_key 671b50abbd119c756010416e15fcdcc9a8e92eed0f67cbca240c3a9154db55c0 false
check_key df7aeee8458433e5c68253b8ef006a1c74ce3aef8951056f1fa918a8eb855213 false
check_key 70c81a38b92849cf547e3d5a6570d78e5228d4eaf9c8fdd15959edc9eb750daf false
check_key 55a512100b72d4ae0cfc16c75566fcaa3a7bb9116840db1559c71fd0e961cc36 false
check_key dbfbec4d0d2433a794ad40dc0aea965b6582875805c9a7351b47377403296acd true
check_key 0a7fe09eb9342214f98b38964f72ae3c787c19e5d7e256af9216f108f88b00a3 true
check_key a82e54681475f53ced9730ee9e3a607e341014d9403f5a42f3dbdbe8fc52e842 true
check_key 4d1f90059f7895a3f89abf16162e8d69b399c417f515ccb43b83144bbe8105f6 true
check_key 94e5c5b8486b1f2ff4e98ddf3b9295787eb252ba9b408ca4d7724595861da834 false
check_key d16e3e8dfa6d33d1d2db21c651006ccddbf4ce2e556594de5a22ae433e774ae6 false
check_key a1b203ec5e36098a3af08d6077068fec57eab3a754cbb5f8192983f37191c2df false
check_key 5378bb3ec8b4e49849bd7477356ed86f40757dd1ea3cee1e5183c7e7be4c3406 false
check_key 541a4162edeb57130295441dc1cb604072d7323b6c7dffa02ea5e4fed1d2ee9e true
check_key d8e86e189edcc4b5c262c26004691edd7bd909090997f886b00ed4b6af64d547 false
check_key 18a8731d1983d1df2ce2703b4c85e7357b6356634ac1412e6c2ac33ad35f8364 false
check_key b21212eac1eb11e811022514c5041233c4a07083a5b20acd7d632a938dc627de true
check_key 50efcfac1a55e9829d89334513d6d921abeb237594174015d154512054e4f9d1 true
check_key 9c44e8bcba31ddb4e67808422e42062540742ebd73439da0ba7837bf26649ec4 true
check_key b068a4f90d5bd78fd350daa129de35e5297b0ad6be9c85c7a6f129e3760a1482 false
check_key e9df93932f0096fcf2055564457c6dc685051673a4a6cd87779924be5c4abead true
check_key eddab2fc52dac8ed12914d1eb5b0da9978662c4d35b388d64ddf8f065606acaf true
check_key 54d3e6b3f2143d9083b4c98e4c22d98f99d274228050b2dc11695bf86631e89f true
check_key 6da1d5ef1827de8bbf886623561b058032e196d17f983cbc52199b31b2acc75b true
check_key e2a2df18e2235ebd743c9714e334f415d4ca4baf7ad1b335fb45021353d5117f true
check_key f34cb7d6e861c8bfe6e15ac19de68e74ccc9b345a7b751a10a5c7f85a99dfeb6 false
check_key f36e2f5967eb56244f9e4981a831f4d19c805e31983662641fe384e68176604a true
check_key c7e2dc9e8aa6f9c23d379e0f5e3057a69b931b886bbb74ded9f660c06d457463 true
check_key b97324364941e06f2ab4f5153a368f9b07c524a89e246720099042ad9e8c1c5b false
check_key eff75c70d425f5bba0eef426e116a4697e54feefac870660d9cf24c685078d75 false
check_key 161f3cd1a5873788755437e399136bcbf51ff5534700b3a8064f822995a15d24 false
check_key 63d6d3d2c21e88b06c9ff856809572024d86c85d85d6d62a52105c0672d92e66 false
check_key 1dc19b610b293de602f43dca6c204ce304702e6dc15d2a9337da55961bd26834 false
check_key 28a16d02405f509e1cfef5236c0c5f73c3bcadcd23c8eff377253941f82769db true
check_key 682d9cc3b65d149b8c2e54d6e20101e12b7cf96be90c9458e7a69699ec0c8ed7 false
check_key 0000000000000000000000000000000000000000000000000000000000000000 true
check_key 0000000000000000000000000000000000000000000000000000000000000080 true
check_key 0100000000000000000000000000000000000000000000000000000000000000 true
check_key 0100000000000000000000000000000000000000000000000000000000000080 false
check_key 0200000000000000000000000000000000000000000000000000000000000000 false
check_key 0200000000000000000000000000000000000000000000000000000000000080 false
check_key 0300000000000000000000000000000000000000000000000000000000000000 true
check_key 0300000000000000000000000000000000000000000000000000000000000080 true
check_key 0400000000000000000000000000000000000000000000000000000000000000 true
check_key 0400000000000000000000000000000000000000000000000000000000000080 true
check_key 0500000000000000000000000000000000000000000000000000000000000000 true
check_key 0500000000000000000000000000000000000000000000000000000000000080 true
check_key 0600000000000000000000000000000000000000000000000000000000000000 true
check_key 0600000000000000000000000000000000000000000000000000000000000080 true
check_key 0700000000000000000000000000000000000000000000000000000000000000 false
check_key 0700000000000000000000000000000000000000000000000000000000000080 false
check_key 0800000000000000000000000000000000000000000000000000000000000000 false
check_key 0800000000000000000000000000000000000000000000000000000000000080 false
check_key 0900000000000000000000000000000000000000000000000000000000000000 true
check_key 0900000000000000000000000000000000000000000000000000000000000080 true
check_key 0a00000000000000000000000000000000000000000000000000000000000000 true
check_key 0a00000000000000000000000000000000000000000000000000000000000080 true
check_key 0b00000000000000000000000000000000000000000000000000000000000000 false
check_key 0b00000000000000000000000000000000000000000000000000000000000080 false
check_key 0c00000000000000000000000000000000000000000000000000000000000000 false
check_key 0c00000000000000000000000000000000000000000000000000000000000080 false
check_key 0d00000000000000000000000000000000000000000000000000000000000000 false
check_key 0d00000000000000000000000000000000000000000000000000000000000080 false
check_key 0e00000000000000000000000000000000000000000000000000000000000000 true
check_key 0e00000000000000000000000000000000000000000000000000000000000080 true
check_key 0f00000000000000000000000000000000000000000000000000000000000000 true
check_key 0f00000000000000000000000000000000000000000000000000000000000080 true
check_key 1000000000000000000000000000000000000000000000000000000000000000 true
check_key 1000000000000000000000000000000000000000000000000000000000000080 true
check_key 1100000000000000000000000000000000000000000000000000000000000000 false
check_key 1100000000000000000000000000000000000000000000000000000000000080 false
check_key 1200000000000000000000000000000000000000000000000000000000000000 true
check_key 1200000000000000000000000000000000000000000000000000000000000080 true
check_key 1300000000000000000000000000000000000000000000000000000000000000 true
check_key 1300000000000000000000000000000000000000000000000000000000000080 true
check_key daffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
check_key daffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
check_key dbffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
check_key dbffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
check_key dcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key dcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key ddffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
check_key ddffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
check_key deffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
check_key deffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
check_key dfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
check_key dfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
check_key e0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key e0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key e1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key e1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key e2ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key e2ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key e3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
check_key e3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
check_key e4ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
check_key e4ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
check_key e5ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key e5ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key e6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key e6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key e7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
check_key e7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
check_key e8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
check_key e8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
check_key e9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
check_key e9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
check_key eaffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
check_key eaffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff true
check_key ebffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key ebffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key ecffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f true
check_key ecffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key edffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key eeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key eeffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key efffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key f0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key f0ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key f1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key f1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key f2ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key f2ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key f3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key f3ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key f4ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key f4ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key f5ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key f5ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key f6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key f6ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key f7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key f7ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key f8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key f8ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key f9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key f9ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key faffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key faffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key fbffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key fbffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key fcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key fcffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key fdffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key fdffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key feffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key feffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
check_key ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7f false
check_key ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff false
hash_to_ec da66e9ba613919dec28ef367a125bb310d6d83fb9052e71034164b6dc4f392d0 52b3f38753b4e13b74624862e253072cf12f745d43fcfafbe8c217701a6e5875
hash_to_ec a7fbdeeccb597c2d5fdaf2ea2e10cbfcd26b5740903e7f6d46bcbf9a90384fc6 f055ba2d0d9828ce2e203d9896bfda494d7830e7e3a27fa27d5eaa825a79a19c
hash_to_ec ed6e6579368caba2cc4851672972e949c0ee586fee4d6d6a9476d4a908f64070 da3ceda9a2ef6316bf9272566e6dffd785ac71f57855c0202f422bbb86af4ec0
hash_to_ec 9ae78e5620f1c4e6b29d03da006869465b3b16dae87ab0a51f4e1b74bc8aa48b 72d8720da66f797f55fbb7fa538af0b4a4f5930c8289c991472c37dc5ec16853
hash_to_ec ab49eb4834d24db7f479753217b763f70604ecb79ed37e6c788528720f424e5b 45914ba926a1a22c8146459c7f050a51ef5f560f5b74bae436b93a379866e6b8
hash_to_ec 5b79158ef2341180b8327b976efddbf364620b7e88d2e0707fa56f3b902c34b3 eac991dcbba39cb3bd166906ab48e2c3c3f4cd289a05e1c188486d348ede7c2e
hash_to_ec f21daa7896c81d3a7a2e9df721035d3c3902fe546c9d739d0c334ed894fb1d21 a6bedc5ffcc867d0c13a88a03360c8c83a9e4ddf339851bd3768c53a124378ec
hash_to_ec 3dae79aaca1abe6aecea7b0d38646c6b013d40053c7cdde2bed094497d925d2b 1a442546a35860a4ab697a36b158ded8e001bbfe20aef1c63e2840e87485c613
hash_to_ec 3d219463a55c24ac6f55706a6e46ade3fcd1edc87bade7b967129372036aca63 b252922ab64e32968735b8ade861445aa8dc02b763bd249bff121d10829f7c52
hash_to_ec bc5db69aced2b3197398eaf7cf60fd782379874b5ca27cb21bd23692c3c885cc ae072a43f78a0f29dc9822ae5e70865bbd151236a6d7fe4ae3e8f8961e19b0e5
hash_to_ec 98a6ed760b225976f8ada0579540e35da643089656695b5d0b8c7265a37e2342 6a99dbfa8ead6228910498cc3ff3fb18cb8627c5735e4b8657da846c16d2dcad
hash_to_ec e9cdc9fd9425a4a2389a5d60f76a2d839f0afbf66330f079a88fe23d73eae930 8aa518d091928668f3ca40e71e14b2698f6cae097b8120d7f6ae9afba8fd3d60
hash_to_ec a50c026c0af2f9f9884c2e9b8464724ac83bef546fec2c86b7de0880980d24fb b07433f8df39da2453a1e13fd413123a158feae602d822b724d42ef6c8e443bf
hash_to_ec bf180e20d160fa23ccfa6993febe22b920160efc5a9614245f1a3a360076e87a 9d6454ff69779ce978ea5fb3be88576dc8feaedf151e93b70065f92505f2e800
hash_to_ec b2b64dfeb1d58c6afbf5a56d8c0c42012175ebb4b7df30f26a67b66be8c34614 0523b22e7f220c939b604a15780abc5816709b91b81d9ee1541d44bd2586bbd8
hash_to_ec 463fc877f4279740020d10652c950f088ebdebeae34aa7a366c92c9c8773f63a daa5fa72e70c4d3af407b8f2f3364708029b2d4863bbdde54bd67bd08db0fcad
hash_to_ec 721842f3809982e7b96a806ae1f162d98ae6911d476307ad1e4f24522fd26f55 4397c300a8cfcb42e7cc310bc975dc975ec2d191eaa7e0462998eb2830c34126
hash_to_ec 384da8d9b83972af8cbefc2da5efc744037c8ef40efa4b3bacc3238a6232963d 3c80f107e6868f73ef600ab9229a3f4bbe24f4adce52e6ab3a66d5d510e0670d
hash_to_ec e26f8adef5b6fe5bb01466bff0455ca23fda07e200133697b3b6430ca3332bde e262a58bcc1f8baf1980e00d5d40ba00803690174d14fb4c0f608429ce3df773
hash_to_ec 6e275b4ea4f085a5d3151aa08cf16a8c60b078e70be7ce5dac75b5d7b0eebe7c cb21b5a7744b4fcdc92ead4be0b04bcb9145e7bb4b06eff3bb2f0fe429b85108
hash_to_ec a0dde4561ad9daa796d9cd8a3c34fd41687cee76d128bf2e2252466e3ef3b068 79a2eb06bb7647f5d0aae5da7cf2e2b2d2ce890f25f2b1f81bfc5fef8c87a7d3
hash_to_ec dbaf63830e037b4c329969d1d85e58cb6c4f56014fd08eb38219bd20031ae27c 079c93ae27cd98075a487fd3f7457ad2fb57cdf12ec8651fedd944d765d07549
hash_to_ec 1e87ba8a9acf96948bc199ae55c83ab3277be152c6d0b1d68a07955768d81171 5c6339f834116791f9ea22fcc3970346aaeddacf13fbd0a7d4005fbd469492ca
hash_to_ec 5a544088e63ddf5b9f444ed75a75bc9315c4c50439522f06b4823ecaf5e8a08d e95ca0730d57c6469be3a0f3c94382f8490257e2e546de86c650bdbc6482eaee
hash_to_ec e4e06d92ebb036a5e4bb547dbaa43fd70db3929eef2702649455c86d7e59aa46 e26210ff8ee28e24ef2613df40aa8a874b5e3c1d07ae14acc59220615aa334dc
hash_to_ec 5793b8b32dcc0f204501647f2976493c4f8f1fa5132315226f99f29a5a6fdfce 656e390086906d99852c9696e831f62cb56fc8f85f9a5c936c327f23c7faf4fe
hash_to_ec 84f56fa4d7f12e0efd48b1f7c81c15d6e3843ebb419f4a27ec97028d4f9da19e 0cbd4f0cd288e1e071cce800877de6aef97b63fff867424a4f2b2bab25602608
hash_to_ec 242683ddf0a9fc55f6585de3aa64ea17c9c544896ff7677cd82c98f833bdf2ca 38c36d52314549213df7c7201ab7749a4724cbea92812f583bb48cabc20816ad
hash_to_ec a93ee320dc030aa382168c2eb6d75fce6e5a63a81f15632d514c6de8a7cfa5ee bd0a2facaa95bc95215a94be21996e46f789ee8beb38e75a1173b75fc686c505
hash_to_ec e36136601d84475d25c3f14efe030363d646658937a8a8a19a812d5e6deb5944 2fb93d78fae299c9f6b22346acfb829796ee7a47ec71db5456d8201bec6c35a3
hash_to_ec ba4b67d3d387c66baa4a32ec8b1db7681087e85076e71bab10036388c3aeb011 cc01329ce56f963bf444a124751c45b2c779ccb6dea16ca05251baca246b5401
hash_to_ec 3fbc91896a2585154d6f7094c5ab9c487e29a27951c226eec1235f618e44946b 7d983acbb901bf5497d0708392e5e742ec8c8036cbb0d03403e9929da8cc85a7
hash_to_ec a2da289fed650e9901f69a5f33535eb47c6bd07798633cbf6c00ce3172df76ac dca8a4d30ec2d657fefd0dba9c1c5fd45a79f665048b3cf72ac2c3b7363da1ac
hash_to_ec 99025d2d493f768e273ed66cacd3a5b392761e6bd158ca09c8fba84631ea1534 7ef5af79ab155ab7e1770a47fcd7f194aca43d79ec6e303c7ce18c6a20279b04
hash_to_ec 3cf1d01d0b70fb31f2a2f979c1bae812381430f474247d0b018167f2a2cd9a9f 7c53d799ec938a21bb305a6b5ca0a7a355fa9a68b01d289c4f22b36ce3738f95
hash_to_ec 639c421b49636b2a1f8416c5d6e64425fe51e3b52584c265502379189895668e 0b47216ae5e6e03667143a6cf8894d9d73e3152c64fb455631d81a424410e871
hash_to_ec 4ccf2c973348b7cc4b14f846f9bfcdcb959b7429accf6dede96248946841d990 7fd41f5b97ba42ed03947dd953f8e69770c92cc34b16236edad7ab3c78cbbb2e
hash_to_ec f76ae09fff537f8919fd1a43ff9b8922b6a77e9e30791c82cf2c4b8acb51363e 8e2c6bf86461ad2c230c496ee3896da33c11cc020fd4c70faa3645b329049234
hash_to_ec 98932da7450f15db6c1eef78359904915c31c2aa7572366ec8855180edb81e3a 86180adddfac0b4d1fb41d58e98445dde1da605b380d392e9386bd445f1d821c
hash_to_ec ab26a1660988ec7aba91fc01f7aa9a157bbc12927f5b197062b922a5c0c7f8dd 2c44a43eda0d0aad055f18333e761f2f2ec11c585ec7339081c19266af918e4f
hash_to_ec 4465d0c1b4930cc718252efd87d11d04162d2a321b9b850c4a19a6acdfca24f4 b03806287d804188a4d679a0ecee66f399d7bdc3bd1494f9b2b0772bbb5a034f
hash_to_ec 0f2a7867864ed00e5c40082df0a0b031c89fa5f978d9beb2fde75153f51cfb75 5c471e1b118ef9d76c93aec70e0578f46e8db1d55affd447c1f64c0ad9a5caa5
hash_to_ec 5c2808c07d8175f332cae050ce13bec4254870d76abff68faf34b0b8d3ad5000 eeff1d9a5aa428b7aecc575e63dde17294072eb246568493e1ed88ce5c95b779
hash_to_ec 36300a21601fad00d00da45e27b36c11923b857f97e50303bd01f21998eaef95 b33b077871e6f5dad8ff6bc621c1b6dedcf700777d996c8c02d73f7297108b7e
hash_to_ec 9e1afb76d6c480816d2cedd7f2ab08a36c309efaa3764dcdb51bad6049683805 4cd96ba7b543b1a224b8670bf20b3733e3910711d32456d3e58e920215788adf
hash_to_ec 685f152704664495459b76c81567a4b571e8b307dd0e3c9b08ee95651a006047 80dd6b637580cb3be76025867f1525852b65a7a66066993fda3af7eb187dc1a5
hash_to_ec 0b216444391a1163c14f7b27f9135e9747978c0e426dce1fa65c657f3e9146be 021259695a6854a4a03e8c74d09ab9630a401bfca06172a733fe122f01af90b4
hash_to_ec cfcb35e98f71226c3558eaa9cf620db5ae207ece081ab13ddea4b1f122850a5a 46763d2742e2cdffe80bb3d056f4d3a1565aa83f19aab0a1f89e54ad81ae0814
hash_to_ec 07e7292da8cdcdb58ee30c3fa16f1d609e9b3b1110dd6fa9b2cc18f4103a1c12 fe949ca251ac66f13a8925ae624a09cdbf6696d3c110442338d37700536e8ec7
hash_to_ec 813bc7e3749e658190cf2a4e358bc07a6671f262e2c4eef9f44c66066a72e6a7 6b92fbda984bd0e6f4af7a5e04c2b66b6f0f9d197a9694362a8556e5b7439f8a
hash_to_ec 89c50a1e5497156e0fae20d99f5e33e330362b962c9ca00eaf084fe91aaec71d ef36cb75eb95fb761a8fa8c376e9c4447bcd61421250f7a711bd289e6ed78a9b
hash_to_ec d9bd9ff2dd807eb25de7c5de865dbc43cce2466389cedbc92b90aab0eb014f81 30104771ff961cd1861cd053689feab888c57b8a4a2e3989646ea7dea40f3c04
hash_to_ec b8c837501b6ca3e118db9848717c847c062bf0ebeca5a7c211726c1426878af5 19a1e204b4a32ce9cccf5d96a541eb76a78789dceaf4fe69964e58ff96c29b63
hash_to_ec 84376c5350a42c07ac9f96e8d5c35a8c7f62c639a1834b09e4331b5962ecace8 ba1e4437d5048bd1294eadc502092eafc470b99fde82649e84a52225e68e88f2
hash_to_ec a3345e4a4cfc369bf0e7d11f49aed0d2a6ded00e3ff8c7605db9a919cf730640 0d318705c16e943c0fdcde134aaf6e4ccce9f3d9161d001861656fc7ea77a0b1
hash_to_ec 3c994dfb9c71e4f401e65fd552dc9f49885f88b8b3588e24e1d2e9b8870ffab1 984157de5d7c2c4b43b2bffea171809165d7bb442baea88e83b27f839ebdb939
hash_to_ec 153674c1c1b18a646f564af77c5bd7de452dc3f3e1e2326bfe9c57745b69ec5c e9a4a1e225ae472d1b3168c99f8ba1943ad2ed84ef29598f3f96314f22db9ef2
hash_to_ec 2d46a705d4fe5d8b5a1f4e9ef46d9e06467450eb357b6d39faa000995314e871 b9d1aec540bf6a9c0e1b325ab87d4fbe66b1df48986dde3cb62e66e136eba107
hash_to_ec 6764c3767f16ec8faecc62f9f76735f76b11d7556aeb61066aeaeaad4fc9042f 3a5c68fb94b023488fb5940e07d1005e7c18328e7a84f673ccd536c07560a57b
hash_to_ec c99c6ee5804d4b13a445bc03eaa07a6ef5bcb2fff0f71678dd3bd66b822f8be8 a9e1ce91deed4136e6e53e143d1c0af106abde9d77c066c78ebbf5d227f9dde0
hash_to_ec 3009182e1efac085c7eba24a7d9ef28ace98ebafa72211e73a41c935c37e6768 e55431a4c89d38bd95f8092cdf6e44d164ad5855677aba17ec262abc8c217c86
hash_to_ec e7153acd114a7636a207be0b67fa86fee56dd318f2808a81e35dd13d4251b2d0 ff2b98d257e4d4ff7379e8871441ca7d26e73f78f3f5afcf421d78c9799ba677
hash_to_ec 6378586744b721c5003976e3e18351c49cd28154c821bc45338892e5efedd197 3d765fb7bb4e165a3fa6ea00b5b5e22250f3861f0db0099626d9a9020443dda2
hash_to_ec 5be49aba389b7e3ad6def3ba3c7dbec0a11a3c36fc9d441130ef370b8a8d29c2 2d61faf38062dc98ae1aaafec05e90a925c9769df5b8b8f7090d9e91b2a11151
hash_to_ec f7bc382178d38e1b9a1a995bd8347c1283d8a2e8d150379faa53fd125e903d2b 544c815da65c3c5994b0ac7d6455578d03a2bc7cf558b788bcdb3430e231635a
hash_to_ec c28b5c4b6662eebb3ec358600644849ebeb59d827ed589c161d900ca18715fa8 a2d64db3c0e0353c257aadf9abc12ac779654d364f348b9f8e429aa7571203db
hash_to_ec 3a4792e5df9b2416a785739b9cf4e0d68aef600fa756a399cc949dd1fff5033a 4b54591bd79c30640b700dfb7f20158f692f467b6af70bd8a4e739c14a66c86a
hash_to_ec 002e70f25e1ceaf35cc14b2c6975a4c777b284a695550541e6f5424b962c19f5 73987e9342e338eb57a7a9e03bd33144db37c1091e952a10bd243c5bb295c18a
hash_to_ec 7eb671319f212c9cae0975571b6af109124724ba182937a9066546c92bdeff0c 49b46da3be0df1d141d2a323d5af82202afa2947a95b9f3df47722337f0d5798
hash_to_ec ca093712559c8edd5c51689e2ddcb8641c2960e5d9c8b03a44926bb798a0c8dc b9ef9cf0f8e4a3d123db565afafb1102338bfb75498444ac0a25c5ed70d615da
hash_to_ec cfea0a08a72777ff3aa7be0d8934587fa4127cd49a1a938232815dc3fd8b23ac b4de604b3d712f1ef578195fb0e53c865d41e2dfe425202c6cfe6f10e4404eb5
hash_to_ec aa0122ae258d6db21a26a31c0c92d8a0e3fdb46594aed41d561e069687dedcd6 5247eaec346de1c6cddf0ab04c12cd1d85cdb6d3a2fba2a5f9a5fe461abef5eb
hash_to_ec b3941734f4d3ba34ccaf03c4c737ac5a1e036eb74309300ce44d73aca24fef08 535938985c936e3780c61fe29a4121d6cb89a05080b6c2147031ea0c2b5b9829
hash_to_ec 8c2ee1041a2743b30dcbf413cc9232099b9268f82a5a21a09b63e7aff750882f 6ad0d4b3a65b522dfad0e9ac814b1fb939bc4910bd780943c72f57f362754cca
hash_to_ec 4b6829a2a2d46c8f0d0c23db0f735fcf976524bf39ccb623b919dd3b28ad5193 2e0097d7f92993bc45ba06baf4ca63d64899d86760adc4eb5eeefb4a78561050
hash_to_ec 9c1407cb6bba11e7b4c1d274d772f074f410d6fe9a1ee7a22cddf379257877d9 692261c7d6a9a7031c67d033f6d82a68ef3c27bd51a5666e55972238769821cd
hash_to_ec 638c42e4997abf8a4a9bffd040e31bd695d590cde8afbd7efd16ffdbae63bf66 793024c8ce196a2419f761dde8734734af6bd9eb772b30cc78f2cb89598dce97
hash_to_ec 1fb60d79600de151a1cf8a2334deb5828632cbd91cb5b3d45ae06e08187ae23d ff2542cde5bc2562e69471a31cfc3d0c26e2f6ccc1891a633b07a3968e42521c
hash_to_ec d2fdbbae4e38a1b734151c3df52540feb2d3ff74edfef2f740e49a5c363406ee 344c83ba6ff4e38b257077623d298d2f2b52002645021241bc9389f81b29ad12
hash_to_ec 836c27a6ddfe1a24aba3d6022dff6dfe970f142d8b4ac6afb8efcba5a051942f b8af481d33726b3f875268282d621e4c63f891a09f920b8f2f49080f3a507387
hash_to_ec 46281153ddcdf2e79d459693b6fe318c1969538dd59a750b790bfff6e9481abf 8eaf534919ab6573ba4e0fbde0e370ae01eae0763335177aa429f61c4295e9d4
hash_to_ec d57b789e050bf3db462b79a997dac76aa048d4be05f133c66edee56afd3dbe66 0c5a294cb2cbb6d9d1c0a1d57d938278f674867f612ed89dcbe4533449f1a131
hash_to_ec 548d524d03ac22da18ff4201ce8dbee83ad9af54ee4e26791d26ed2ab8f9bfc7 c6609d9e7d9fd982dec8a166ff4fb6f7d195b413aad2df85f73d555349134f3b
hash_to_ec cc920690422e307357f573b87a6e0e65f432c6ec12a604eb718b66ba18897a56 6f11c466d1c72fccd81e51d9bda03b6e8d6a395e1d931b2a84e392dc9a3efa18
hash_to_ec c7fb8a51f5fcd8824fc0875d4eb57ab4917cb97090a6e2288f852f2bb449edd9 45543fea6eed461016e48598b521f18ff70178afea18032b188deea3e56052fc
hash_to_ec c681bb1b829e24b1c52cb890036b89f0029d261c6a15e5b2c684ee7dfe91e746 263006fe2c6b08f1ab29cdf442472c298e2faf225bbf5c32399d3745cd3904bd
hash_to_ec e06411c542312fdd305e17e46be14c63bab5836dc8751da06164b1ae22d4e20f 901871be7a7ff5aecade2acff869846f3c50de69307ac155f2aa3a74d5472ef2
hash_to_ec 9c725a2acb80fa712f9781da510e5163b1b30f4e1c064c26b5185e537f0614ea 02420d49257846eb39fddd196d3171679f6be21d9adac667786b65a6e90f57b1
hash_to_ec 22792772820feafa85c5cb3fa8f876105251bef08617d389619697f47dff54f2 a3ad444e7811693687f3925e7c315ae55d08d9f4b0a29876bc2a891ab941c1c3
hash_to_ec 0587b790121395d0f4f39093d10b4817f58a1e80621a24eea22b3c127d6ac5a2 86c417c695c64c7becaad0d59ddbb2bca4cb2b409a21253d680aac1a08617095
hash_to_ec fa0b5f28399bef0cd87bfe6b8a2b69e9c5506fb4bacd22deba8049615a5db526 ede0ea240036ff75d075258a053f3ce5d6f77925d358dbe33c06509fc9b12111
hash_to_ec 62a3274fc0bed109d5057b865c2ba6b6a5a417cb90a3425674102fcd457ede2d ff7e46751bb4dcd1e800a8feab7cf6771f42dc0cfed7084c23b8a5d255a6f34e
hash_to_ec a6fcd4aecaaaf281563b9b7cd6fbc7b1829654f644f4165942669a2ef632b2bf 28f136be0eb957a5b36f8ec294399c9f73ad3a3c9bb953ad191758ced554a233
hash_to_ec 01baa4c06d6676c9b286cda76ed949fd80a408b3309500ba84a5bb7e3dce58e2 a943d1afa2efce284740e7db21ea02db70b124808be2ff80cbf9b9cb96c7b73e
hash_to_ec dd9aff9c006ba514cef8fae665657bc9813fe2715467cf479643ea4c4e365d6d 68de2f7d49de4004286ce0989a06a686b15d0f463a02ffd448a18914e1ddf713
hash_to_ec 3df3513d5e539161761ce7992ab9935f649bc934bed0da3c5e1095344b733bb9 e9c2dd747d7b2482474325943cd850102b8093164678362c7621993a790e2a8a
hash_to_ec 7680cfb244dc8ef37c671fff176be1a3dad00e5d283f93145d0cbee74cca2df4 a0fd8c3cca16a130eaa5864cbe8152b7adfbf09e8cf72244b2fc8364c3b20bf4
hash_to_ec 8a547c38bd6b219ea0d612d4a155eba9c56034a1405dcf4b608de787f37e0fd8 76bf0dc40fd0a5508c5e091d8bb7eccfa28b331e72c6a0d4ac0e05a3d651850b
hash_to_ec dd93901621f58465e9791012afa76908f1e80ad80e52b809dc7fc32bb004f0a8 09a0b7ecfe8058b1e9ee01c9b523826867ca97a32efad29ac8ceebca67a4ea00
hash_to_ec b643010220f1f4ee6c7565f6e1b3dc84c18274ede363ac36b6af3707e69a1542 233c9ff8de59e5f96c2f91892a71d9d93fa7316319f30d1615f10ac1e01f9285
hash_to_ec c2637b2299dfc1fd7e953e39a582bafd19e6e7fff3642978eb092b900dbfea80 339587ba1c05e2cba44196a4be1fd218b772199e2c61c3c0ff21dcd54b570c43
hash_to_ec 1f36d3a7e7c468eb000937de138809e381ad2e23414cbbaac49b7f33533ed486 7e5b0a96051c77237a027a79764c2763487af88121c7774645e97827fb744888
hash_to_ec 8c142a55f60b2edbe03335b7f90aa2bd63e567048a65d61c70cb28779c5200af d3d6d5563b3d81c8c91cf9806bb13b2850fb7c162c610fd2f5b83c464add8182
hash_to_ec 99e7b98293c9de1f81aff1376485a990014b8b176521b2a68cdbde6300190398 119cbc01a1d9b9fb4759031d3a70685aebea0f01bc5ee082ce824265fd21b3b4
hash_to_ec 9753bd38be072b51490290be6207ca4545e3541bdf194e0850ae0a9f9e64b8ba 1ad3aa759863153606fa6570f0e1290baded4c8c1f2ba0f67c1911bfc8ccd7a0
hash_to_ec 322703864ceee19b7f17cec2a822f310f0c4da3ff98b0be61a6fd30ac4db649c 89d9e7a5947e1cde874e4030de278070aae363063cd3592ce5411821474f0816
hash_to_ec c1acd01e1e535fad273a8b757d981470f43dd7d95af732901fbba16b6e245761 57e80445248111150da5e63c706b4abbf3eef2cc508bd0347ff6b81e8c59f5bc
hash_to_ec 492473559f181bbe78f60215bc6d3a5168435ea2fc0a508372d6f5ca126e9767 df3965f137cf6f60c56ebd7c8f246281fd6dc92ce23a37e9f846f8452c884e01
hash_to_ec afa9d6e0e2fb972ee806beb450c2c0165e58234b0676a4ec0ca19b6e710d7c35 669a57e69dd2845a5e50ed8e5d8423ac9ae792a43c7738554d6c5e765a7b088a
hash_to_ec 094de050bdadef3b7dbaeeca29381c667e63e71220970149d97b95db8f4db61b 0cf5d03530c5e97850d0964c6a394de9cde1e8e498f8c0e173c518242c07f99a
hash_to_ec 2ce583724bc699ad800b33176a1d983512fe3cb3afa65d99224b23dae223efb7 e1548fd563c75ae5b5366dbab4cb73c54e7d5e087c9e5453125ff8fbe6c83a5c
hash_to_ec 8064974b976ff5ef6adaade6196ab69cda6970cd74f7f5899181805f691ad970 98ae63c47331a4ac433cb2f17230c525982d89d21e2838515a36ec5744ec2d15
hash_to_ec 384911047de609c6ae8438c745897357989363885cef2381a8a00a090cf04a58 4692ec3a0a03263620841c108538d584322fdd24d221a74bf1e1f407f83828af
hash_to_ec 0e1b1ced5ae997ef9c10b72cfc6d8c36d7433c01fc04f4083447f87243282528 6ee443ab0637702b7340bd4a908b9e2e63df0cc423c409fb320eb3f383118b80
hash_to_ec 5a7aea70c85c040af6ff3384bcaa63ec45c015b55b44fffa37ab982a00dc57c5 2df2e20137cefd166c767646ecd2e386d28f405aebe43d739aa55beba04ed407
hash_to_ec 3e878a3567487f20f7c98ea0488a40b87f1ba99e50bbfe9f00a423f927cbd898 697c7e60e4bf8c429ba7ac22b11a4b248d7465fc6abe597ec6d1e1c973330688
hash_to_ec c0bb08350d8a4bb6bf8745f6440e9bd254653102a81c79d6528da2810da758e4 396a872ac9147a69b27223bf4ec4198345b26576b3690f233b832395f2598235
hash_to_ec 6c3026a9284053a4ddb754818f9ae306ffa96eb7003bd03826eeccc9a0cf656e bef73da51d3ba9972a33d1afb7d263094b66ab6dbe3988161b08c17f8c69c2d5
hash_to_ec f80b7d8f5a80d321af3a42130db199d9edcb8f5a82507d8bfca6d002d65458b6 aa59c167ea60ee024421bfbd00adbb3cbfc20e16bd3c9b172a6bef4d47ca7f57
hash_to_ec bc0ffc24615aa02fafef447f17e7b776489cd2cc909f71e8344e01cad9f1610d 5c4195cc8dc3518143f06a9c228ae59ec9a6425a8fab89bfc638ad997cf35220
hash_to_ec b15fad558737229f8816fcba8fbef805bd420c03e392d118c69bdf01890c4924 f5810477e37554728837f097e1b170d1d8c95351c7fff8abbbfc624e1a50c1b9
hash_to_ec ec8c1f10d8e9da9cf0d57c4a1f2c402771bed7970109f3cf21ad32111f1f198f a697e0a3f09827b0cf3a4ffb6386388feda80d30ffffcbd54443dafcba162b28
hash_to_ec a989647bf0d70fdb7533b8c303a2a07f5e42e26a45ffc4e48cff5ba88643a201 450fd73e636f94d0d232600dd39031386b0e2ecde4105124fc451341da9803db
hash_to_ec 7159971b03c365480d91d625a0fadc8e3a632c518acf0dbec87dd659da70e168 377bc43c038ac46cf6565aa0a6d6bf39968c0c1142755dba3141eeebf0acdf5d
hash_to_ec e39089a64fedac4b2c25e36312b33f79d02bf75a883f450f910915b8560a3b06 77efa7db1be020e77596f550de45626824a8268095d56a0991696b211cb329cc
hash_to_ec 2056b3c6347611bb0929dad00ec932a4d9bec0f06b2d57f17e01ffa1528a719e b6072c2be2ce928e8cbbb87e8eb7e06975c0f93b309dd3b6a29edaad2b56f99b
hash_to_ec 2c026793146e81b889fc741d62e06c341ce263560d57cd46d0376f5b29174489 8f1f64b67762aa784969e954c196a2c6610addc3604aa3291eb0b80304dfe9ef
hash_to_ec be6026d6704379c489fa7749832b58bdb1a9685a5ffb68c438537f2f76e0011f 0072569a4090a9ad383a205bb092196c9de871c22506e3bb63d6b9d1b2357c96
hash_to_ec f4db802d5c6b7d7b53663b03d988b4cd0c7cad6c26612c5307754a93ebdc9710 f21bc9be4cb28761f6fe1d0a555ad5e9748375a2e9faea25a1df75cc8d273e18
hash_to_ec c27d79a564c56b00956a55090481e85fbc837fd5fb5e8311ecb436e300c07e3a 1b1891e6abec74621501450cd68bb1eeaa5b2fffff4ec441a55d1235ff3a0842
hash_to_ec a1e2f93c717cad32af386efa624198973df5a710963dd19d4c3ac40032a3a286 69c60571e3f9f63d2bfb359386ae3b8cd9e49a2e9127753002866e85c0443573
hash_to_ec 76920d7b1763474bc94a16433c3c28241a9acdee3ff2b2cb0e6757ba415310aa c1b409169f102b696fc7fa1aa9c48631e58e08b5132b6aadf43407627bb1b499
hash_to_ec 57ac654b29fa227c181fff2121491fcb283af6cbe932c8199c946862c0e90cb2 a204e8d327ea93b0b1bd74a78ffc370b20cea6455e209f2bc258114baa16d728
hash_to_ec 88e66cfaef6432b759c50efce885097d1752252b479dac5ed822fa6c85d56427 6fb84790d3749a5c1088209ee3823848d9c19bf1524215c44031143dd8080d70
hash_to_ec c1e55da929c4f8f793696fc77ff4e1c317c34852d98403bfd15dd388ee7df0df 2f41e76f15c5b480665bd84067e3b543b85ce6de02be9da7a550b5e1ead94d34
hash_to_ec 29e9ace5aa3c5a572b13f4b62b738a764d90c8c293ccb062ad798acbab7c5ef4 bce791aba1edc2a66079628fd838799489ab16b0a475ce7fe62e24cc56fe131c
hash_to_ec f25b2340689dadacaa9a0ef08aee8447d80b982e8a1ea42cf0500a1b9d85b37d f7f53aa117e6772a9abc452b3931b0a99405ac45147e7c550ac9fcf7ffe377b5
hash_to_ec 0cb6c47fc8478063b33f5aed615a05bcc84d782c497b6cc8e76ec1fa11edbfdb 7a0b58b03147e7c9be1d98de49ead2ce738d0071b0af8ca03cc92ceb26fc2246
hash_to_ec 7bd7287d7c4b596fe46fe57a6982c959653487bea843a77dd47d40986200d576 343084618c58284c64a5ff076f891be64885dc2ac73fa1567f7b39fde6b91542
hash_to_ec e4984bf330708152254fb18ecef12d546afd24898a3cf00fba866957b6ee1b82 c70e88b061656181fbd6ff12aca578fb66de5553c756ea4698a248b177185bc6
hash_to_ec cefd6c3cb9754ea632d6aea140af017de5ea12e5184f868936b74d9aa349d603 4b476502a8a483aadd50667f262f95351901628dd3a2aac1a5a41c4ea03f1647
hash_to_ec da5d0f33344ee7f3345204badf183491b9452b84bccc907602c7bad43e5cf43e 9561b9e61241625e028361494d4fa5cd78df4c7219fa64c8fede6d8421b8904a
hash_to_ec d6f0a4f8c770a1274a76fd7ae4e5faf7779249263e1aaecc6f815cf376f5c302 cd5c55820be10f0d38feb81363ede3716a9168601a0dd1ce3109aab81367d698
hash_to_ec b6bf32491d12a41c275d8518fc534d9a0d17aade509e7e8b8409a95c86167307 4aae534abbd67a9a8f2974154606c0e9be8932e920c7a5e931b46a92859acf82
hash_to_ec 0f930beaad041f9cefd867bc194027dd651fb3c9bda5944ececdba8a7136b6d3 521708f8149891b418d0920369569a9d578029c78f8e41c68a0bb68d3ad5df60
hash_to_ec 49b1fe0f97be74b81e0b047027b3e9f726fa5e90a67dafa877309397291c06c5 0852e59dfae5ec32cce606c119376597bce5cd4d04879d329f74e3ec66414cd3
hash_to_ec 4d57647d03f2cfbd4782fcc933e0683b52d35fc8d37283e6c7de522ddfa7e698 cbeb9ebfbbc49ec81fac3b7b063fecac1bb40ea686d3ffb08f82b291715cd87f
hash_to_ec 4ea3238c06fc9346c7421ff85bc0244b893860b94bc437378472814d09b2e99f a1fbae941adc344031bbdf53385dfdc012311490a4eb5e9a2749a21b27ce917a
hash_to_ec 0cd3609f5c78b318cb853d189b73b1ee2d00edd4e5fce2812027daa3fcb1fed1 0c7a7241b16e3c47d41f5abbf205797bd4b63fc425a7120cb2a4bf324e08ae74
hash_to_ec d74ab71428e36943c9868f70d3243469babd27988a1666a06f499a5741a52e3e 65b7c259f3b4547c082b2a7669b2b363668c4d87ac14e80471317b03b34e5216
hash_to_ec f6b151998365e7d69bcbce383dd2e8b5bf93b8b72f029ff942588208c1619591 6ce840ce5dfbca238665c1e6eddb8b045aa85c69b5976fc55ab57e66d3d0a791
hash_to_ec 207751de234b2bd7ec20bdd8326210c23aa68f04875c94ad7e256a96520f25d6 fc8f79ab3af317c38bfb88f40fb84422995a0479cfa6b03fa6df7f4e5f2813fb
hash_to_ec 62291e2873f38c0a234b77d1964205f3f91905c261d3c06f81051a9b0cb787cb 076d1d767457518e6777cb3bd4df22c8a19eb617e4bbccd1b0bd37522d6597a5
hash_to_ec 4b060df2d2854036751d00190ee821cb0066d256d4172539fdfa6fbd1cdfe1f9 59866e927c69e7de5df00dc46c0d2a1ddf799d901128ff040cebb8fd61b95da4
hash_to_ec ac8daf73f9c609bb36bce4fdeec1e50be5f22de38c3904fabcf758f0fc180bc7 7d8dc4e956363b652468a5fecafd7c08d48a2297e93b8edcb38e595fdd5a1fde
hash_to_ec fef7b6563fd27f3aab1d659806b26b8f2ec38bc8feefad50288383c001d1c20f e6e42547f12df431439d45103d2c5a583248f44554a98a3a433cf8c38b11805d
hash_to_ec 40a3d6871c76ecc6bb7b28324478733e196cc11d062dd4c9265cf31be5cf5a97 8c55a3811c241a020b1be202a58d5defbc4c8945d73b132570b47dd7c019ccf0
hash_to_ec 0cd71e7e562b2b47f4bc8640caf20e69d3a62f10231b4c7a372c9691cff9ac3c fb8e4e3de479b3bf1f4f13b4ed5507df1e80bd9250567b9d021b03339d6e7197
hash_to_ec 40a4e62800a99b7a26e0b507ffb29592e5bdba25284dc473048f24b27d25b40a 90ae131d29ee4a71cd764ab26f1ca4e6d09a40db98f8692b345c3a0e130dc860
hash_to_ec 1ddf35193cf52860bfe3e41060a7f44281241c6ae49cd541d24c1aca679b7501 3b4f50013895c522776ced456329c4e727de03575f6b99ae7d238a9f70862121
hash_to_ec 014e0fa8ce9d5df262b9a1765725fde354a855de8aef3fc23684e05dd1ba8d34 3857f57776a3cb68721bcb7f1533a5f9fb416a1dc8824d719399b63a142d24de
hash_to_ec 09987979b0e98d1d5355df8a8698b8f54d3a037d12745c0a4317fe519c3df9cc 32a181e2b754aeced214c73ac459c97d99e63317be3eb923344c64a396173bca
hash_to_ec 51e9e8ec4413e92dbaaba067824c32b018487a8d16412ed310507b4741e18eed 0356b209156b4993fd5d5630308298429a1b0021c19bedecb7719ac607cfa644
hash_to_ec 14d91313dfe46e353310e6a4a23ee15d7a4e1f431700a444be8520e6043d08d9 6f345f4018b5d178d9f61894d9f46ac09ff639483727b0d113943507cee88cfd
hash_to_ec 0d5af9ace87382acfffb9ab1a34b6e921881aa015d4f6d9c73171b2b0a97600d a8dbf36c85bebe6a7b3733e70cd3cd9ed0eb282ca470f344e5fcf9fe959f2e6e
hash_to_ec 996690caac7328b19d20ed28eb0003d675b1a9ff79055ab530e3bf170eb22a94 14340d7d935cffce74b8b2f325c9d92ce0238b51807ef2c1512935bb843194ce
hash_to_ec ad839c4b4c278c8ebe16ff137a558255a1f74646aa87c6cd99e994c7bb97ce8a d4f2da327ffded913b50577be0e583db2b237b5ca74da648e9b985c247073b76
hash_to_ec 26fc2eeeee983e1300d72362fdff42edf08038e4eee277a6e2dbd1bd8c9d6560 3468b8269728c2c0bfc2e53b1575415124798bc0f59b60ea2f14967fc0ca19ce
hash_to_ec db33cecaf4ee6f0ceba338cc5fabfb7462cd952a9c9007357ff3f0ca8336f8bc 0bab38f58686d0ff770f770a297971510bc83e2ff2dfead34823d1c4d67f11af
hash_to_ec a0ee84b3c646526fb8787d26dcd9b7fe9dc713c8a6c1a4ea640465a9f36a64df 4d7a638f6759d3ec45339cd1300e1239cca5f0f658ca3cd29bc9bdb32f44faf0
hash_to_ec 6a702e7899fcf3988e2b6b55654c22e54f43d3fa29de19177bdff5b2295fe27f 145d5748d6054fb586568e276f6925aef593a5b9c8249ad3dbef510af99b4307
hash_to_ec 30ce0fd4f1fac8b62d613b8ee4a66deef6eb7094bd8466531050b837460f6971 f3aa850d593ba7cef01389f7e1916e57617f1d75cd42f64ce8f5f272384b148c
hash_to_ec 3aa31d4ad7046ad13d83eb11c9a6e90eb8483a374a77a9a7b2a7cc0978fefa76 2fe0827dc080d9c1e7ec475a78aa7ae3c86d1a35f4c3f25f4a1f7299cacf018a
hash_to_ec 8562a5a91e763b98014523ebb6e49120979098f89c31df1fde9eb3a49a15b20f ae223bf85e2009a9daf5fd8a14685e2e1e625fc88818b2fd437dd7e109a48f59
hash_to_ec ccf9c313a47b8dbf7ce42c94b785818bc24134d95b6d22acc53c1ec2be29cf27 3e79fce6fe5aa14251b6560df4b76e811d7739eec097f27052c4403a283be71d
hash_to_ec d1e33cd6f8918618d5fb6d67ad8de939db8beaec4f115551eac64479b739b773 613fffcbe1bf48bb2d7bfd64fd97790a06025f8f2429edddb9ac145707847ecf
hash_to_ec 81eaeced34dd44e448d5dafa5715225e4956c90911c964a96ff7aa5b86b969bc 8f81177495d120a1357380164d677509b167f2958eb8b962b616c3951d426d8c
hash_to_ec 2bc001a29f8eab1c7377de69957ba365fb5bdaf9c2c220889709af920dfe27d3 9bcb3010038f366fa4c280eed6e914a23bfc402594d0b83d0e66730a465a565b
hash_to_ec 6feeb703c05e86c58d9fc5623f1af8657ecd1e75a14d18c4eedb642a8a393d16 6544628ba67ed0e14854961739c4d467fcf49d6361e39d32ea73dabeae51e6c3
hash_to_ec e8ff145a7c26897f2c1639edd333a5412f87752f110079f581ccdc87fcce208c d4b5a6e06069c7e012e32119f8eda08ff04a8dfa784e1cf1bced455a4d41d905
hash_to_ec 80488131dcb2018527908dbf8cdf4b823ef0806dc1d360f4da671004ef7ff74d 9984a79d9fd4f317768b442161116eef84e2ca49e938642b268fd64312d59a27
hash_to_ec d8c4ca60446849a784d1462aa26a3b93073ff6841cb2da3ef52ab9785b00b1fd da5ec1562e7de2382d35728312f4eea3608d4dba775c1c108de510e1ce97d059
hash_to_ec 68645728dfc6b9358dfb426493238ba38f24a2f46a3e89edb47d212549939cb7 d3253aa7235113dcc1b577d3bb80be34f528398815a653dbdbacbcbdfd5887a1
hash_to_ec 4e8eb97ba2d1046e1b42e67530a61441e31c84e5e5e448d8e8dbe75d104eaccb de94f73e83222aa0e39b559d4fef70387b0815b9b2f6beff5da67262d8f0eb3e
hash_to_ec 104ff03122ffdf59b22b8c0fe3d8f2ef67d02328e4d5181916d3d2a92f9a0bb7 1517ccf69c0328327e1cf581f16944ff66bc91c37e1cd68a99525415e00b7c9f
hash_to_ec 80f23aae7356ae9a2f9f7504495a731214d26f870fb7df68fdc00b233494156f 7aef046b0a70f84e8d239aa95e192b5a3fffa0fae5090c91273e8996beca9e38
hash_to_ec 2424b33235955a737ebddbf1c6c59cd8778af74da3bd3e658447666a2ab2f557 d19e2be8d482950fbdae429618da7a9daedb8c5944dea19cd1b6b274e792231b
hash_to_ec 0adc839d2b8f099e4341a4763b074c06318d6bcbd1ec558d20a9820c4a426463 cea5da12a84e5c20011726d9224a9930bec30f9571762dd7ca857b86bd37d056
hash_to_ec 46c84d53951f1ba23c46a23d5d96bf019c559aa5d2d79e4535cfcdb36f38ce25 2a913a01a6f7dd78a43cdd5354d1160d9a5f0d824c489a892c80eba798a77567
hash_to_ec 99bdaaf68555ccdc93d97c3a0fb4c126a1aa8b1202194a1a753401a6cae21055 1f645efe173577a092f2d847cc966e28ba3b36397fe84c96dfa4724ed4fcfdf9
hash_to_ec c540ff78f1e063ad26ffa69febb8818c9f2a325072c566091ad816e40fe39af4 de7a762262c91ab4beccc0713233cb91163aec43e34de0dbcfad0c431e8a9722
hash_to_ec de8b1ff8978cd5e02681521542b7b6c3c2f8f4602065059f83594809d04e3dda 290601e75207085bff3e016746e55a80310a76dea9ef566c24181079c76da11c
hash_to_ec d555994c8a022e52602d2a8bdd01fc1bfa6b9ab6734ff72a1bd5f937de4627f8 5f6794e874f48c4b362d0a24207374c2d274e28de86351afc6ddb95d8cc2fd62
hash_to_ec 19db72f703fe6f1b73f21b6ba133ae6b111ae8cc496d3aa32e02411e34c0d8d7 42f159f43d2d62b8cf8a47d5f1340c5cf070e9860fc60de647c55d50fe9f5607
hash_to_ec 23a87a258c2a5d1353aa2d5946f9e5749b92f85e3c58e1d177c3b6c3dcac809c e5685016f79d5e87d1fecb3e2a0fe64e4875f7accd2f6649d7f6b16317549cb1
hash_to_ec 43e1738d7d1b5b565f5fc78e81480f7edf9a4dc18f104fc4be95135b98931b17 650f5b682e45f2d0c5d5e8bcfd9e0cda7d9071b55ecbfaf5e3b59941cd7479f2
hash_to_ec a9d644de0804edf62dee613efa2547e510990a9b7a987ebe55ec74c23873a878 52ad329f88499a4f110e6a6cba1f820012d8db6ccb8f6495ab1e3eb5a24786e1
hash_to_ec 11f2b5d89a0350d7c8727becf0f4dd19bd90f8c94ff207132ab13282dd9b94e6 b798a47bb98dc2a8f99deaf64d27638e33a0d504c5d2fbee477a2bc9b89e2838
hash_to_ec 5e206e3190b3b715d125f1a11fff424fb33e36e534c99ddde2a3517068b7dcc4 2738e9571c96b2ddf93cb5f4a72b1ea78d3731d9555b830494513c0683c950ca
hash_to_ec efc3d65a43d4f10795c7265a76671348f80173e0f507c812f7ae76793b99c529 cf4434d18ce8167b51f117fe930860143c46e1739a8db1fba73b6b0de830d707
hash_to_ec 81f00469788aad6631cf75b585ae06d43ec81c20479925a2009afac9687dff60 c335b5889b36ba4b4175bb0d986807e8eedb6f6b7329b70b922e2ab729c4202a
hash_to_ec 9ef5ff329b525ee8f5c3ac38e1dba7cb19985617341d356707c67ff273aed02d bef9f9e051ba0e24d1fdf72099cf43ecdd250d047fb329855b5372d5c422db9e
hash_to_ec 3fa1401bd63132cf8b385c0fa65f0715ba1fe6161e41d59f8033ae2b22f63fa1 8289a1cb3c2dae48879bb8913fafe2d196cc2fdab5f2a77607910efd33eae6df
hash_to_ec 6559836fd0081fa38a3f8d8408b564e5698b9797cf5e15f7f12a7d2c84511989 28d405a6687d2ecc90c1c66bf0454d58f3fa38835743075e1db58c658e15a104
hash_to_ec 8e0882d45f0e4c2fb2839d3be86ff699d4b2242f5b25ac5a3c2f65297c7d2032 2771fdcf9135a62007adb5f0004d8222f0e42f819c81710aa4dc3ab2042bebf3
hash_to_ec 1d91dc4dd9bd82646029d13aca1af96830c1d8a0400ddebeb14b00c93501c039 7792c62e897f32cbc9c4229f0d28f7882ceeae120329a1cd35f76a75ac704e93
hash_to_ec 09527f9052acbbdd7676cbbd9534780865f04a27aaadad2b7d4f1dac68883cf0 b934220cde1327f2dc6af67bcb4124bf424d5084ef4da945e4daad1717cd0bb8
hash_to_ec 2362e1abe73e64cdd2ca7f6c5ea9f467213747dd3f2b7c6e5df9cb21e03307d7 676b7122b96564358bbaaf77e3a5a4db1767e4f9a50f6ddd1c69df4566755af9
hash_to_ec 26c2dd2356e9b6c68a415b25f91d18614dc8500c66f346d28489da543ee75a94 0f4fd7086acd68eb7c9fa2410e2ecf18e34654eb44e979bc03ce436e992d5feb
hash_to_ec 422dc0a09d6a45a8e0b563eeb6a5ee84b08abd3a8cb34ff93f77ba3b163f4042 631f1b412ff5a0fccbe53a02b4a3deaa93a0418ed9874df401eb698ef75d7441
hash_to_ec ceecdf46f57ef3f36ff30a1a3579b609340282d1b26ab5ddef2f53514e91bab1 9bc6f981fe98d14a2fc5b01a8134b6d35e123ec9ab8a3f303e0a5abb28150e2e
hash_to_ec 024a9e6e0d73f28aa6207fb1e02ce86d444d2d46f8211e8aaab54f459db91a5a 5fb0c1d2c3b30f399102104ea1874099fa83110b3d9c1fcfffb2981c98bf8cdf
hash_to_ec 5b8e45e269c9ccac4c68e532a72b29346d218f4606f37a14064826a62050e3a8 c7be46a871b77fc05ce891d24bd6bd54d9775b7ef573c6bc2d92b67f3604c1d1
hash_to_ec 9a6593a385c266389eef14237874b97bdcd1823c3199311667d4853c2d12aa81 9f55ee9d94102d2b9c5670f30586cf9823bf205b4d4fe088c323e87c4e10f26f
hash_to_ec 27377e2811598c3569b92990865d39b72c7a5533e1be30f77330863187c11875 abd82bc726f2710a8b87e4c1cf5a069f0ae800de614468d3ff35639983020197
hash_to_ec 7cacfaa135fb7d568b8dce8ea9136498b1b28c6d1020af45d376288d78d411f0 229fccd49744c0692508af329224553d21561ee6062b2b8a21f080f73da5bd97
hash_to_ec 52abd90a5542d6496b8dec9567b020f30058e29458d64f2d4f3ad6f3bfc1a5a0 874e82ced7cf77577b3374087fb08a2300b7f403de628310c26bdb3be869d309
hash_to_ec 5c8eebe9d12309187afa8d0d5191de3fdb84e5a05485d7cd62e8804ce7fdc0bc 12b7537643488aa8b9dcc4bae040cd491f8b466163b7988157b0502fb6c9177f
hash_to_ec 6ca3dd5c7a21a6bf65d6eefbe20a66e9b1d6b64196344be0c075f47aea48e3aa 5e1d0705ee24675238293b73ab1d98359119d4b328275be2460cc6ee4d19cc88
hash_to_ec d7e6cd0d39b4308c2a5ee547c4569c8bb3887e49cedece62d218d7c3c5277797 793dc4397112dfd9a8f4e061f457eb6d6fbb1d7a58c40bad5f16002c64914186
hash_to_ec 9cb6de8ba967cca0f0f861c6e20546f8958446595c01c28dae7ba6cfa09d6b14 ba1a2f7502b58fee3499c20e35fa01bb932e7a7c4a925dc04fbf5d90f33cfb5e
hash_to_ec 8ef9c7366733a1edcd116238cdbd177d61222d5c3e05b30ef6b85014cbcb6b79 8fc89664722947164ac9b77086aed319897612068f56ecd57f47029f14671603
hash_to_ec 7f317a34e4fb7de9f69cb107ffc0e57fd9f5c85b85ccb5319d05cebfc169924a 4b71c42339c73db7d710cd63f374d478a6c13bdc352cff40e967282268965ba7
hash_to_ec 15beef8d9687b92918a903b01d594859db4e7128263c8db0cae9d423ff962c1e cd75e6323952f6ac88f138f391b69f38c46d70b7eda61f9e431725b6f1d514a5
hash_to_ec 7a1c04c9af8fc6649833fe81e96f0199fcfe94959256cbe1490075fc5be0904e 0368270cd979439ae0a9552a5d6c9f959e4247fcf920d9e071464582e79c04b1
hash_to_ec c854c583d338615f85f69061e0fa9c9d7c5bbbfe562e8774fef3be556fe8bb63 061620171d7320f64bee98414ff7200a1f481521d202fb281cab06be73b80402
hash_to_ec 0fb8af5aba05ad2503edf1cfad5a451da088e7e974772057cd991a4e0601a3eb d3cbc20384a4420143fcce2cb763b0c15bec4f3267d1bdad3c34c1ee6b790f5e
hash_to_ec 9a251cf59e84a9da5630642f9671c732440caa8fcf4c92446a7e5f5ef99da46c 9b9679086a433f2077f40bcd4c7545fb5cc87e7dbb8bba468d53cb04a74361a0
hash_to_ec 8c632e357cef00e0911eb566f8cc809136b3f5ac1e82d183e4d645cef89fa155 5e06b0f4f278fa1ccb5431866e0b35171cdb814e2e82b9189ce01d8d8a1b2408
hash_to_ec 4aa4c31463475086a5d96b3ff550340567ab3b4a86fa3f01cfe9be18bc4dcb54 76a2916cfc093f27992e1f07b50f431d61d58e255507e208cd29ea4d3bc56623
hash_to_ec 1d33d9aadb949346e3c78d065a0f5262374524f4cb97a7390c8cdaede7ca6578 9ad2f757f499359903031adea6126c577469c4e834a2959e3ac08ee74b13783c
hash_to_ec d9217b9a070df20c4d2f0db42ff0bb36bfba9f51b0b6df8fdfe150405dce4934 65a843c522b4b8ec081a696a0d2dd8dfdfea45db201de7a5889a1446c6dff8c7
hash_to_ec b665b2ca8a285e44ba84e785533b56496a5319730dbb95bc14d3bdfece7544dc 8a804cd13457497b0a29eeca2cecfaa858766ec1d270a0e0c6785b43fd49b824
hash_to_ec 43b5cbcc21b3404bca97fa9a661940fe64d40f3ca569310e50b1bb0173c4d5ee 6c12fffb540d536060bb8b96cf635c1b2cbaa4d875a8d2fb0bf79a690363df19
hash_to_ec 11c58f20562c00dec5bb4456be07cd98186837e9af38d50d45f5e7b6f0f9000d cee76b567586f66dadd38c01213bfc1a17d38e96a495efb4c26063dc498ba209
hash_to_ec b069a980b51d8e030262db0b30069e660f4a3f6f8075d1790c153ba12b879f8b 262391b00bdee71d1d827b2cfe50b46c29e265934dc91959bd369aca0cc6444e
hash_to_ec 75274bfd79bf33eb2f9ab046d34528af9a71811e7e3d55c20eb049c81ac692d8 cb93c850e36896fe6626e97c53652af6736ec3ba0641c7765d0cca2bad2352de
hash_to_ec 5cdb6a24d9736a00f197d9707949fedc5405f367744fe8c83b7cff650302b589 8b4ac03123fab9275dcf340345a1b11fba48ef106d410ba2e0e6f6457037a419
hash_to_ec 07fdc85f809f95a07b59b084402bf91c512ebbe05c7657d6ba27a9e7e121e3e2 61182b3def063630e11de648a278032bcb75949f3a24ef5a133da87830ae5c4e
hash_to_ec a4188ca634cbb796f9927822e343d7b267e0a609c1a0ffa4dcf3726b9ffcc8a2 a911e4899fda28fd6337d708d34553ac5e810ee4938f6f7d9d6e521cab069edb
hash_to_ec 3c128ec5c955ea189a5789df2c892e94193a534a9d5801b8f75df870bc492a69 59eef5ee9df0f681df5b5c67ead1f06b059a8a843837b67f20cce15779608170
hash_to_ec 51a4cc7ec4a14a98c0731e9de7f3ce0779123222d95455e940f2014a23729ec8 105863ccda076af7290d1bf9ec828651dc5811159839044d23f1c3e31a11c5e2
hash_to_ec 1b901a31acbb7807c3309facdc7d04bc3b5a4aa714e6e346bd1c6ad4634e6534 01b3c0000b6c6b471c67c6ab3f9c7a500beaea5edb5c8f2b34df91b69ff67f21
hash_to_ec d2f2c8d79cfa2e7cb2db80568ba62ca0576741acfbe5e2baa0d9b3c424a7c84d 7df9d9088022bd1ce6814d6f8051eef27a650ee38e789b184da2691efd27139d
hash_to_ec 04dcb7644fdfc12d8e34d6e57d7769db939b4a149ed2b81aa51a74ee90babe19 6cff0ab2dd3b32ba1bd1a78e3661722f3f10003a01ce83e430970557decedb2c
hash_to_ec 222798c6841eeaa07e7b7e29686942d7c7f9afc38d09360c8e1f52f2b7debd12 133e3a04ec82aa9b8dbbec18cadbafff446d1270bf7c6f3f97ddd3906dae2468
hash_to_ec 4f7277c3ef247a0689b486ad965f969c433fc63e95d7310e789c4708418ccabc 7e0f2c984dd3cffb35458938c95fe92acf2e697aed060b0e3377c7a07e53c494
hash_to_ec 359b4d6709413243ae2c5409ea02714a9f8961bbbb64a91e81daf01e18c981bf eab69af2cb7f113ad6a27035c0399853d10bd0b99291fad37794d100f7530431
hash_to_ec 6cea3c6a9eb38f60329537170aa4db8dbb869af2040061e53b10c267daf6568c da9a97f4fa96bd05dade5e2704a6a633ba4dbe5080a1e831cda888e9d4f86615
hash_to_ec 3dddecb954ef0209bcf61fd5b46b6c94f2384ef281c48a20ffee74f90788172d af9899c31f944617af54712f93d1a2b4944e48867f480d0d1aec61f3b713e32d
hash_to_ec 9605247462f50bdf7ff57fe966abbefe8b6efa0b65b5116252f0ec723717013f fc8f10904d42a74e09310ccf63db31a90f1dab88b278f15e3364a2356810f7e9
hash_to_ec a005143c4d299933f866db41d0a0b8c67264f5d4ea840dd243cb10c3526bc077 928df1fe9404ffa9c1f4a1c8b2d43ab9b81c5615c8330d2dc2074ac66d4d5200
hash_to_ec f45ce88065c34a163f8e77b6fb583502ed0eb1f490f63f76065a9d97e214e3a9 41bd6784270af4154f2f24f118617e2d7f5b7771a409f08b0f2b7bbcb5e3d666
hash_to_ec 7b40ac30ed02b12ff592a5479c80cf5a7673abfdd4dd38810e40e63275bc2eed 6c6bf5961d83851c9728801093d9af04e5a693bc6cbad237b9ac4b0ed580a771
hash_to_ec 9f985005794d3052a63361413a9820d2ce903198d6d5195b3f20a68f146c6d5c 88bcac53ba5b1c5b44730a24b4cc2cd782298fc70dc9d777b577a2b33b256449
hash_to_ec 31b8e37d01fd5669de4ebf78889d749bc44ffe997186ace56f1fb3e60b8742d2 776366b44170efb130a5045597db5675c6c0b56f3def84863c6b6358aa8dcf40

View File

@@ -2,7 +2,10 @@
mod binaries {
pub(crate) use std::sync::Arc;
pub(crate) use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
pub(crate) use curve25519_dalek::{
scalar::Scalar,
edwards::{CompressedEdwardsY, EdwardsPoint},
};
pub(crate) use multiexp::BatchVerifier;
@@ -17,8 +20,6 @@ mod binaries {
rpc::{RpcError, Rpc, HttpRpc},
};
pub(crate) use monero_generators::decompress_point;
pub(crate) use tokio::task::JoinHandle;
pub(crate) async fn check_block(rpc: Arc<Rpc<HttpRpc>>, block_i: usize) {
@@ -200,12 +201,13 @@ mod binaries {
};
let rpc_point = |point: &str| {
decompress_point(
CompressedEdwardsY(
hex::decode(point)
.expect("invalid hex for ring member")
.try_into()
.expect("invalid point len for ring member"),
)
.decompress()
.expect("invalid point for ring member")
};

View File

@@ -58,10 +58,10 @@ pub struct Block {
}
impl Block {
pub fn number(&self) -> Option<u64> {
pub fn number(&self) -> usize {
match self.miner_tx.prefix.inputs.first() {
Some(Input::Gen(number)) => Some(*number),
_ => None,
Some(Input::Gen(number)) => (*number).try_into().unwrap(),
_ => panic!("invalid block, miner TX didn't have a Input::Gen"),
}
}
@@ -114,16 +114,9 @@ impl Block {
}
pub fn read<R: Read>(r: &mut R) -> io::Result<Block> {
let header = BlockHeader::read(r)?;
let miner_tx = Transaction::read(r)?;
if !matches!(miner_tx.prefix.inputs.as_slice(), &[Input::Gen(_)]) {
Err(io::Error::other("Miner transaction has incorrect input type."))?;
}
Ok(Block {
header,
miner_tx,
header: BlockHeader::read(r)?,
miner_tx: Transaction::read(r)?,
txs: (0_usize .. read_varint(r)?).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
})
}

View File

@@ -16,7 +16,7 @@ use sha3::{Digest, Keccak256};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
pub use monero_generators::{H, decompress_point};
pub use monero_generators::H;
mod merkle;
@@ -46,10 +46,6 @@ pub mod wallet;
#[cfg(test)]
mod tests;
pub const DEFAULT_LOCK_WINDOW: usize = 10;
pub const COINBASE_LOCK_WINDOW: usize = 60;
pub const BLOCK_TIME: usize = 120;
static INV_EIGHT_CELL: OnceLock<Scalar> = OnceLock::new();
#[allow(non_snake_case)]
pub(crate) fn INV_EIGHT() -> Scalar {

View File

@@ -9,7 +9,7 @@ use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as D
use group::{ff::Field, Group};
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
use multiexp::{BatchVerifier, multiexp};
use multiexp::BatchVerifier;
use crate::{Commitment, ringct::bulletproofs::core::*};
@@ -17,20 +17,7 @@ include!(concat!(env!("OUT_DIR"), "/generators.rs"));
static IP12_CELL: OnceLock<Scalar> = OnceLock::new();
pub(crate) fn IP12() -> Scalar {
*IP12_CELL.get_or_init(|| ScalarVector(vec![Scalar::ONE; N]).inner_product(TWO_N()))
}
pub(crate) fn hadamard_fold(
l: &[EdwardsPoint],
r: &[EdwardsPoint],
a: Scalar,
b: Scalar,
) -> Vec<EdwardsPoint> {
let mut res = Vec::with_capacity(l.len() / 2);
for i in 0 .. l.len() {
res.push(multiexp(&[(a, l[i]), (b, r[i])]));
}
res
*IP12_CELL.get_or_init(|| inner_product(&ScalarVector(vec![Scalar::ONE; N]), TWO_N()))
}
#[derive(Clone, PartialEq, Eq, Debug)]
@@ -70,7 +57,7 @@ impl OriginalStruct {
let mut cache = hash_to_scalar(&y.to_bytes());
let z = cache;
let l0 = aL - z;
let l0 = &aL - z;
let l1 = sL;
let mut zero_twos = Vec::with_capacity(MN);
@@ -82,12 +69,12 @@ impl OriginalStruct {
}
let yMN = ScalarVector::powers(y, MN);
let r0 = ((aR + z) * &yMN) + &ScalarVector(zero_twos);
let r1 = yMN * &sR;
let r0 = (&(aR + z) * &yMN) + ScalarVector(zero_twos);
let r1 = yMN * sR;
let (T1, T2, x, mut taux) = {
let t1 = l0.clone().inner_product(&r1) + r0.clone().inner_product(&l1);
let t2 = l1.clone().inner_product(&r1);
let t1 = inner_product(&l0, &r1) + inner_product(&l1, &r0);
let t2 = inner_product(&l1, &r1);
let mut tau1 = Scalar::random(&mut *rng);
let mut tau2 = Scalar::random(&mut *rng);
@@ -113,10 +100,10 @@ impl OriginalStruct {
taux += zpow[i + 2] * gamma;
}
let l = l0 + &(l1 * x);
let r = r0 + &(r1 * x);
let l = &l0 + &(l1 * x);
let r = &r0 + &(r1 * x);
let t = l.clone().inner_product(&r);
let t = inner_product(&l, &r);
let x_ip =
hash_cache(&mut cache, &[x.to_bytes(), taux.to_bytes(), mu.to_bytes(), t.to_bytes()]);
@@ -139,8 +126,8 @@ impl OriginalStruct {
let (aL, aR) = a.split();
let (bL, bR) = b.split();
let cL = aL.clone().inner_product(&bR);
let cR = aR.clone().inner_product(&bL);
let cL = inner_product(&aL, &bR);
let cR = inner_product(&aR, &bL);
let (G_L, G_R) = G_proof.split_at(aL.len());
let (H_L, H_R) = H_proof.split_at(aL.len());
@@ -153,8 +140,8 @@ impl OriginalStruct {
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
let winv = w.invert().unwrap();
a = (aL * w) + &(aR * winv);
b = (bL * winv) + &(bR * w);
a = (aL * w) + (aR * winv);
b = (bL * winv) + (bR * w);
if a.len() != 1 {
G_proof = hadamard_fold(G_L, G_R, winv, w);

View File

@@ -112,7 +112,7 @@ impl AggregateRangeStatement {
let mut d = ScalarVector::new(mn);
for j in 1 ..= V.len() {
z_pow.push(z.pow(Scalar::from(2 * u64::try_from(j).unwrap()))); // TODO: Optimize this
d = d + &(Self::d_j(j, V.len()) * (z_pow[j - 1]));
d = d.add_vec(&Self::d_j(j, V.len()).mul(z_pow[j - 1]));
}
let mut ascending_y = ScalarVector(vec![y]);
@@ -124,8 +124,7 @@ impl AggregateRangeStatement {
let mut descending_y = ascending_y.clone();
descending_y.0.reverse();
let d_descending_y = d.clone() * &descending_y;
let d_descending_y_plus_z = d_descending_y + z;
let d_descending_y = d.mul_vec(&descending_y);
let y_mn_plus_one = descending_y[0] * y;
@@ -136,9 +135,9 @@ impl AggregateRangeStatement {
let neg_z = -z;
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 2);
for (i, d_y_z) in d_descending_y_plus_z.0.iter().enumerate() {
for (i, d_y_z) in d_descending_y.add(z).0.drain(..).enumerate() {
A_terms.push((neg_z, generators.generator(GeneratorsList::GBold1, i)));
A_terms.push((*d_y_z, generators.generator(GeneratorsList::HBold1, i)));
A_terms.push((d_y_z, generators.generator(GeneratorsList::HBold1, i)));
}
A_terms.push((y_mn_plus_one, commitment_accum));
A_terms.push((
@@ -146,14 +145,7 @@ impl AggregateRangeStatement {
Generators::g(),
));
(
y,
d_descending_y_plus_z,
y_mn_plus_one,
z,
ScalarVector(z_pow),
A + multiexp_vartime(&A_terms),
)
(y, d_descending_y, y_mn_plus_one, z, ScalarVector(z_pow), A + multiexp_vartime(&A_terms))
}
pub(crate) fn prove<R: RngCore + CryptoRng>(
@@ -199,7 +191,7 @@ impl AggregateRangeStatement {
a_l.0.append(&mut u64_decompose(*witness.values.get(j - 1).unwrap_or(&0)).0);
}
let a_r = a_l.clone() - Scalar::ONE;
let a_r = a_l.sub(Scalar::ONE);
let alpha = Scalar::random(&mut *rng);
@@ -217,11 +209,11 @@ impl AggregateRangeStatement {
// Multiply by INV_EIGHT per earlier commentary
A.0 *= crate::INV_EIGHT();
let (y, d_descending_y_plus_z, y_mn_plus_one, z, z_pow, A_hat) =
let (y, d_descending_y, y_mn_plus_one, z, z_pow, A_hat) =
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, A);
let a_l = a_l - z;
let a_r = a_r + &d_descending_y_plus_z;
let a_l = a_l.sub(z);
let a_r = a_r.add_vec(&d_descending_y).add(z);
let mut alpha = alpha;
for j in 1 ..= witness.gammas.len() {
alpha += z_pow[j - 1] * witness.gammas[j - 1] * y_mn_plus_one;

View File

@@ -3,7 +3,8 @@
use group::Group;
use dalek_ff_group::{Scalar, EdwardsPoint};
pub(crate) use crate::ringct::bulletproofs::scalar_vector::ScalarVector;
mod scalar_vector;
pub(crate) use scalar_vector::{ScalarVector, weighted_inner_product};
mod point_vector;
pub(crate) use point_vector::PointVector;

View File

@@ -0,0 +1,114 @@
use core::{
borrow::Borrow,
ops::{Index, IndexMut},
};
use std_shims::vec::Vec;
use zeroize::Zeroize;
use group::ff::Field;
use dalek_ff_group::Scalar;
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
impl Index<usize> for ScalarVector {
type Output = Scalar;
fn index(&self, index: usize) -> &Scalar {
&self.0[index]
}
}
impl IndexMut<usize> for ScalarVector {
fn index_mut(&mut self, index: usize) -> &mut Scalar {
&mut self.0[index]
}
}
impl ScalarVector {
pub(crate) fn new(len: usize) -> Self {
ScalarVector(vec![Scalar::ZERO; len])
}
pub(crate) fn add(&self, scalar: impl Borrow<Scalar>) -> Self {
let mut res = self.clone();
for val in &mut res.0 {
*val += scalar.borrow();
}
res
}
pub(crate) fn sub(&self, scalar: impl Borrow<Scalar>) -> Self {
let mut res = self.clone();
for val in &mut res.0 {
*val -= scalar.borrow();
}
res
}
pub(crate) fn mul(&self, scalar: impl Borrow<Scalar>) -> Self {
let mut res = self.clone();
for val in &mut res.0 {
*val *= scalar.borrow();
}
res
}
pub(crate) fn add_vec(&self, vector: &Self) -> Self {
debug_assert_eq!(self.len(), vector.len());
let mut res = self.clone();
for (i, val) in res.0.iter_mut().enumerate() {
*val += vector.0[i];
}
res
}
pub(crate) fn mul_vec(&self, vector: &Self) -> Self {
debug_assert_eq!(self.len(), vector.len());
let mut res = self.clone();
for (i, val) in res.0.iter_mut().enumerate() {
*val *= vector.0[i];
}
res
}
pub(crate) fn inner_product(&self, vector: &Self) -> Scalar {
self.mul_vec(vector).sum()
}
pub(crate) fn powers(x: Scalar, len: usize) -> Self {
debug_assert!(len != 0);
let mut res = Vec::with_capacity(len);
res.push(Scalar::ONE);
res.push(x);
for i in 2 .. len {
res.push(res[i - 1] * x);
}
res.truncate(len);
ScalarVector(res)
}
pub(crate) fn sum(mut self) -> Scalar {
self.0.drain(..).sum()
}
pub(crate) fn len(&self) -> usize {
self.0.len()
}
pub(crate) fn split(mut self) -> (Self, Self) {
debug_assert!(self.len() > 1);
let r = self.0.split_off(self.0.len() / 2);
debug_assert_eq!(self.len(), r.len());
(self, ScalarVector(r))
}
}
pub(crate) fn weighted_inner_product(
a: &ScalarVector,
b: &ScalarVector,
y: &ScalarVector,
) -> Scalar {
a.inner_product(&b.mul_vec(y))
}

View File

@@ -4,7 +4,7 @@ use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop};
use multiexp::{BatchVerifier, multiexp, multiexp_vartime};
use multiexp::{multiexp, multiexp_vartime, BatchVerifier};
use group::{
ff::{Field, PrimeField},
GroupEncoding,
@@ -12,7 +12,8 @@ use group::{
use dalek_ff_group::{Scalar, EdwardsPoint};
use crate::ringct::bulletproofs::plus::{
ScalarVector, PointVector, GeneratorsList, Generators, padded_pow_of_2, transcript::*,
ScalarVector, PointVector, GeneratorsList, Generators, padded_pow_of_2, weighted_inner_product,
transcript::*,
};
// Figure 1
@@ -218,7 +219,7 @@ impl WipStatement {
.zip(g_bold.0.iter().copied())
.chain(witness.b.0.iter().copied().zip(h_bold.0.iter().copied()))
.collect::<Vec<_>>();
P_terms.push((witness.a.clone().weighted_inner_product(&witness.b, &y), g));
P_terms.push((weighted_inner_product(&witness.a, &witness.b, &y), g));
P_terms.push((witness.alpha, h));
debug_assert_eq!(multiexp(&P_terms), P);
P_terms.zeroize();
@@ -257,13 +258,14 @@ impl WipStatement {
let d_l = Scalar::random(&mut *rng);
let d_r = Scalar::random(&mut *rng);
let c_l = a1.clone().weighted_inner_product(&b2, &y);
let c_r = (a2.clone() * y_n_hat).weighted_inner_product(&b1, &y);
let c_l = weighted_inner_product(&a1, &b2, &y);
let c_r = weighted_inner_product(&(a2.mul(y_n_hat)), &b1, &y);
// TODO: Calculate these with a batch inversion
let y_inv_n_hat = y_n_hat.invert().unwrap();
let mut L_terms = (a1.clone() * y_inv_n_hat)
let mut L_terms = a1
.mul(y_inv_n_hat)
.0
.drain(..)
.zip(g_bold2.0.iter().copied())
@@ -275,7 +277,8 @@ impl WipStatement {
L_vec.push(L);
L_terms.zeroize();
let mut R_terms = (a2.clone() * y_n_hat)
let mut R_terms = a2
.mul(y_n_hat)
.0
.drain(..)
.zip(g_bold1.0.iter().copied())
@@ -291,8 +294,8 @@ impl WipStatement {
(e, inv_e, e_square, inv_e_square, g_bold, h_bold) =
Self::next_G_H(&mut transcript, g_bold1, g_bold2, h_bold1, h_bold2, L, R, y_inv_n_hat);
a = (a1 * e) + &(a2 * (y_n_hat * inv_e));
b = (b1 * inv_e) + &(b2 * e);
a = a1.mul(e).add_vec(&a2.mul(y_n_hat * inv_e));
b = b1.mul(inv_e).add_vec(&b2.mul(e));
alpha += (d_l * e_square) + (d_r * inv_e_square);
debug_assert_eq!(g_bold.len(), a.len());

View File

@@ -1,17 +1,85 @@
use core::{
borrow::Borrow,
ops::{Index, IndexMut, Add, Sub, Mul},
};
use core::ops::{Add, Sub, Mul, Index};
use std_shims::vec::Vec;
use zeroize::{Zeroize, ZeroizeOnDrop};
use group::ff::Field;
use dalek_ff_group::{Scalar, EdwardsPoint};
use multiexp::multiexp;
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
macro_rules! math_op {
($Op: ident, $op: ident, $f: expr) => {
#[allow(clippy::redundant_closure_call)]
impl $Op<Scalar> for ScalarVector {
type Output = ScalarVector;
fn $op(self, b: Scalar) -> ScalarVector {
ScalarVector(self.0.iter().map(|a| $f((a, &b))).collect())
}
}
#[allow(clippy::redundant_closure_call)]
impl $Op<Scalar> for &ScalarVector {
type Output = ScalarVector;
fn $op(self, b: Scalar) -> ScalarVector {
ScalarVector(self.0.iter().map(|a| $f((a, &b))).collect())
}
}
#[allow(clippy::redundant_closure_call)]
impl $Op<ScalarVector> for ScalarVector {
type Output = ScalarVector;
fn $op(self, b: ScalarVector) -> ScalarVector {
debug_assert_eq!(self.len(), b.len());
ScalarVector(self.0.iter().zip(b.0.iter()).map($f).collect())
}
}
#[allow(clippy::redundant_closure_call)]
impl $Op<&ScalarVector> for &ScalarVector {
type Output = ScalarVector;
fn $op(self, b: &ScalarVector) -> ScalarVector {
debug_assert_eq!(self.len(), b.len());
ScalarVector(self.0.iter().zip(b.0.iter()).map($f).collect())
}
}
};
}
math_op!(Add, add, |(a, b): (&Scalar, &Scalar)| *a + *b);
math_op!(Sub, sub, |(a, b): (&Scalar, &Scalar)| *a - *b);
math_op!(Mul, mul, |(a, b): (&Scalar, &Scalar)| *a * *b);
impl ScalarVector {
pub(crate) fn new(len: usize) -> ScalarVector {
ScalarVector(vec![Scalar::ZERO; len])
}
pub(crate) fn powers(x: Scalar, len: usize) -> ScalarVector {
debug_assert!(len != 0);
let mut res = Vec::with_capacity(len);
res.push(Scalar::ONE);
for i in 1 .. len {
res.push(res[i - 1] * x);
}
ScalarVector(res)
}
pub(crate) fn sum(mut self) -> Scalar {
self.0.drain(..).sum()
}
pub(crate) fn len(&self) -> usize {
self.0.len()
}
pub(crate) fn split(self) -> (ScalarVector, ScalarVector) {
let (l, r) = self.0.split_at(self.0.len() / 2);
(ScalarVector(l.to_vec()), ScalarVector(r.to_vec()))
}
}
impl Index<usize> for ScalarVector {
type Output = Scalar;
@@ -19,120 +87,28 @@ impl Index<usize> for ScalarVector {
&self.0[index]
}
}
impl IndexMut<usize> for ScalarVector {
fn index_mut(&mut self, index: usize) -> &mut Scalar {
&mut self.0[index]
}
}
impl<S: Borrow<Scalar>> Add<S> for ScalarVector {
type Output = ScalarVector;
fn add(mut self, scalar: S) -> ScalarVector {
for s in &mut self.0 {
*s += scalar.borrow();
}
self
}
}
impl<S: Borrow<Scalar>> Sub<S> for ScalarVector {
type Output = ScalarVector;
fn sub(mut self, scalar: S) -> ScalarVector {
for s in &mut self.0 {
*s -= scalar.borrow();
}
self
}
}
impl<S: Borrow<Scalar>> Mul<S> for ScalarVector {
type Output = ScalarVector;
fn mul(mut self, scalar: S) -> ScalarVector {
for s in &mut self.0 {
*s *= scalar.borrow();
}
self
}
}
impl Add<&ScalarVector> for ScalarVector {
type Output = ScalarVector;
fn add(mut self, other: &ScalarVector) -> ScalarVector {
debug_assert_eq!(self.len(), other.len());
for (s, o) in self.0.iter_mut().zip(other.0.iter()) {
*s += o;
}
self
}
}
impl Sub<&ScalarVector> for ScalarVector {
type Output = ScalarVector;
fn sub(mut self, other: &ScalarVector) -> ScalarVector {
debug_assert_eq!(self.len(), other.len());
for (s, o) in self.0.iter_mut().zip(other.0.iter()) {
*s -= o;
}
self
}
}
impl Mul<&ScalarVector> for ScalarVector {
type Output = ScalarVector;
fn mul(mut self, other: &ScalarVector) -> ScalarVector {
debug_assert_eq!(self.len(), other.len());
for (s, o) in self.0.iter_mut().zip(other.0.iter()) {
*s *= o;
}
self
}
pub(crate) fn inner_product(a: &ScalarVector, b: &ScalarVector) -> Scalar {
(a * b).sum()
}
impl Mul<&[EdwardsPoint]> for &ScalarVector {
type Output = EdwardsPoint;
fn mul(self, b: &[EdwardsPoint]) -> EdwardsPoint {
debug_assert_eq!(self.len(), b.len());
let mut multiexp_args = self.0.iter().copied().zip(b.iter().copied()).collect::<Vec<_>>();
let res = multiexp(&multiexp_args);
multiexp_args.zeroize();
multiexp(&self.0.iter().copied().zip(b.iter().copied()).collect::<Vec<_>>())
}
}
pub(crate) fn hadamard_fold(
l: &[EdwardsPoint],
r: &[EdwardsPoint],
a: Scalar,
b: Scalar,
) -> Vec<EdwardsPoint> {
let mut res = Vec::with_capacity(l.len() / 2);
for i in 0 .. l.len() {
res.push(multiexp(&[(a, l[i]), (b, r[i])]));
}
res
}
}
impl ScalarVector {
pub(crate) fn new(len: usize) -> Self {
ScalarVector(vec![Scalar::ZERO; len])
}
pub(crate) fn powers(x: Scalar, len: usize) -> Self {
debug_assert!(len != 0);
let mut res = Vec::with_capacity(len);
res.push(Scalar::ONE);
res.push(x);
for i in 2 .. len {
res.push(res[i - 1] * x);
}
res.truncate(len);
ScalarVector(res)
}
pub(crate) fn len(&self) -> usize {
self.0.len()
}
pub(crate) fn sum(mut self) -> Scalar {
self.0.drain(..).sum()
}
pub(crate) fn inner_product(self, vector: &Self) -> Scalar {
(self * vector).sum()
}
pub(crate) fn weighted_inner_product(self, vector: &Self, y: &Self) -> Scalar {
(self * vector * y).sum()
}
pub(crate) fn split(mut self) -> (Self, Self) {
debug_assert!(self.len() > 1);
let r = self.0.split_off(self.0.len() / 2);
debug_assert_eq!(self.len(), r.len());
(self, ScalarVector(r))
}
}

View File

@@ -9,7 +9,7 @@ use std_shims::{
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use subtle::{ConstantTimeEq, ConditionallySelectable};
use subtle::{ConstantTimeEq, Choice, CtOption};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_TABLE,
@@ -169,8 +169,13 @@ fn core(
}
// Perform the core loop
let mut c1 = c;
let mut c1 = CtOption::new(Scalar::ZERO, Choice::from(0));
for i in (start .. end).map(|i| i % n) {
// This will only execute once and shouldn't need to be constant time. Making it constant time
// removes the risk of branch prediction creating timing differences depending on ring index
// however
c1 = c1.or_else(|| CtOption::new(c, i.ct_eq(&0)));
let c_p = mu_P * c;
let c_c = mu_C * c;
@@ -183,15 +188,10 @@ fn core(
to_hash.extend(L.compress().to_bytes());
to_hash.extend(R.compress().to_bytes());
c = hash_to_scalar(&to_hash);
// This will only execute once and shouldn't need to be constant time. Making it constant time
// removes the risk of branch prediction creating timing differences depending on ring index
// however
c1.conditional_assign(&c, i.ct_eq(&(n - 1)));
}
// This first tuple is needed to continue signing, the latter is the c to be tested/worked with
((D, c * mu_P, c * mu_C), c1)
((D, c * mu_P, c * mu_C), c1.unwrap_or(c))
}
/// CLSAG signature, as used in Monero.

View File

@@ -199,7 +199,6 @@ impl Algorithm<Ed25519> for ClsagMultisig {
l: Participant,
addendum: ClsagAddendum,
) -> Result<(), FrostError> {
// TODO: This check is faulty if two shares are additive inverses of each other
if self.image.is_identity().into() {
self.transcript.domain_separate(b"CLSAG");
self.input().transcript(&mut self.transcript);

View File

@@ -33,10 +33,7 @@ pub struct RingMatrix {
impl RingMatrix {
pub fn new(matrix: Vec<Vec<EdwardsPoint>>) -> Result<Self, MlsagError> {
// Monero requires that there is more than one ring member for MLSAG signatures:
// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/
// src/ringct/rctSigs.cpp#L462
if matrix.len() < 2 {
if matrix.is_empty() {
Err(MlsagError::InvalidRing)?;
}
for member in &matrix {

View File

@@ -257,8 +257,7 @@ impl RctPrunable {
pub fn read<R: Read>(
rct_type: RctType,
ring_length: usize,
inputs: usize,
decoys: &[usize],
outputs: usize,
r: &mut R,
) -> io::Result<RctPrunable> {
@@ -269,7 +268,7 @@ impl RctPrunable {
// src/ringct/rctSigs.cpp#L609
// And then for RctNull, that's only allowed for miner TXs which require one input of
// Input::Gen
if inputs == 0 {
if decoys.is_empty() {
Err(io::Error::other("transaction had no inputs"))?;
}
@@ -277,11 +276,11 @@ impl RctPrunable {
RctType::Null => RctPrunable::Null,
RctType::MlsagAggregate => RctPrunable::AggregateMlsagBorromean {
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
mlsag: Mlsag::read(ring_length, inputs + 1, r)?,
mlsag: Mlsag::read(decoys[0], decoys.len() + 1, r)?,
},
RctType::MlsagIndividual => RctPrunable::MlsagBorromean {
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
mlsags: (0 .. inputs).map(|_| Mlsag::read(ring_length, 2, r)).collect::<Result<_, _>>()?,
mlsags: decoys.iter().map(|d| Mlsag::read(*d, 2, r)).collect::<Result<_, _>>()?,
},
RctType::Bulletproofs | RctType::BulletproofsCompactAmount => {
RctPrunable::MlsagBulletproofs {
@@ -296,10 +295,8 @@ impl RctPrunable {
}
Bulletproofs::read(r)?
},
mlsags: (0 .. inputs)
.map(|_| Mlsag::read(ring_length, 2, r))
.collect::<Result<_, _>>()?,
pseudo_outs: read_raw_vec(read_point, inputs, r)?,
mlsags: decoys.iter().map(|d| Mlsag::read(*d, 2, r)).collect::<Result<_, _>>()?,
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
}
}
RctType::Clsag | RctType::BulletproofsPlus => RctPrunable::Clsag {
@@ -311,8 +308,8 @@ impl RctPrunable {
r,
)?
},
clsags: (0 .. inputs).map(|_| Clsag::read(ring_length, r)).collect::<Result<_, _>>()?,
pseudo_outs: read_raw_vec(read_point, inputs, r)?,
clsags: (0 .. decoys.len()).map(|o| Clsag::read(decoys[o], r)).collect::<Result<_, _>>()?,
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
},
})
}
@@ -385,16 +382,8 @@ impl RctSignatures {
serialized
}
pub fn read<R: Read>(
ring_length: usize,
inputs: usize,
outputs: usize,
r: &mut R,
) -> io::Result<RctSignatures> {
let base = RctBase::read(inputs, outputs, r)?;
Ok(RctSignatures {
base: base.0,
prunable: RctPrunable::read(base.1, ring_length, inputs, outputs, r)?,
})
pub fn read<R: Read>(decoys: &[usize], outputs: usize, r: &mut R) -> io::Result<RctSignatures> {
let base = RctBase::read(decoys.len(), outputs, r)?;
Ok(RctSignatures { base: base.0, prunable: RctPrunable::read(base.1, decoys, outputs, r)? })
}
}

View File

@@ -9,9 +9,7 @@ use std_shims::{
use async_trait::async_trait;
use curve25519_dalek::edwards::EdwardsPoint;
use monero_generators::decompress_point;
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
use serde::{Serialize, Deserialize, de::DeserializeOwned};
use serde_json::{Value, json};
@@ -54,15 +52,6 @@ struct TransactionsResponse {
txs: Vec<TransactionResponse>,
}
#[derive(Deserialize, Debug)]
pub struct OutputResponse {
pub height: usize,
pub unlocked: bool,
key: String,
mask: String,
txid: String,
}
#[derive(Clone, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum RpcError {
@@ -97,9 +86,10 @@ fn hash_hex(hash: &str) -> Result<[u8; 32], RpcError> {
}
fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
decompress_point(
CompressedEdwardsY(
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?,
)
.decompress()
.ok_or_else(|| RpcError::InvalidPoint(point.to_string()))
}
@@ -543,15 +533,29 @@ impl<R: RpcConnection> Rpc<R> {
Ok(distributions.distributions.swap_remove(0).distribution)
}
/// Get the specified outputs from the RingCT (zero-amount) pool
pub async fn get_outs(&self, indexes: &[u64]) -> Result<Vec<OutputResponse>, RpcError> {
/// Get the specified outputs from the RingCT (zero-amount) pool, but only return them if their
/// timelock has been satisfied.
///
/// The timelock being satisfied is distinct from being free of the 10-block lock applied to all
/// Monero transactions.
pub async fn get_unlocked_outputs(
&self,
indexes: &[u64],
height: usize,
) -> Result<Vec<Option<[EdwardsPoint; 2]>>, RpcError> {
#[derive(Deserialize, Debug)]
struct OutsResponse {
status: String,
outs: Vec<OutputResponse>,
struct Out {
key: String,
mask: String,
txid: String,
}
let res: OutsResponse = self
#[derive(Deserialize, Debug)]
struct Outs {
outs: Vec<Out>,
}
let outs: Outs = self
.rpc_call(
"get_outs",
Some(json!({
@@ -564,39 +568,15 @@ impl<R: RpcConnection> Rpc<R> {
)
.await?;
if res.status != "OK" {
Err(RpcError::InvalidNode("bad response to get_outs".to_string()))?;
}
Ok(res.outs)
}
/// Get the specified outputs from the RingCT (zero-amount) pool, but only return them if their
/// timelock has been satisfied.
///
/// The timelock being satisfied is distinct from being free of the 10-block lock applied to all
/// Monero transactions.
pub async fn get_unlocked_outputs(
&self,
indexes: &[u64],
height: usize,
fingerprintable_canonical: bool,
) -> Result<Vec<Option<[EdwardsPoint; 2]>>, RpcError> {
let outs: Vec<OutputResponse> = self.get_outs(indexes).await?;
// Only need to fetch txs to do canonical check on timelock
let txs = if fingerprintable_canonical {
self
let txs = self
.get_transactions(
&outs.iter().map(|out| hash_hex(&out.txid)).collect::<Result<Vec<_>, _>>()?,
&outs.outs.iter().map(|out| hash_hex(&out.txid)).collect::<Result<Vec<_>, _>>()?,
)
.await?
} else {
Vec::new()
};
.await?;
// TODO: https://github.com/serai-dex/serai/issues/104
outs
.outs
.iter()
.enumerate()
.map(|(i, out)| {
@@ -605,20 +585,18 @@ impl<R: RpcConnection> Rpc<R> {
// Only valid keys can be used in CLSAG proofs, hence the need for re-selection, yet
// invalid keys may honestly exist on the blockchain
// Only a recent hard fork checked output keys were valid points
let Some(key) = decompress_point(
let Some(key) = CompressedEdwardsY(
rpc_hex(&out.key)?
.try_into()
.map_err(|_| RpcError::InvalidNode("non-32-byte point".to_string()))?,
) else {
)
.decompress() else {
return Ok(None);
};
Ok(Some([key, rpc_point(&out.mask)?]).filter(|_| {
if fingerprintable_canonical {
Timelock::Block(height) >= txs[i].prefix.timelock
} else {
out.unlocked
}
}))
Ok(
Some([key, rpc_point(&out.mask)?])
.filter(|_| Timelock::Block(height) >= txs[i].prefix.timelock),
)
})
.collect()
}
@@ -735,14 +713,13 @@ impl<R: RpcConnection> Rpc<R> {
&self,
address: &str,
block_count: usize,
) -> Result<(Vec<[u8; 32]>, usize), RpcError> {
) -> Result<Vec<[u8; 32]>, RpcError> {
#[derive(Debug, Deserialize)]
struct BlocksResponse {
blocks: Vec<String>,
height: usize,
}
let res = self
let block_strs = self
.json_rpc_call::<BlocksResponse>(
"generateblocks",
Some(json!({
@@ -750,12 +727,13 @@ impl<R: RpcConnection> Rpc<R> {
"amount_of_blocks": block_count
})),
)
.await?;
.await?
.blocks;
let mut blocks = Vec::with_capacity(res.blocks.len());
for block in res.blocks {
let mut blocks = Vec::with_capacity(block_strs.len());
for block in block_strs {
blocks.push(hash_hex(&block)?);
}
Ok((blocks, res.height))
Ok(blocks)
}
}

View File

@@ -4,28 +4,19 @@ use std_shims::{
io::{self, Read, Write},
};
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use monero_generators::decompress_point;
use curve25519_dalek::{
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
mod sealed {
pub trait VarInt: TryInto<u64> + TryFrom<u64> + Copy {
const BITS: usize;
}
impl VarInt for u8 {
const BITS: usize = 8;
}
impl VarInt for u32 {
const BITS: usize = 32;
}
impl VarInt for u64 {
const BITS: usize = 64;
}
impl VarInt for usize {
const BITS: usize = core::mem::size_of::<usize>() * 8;
}
pub trait VarInt: TryInto<u64> + TryFrom<u64> + Copy {}
impl VarInt for u8 {}
impl VarInt for u32 {}
impl VarInt for u64 {}
impl VarInt for usize {}
}
// This will panic if the VarInt exceeds u64::MAX
@@ -111,7 +102,7 @@ pub(crate) fn read_varint<R: Read, U: sealed::VarInt>(r: &mut R) -> io::Result<U
if (bits != 0) && (b == 0) {
Err(io::Error::other("non-canonical varint"))?;
}
if ((bits + 7) >= U::BITS) && (b >= (1 << (U::BITS - bits))) {
if ((bits + 7) > 64) && (b >= (1 << (64 - bits))) {
Err(io::Error::other("varint overflow"))?;
}
@@ -135,7 +126,11 @@ pub(crate) fn read_scalar<R: Read>(r: &mut R) -> io::Result<Scalar> {
pub(crate) fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
let bytes = read_bytes(r)?;
decompress_point(bytes).ok_or_else(|| io::Error::other("invalid point"))
CompressedEdwardsY(bytes)
.decompress()
// Ban points which are either unreduced or -0
.filter(|point| point.compress().to_bytes() == bytes)
.ok_or_else(|| io::Error::other("invalid point"))
}
pub(crate) fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {

View File

@@ -2,9 +2,7 @@ use hex_literal::hex;
use rand_core::{RngCore, OsRng};
use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
use monero_generators::decompress_point;
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, edwards::CompressedEdwardsY};
use crate::{
random_scalar,
@@ -144,8 +142,14 @@ fn featured_vectors() {
}
_ => panic!("Unknown network"),
};
let spend = decompress_point(hex::decode(vector.spend).unwrap().try_into().unwrap()).unwrap();
let view = decompress_point(hex::decode(vector.view).unwrap().try_into().unwrap()).unwrap();
let spend = CompressedEdwardsY::from_slice(&hex::decode(vector.spend).unwrap())
.unwrap()
.decompress()
.unwrap();
let view = CompressedEdwardsY::from_slice(&hex::decode(vector.view).unwrap())
.unwrap()
.decompress()
.unwrap();
let addr = MoneroAddress::from_str(network, &vector.address).unwrap();
assert_eq!(addr.spend, spend);

View File

@@ -1,8 +1,7 @@
use hex_literal::hex;
use rand_core::OsRng;
use curve25519_dalek::scalar::Scalar;
use monero_generators::decompress_point;
use curve25519_dalek::{scalar::Scalar, edwards::CompressedEdwardsY};
use multiexp::BatchVerifier;
use crate::{
@@ -15,7 +14,7 @@ mod plus;
#[test]
fn bulletproofs_vector() {
let scalar = |scalar| Scalar::from_canonical_bytes(scalar).unwrap();
let point = |point| decompress_point(point).unwrap();
let point = |point| CompressedEdwardsY(point).decompress().unwrap();
// Generated from Monero
assert!(Bulletproofs::Original(OriginalStruct {

View File

@@ -9,6 +9,7 @@ use dalek_ff_group::{Scalar, EdwardsPoint};
use crate::ringct::bulletproofs::plus::{
ScalarVector, PointVector, GeneratorsList, Generators,
weighted_inner_product::{WipStatement, WipWitness},
weighted_inner_product,
};
#[test]
@@ -67,7 +68,7 @@ fn test_weighted_inner_product() {
#[allow(non_snake_case)]
let P = g_bold.multiexp(&a) +
h_bold.multiexp(&b) +
(g * a.clone().weighted_inner_product(&b, &y_vec)) +
(g * weighted_inner_product(&a, &b, &y_vec)) +
(h * alpha);
let statement = WipStatement::new(generators, P, y);

View File

@@ -57,7 +57,7 @@ fn clsag() {
}
let image = generate_key_image(&secrets.0);
let (mut clsag, pseudo_out) = Clsag::sign(
let (clsag, pseudo_out) = Clsag::sign(
&mut OsRng,
vec![(
secrets.0,
@@ -76,12 +76,7 @@ fn clsag() {
msg,
)
.swap_remove(0);
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
// make sure verification fails if we throw a random `c1` at it.
clsag.c1 = random_scalar(&mut OsRng);
assert!(clsag.verify(&ring, &image, &pseudo_out, &msg).is_err());
}
}

View File

@@ -1,158 +0,0 @@
use crate::{
wallet::{ExtraField, Extra, extra::MAX_TX_EXTRA_PADDING_COUNT},
serialize::write_varint,
};
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
// Borrowed tests from
// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/
// tests/unit_tests/test_tx_utils.cpp
const PUB_KEY_BYTES: [u8; 33] = [
1, 30, 208, 98, 162, 133, 64, 85, 83, 112, 91, 188, 89, 211, 24, 131, 39, 154, 22, 228, 80, 63,
198, 141, 173, 111, 244, 183, 4, 149, 186, 140, 230,
];
fn pub_key() -> EdwardsPoint {
CompressedEdwardsY(PUB_KEY_BYTES[1 .. PUB_KEY_BYTES.len()].try_into().expect("invalid pub key"))
.decompress()
.unwrap()
}
fn test_write_buf(extra: &Extra, buf: &[u8]) {
let mut w: Vec<u8> = vec![];
Extra::write(extra, &mut w).unwrap();
assert_eq!(buf, w);
}
#[test]
fn empty_extra() {
let buf: Vec<u8> = vec![];
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert!(extra.0.is_empty());
test_write_buf(&extra, &buf);
}
#[test]
fn padding_only_size_1() {
let buf: Vec<u8> = vec![0];
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::Padding(1)]);
test_write_buf(&extra, &buf);
}
#[test]
fn padding_only_size_2() {
let buf: Vec<u8> = vec![0, 0];
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::Padding(2)]);
test_write_buf(&extra, &buf);
}
#[test]
fn padding_only_max_size() {
let buf: Vec<u8> = vec![0; MAX_TX_EXTRA_PADDING_COUNT];
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::Padding(MAX_TX_EXTRA_PADDING_COUNT)]);
test_write_buf(&extra, &buf);
}
#[test]
fn padding_only_exceed_max_size() {
let buf: Vec<u8> = vec![0; MAX_TX_EXTRA_PADDING_COUNT + 1];
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert!(extra.0.is_empty());
}
#[test]
fn invalid_padding_only() {
let buf: Vec<u8> = vec![0, 42];
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert!(extra.0.is_empty());
}
#[test]
fn pub_key_only() {
let buf: Vec<u8> = PUB_KEY_BYTES.to_vec();
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::PublicKey(pub_key())]);
test_write_buf(&extra, &buf);
}
#[test]
fn extra_nonce_only() {
let buf: Vec<u8> = vec![2, 1, 42];
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::Nonce(vec![42])]);
test_write_buf(&extra, &buf);
}
#[test]
fn extra_nonce_only_wrong_size() {
let mut buf: Vec<u8> = vec![0; 20];
buf[0] = 2;
buf[1] = 255;
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert!(extra.0.is_empty());
}
#[test]
fn pub_key_and_padding() {
let mut buf: Vec<u8> = PUB_KEY_BYTES.to_vec();
buf.extend([
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
]);
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::PublicKey(pub_key()), ExtraField::Padding(76)]);
test_write_buf(&extra, &buf);
}
#[test]
fn pub_key_and_invalid_padding() {
let mut buf: Vec<u8> = PUB_KEY_BYTES.to_vec();
buf.extend([0, 1]);
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::PublicKey(pub_key())]);
}
#[test]
fn extra_mysterious_minergate_only() {
let buf: Vec<u8> = vec![222, 1, 42];
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::MysteriousMinergate(vec![42])]);
test_write_buf(&extra, &buf);
}
#[test]
fn extra_mysterious_minergate_only_large() {
let mut buf: Vec<u8> = vec![222];
write_varint(&512u64, &mut buf).unwrap();
buf.extend_from_slice(&vec![0; 512]);
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(extra.0, vec![ExtraField::MysteriousMinergate(vec![0; 512])]);
test_write_buf(&extra, &buf);
}
#[test]
fn extra_mysterious_minergate_only_wrong_size() {
let mut buf: Vec<u8> = vec![0; 20];
buf[0] = 222;
buf[1] = 255;
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert!(extra.0.is_empty());
}
#[test]
fn extra_mysterious_minergate_and_pub_key() {
let mut buf: Vec<u8> = vec![222, 1, 42];
buf.extend(PUB_KEY_BYTES.to_vec());
let extra = Extra::read::<&[u8]>(&mut buf.as_ref()).unwrap();
assert_eq!(
extra.0,
vec![ExtraField::MysteriousMinergate(vec![42]), ExtraField::PublicKey(pub_key())]
);
test_write_buf(&extra, &buf);
}

View File

@@ -3,4 +3,3 @@ mod clsag;
mod bulletproofs;
mod address;
mod seed;
mod extra;

View File

@@ -7,7 +7,7 @@ use curve25519_dalek::scalar::Scalar;
use crate::{
hash,
wallet::seed::{
Seed, SeedType, SeedError,
Seed, SeedType,
classic::{self, trim_by_lang},
polyseed,
},
@@ -137,53 +137,6 @@ fn test_classic_seed() {
spend: "647f4765b66b636ff07170ab6280a9a6804dfbaf19db2ad37d23be024a18730b".into(),
view: "045da65316a906a8c30046053119c18020b07a7a3a6ef5c01ab2a8755416bd02".into(),
},
// The following seeds require the language specification in order to calculate
// a single valid checksum
Vector {
language: classic::Language::Spanish,
seed: "pluma laico atraer pintor peor cerca balde buscar \
lancha batir nulo reloj resto gemelo nevera poder columna gol \
oveja latir amplio bolero feliz fuerza nevera"
.into(),
spend: "30303983fc8d215dd020cc6b8223793318d55c466a86e4390954f373fdc7200a".into(),
view: "97c649143f3c147ba59aa5506cc09c7992c5c219bb26964442142bf97980800e".into(),
},
Vector {
language: classic::Language::Spanish,
seed: "pluma pluma pluma pluma pluma pluma pluma pluma \
pluma pluma pluma pluma pluma pluma pluma pluma \
pluma pluma pluma pluma pluma pluma pluma pluma pluma"
.into(),
spend: "b4050000b4050000b4050000b4050000b4050000b4050000b4050000b4050000".into(),
view: "d73534f7912b395eb70ef911791a2814eb6df7ce56528eaaa83ff2b72d9f5e0f".into(),
},
Vector {
language: classic::Language::English,
seed: "plus plus plus plus plus plus plus plus \
plus plus plus plus plus plus plus plus \
plus plus plus plus plus plus plus plus plus"
.into(),
spend: "3b0400003b0400003b0400003b0400003b0400003b0400003b0400003b040000".into(),
view: "43a8a7715eed11eff145a2024ddcc39740255156da7bbd736ee66a0838053a02".into(),
},
Vector {
language: classic::Language::Spanish,
seed: "audio audio audio audio audio audio audio audio \
audio audio audio audio audio audio audio audio \
audio audio audio audio audio audio audio audio audio"
.into(),
spend: "ba000000ba000000ba000000ba000000ba000000ba000000ba000000ba000000".into(),
view: "1437256da2c85d029b293d8c6b1d625d9374969301869b12f37186e3f906c708".into(),
},
Vector {
language: classic::Language::English,
seed: "audio audio audio audio audio audio audio audio \
audio audio audio audio audio audio audio audio \
audio audio audio audio audio audio audio audio audio"
.into(),
spend: "7900000079000000790000007900000079000000790000007900000079000000".into(),
view: "20bec797ab96780ae6a045dd816676ca7ed1d7c6773f7022d03ad234b581d600".into(),
},
];
for vector in vectors {
@@ -197,15 +150,15 @@ fn test_classic_seed() {
// Test against Monero
{
println!("{}. language: {:?}, seed: {}", line!(), vector.language, vector.seed.clone());
let seed =
Seed::from_string(SeedType::Classic(vector.language), Zeroizing::new(vector.seed.clone()))
.unwrap();
let seed = Seed::from_string(Zeroizing::new(vector.seed.clone())).unwrap();
let trim = trim_seed(&vector.seed);
assert_eq!(
seed,
Seed::from_string(SeedType::Classic(vector.language), Zeroizing::new(trim)).unwrap()
println!(
"{}. seed: {}, entropy: {:?}, trim: {trim}",
line!(),
*seed.to_string(),
*seed.entropy()
);
assert_eq!(seed, Seed::from_string(Zeroizing::new(trim)).unwrap());
let spend: [u8; 32] = hex::decode(vector.spend).unwrap().try_into().unwrap();
// For classical seeds, Monero directly uses the entropy as a spend key
@@ -231,20 +184,19 @@ fn test_classic_seed() {
// Test against ourselves
{
let seed = Seed::new(&mut OsRng, SeedType::Classic(vector.language));
println!("{}. seed: {}", line!(), *seed.to_string());
let trim = trim_seed(&seed.to_string());
assert_eq!(
seed,
Seed::from_string(SeedType::Classic(vector.language), Zeroizing::new(trim)).unwrap()
println!(
"{}. seed: {}, entropy: {:?}, trim: {trim}",
line!(),
*seed.to_string(),
*seed.entropy()
);
assert_eq!(seed, Seed::from_string(Zeroizing::new(trim)).unwrap());
assert_eq!(
seed,
Seed::from_entropy(SeedType::Classic(vector.language), seed.entropy(), None).unwrap()
);
assert_eq!(
seed,
Seed::from_string(SeedType::Classic(vector.language), seed.to_string()).unwrap()
);
assert_eq!(seed, Seed::from_string(seed.to_string()).unwrap());
}
}
}
@@ -357,18 +309,6 @@ fn test_polyseed() {
has_prefix: false,
has_accent: false,
},
// The following seed requires the language specification in order to calculate
// a single valid checksum
Vector {
language: polyseed::Language::Spanish,
seed: "impo sort usua cabi venu nobl oliv clim \
cont barr marc auto prod vaca torn fati"
.into(),
entropy: "dbfce25fe09b68a340e01c62417eeef43ad51800000000000000000000000000".into(),
birthday: 1701511650,
has_prefix: true,
has_accent: true,
},
];
for vector in vectors {
@@ -410,32 +350,31 @@ fn test_polyseed() {
};
// String -> Seed
println!("{}. language: {:?}, seed: {}", line!(), vector.language, vector.seed.clone());
let seed =
Seed::from_string(SeedType::Polyseed(vector.language), Zeroizing::new(vector.seed.clone()))
.unwrap();
let seed = Seed::from_string(Zeroizing::new(vector.seed.clone())).unwrap();
let trim = trim_seed(&vector.seed);
let add_whitespace = add_whitespace(vector.seed.clone());
let seed_without_accents = seed_without_accents(&vector.seed);
println!(
"{}. seed: {}, entropy: {:?}, trim: {}, add_whitespace: {}, seed_without_accents: {}",
line!(),
*seed.to_string(),
*seed.entropy(),
trim,
add_whitespace,
seed_without_accents,
);
// Make sure a version with added whitespace still works
let whitespaced_seed =
Seed::from_string(SeedType::Polyseed(vector.language), Zeroizing::new(add_whitespace))
.unwrap();
let whitespaced_seed = Seed::from_string(Zeroizing::new(add_whitespace)).unwrap();
assert_eq!(seed, whitespaced_seed);
// Check trimmed versions works
if vector.has_prefix {
let trimmed_seed =
Seed::from_string(SeedType::Polyseed(vector.language), Zeroizing::new(trim)).unwrap();
let trimmed_seed = Seed::from_string(Zeroizing::new(trim)).unwrap();
assert_eq!(seed, trimmed_seed);
}
// Check versions without accents work
if vector.has_accent {
let seed_without_accents = Seed::from_string(
SeedType::Polyseed(vector.language),
Zeroizing::new(seed_without_accents),
)
.unwrap();
let seed_without_accents = Seed::from_string(Zeroizing::new(seed_without_accents)).unwrap();
assert_eq!(seed, seed_without_accents);
}
@@ -452,11 +391,8 @@ fn test_polyseed() {
// Check against ourselves
{
let seed = Seed::new(&mut OsRng, SeedType::Polyseed(vector.language));
println!("{}. seed: {}", line!(), *seed.to_string());
assert_eq!(
seed,
Seed::from_string(SeedType::Polyseed(vector.language), seed.to_string()).unwrap()
);
println!("{}. seed: {}, key: {:?}", line!(), *seed.to_string(), *seed.key());
assert_eq!(seed, Seed::from_string(seed.to_string()).unwrap());
assert_eq!(
seed,
Seed::from_entropy(
@@ -469,14 +405,3 @@ fn test_polyseed() {
}
}
}
#[test]
fn test_invalid_polyseed() {
// This seed includes unsupported features bits and should error on decode
let seed = "include domain claim resemble urban hire lunch bird \
crucial fire best wife ring warm ignore model"
.into();
let res =
Seed::from_string(SeedType::Polyseed(polyseed::Language::English), Zeroizing::new(seed));
assert_eq!(res, Err(SeedError::UnsupportedFeatures));
}

View File

@@ -161,9 +161,7 @@ impl Timelock {
impl PartialOrd for Timelock {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
match (self, other) {
(Timelock::None, Timelock::None) => Some(Ordering::Equal),
(Timelock::None, _) => Some(Ordering::Less),
(_, Timelock::None) => Some(Ordering::Greater),
(Timelock::Block(a), Timelock::Block(b)) => a.partial_cmp(b),
(Timelock::Time(a), Timelock::Time(b)) => a.partial_cmp(b),
_ => None,
@@ -333,11 +331,14 @@ impl Transaction {
}
} else if prefix.version == 2 {
rct_signatures = RctSignatures::read(
prefix.inputs.first().map_or(0, |input| match input {
&prefix
.inputs
.iter()
.map(|input| match input {
Input::Gen(_) => 0,
Input::ToKey { key_offsets, .. } => key_offsets.len(),
}),
prefix.inputs.len(),
})
.collect::<Vec<_>>(),
prefix.outputs.len(),
r,
)?;

View File

@@ -3,9 +3,7 @@ use std_shims::string::{String, ToString};
use zeroize::Zeroize;
use curve25519_dalek::edwards::EdwardsPoint;
use monero_generators::decompress_point;
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
use base58_monero::base58::{encode_check, decode_check};
@@ -242,10 +240,12 @@ impl<B: AddressBytes> Address<B> {
}
let mut meta = AddressMeta::from_byte(raw[0])?;
let spend =
decompress_point(raw[1 .. 33].try_into().unwrap()).ok_or(AddressError::InvalidKey)?;
let view =
decompress_point(raw[33 .. 65].try_into().unwrap()).ok_or(AddressError::InvalidKey)?;
let spend = CompressedEdwardsY(raw[1 .. 33].try_into().unwrap())
.decompress()
.ok_or(AddressError::InvalidKey)?;
let view = CompressedEdwardsY(raw[33 .. 65].try_into().unwrap())
.decompress()
.ok_or(AddressError::InvalidKey)?;
let mut read = 65;
if matches!(meta.kind, AddressType::Featured { .. }) {

View File

@@ -21,13 +21,15 @@ use crate::{
serialize::varint_len,
wallet::SpendableOutput,
rpc::{RpcError, RpcConnection, Rpc},
DEFAULT_LOCK_WINDOW, COINBASE_LOCK_WINDOW, BLOCK_TIME,
};
const LOCK_WINDOW: usize = 10;
const MATURITY: u64 = 60;
const RECENT_WINDOW: usize = 15;
const BLOCK_TIME: usize = 120;
const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME;
#[allow(clippy::cast_precision_loss)]
const TIP_APPLICATION: f64 = (DEFAULT_LOCK_WINDOW * BLOCK_TIME) as f64;
const TIP_APPLICATION: f64 = (LOCK_WINDOW * BLOCK_TIME) as f64;
// TODO: Resolve safety of this in case a reorg occurs/the network changes
// TODO: Update this when scanning a block, as possible
@@ -50,10 +52,8 @@ async fn select_n<'a, R: RngCore + CryptoRng, RPC: RpcConnection>(
real: &[u64],
used: &mut HashSet<u64>,
count: usize,
fingerprintable_canonical: bool,
) -> Result<Vec<(u64, [EdwardsPoint; 2])>, RpcError> {
// TODO: consider removing this extra RPC and expect the caller to handle it
if fingerprintable_canonical && height > rpc.get_height().await? {
if height >= rpc.get_height().await? {
// TODO: Don't use InternalError for the caller's failure
Err(RpcError::InternalError("decoys being requested from too young blocks"))?;
}
@@ -64,8 +64,6 @@ async fn select_n<'a, R: RngCore + CryptoRng, RPC: RpcConnection>(
// Retries on failure. Retries are obvious as decoys, yet should be minimal
while confirmed.len() != count {
let remaining = count - confirmed.len();
// TODO: over-request candidates in case some are locked to avoid needing
// round trips to the daemon (and revealing obvious decoys to the daemon)
let mut candidates = Vec::with_capacity(remaining);
while candidates.len() != remaining {
#[cfg(test)]
@@ -119,14 +117,7 @@ async fn select_n<'a, R: RngCore + CryptoRng, RPC: RpcConnection>(
}
}
// TODO: make sure that the real output is included in the response, and
// that mask and key are equal to expected
for (i, output) in rpc
.get_unlocked_outputs(&candidates, height, fingerprintable_canonical)
.await?
.iter_mut()
.enumerate()
{
for (i, output) in rpc.get_unlocked_outputs(&candidates, height).await?.iter_mut().enumerate() {
// Don't include the real spend as a decoy, despite requesting it
if real_indexes.contains(&i) {
continue;
@@ -150,14 +141,32 @@ fn offset(ring: &[u64]) -> Vec<u64> {
res
}
async fn select_decoys<R: RngCore + CryptoRng, RPC: RpcConnection>(
/// Decoy data, containing the actual member as well (at index `i`).
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct Decoys {
pub(crate) i: u8,
pub(crate) offsets: Vec<u64>,
pub(crate) ring: Vec<[EdwardsPoint; 2]>,
}
#[allow(clippy::len_without_is_empty)]
impl Decoys {
pub fn fee_weight(offsets: &[u64]) -> usize {
varint_len(offsets.len()) + offsets.iter().map(|offset| varint_len(*offset)).sum::<usize>()
}
pub fn len(&self) -> usize {
self.offsets.len()
}
/// Select decoys using the same distribution as Monero.
pub async fn select<R: RngCore + CryptoRng, RPC: RpcConnection>(
rng: &mut R,
rpc: &Rpc<RPC>,
ring_len: usize,
height: usize,
inputs: &[SpendableOutput],
fingerprintable_canonical: bool,
) -> Result<Vec<Decoys>, RpcError> {
) -> Result<Vec<Decoys>, RpcError> {
#[cfg(feature = "cache-distribution")]
#[cfg(not(feature = "std"))]
let mut distribution = DISTRIBUTION().lock();
@@ -178,25 +187,19 @@ async fn select_decoys<R: RngCore + CryptoRng, RPC: RpcConnection>(
outputs.push((real[real.len() - 1], [input.key(), input.commitment().calculate()]));
}
if distribution.len() < height {
// TODO: verify distribution elems are strictly increasing
let extension =
rpc.get_output_distribution(distribution.len(), height.saturating_sub(1)).await?;
if distribution.len() <= height {
let extension = rpc.get_output_distribution(distribution.len(), height).await?;
distribution.extend(extension);
}
// If asked to use an older height than previously asked, truncate to ensure accuracy
// Should never happen, yet risks desyncing if it did
distribution.truncate(height);
if distribution.len() < DEFAULT_LOCK_WINDOW {
Err(RpcError::InternalError("not enough decoy candidates"))?;
}
distribution.truncate(height + 1); // height is inclusive, and 0 is a valid height
let high = distribution[distribution.len() - 1];
#[allow(clippy::cast_precision_loss)]
let per_second = {
let blocks = distribution.len().min(BLOCKS_PER_YEAR);
let initial = distribution[distribution.len().saturating_sub(blocks + 1)];
let outputs = distribution[distribution.len() - 1].saturating_sub(initial);
let outputs = high - distribution[distribution.len().saturating_sub(blocks + 1)];
(outputs as f64) / ((blocks * BLOCK_TIME) as f64)
};
@@ -206,11 +209,8 @@ async fn select_decoys<R: RngCore + CryptoRng, RPC: RpcConnection>(
}
// TODO: Create a TX with less than the target amount, as allowed by the protocol
let high = distribution[distribution.len() - DEFAULT_LOCK_WINDOW];
if high.saturating_sub(COINBASE_LOCK_WINDOW as u64) <
u64::try_from(inputs.len() * ring_len).unwrap()
{
Err(RpcError::InternalError("not enough coinbase candidates"))?;
if (high - MATURITY) < u64::try_from(inputs.len() * ring_len).unwrap() {
Err(RpcError::InternalError("not enough decoy candidates"))?;
}
// Select all decoys for this transaction, assuming we generate a sane transaction
@@ -226,7 +226,6 @@ async fn select_decoys<R: RngCore + CryptoRng, RPC: RpcConnection>(
&real,
&mut used,
inputs.len() * decoy_count,
fingerprintable_canonical,
)
.await?;
real.zeroize();
@@ -276,7 +275,6 @@ async fn select_decoys<R: RngCore + CryptoRng, RPC: RpcConnection>(
&[],
&mut used,
ring_len - ring.len(),
fingerprintable_canonical,
)
.await?,
);
@@ -296,61 +294,5 @@ async fn select_decoys<R: RngCore + CryptoRng, RPC: RpcConnection>(
}
Ok(res)
}
/// Decoy data, containing the actual member as well (at index `i`).
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct Decoys {
pub(crate) i: u8,
pub(crate) offsets: Vec<u64>,
pub(crate) ring: Vec<[EdwardsPoint; 2]>,
}
#[allow(clippy::len_without_is_empty)]
impl Decoys {
pub fn fee_weight(offsets: &[u64]) -> usize {
varint_len(offsets.len()) + offsets.iter().map(|offset| varint_len(*offset)).sum::<usize>()
}
pub fn len(&self) -> usize {
self.offsets.len()
}
pub fn indexes(&self) -> Vec<u64> {
let mut res = vec![self.offsets[0]; self.len()];
for m in 1 .. res.len() {
res[m] = res[m - 1] + self.offsets[m];
}
res
}
/// Select decoys using the same distribution as Monero. Relies on the monerod RPC
/// response for an output's unlocked status, minimizing trips to the daemon.
pub async fn select<R: RngCore + CryptoRng, RPC: RpcConnection>(
rng: &mut R,
rpc: &Rpc<RPC>,
ring_len: usize,
height: usize,
inputs: &[SpendableOutput],
) -> Result<Vec<Decoys>, RpcError> {
select_decoys(rng, rpc, ring_len, height, inputs, false).await
}
/// If no reorg has occurred and an honest RPC, any caller who passes the same height to this
/// function will use the same distribution to select decoys. It is fingerprintable
/// because a caller using this will not be able to select decoys that are timelocked
/// with a timestamp. Any transaction which includes timestamp timelocked decoys in its
/// rings could not be constructed using this function.
///
/// TODO: upstream change to monerod get_outs RPC to accept a height param for checking
/// output's unlocked status and remove all usage of fingerprintable_canonical
pub async fn fingerprintable_canonical_select<R: RngCore + CryptoRng, RPC: RpcConnection>(
rng: &mut R,
rpc: &Rpc<RPC>,
ring_len: usize,
height: usize,
inputs: &[SpendableOutput],
) -> Result<Vec<Decoys>, RpcError> {
select_decoys(rng, rpc, ring_len, height, inputs, true).await
}
}

View File

@@ -1,7 +1,7 @@
use core::ops::BitXor;
use std_shims::{
vec::Vec,
io::{self, Read, BufRead, Write},
io::{self, Read, Write},
};
use zeroize::Zeroize;
@@ -13,7 +13,6 @@ use crate::serialize::{
write_point, write_vec,
};
pub const MAX_TX_EXTRA_PADDING_COUNT: usize = 255;
pub const MAX_TX_EXTRA_NONCE_SIZE: usize = 255;
pub const PAYMENT_ID_MARKER: u8 = 0;
@@ -71,23 +70,15 @@ impl PaymentId {
// Doesn't bother with padding nor MinerGate
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub enum ExtraField {
Padding(usize),
PublicKey(EdwardsPoint),
Nonce(Vec<u8>),
MergeMining(usize, [u8; 32]),
PublicKeys(Vec<EdwardsPoint>),
MysteriousMinergate(Vec<u8>),
}
impl ExtraField {
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
match self {
ExtraField::Padding(size) => {
w.write_all(&[0])?;
for _ in 1 .. *size {
write_byte(&0u8, w)?;
}
}
ExtraField::PublicKey(key) => {
w.write_all(&[1])?;
w.write_all(&key.compress().to_bytes())?;
@@ -105,39 +96,12 @@ impl ExtraField {
w.write_all(&[4])?;
write_vec(write_point, keys, w)?;
}
ExtraField::MysteriousMinergate(data) => {
w.write_all(&[0xDE])?;
write_vec(write_byte, data, w)?;
}
}
Ok(())
}
pub fn read<R: BufRead>(r: &mut R) -> io::Result<ExtraField> {
pub fn read<R: Read>(r: &mut R) -> io::Result<ExtraField> {
Ok(match read_byte(r)? {
0 => ExtraField::Padding({
// Read until either non-zero, max padding count, or end of buffer
let mut size: usize = 1;
loop {
let buf = r.fill_buf()?;
let mut n_consume = 0;
for v in buf {
if *v != 0u8 {
Err(io::Error::other("non-zero value after padding"))?
}
n_consume += 1;
size += 1;
if size > MAX_TX_EXTRA_PADDING_COUNT {
Err(io::Error::other("padding exceeded max count"))?
}
}
if n_consume == 0 {
break;
}
r.consume(n_consume);
}
size
}),
1 => ExtraField::PublicKey(read_point(r)?),
2 => ExtraField::Nonce({
let nonce = read_vec(read_byte, r)?;
@@ -148,21 +112,20 @@ impl ExtraField {
}),
3 => ExtraField::MergeMining(read_varint(r)?, read_bytes(r)?),
4 => ExtraField::PublicKeys(read_vec(read_point, r)?),
0xDE => ExtraField::MysteriousMinergate(read_vec(read_byte, r)?),
_ => Err(io::Error::other("unknown extra field"))?,
})
}
}
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct Extra(pub(crate) Vec<ExtraField>);
pub struct Extra(Vec<ExtraField>);
impl Extra {
pub fn keys(&self) -> Option<(Vec<EdwardsPoint>, Option<Vec<EdwardsPoint>>)> {
let mut keys = vec![];
pub fn keys(&self) -> Option<(EdwardsPoint, Option<Vec<EdwardsPoint>>)> {
let mut key = None;
let mut additional = None;
for field in &self.0 {
match field.clone() {
ExtraField::PublicKey(this_key) => keys.push(this_key),
ExtraField::PublicKey(this_key) => key = key.or(Some(this_key)),
ExtraField::PublicKeys(these_additional) => {
additional = additional.or(Some(these_additional))
}
@@ -170,11 +133,7 @@ impl Extra {
}
}
// Don't return any keys if this was non-standard and didn't include the primary key
if keys.is_empty() {
None
} else {
Some((keys, additional))
}
key.map(|key| (key, additional))
}
pub fn payment_id(&self) -> Option<PaymentId> {
@@ -241,7 +200,7 @@ impl Extra {
buf
}
pub fn read<R: BufRead>(r: &mut R) -> io::Result<Extra> {
pub fn read<R: Read>(r: &mut R) -> io::Result<Extra> {
let mut res = Extra(vec![]);
let mut field;
while {

View File

@@ -9,8 +9,6 @@ use zeroize::{Zeroize, ZeroizeOnDrop};
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
use monero_generators::decompress_point;
use crate::{
Commitment,
serialize::{read_byte, read_u32, read_u64, read_bytes, read_scalar, read_point, read_raw_vec},
@@ -101,18 +99,10 @@ pub struct Metadata {
/// The subaddress this output was sent to.
pub subaddress: Option<SubaddressIndex>,
/// The payment ID included with this output.
/// There are 2 circumstances in which the reference wallet2 ignores the payment ID
/// but the payment ID will be returned here anyway:
///
/// 1) If the payment ID is tied to an output received by a subaddress account
/// that spent Monero in the transaction (the received output is considered
/// "change" and is not considered a "payment" in this case). If there are multiple
/// spending subaddress accounts in a transaction, the highest index spent key image
/// is used to determine the spending subaddress account.
///
/// 2) If the payment ID is the unencrypted variant and the block's hf version is
/// v12 or higher (https://github.com/serai-dex/serai/issues/512)
pub payment_id: Option<PaymentId>,
/// This will be gibberish if the payment ID wasn't intended for the recipient or wasn't included.
// Could be an Option, as extra doesn't necessarily have a payment ID, yet all Monero TXs should
// have this making it simplest for it to be as-is.
pub payment_id: [u8; 8],
/// Arbitrary data encoded in TX extra.
pub arbitrary_data: Vec<Vec<u8>>,
}
@@ -122,7 +112,7 @@ impl core::fmt::Debug for Metadata {
fmt
.debug_struct("Metadata")
.field("subaddress", &self.subaddress)
.field("payment_id", &self.payment_id)
.field("payment_id", &hex::encode(self.payment_id))
.field("arbitrary_data", &self.arbitrary_data.iter().map(hex::encode).collect::<Vec<_>>())
.finish()
}
@@ -137,13 +127,7 @@ impl Metadata {
} else {
w.write_all(&[0])?;
}
if let Some(payment_id) = self.payment_id {
w.write_all(&[1])?;
payment_id.write(w)?;
} else {
w.write_all(&[0])?;
}
w.write_all(&self.payment_id)?;
w.write_all(&u32::try_from(self.arbitrary_data.len()).unwrap().to_le_bytes())?;
for part in &self.arbitrary_data {
@@ -171,7 +155,7 @@ impl Metadata {
Ok(Metadata {
subaddress,
payment_id: if read_byte(r)? == 1 { PaymentId::read(r).ok() } else { None },
payment_id: read_bytes(r)?,
arbitrary_data: {
let mut data = vec![];
for _ in 0 .. read_u32(r)? {
@@ -350,7 +334,7 @@ impl Scanner {
return Timelocked(tx.prefix.timelock, vec![]);
};
let Some((tx_keys, additional)) = extra.keys() else {
let Some((tx_key, additional)) = extra.keys() else {
return Timelocked(tx.prefix.timelock, vec![]);
};
@@ -365,15 +349,13 @@ impl Scanner {
}
}
let output_key = decompress_point(output.key.to_bytes());
let output_key = output.key.decompress();
if output_key.is_none() {
continue;
}
let output_key = output_key.unwrap();
let additional = additional.as_ref().map(|additional| additional.get(o));
for key in tx_keys.iter().map(|key| Some(Some(key))).chain(core::iter::once(additional)) {
for key in [Some(Some(&tx_key)), additional.as_ref().map(|additional| additional.get(o))] {
let key = match key {
Some(Some(key)) => key,
Some(None) => {
@@ -381,6 +363,7 @@ impl Scanner {
// https://github.com/monero-project/monero/
// blob/04a1e2875d6e35e27bb21497988a6c822d319c28/
// src/cryptonote_basic/cryptonote_format_utils.cpp#L1062
// TODO: Should this return? Where does Monero set the trap handler for this exception?
continue;
}
None => {
@@ -393,7 +376,12 @@ impl Scanner {
o,
);
let payment_id = payment_id.map(|id| id ^ payment_id_xor);
let payment_id =
if let Some(PaymentId::Encrypted(id)) = payment_id.map(|id| id ^ payment_id_xor) {
id
} else {
payment_id_xor
};
if let Some(actual_view_tag) = output.view_tag {
if actual_view_tag != view_tag {

View File

@@ -16,7 +16,7 @@ use crate::{random_scalar, wallet::seed::SeedError};
pub(crate) const CLASSIC_SEED_LENGTH: usize = 24;
pub(crate) const CLASSIC_SEED_LENGTH_WITH_CHECKSUM: usize = 25;
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash, Zeroize)]
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
pub enum Language {
Chinese,
English,
@@ -184,58 +184,63 @@ fn key_to_seed(lang: Language, key: Zeroizing<Scalar>) -> ClassicSeed {
}
*res += word;
}
ClassicSeed(lang, res)
ClassicSeed(res)
}
// Convert a seed to bytes
pub(crate) fn seed_to_bytes(lang: Language, words: &str) -> Result<Zeroizing<[u8; 32]>, SeedError> {
pub(crate) fn seed_to_bytes(words: &str) -> Result<(Language, Zeroizing<[u8; 32]>), SeedError> {
// get seed words
let words = words.split_whitespace().map(|w| Zeroizing::new(w.to_string())).collect::<Vec<_>>();
if (words.len() != CLASSIC_SEED_LENGTH) && (words.len() != CLASSIC_SEED_LENGTH_WITH_CHECKSUM) {
panic!("invalid seed passed to seed_to_bytes");
}
let has_checksum = words.len() == CLASSIC_SEED_LENGTH_WITH_CHECKSUM;
if has_checksum && lang == Language::EnglishOld {
Err(SeedError::EnglishOldWithChecksum)?;
}
// Validate words are in the language word list
let lang_word_list: &WordList = &LANGUAGES()[&lang];
let matched_indices = (|| {
// find the language
let (matched_indices, lang_name, lang) = (|| {
let has_checksum = words.len() == CLASSIC_SEED_LENGTH_WITH_CHECKSUM;
let mut matched_indices = Zeroizing::new(vec![]);
// Iterate through all the languages
'language: for (lang_name, lang) in LANGUAGES() {
matched_indices.zeroize();
matched_indices.clear();
// Iterate through all the words and see if they're all present
for word in &words {
let trimmed = trim(word, lang_word_list.unique_prefix_length);
let trimmed = trim(word, lang.unique_prefix_length);
let word = if has_checksum { &trimmed } else { word };
if let Some(index) = if has_checksum {
lang_word_list.trimmed_word_map.get(word.deref())
lang.trimmed_word_map.get(word.deref())
} else {
lang_word_list.word_map.get(&word.as_str())
lang.word_map.get(&word.as_str())
} {
matched_indices.push(*index);
} else {
Err(SeedError::InvalidSeed)?;
continue 'language;
}
}
if has_checksum {
if lang_name == &Language::EnglishOld {
Err(SeedError::EnglishOldWithChecksum)?;
}
// exclude the last word when calculating a checksum.
let last_word = words.last().unwrap().clone();
let checksum = words[checksum_index(&words[.. words.len() - 1], lang_word_list)].clone();
let checksum = words[checksum_index(&words[.. words.len() - 1], lang)].clone();
// check the trimmed checksum and trimmed last word line up
if trim(&checksum, lang_word_list.unique_prefix_length) !=
trim(&last_word, lang_word_list.unique_prefix_length)
if trim(&checksum, lang.unique_prefix_length) != trim(&last_word, lang.unique_prefix_length)
{
Err(SeedError::InvalidChecksum)?;
}
}
Ok(matched_indices)
return Ok((matched_indices, lang_name, lang));
}
Err(SeedError::UnknownLanguage)?
})()?;
// convert to bytes
@@ -249,17 +254,16 @@ pub(crate) fn seed_to_bytes(lang: Language, words: &str) -> Result<Zeroizing<[u8
indices[3] = matched_indices[i3 + 2];
let inner = |i| {
let mut base = (lang_word_list.word_list.len() - indices[i] + indices[i + 1]) %
lang_word_list.word_list.len();
let mut base = (lang.word_list.len() - indices[i] + indices[i + 1]) % lang.word_list.len();
// Shift the index over
for _ in 0 .. i {
base *= lang_word_list.word_list.len();
base *= lang.word_list.len();
}
base
};
// set the last index
indices[0] = indices[1] + inner(1) + inner(2);
if (indices[0] % lang_word_list.word_list.len()) != indices[1] {
if (indices[0] % lang.word_list.len()) != indices[1] {
Err(SeedError::InvalidSeed)?;
}
@@ -269,19 +273,19 @@ pub(crate) fn seed_to_bytes(lang: Language, words: &str) -> Result<Zeroizing<[u8
bytes.zeroize();
}
Ok(res)
Ok((*lang_name, res))
}
#[derive(Clone, PartialEq, Eq, Zeroize)]
pub struct ClassicSeed(Language, Zeroizing<String>);
pub struct ClassicSeed(Zeroizing<String>);
impl ClassicSeed {
pub(crate) fn new<R: RngCore + CryptoRng>(rng: &mut R, lang: Language) -> ClassicSeed {
key_to_seed(lang, Zeroizing::new(random_scalar(rng)))
}
#[allow(clippy::needless_pass_by_value)]
pub fn from_string(lang: Language, words: Zeroizing<String>) -> Result<ClassicSeed, SeedError> {
let entropy = seed_to_bytes(lang, &words)?;
pub fn from_string(words: Zeroizing<String>) -> Result<ClassicSeed, SeedError> {
let (lang, entropy) = seed_to_bytes(&words)?;
// Make sure this is a valid scalar
let scalar = Scalar::from_canonical_bytes(*entropy);
@@ -302,10 +306,10 @@ impl ClassicSeed {
}
pub(crate) fn to_string(&self) -> Zeroizing<String> {
self.1.clone()
self.0.clone()
}
pub(crate) fn entropy(&self) -> Zeroizing<[u8; 32]> {
seed_to_bytes(self.0, &self.1).unwrap()
seed_to_bytes(&self.0).unwrap().1
}
}

View File

@@ -61,23 +61,13 @@ impl Seed {
}
/// Parse a seed from a `String`.
pub fn from_string(seed_type: SeedType, words: Zeroizing<String>) -> Result<Seed, SeedError> {
let word_count = words.split_whitespace().count();
match seed_type {
SeedType::Classic(lang) => {
if word_count != CLASSIC_SEED_LENGTH && word_count != CLASSIC_SEED_LENGTH_WITH_CHECKSUM {
Err(SeedError::InvalidSeedLength)?
} else {
ClassicSeed::from_string(lang, words).map(Seed::Classic)
}
}
SeedType::Polyseed(lang) => {
if word_count != POLYSEED_LENGTH {
Err(SeedError::InvalidSeedLength)?
} else {
Polyseed::from_string(lang, words).map(Seed::Polyseed)
}
pub fn from_string(words: Zeroizing<String>) -> Result<Seed, SeedError> {
match words.split_whitespace().count() {
CLASSIC_SEED_LENGTH | CLASSIC_SEED_LENGTH_WITH_CHECKSUM => {
ClassicSeed::from_string(words).map(Seed::Classic)
}
POLYSEED_LENGTH => Polyseed::from_string(words).map(Seed::Polyseed),
_ => Err(SeedError::InvalidSeedLength)?,
}
}

View File

@@ -217,31 +217,6 @@ impl Polyseed {
poly
}
fn from_internal(
language: Language,
masked_features: u8,
encoded_birthday: u16,
entropy: Zeroizing<[u8; 32]>,
) -> Result<Polyseed, SeedError> {
if !polyseed_features_supported(masked_features) {
Err(SeedError::UnsupportedFeatures)?;
}
if !valid_entropy(&entropy) {
Err(SeedError::InvalidEntropy)?;
}
let mut res = Polyseed {
language,
birthday: encoded_birthday,
features: masked_features,
entropy,
checksum: 0,
};
res.checksum = poly_eval(&res.to_poly());
Ok(res)
}
/// Create a new `Polyseed` with specific internals.
///
/// `birthday` is defined in seconds since the Unix epoch.
@@ -251,7 +226,20 @@ impl Polyseed {
birthday: u64,
entropy: Zeroizing<[u8; 32]>,
) -> Result<Polyseed, SeedError> {
Self::from_internal(language, user_features(features), birthday_encode(birthday), entropy)
let features = user_features(features);
if !polyseed_features_supported(features) {
Err(SeedError::UnsupportedFeatures)?;
}
let birthday = birthday_encode(birthday);
if !valid_entropy(&entropy) {
Err(SeedError::InvalidEntropy)?;
}
let mut res = Polyseed { language, birthday, features, entropy, checksum: 0 };
res.checksum = poly_eval(&res.to_poly());
Ok(res)
}
/// Create a new `Polyseed`.
@@ -275,12 +263,11 @@ impl Polyseed {
/// Create a new `Polyseed` from a String.
#[allow(clippy::needless_pass_by_value)]
pub fn from_string(lang: Language, seed: Zeroizing<String>) -> Result<Polyseed, SeedError> {
pub fn from_string(seed: Zeroizing<String>) -> Result<Polyseed, SeedError> {
// Decode the seed into its polynomial coefficients
let mut poly = [0; POLYSEED_LENGTH];
// Validate words are in the lang word list
let lang_word_list: &WordList = &LANGUAGES()[&lang];
let lang = (|| {
'language: for (name, lang) in LANGUAGES() {
for (i, word) in seed.split_whitespace().enumerate() {
// Find the word's index
fn check_if_matches<S: AsRef<str>, I: Iterator<Item = S>>(
@@ -315,23 +302,29 @@ impl Polyseed {
}
}
let Some(coeff) = (if lang_word_list.has_accent {
let Some(coeff) = (if lang.has_accent {
let ascii = |word: &str| word.chars().filter(char::is_ascii).collect::<String>();
check_if_matches(
lang_word_list.has_prefix,
lang_word_list.words.iter().map(|lang_word| ascii(lang_word)),
lang.has_prefix,
lang.words.iter().map(|lang_word| ascii(lang_word)),
&ascii(word),
)
} else {
check_if_matches(lang_word_list.has_prefix, lang_word_list.words.iter(), word)
check_if_matches(lang.has_prefix, lang.words.iter(), word)
}) else {
Err(SeedError::InvalidSeed)?
continue 'language;
};
// WordList asserts the word list length is less than u16::MAX
poly[i] = u16::try_from(coeff).expect("coeff exceeded u16");
}
return Ok(*name);
}
Err(SeedError::UnknownLanguage)
})()?;
// xor out the coin
poly[POLY_NUM_CHECK_DIGITS] ^= COIN;
@@ -382,7 +375,7 @@ impl Polyseed {
let features =
u8::try_from(extra >> DATE_BITS).expect("couldn't convert extra >> DATE_BITS to u8");
let res = Self::from_internal(lang, features, birthday, entropy);
let res = Polyseed::from(lang, features, birthday_decode(birthday), entropy);
if let Ok(res) = res.as_ref() {
debug_assert_eq!(res.checksum, checksum);
}

View File

@@ -69,23 +69,16 @@ impl SendOutput {
#[allow(non_snake_case)]
fn internal(
unique: [u8; 32],
output: (usize, (MoneroAddress, u64), bool),
output: (usize, (MoneroAddress, u64)),
ecdh: EdwardsPoint,
R: EdwardsPoint,
) -> (SendOutput, Option<[u8; 8]>) {
let o = output.0;
let need_dummy_payment_id = output.2;
let output = output.1;
let (view_tag, shared_key, payment_id_xor) =
shared_key(Some(unique).filter(|_| output.0.is_guaranteed()), ecdh, o);
let payment_id = output
.0
.payment_id()
.or(if need_dummy_payment_id { Some([0u8; 8]) } else { None })
.map(|id| (u64::from_le_bytes(id) ^ u64::from_le_bytes(payment_id_xor)).to_le_bytes());
(
SendOutput {
R,
@@ -94,14 +87,17 @@ impl SendOutput {
commitment: Commitment::new(commitment_mask(shared_key), output.1),
amount: amount_encryption(output.1, shared_key),
},
payment_id,
output
.0
.payment_id()
.map(|id| (u64::from_le_bytes(id) ^ u64::from_le_bytes(payment_id_xor)).to_le_bytes()),
)
}
fn new(
r: &Zeroizing<Scalar>,
unique: [u8; 32],
output: (usize, (MoneroAddress, u64), bool),
output: (usize, (MoneroAddress, u64)),
) -> (SendOutput, Option<[u8; 8]>) {
let address = output.1 .0;
SendOutput::internal(
@@ -119,7 +115,7 @@ impl SendOutput {
fn change(
ecdh: EdwardsPoint,
unique: [u8; 32],
output: (usize, (MoneroAddress, u64), bool),
output: (usize, (MoneroAddress, u64)),
) -> (SendOutput, Option<[u8; 8]>) {
SendOutput::internal(unique, output, ecdh, ED25519_BASEPOINT_POINT)
}
@@ -274,22 +270,20 @@ impl Fee {
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[allow(non_camel_case_types)]
pub enum FeePriority {
Unimportant,
Normal,
Elevated,
Priority,
Lowest,
Low,
Medium,
High,
Custom { priority: u32 },
}
/// https://github.com/monero-project/monero/blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/
/// src/simplewallet/simplewallet.cpp#L161
impl FeePriority {
pub(crate) fn fee_priority(&self) -> u32 {
match self {
FeePriority::Unimportant => 1,
FeePriority::Normal => 2,
FeePriority::Elevated => 3,
FeePriority::Priority => 4,
FeePriority::Lowest => 0,
FeePriority::Low => 1,
FeePriority::Medium => 2,
FeePriority::High => 3,
FeePriority::Custom { priority, .. } => *priority,
}
}
@@ -297,8 +291,8 @@ impl FeePriority {
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub(crate) enum InternalPayment {
Payment((MoneroAddress, u64), bool),
Change((MoneroAddress, u64), Option<Zeroizing<Scalar>>),
Payment((MoneroAddress, u64)),
Change((MoneroAddress, Option<Zeroizing<Scalar>>), u64),
}
/// The eventual output of a SignableTransaction.
@@ -358,14 +352,6 @@ impl Change {
/// Create a fingerprintable change output specification which will harm privacy. Only use this
/// if you know what you're doing.
///
/// If the change address is None, there are 2 potential fingerprints:
///
/// 1) The change in the tx is shunted to the fee (fingerprintable fee).
///
/// 2) If there are 2 outputs in the tx, there would be no payment ID as is the case when the
/// reference wallet creates 2 output txs, since monero-serai doesn't know which output
/// to tie the dummy payment ID to.
pub fn fingerprintable(address: Option<MoneroAddress>) -> Change {
Change { address, view: None }
}
@@ -376,9 +362,9 @@ fn need_additional(payments: &[InternalPayment]) -> (bool, bool) {
let subaddresses = payments
.iter()
.filter(|payment| match *payment {
InternalPayment::Payment(payment, _) => payment.0.is_subaddress(),
InternalPayment::Change(change, change_view) => {
if change_view.is_some() {
InternalPayment::Payment(payment) => payment.0.is_subaddress(),
InternalPayment::Change(change, _) => {
if change.1.is_some() {
has_change_view = true;
// It should not be possible to construct a change specification to a subaddress with a
// view key
@@ -405,7 +391,7 @@ fn sanity_check_change_payment_quantity(payments: &[InternalPayment], has_change
payments
.iter()
.filter(|payment| match *payment {
InternalPayment::Payment(_, _) => false,
InternalPayment::Payment(_) => false,
InternalPayment::Change(_, _) => true,
})
.count(),
@@ -477,13 +463,6 @@ impl SignableTransaction {
Err(TransactionError::NoChange)?;
}
// All 2 output txs created by the reference wallet have payment IDs to avoid
// fingerprinting integrated addresses. Note: we won't create a dummy payment
// ID if we create a 0-change 2-output tx since we don't know which output should
// receive the payment ID and such a tx is fingerprintable to monero-serai anyway
let need_dummy_payment_id = !has_payment_id && payments.len() == 1;
has_payment_id |= need_dummy_payment_id;
// Get the outgoing amount ignoring fees
let out_amount = payments.iter().map(|payment| payment.1).sum::<u64>();
@@ -493,21 +472,19 @@ impl SignableTransaction {
}
// Collect payments in a container that includes a change output if a change address is provided
let mut payments = payments
.into_iter()
.map(|payment| InternalPayment::Payment(payment, need_dummy_payment_id))
.collect::<Vec<_>>();
debug_assert!(!need_dummy_payment_id || (payments.len() == 1 && change.address.is_some()));
let mut payments = payments.into_iter().map(InternalPayment::Payment).collect::<Vec<_>>();
if let Some(change_address) = change.address.as_ref() {
// Push a 0 amount change output that we'll use to do fee calculations.
// We'll modify the change amount after calculating the fee
payments.push(InternalPayment::Change((*change_address, 0), change.view.clone()));
payments.push(InternalPayment::Change((*change_address, change.view.clone()), 0));
}
// Determine if we'll need additional pub keys in tx extra
let (_, additional) = need_additional(&payments);
// Add a dummy payment ID if there's only 2 payments
has_payment_id |= outputs == 2;
// Calculate the extra length
let extra = Extra::fee_weight(outputs, additional, has_payment_id, data.as_ref());
@@ -547,8 +524,8 @@ impl SignableTransaction {
let change_payment = payments.last_mut().unwrap();
debug_assert!(matches!(change_payment, InternalPayment::Change(_, _)));
*change_payment = InternalPayment::Change(
(*change_address, in_amount - out_amount - fee),
change.view.clone(),
(*change_address, change.view.clone()),
in_amount - out_amount - fee,
);
}
@@ -561,8 +538,8 @@ impl SignableTransaction {
payments
.iter()
.map(|payment| match *payment {
InternalPayment::Payment(payment, _) => payment.1,
InternalPayment::Change(change, _) => change.1,
InternalPayment::Payment(payment) => payment.1,
InternalPayment::Change(_, amount) => amount,
})
.sum::<u64>() +
fee,
@@ -629,7 +606,7 @@ impl SignableTransaction {
if modified_change_ecdh {
for payment in &*payments {
match payment {
InternalPayment::Payment(payment, _) => {
InternalPayment::Payment(payment) => {
// This should be the only payment and it should be a subaddress
debug_assert!(payment.0.is_subaddress());
tx_public_key = tx_key.deref() * payment.0.spend;
@@ -648,8 +625,8 @@ impl SignableTransaction {
// Downcast the change output to a payment output if it doesn't require special handling
// regarding it's view key
payment = if !modified_change_ecdh {
if let InternalPayment::Change(change, _) = &payment {
InternalPayment::Payment(*change, false)
if let InternalPayment::Change(change, amount) = &payment {
InternalPayment::Payment((change.0, *amount))
} else {
payment
}
@@ -658,14 +635,13 @@ impl SignableTransaction {
};
let (output, payment_id) = match payment {
InternalPayment::Payment(payment, need_dummy_payment_id) => {
InternalPayment::Payment(payment) => {
// If this is a subaddress, generate a dedicated r. Else, reuse the TX key
let dedicated = Zeroizing::new(random_scalar(&mut rng));
let use_dedicated = additional && payment.0.is_subaddress();
let r = if use_dedicated { &dedicated } else { &tx_key };
let (mut output, payment_id) =
SendOutput::new(r, uniqueness, (o, payment, need_dummy_payment_id));
let (mut output, payment_id) = SendOutput::new(r, uniqueness, (o, payment));
if modified_change_ecdh {
debug_assert_eq!(tx_public_key, output.R);
}
@@ -679,11 +655,11 @@ impl SignableTransaction {
}
(output, payment_id)
}
InternalPayment::Change(change, change_view) => {
InternalPayment::Change(change, amount) => {
// Instead of rA, use Ra, where R is r * subaddress_spend_key
// change.view must be Some as if it's None, this payment would've been downcast
let ecdh = tx_public_key * change_view.unwrap().deref();
SendOutput::change(ecdh, uniqueness, (o, change, false))
let ecdh = tx_public_key * change.1.unwrap().deref();
SendOutput::change(ecdh, uniqueness, (o, (change.0, amount)))
}
};
@@ -691,6 +667,16 @@ impl SignableTransaction {
id = id.or(payment_id);
}
// Include a random payment ID if we don't actually have one
// It prevents transactions from leaking if they're sending to integrated addresses or not
// Only do this if we only have two outputs though, as Monero won't add a dummy if there's
// more than two outputs
if outputs.len() <= 2 {
let mut rand = [0; 8];
rng.fill_bytes(&mut rand);
id = id.or(Some(rand));
}
(tx_public_key, additional_keys, outputs, id)
}
@@ -963,26 +949,21 @@ impl Eventuality {
fn write_payment<W: io::Write>(payment: &InternalPayment, w: &mut W) -> io::Result<()> {
match payment {
InternalPayment::Payment(payment, need_dummy_payment_id) => {
InternalPayment::Payment(payment) => {
w.write_all(&[0])?;
write_vec(write_byte, payment.0.to_string().as_bytes(), w)?;
w.write_all(&payment.1.to_le_bytes())?;
if *need_dummy_payment_id {
w.write_all(&[1])
} else {
w.write_all(&[0])
w.write_all(&payment.1.to_le_bytes())
}
}
InternalPayment::Change(change, change_view) => {
InternalPayment::Change(change, amount) => {
w.write_all(&[1])?;
write_vec(write_byte, change.0.to_string().as_bytes(), w)?;
w.write_all(&change.1.to_le_bytes())?;
if let Some(view) = change_view.as_ref() {
if let Some(view) = change.1.as_ref() {
w.write_all(&[1])?;
write_scalar(view, w)
write_scalar(view, w)?;
} else {
w.write_all(&[0])
w.write_all(&[0])?;
}
w.write_all(&amount.to_le_bytes())
}
}
}
@@ -1007,22 +988,18 @@ impl Eventuality {
fn read_payment<R: io::Read>(r: &mut R) -> io::Result<InternalPayment> {
Ok(match read_byte(r)? {
0 => InternalPayment::Payment(
(read_address(r)?, read_u64(r)?),
match read_byte(r)? {
0 => false,
1 => true,
_ => Err(io::Error::other("invalid need additional"))?,
},
),
0 => InternalPayment::Payment((read_address(r)?, read_u64(r)?)),
1 => InternalPayment::Change(
(read_address(r)?, read_u64(r)?),
(
read_address(r)?,
match read_byte(r)? {
0 => None,
1 => Some(Zeroizing::new(read_scalar(r)?)),
_ => Err(io::Error::other("invalid change view"))?,
_ => Err(io::Error::other("invalid change payment"))?,
},
),
read_u64(r)?,
),
_ => Err(io::Error::other("invalid payment"))?,
})
}

View File

@@ -120,20 +120,16 @@ impl SignableTransaction {
for payment in &self.payments {
match payment {
InternalPayment::Payment(payment, need_dummy_payment_id) => {
InternalPayment::Payment(payment) => {
transcript.append_message(b"payment_address", payment.0.to_string().as_bytes());
transcript.append_message(b"payment_amount", payment.1.to_le_bytes());
transcript.append_message(
b"need_dummy_payment_id",
[if *need_dummy_payment_id { 1u8 } else { 0u8 }],
);
}
InternalPayment::Change(change, change_view) => {
InternalPayment::Change(change, amount) => {
transcript.append_message(b"change_address", change.0.to_string().as_bytes());
transcript.append_message(b"change_amount", change.1.to_le_bytes());
if let Some(view) = change_view.as_ref() {
if let Some(view) = change.1.as_ref() {
transcript.append_message(b"change_view_key", Zeroizing::new(view.to_bytes()));
}
transcript.append_message(b"change_amount", amount.to_le_bytes());
}
}
}

View File

@@ -1,162 +0,0 @@
use monero_serai::{
transaction::Transaction,
wallet::SpendableOutput,
rpc::{Rpc, OutputResponse},
Protocol, DEFAULT_LOCK_WINDOW,
};
mod runner;
test!(
select_latest_output_as_decoy_canonical,
(
// First make an initial tx0
|_, mut builder: Builder, addr| async move {
builder.add_payment(addr, 2000000000000);
(builder.build().unwrap(), ())
},
|rpc: Rpc<_>, tx: Transaction, mut scanner: Scanner, ()| async move {
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 2000000000000);
SpendableOutput::from(&rpc, output).await.unwrap()
},
),
(
// Then make a second tx1
|protocol: Protocol, rpc: Rpc<_>, mut builder: Builder, addr, state: _| async move {
let output_tx0: SpendableOutput = state;
let decoys = Decoys::fingerprintable_canonical_select(
&mut OsRng,
&rpc,
protocol.ring_len(),
rpc.get_height().await.unwrap(),
&[output_tx0.clone()],
)
.await
.unwrap();
let inputs = [output_tx0.clone()].into_iter().zip(decoys).collect::<Vec<_>>();
builder.add_inputs(&inputs);
builder.add_payment(addr, 1000000000000);
(builder.build().unwrap(), (protocol, output_tx0))
},
// Then make sure DSA selects freshly unlocked output from tx1 as a decoy
|rpc: Rpc<_>, tx: Transaction, mut scanner: Scanner, state: (_, _)| async move {
use rand_core::OsRng;
let height = rpc.get_height().await.unwrap();
let output_tx1 =
SpendableOutput::from(&rpc, scanner.scan_transaction(&tx).not_locked().swap_remove(0))
.await
.unwrap();
// Make sure output from tx1 is in the block in which it unlocks
let out_tx1: OutputResponse =
rpc.get_outs(&[output_tx1.global_index]).await.unwrap().swap_remove(0);
assert_eq!(out_tx1.height, height - DEFAULT_LOCK_WINDOW);
assert!(out_tx1.unlocked);
// Select decoys using spendable output from tx0 as the real, and make sure DSA selects
// the freshly unlocked output from tx1 as a decoy
let (protocol, output_tx0): (Protocol, SpendableOutput) = state;
let mut selected_fresh_decoy = false;
let mut attempts = 1000;
while !selected_fresh_decoy && attempts > 0 {
let decoys = Decoys::fingerprintable_canonical_select(
&mut OsRng, // TODO: use a seeded RNG to consistently select the latest output
&rpc,
protocol.ring_len(),
height,
&[output_tx0.clone()],
)
.await
.unwrap();
selected_fresh_decoy = decoys[0].indexes().contains(&output_tx1.global_index);
attempts -= 1;
}
assert!(selected_fresh_decoy);
assert_eq!(height, rpc.get_height().await.unwrap());
},
),
);
test!(
select_latest_output_as_decoy,
(
// First make an initial tx0
|_, mut builder: Builder, addr| async move {
builder.add_payment(addr, 2000000000000);
(builder.build().unwrap(), ())
},
|rpc: Rpc<_>, tx: Transaction, mut scanner: Scanner, ()| async move {
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 2000000000000);
SpendableOutput::from(&rpc, output).await.unwrap()
},
),
(
// Then make a second tx1
|protocol: Protocol, rpc: Rpc<_>, mut builder: Builder, addr, state: _| async move {
let output_tx0: SpendableOutput = state;
let decoys = Decoys::select(
&mut OsRng,
&rpc,
protocol.ring_len(),
rpc.get_height().await.unwrap(),
&[output_tx0.clone()],
)
.await
.unwrap();
let inputs = [output_tx0.clone()].into_iter().zip(decoys).collect::<Vec<_>>();
builder.add_inputs(&inputs);
builder.add_payment(addr, 1000000000000);
(builder.build().unwrap(), (protocol, output_tx0))
},
// Then make sure DSA selects freshly unlocked output from tx1 as a decoy
|rpc: Rpc<_>, tx: Transaction, mut scanner: Scanner, state: (_, _)| async move {
use rand_core::OsRng;
let height = rpc.get_height().await.unwrap();
let output_tx1 =
SpendableOutput::from(&rpc, scanner.scan_transaction(&tx).not_locked().swap_remove(0))
.await
.unwrap();
// Make sure output from tx1 is in the block in which it unlocks
let out_tx1: OutputResponse =
rpc.get_outs(&[output_tx1.global_index]).await.unwrap().swap_remove(0);
assert_eq!(out_tx1.height, height - DEFAULT_LOCK_WINDOW);
assert!(out_tx1.unlocked);
// Select decoys using spendable output from tx0 as the real, and make sure DSA selects
// the freshly unlocked output from tx1 as a decoy
let (protocol, output_tx0): (Protocol, SpendableOutput) = state;
let mut selected_fresh_decoy = false;
let mut attempts = 1000;
while !selected_fresh_decoy && attempts > 0 {
let decoys = Decoys::select(
&mut OsRng, // TODO: use a seeded RNG to consistently select the latest output
&rpc,
protocol.ring_len(),
height,
&[output_tx0.clone()],
)
.await
.unwrap();
selected_fresh_decoy = decoys[0].indexes().contains(&output_tx1.global_index);
attempts -= 1;
}
assert!(selected_fresh_decoy);
assert_eq!(height, rpc.get_height().await.unwrap());
},
),
);

View File

@@ -17,7 +17,6 @@ use monero_serai::{
SpendableOutput, Fee,
},
transaction::Transaction,
DEFAULT_LOCK_WINDOW,
};
pub fn random_address() -> (Scalar, ViewPair, MoneroAddress) {
@@ -37,6 +36,7 @@ pub fn random_address() -> (Scalar, ViewPair, MoneroAddress) {
// TODO: Support transactions already on-chain
// TODO: Don't have a side effect of mining blocks more blocks than needed under race conditions
// TODO: mine as much as needed instead of default 10 blocks
pub async fn mine_until_unlocked(rpc: &Rpc<HttpRpc>, addr: &str, tx_hash: [u8; 32]) {
// mine until tx is in a block
let mut height = rpc.get_height().await.unwrap();
@@ -46,23 +46,15 @@ pub async fn mine_until_unlocked(rpc: &Rpc<HttpRpc>, addr: &str, tx_hash: [u8; 3
found = match block.txs.iter().find(|&&x| x == tx_hash) {
Some(_) => true,
None => {
height = rpc.generate_blocks(addr, 1).await.unwrap().1 + 1;
rpc.generate_blocks(addr, 1).await.unwrap();
height += 1;
false
}
}
}
// Mine until tx's outputs are unlocked
let o_indexes: Vec<u64> = rpc.get_o_indexes(tx_hash).await.unwrap();
while rpc
.get_outs(&o_indexes)
.await
.unwrap()
.into_iter()
.all(|o| (!(o.unlocked && height >= (o.height + DEFAULT_LOCK_WINDOW))))
{
height = rpc.generate_blocks(addr, 1).await.unwrap().1 + 1;
}
// mine 9 more blocks to unlock the tx
rpc.generate_blocks(addr, 9).await.unwrap();
}
// Mines 60 blocks and returns an unlocked miner TX output.
@@ -94,7 +86,7 @@ pub fn check_weight_and_fee(tx: &Transaction, fee_rate: Fee) {
}
pub async fn rpc() -> Rpc<HttpRpc> {
let rpc = HttpRpc::new("http://serai:seraidex@127.0.0.1:18081".to_string()).await.unwrap();
let rpc = HttpRpc::new("http://127.0.0.1:18081".to_string()).await.unwrap();
// Only run once
if rpc.get_height().await.unwrap() != 1 {
@@ -220,7 +212,7 @@ macro_rules! test {
let builder = SignableTransactionBuilder::new(
protocol,
rpc.get_fee(protocol, FeePriority::Unimportant).await.unwrap(),
rpc.get_fee(protocol, FeePriority::Low).await.unwrap(),
Change::new(
&ViewPair::new(
&random_scalar(&mut OsRng) * ED25519_BASEPOINT_TABLE,
@@ -268,12 +260,12 @@ macro_rules! test {
let temp = Box::new({
let mut builder = builder.clone();
let decoys = Decoys::fingerprintable_canonical_select(
let decoys = Decoys::select(
&mut OsRng,
&rpc,
protocol.ring_len(),
rpc.get_height().await.unwrap(),
&[miner_tx.clone()],
rpc.get_height().await.unwrap() - 1,
&[miner_tx.clone()]
)
.await
.unwrap();

View File

@@ -1,9 +1,6 @@
use rand::RngCore;
use monero_serai::{
transaction::Transaction,
wallet::{address::SubaddressIndex, extra::PaymentId},
};
use monero_serai::{transaction::Transaction, wallet::address::SubaddressIndex};
mod runner;
@@ -19,8 +16,6 @@ test!(
|_, tx: Transaction, _, mut state: Scanner| async move {
let output = state.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
let dummy_payment_id = PaymentId::Encrypted([0u8; 8]);
assert_eq!(output.metadata.payment_id, Some(dummy_payment_id));
},
),
);
@@ -62,7 +57,7 @@ test!(
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.metadata.payment_id, Some(PaymentId::Encrypted(state.1)));
assert_eq!(output.metadata.payment_id, state.1);
},
),
);
@@ -145,7 +140,7 @@ test!(
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.metadata.payment_id, Some(PaymentId::Encrypted(state.1)));
assert_eq!(output.metadata.payment_id, state.1);
},
),
);
@@ -179,7 +174,7 @@ test!(
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8], SubaddressIndex)| async move {
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.metadata.payment_id, Some(PaymentId::Encrypted(state.1)));
assert_eq!(output.metadata.payment_id, state.1);
assert_eq!(output.metadata.subaddress, Some(state.2));
},
),
@@ -264,7 +259,7 @@ test!(
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.metadata.payment_id, Some(PaymentId::Encrypted(state.1)));
assert_eq!(output.metadata.payment_id, state.1);
},
),
);
@@ -298,7 +293,7 @@ test!(
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8], SubaddressIndex)| async move {
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
assert_eq!(output.commitment().amount, 5);
assert_eq!(output.metadata.payment_id, Some(PaymentId::Encrypted(state.1)));
assert_eq!(output.metadata.payment_id, state.1);
assert_eq!(output.metadata.subaddress, Some(state.2));
},
),

View File

@@ -24,11 +24,11 @@ async fn add_inputs(
spendable_outputs.push(SpendableOutput::from(rpc, output).await.unwrap());
}
let decoys = Decoys::fingerprintable_canonical_select(
let decoys = Decoys::select(
&mut OsRng,
rpc,
protocol.ring_len(),
rpc.get_height().await.unwrap(),
rpc.get_height().await.unwrap() - 1,
&spendable_outputs,
)
.await
@@ -110,7 +110,7 @@ test!(
let mut builder = SignableTransactionBuilder::new(
protocol,
rpc.get_fee(protocol, FeePriority::Unimportant).await.unwrap(),
rpc.get_fee(protocol, FeePriority::Low).await.unwrap(),
Change::new(&change_view, false),
);
add_inputs(protocol, &rpc, vec![outputs.first().unwrap().clone()], &mut builder).await;
@@ -294,7 +294,7 @@ test!(
let mut builder = SignableTransactionBuilder::new(
protocol,
rpc.get_fee(protocol, FeePriority::Unimportant).await.unwrap(),
rpc.get_fee(protocol, FeePriority::Low).await.unwrap(),
Change::fingerprintable(None),
);
add_inputs(protocol, &rpc, vec![outputs.first().unwrap().clone()], &mut builder).await;

View File

@@ -10,7 +10,7 @@ use monero_serai::{
rpc::{EmptyResponse, HttpRpc, Rpc},
wallet::{
address::{Network, AddressSpec, SubaddressIndex, MoneroAddress},
extra::{MAX_TX_EXTRA_NONCE_SIZE, Extra, PaymentId},
extra::{MAX_TX_EXTRA_NONCE_SIZE, Extra},
Scanner,
},
};
@@ -35,7 +35,7 @@ async fn make_integrated_address(rpc: &Rpc<HttpRpc>, payment_id: [u8; 8]) -> Str
}
async fn initialize_rpcs() -> (Rpc<HttpRpc>, Rpc<HttpRpc>, String) {
let wallet_rpc = HttpRpc::new("http://127.0.0.1:18082".to_string()).await.unwrap();
let wallet_rpc = HttpRpc::new("http://127.0.0.1:6061".to_string()).await.unwrap();
let daemon_rpc = runner::rpc().await;
#[derive(Debug, Deserialize)]
@@ -88,9 +88,11 @@ async fn from_wallet_rpc_to_self(spec: AddressSpec) {
.unwrap();
let tx_hash = hex::decode(tx.tx_hash).unwrap().try_into().unwrap();
// TODO: Needs https://github.com/monero-project/monero/pull/9260
// TODO: Needs https://github.com/monero-project/monero/pull/8882
// let fee_rate = daemon_rpc
// .get_fee(daemon_rpc.get_protocol().await.unwrap(), FeePriority::Unimportant)
// // Uses `FeePriority::Low` instead of `FeePriority::Lowest` because wallet RPC
// // adjusts `monero_rpc::TransferPriority::Default` up by 1
// .get_fee(daemon_rpc.get_protocol().await.unwrap(), FeePriority::Low)
// .await
// .unwrap();
@@ -107,21 +109,17 @@ async fn from_wallet_rpc_to_self(spec: AddressSpec) {
let tx = daemon_rpc.get_transaction(tx_hash).await.unwrap();
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
// TODO: Needs https://github.com/monero-project/monero/pull/9260
// TODO: Needs https://github.com/monero-project/monero/pull/8882
// runner::check_weight_and_fee(&tx, fee_rate);
match spec {
AddressSpec::Subaddress(index) => {
assert_eq!(output.metadata.subaddress, Some(index));
assert_eq!(output.metadata.payment_id, Some(PaymentId::Encrypted([0u8; 8])));
}
AddressSpec::Subaddress(index) => assert_eq!(output.metadata.subaddress, Some(index)),
AddressSpec::Integrated(payment_id) => {
assert_eq!(output.metadata.payment_id, Some(PaymentId::Encrypted(payment_id)));
assert_eq!(output.metadata.payment_id, payment_id);
assert_eq!(output.metadata.subaddress, None);
}
AddressSpec::Standard | AddressSpec::Featured { .. } => {
assert_eq!(output.metadata.subaddress, None);
assert_eq!(output.metadata.payment_id, Some(PaymentId::Encrypted([0u8; 8])));
assert_eq!(output.metadata.subaddress, None)
}
}
assert_eq!(output.commitment().amount, 1000000000000);
@@ -159,7 +157,6 @@ struct Transfer {
#[derive(Debug, Deserialize)]
struct TransfersResponse {
transfer: Transfer,
transfers: Vec<Transfer>,
}
test!(
@@ -183,7 +180,6 @@ test!(
.unwrap();
assert_eq!(transfer.transfer.subaddr_index, Index { major: 0, minor: 0 });
assert_eq!(transfer.transfer.amount, 1000000);
assert_eq!(transfer.transfer.payment_id, hex::encode([0u8; 8]));
},
),
);
@@ -221,7 +217,6 @@ test!(
.unwrap();
assert_eq!(transfer.transfer.subaddr_index, Index { major: data.1, minor: 0 });
assert_eq!(transfer.transfer.amount, 1000000);
assert_eq!(transfer.transfer.payment_id, hex::encode([0u8; 8]));
// Make sure only one R was included in TX extra
assert!(Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref())
@@ -234,62 +229,6 @@ test!(
),
);
test!(
send_to_wallet_rpc_subaddresses,
(
|_, mut builder: Builder, _| async move {
// initialize rpc
let (wallet_rpc, daemon_rpc, _) = initialize_rpcs().await;
// make the subaddress
#[derive(Debug, Deserialize)]
struct AddressesResponse {
addresses: Vec<String>,
address_index: u32,
}
let addrs: AddressesResponse = wallet_rpc
.json_rpc_call("create_address", Some(json!({ "account_index": 0, "count": 2 })))
.await
.unwrap();
assert!(addrs.address_index != 0);
assert!(addrs.addresses.len() == 2);
builder.add_payments(&[
(MoneroAddress::from_str(Network::Mainnet, &addrs.addresses[0]).unwrap(), 1000000),
(MoneroAddress::from_str(Network::Mainnet, &addrs.addresses[1]).unwrap(), 2000000),
]);
(builder.build().unwrap(), (wallet_rpc, daemon_rpc, addrs.address_index))
},
|_, tx: Transaction, _, data: (Rpc<HttpRpc>, Rpc<HttpRpc>, u32)| async move {
// confirm receipt
let _: EmptyResponse = data.0.json_rpc_call("refresh", None).await.unwrap();
let transfer: TransfersResponse = data
.0
.json_rpc_call(
"get_transfer_by_txid",
Some(json!({ "txid": hex::encode(tx.hash()), "account_index": 0 })),
)
.await
.unwrap();
assert_eq!(transfer.transfers.len(), 2);
for t in transfer.transfers {
match t.amount {
1000000 => assert_eq!(t.subaddr_index, Index { major: 0, minor: data.2 }),
2000000 => assert_eq!(t.subaddr_index, Index { major: 0, minor: data.2 + 1 }),
_ => unreachable!(),
}
}
// Make sure 3 additional pub keys are included in TX extra
let keys =
Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref()).unwrap().keys().unwrap().1.unwrap();
assert_eq!(keys.len(), 3);
},
),
);
test!(
send_to_wallet_rpc_integrated,
(

View File

@@ -18,7 +18,7 @@ workspace = true
[dependencies]
parity-db = { version = "0.4", default-features = false, optional = true }
rocksdb = { version = "0.21", default-features = false, features = ["zstd"], optional = true }
rocksdb = { version = "0.21", default-features = false, features = ["lz4"], optional = true }
[features]
parity-db = ["dep:parity-db"]

View File

@@ -42,9 +42,9 @@ macro_rules! create_db {
}) => {
$(
#[derive(Clone, Debug)]
pub(crate) struct $field_name;
pub struct $field_name;
impl $field_name {
pub(crate) fn key($($arg: $arg_type),*) -> Vec<u8> {
pub fn key($($arg: $arg_type),*) -> Vec<u8> {
use scale::Encode;
$crate::serai_db_key(
stringify!($db_name).as_bytes(),
@@ -52,17 +52,17 @@ macro_rules! create_db {
($($arg),*).encode()
)
}
pub(crate) fn set(txn: &mut impl DbTxn $(, $arg: $arg_type)*, data: &$field_type) {
pub fn set(txn: &mut impl DbTxn $(, $arg: $arg_type)*, data: &$field_type) {
let key = $field_name::key($($arg),*);
txn.put(&key, borsh::to_vec(data).unwrap());
}
pub(crate) fn get(getter: &impl Get, $($arg: $arg_type),*) -> Option<$field_type> {
pub fn get(getter: &impl Get, $($arg: $arg_type),*) -> Option<$field_type> {
getter.get($field_name::key($($arg),*)).map(|data| {
borsh::from_slice(data.as_ref()).unwrap()
})
}
#[allow(dead_code)]
pub(crate) fn del(txn: &mut impl DbTxn $(, $arg: $arg_type)*) {
pub fn del(txn: &mut impl DbTxn $(, $arg: $arg_type)*) {
txn.del(&$field_name::key($($arg),*))
}
}
@@ -83,7 +83,7 @@ macro_rules! db_channel {
}
impl $field_name {
pub(crate) fn send(txn: &mut impl DbTxn $(, $arg: $arg_type)*, value: &$field_type) {
pub fn send(txn: &mut impl DbTxn $(, $arg: $arg_type)*, value: &$field_type) {
// Use index 0 to store the amount of messages
let messages_sent_key = $field_name::key($($arg),*, 0);
let messages_sent = txn.get(&messages_sent_key).map(|counter| {
@@ -98,7 +98,7 @@ macro_rules! db_channel {
$field_name::set(txn, $($arg),*, index_to_use, value);
}
pub(crate) fn try_recv(txn: &mut impl DbTxn $(, $arg: $arg_type)*) -> Option<$field_type> {
pub fn try_recv(txn: &mut impl DbTxn $(, $arg: $arg_type)*) -> Option<$field_type> {
let messages_recvd_key = $field_name::key($($arg),*, 1);
let messages_recvd = txn.get(&messages_recvd_key).map(|counter| {
u32::from_le_bytes(counter.try_into().unwrap())

View File

@@ -1,65 +1,42 @@
use std::sync::Arc;
use rocksdb::{
DBCompressionType, ThreadMode, SingleThreaded, LogLevel, WriteOptions,
Transaction as RocksTransaction, Options, OptimisticTransactionDB,
};
use rocksdb::{DBCompressionType, ThreadMode, SingleThreaded, Options, Transaction, TransactionDB};
use crate::*;
pub struct Transaction<'a, T: ThreadMode>(
RocksTransaction<'a, OptimisticTransactionDB<T>>,
&'a OptimisticTransactionDB<T>,
);
impl<T: ThreadMode> Get for Transaction<'_, T> {
impl<T: ThreadMode> Get for Transaction<'_, TransactionDB<T>> {
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
self.0.get(key).expect("couldn't read from RocksDB via transaction")
self.get(key).expect("couldn't read from RocksDB via transaction")
}
}
impl<T: ThreadMode> DbTxn for Transaction<'_, T> {
impl<T: ThreadMode> DbTxn for Transaction<'_, TransactionDB<T>> {
fn put(&mut self, key: impl AsRef<[u8]>, value: impl AsRef<[u8]>) {
self.0.put(key, value).expect("couldn't write to RocksDB via transaction")
Transaction::put(self, key, value).expect("couldn't write to RocksDB via transaction")
}
fn del(&mut self, key: impl AsRef<[u8]>) {
self.0.delete(key).expect("couldn't delete from RocksDB via transaction")
self.delete(key).expect("couldn't delete from RocksDB via transaction")
}
fn commit(self) {
self.0.commit().expect("couldn't commit to RocksDB via transaction");
self.1.flush_wal(true).expect("couldn't flush RocksDB WAL");
self.1.flush().expect("couldn't flush RocksDB");
Transaction::commit(self).expect("couldn't commit to RocksDB via transaction")
}
}
impl<T: ThreadMode> Get for Arc<OptimisticTransactionDB<T>> {
impl<T: ThreadMode> Get for Arc<TransactionDB<T>> {
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
OptimisticTransactionDB::get(self, key).expect("couldn't read from RocksDB")
TransactionDB::get(self, key).expect("couldn't read from RocksDB")
}
}
impl<T: Send + ThreadMode + 'static> Db for Arc<OptimisticTransactionDB<T>> {
type Transaction<'a> = Transaction<'a, T>;
impl<T: ThreadMode + 'static> Db for Arc<TransactionDB<T>> {
type Transaction<'a> = Transaction<'a, TransactionDB<T>>;
fn txn(&mut self) -> Self::Transaction<'_> {
let mut opts = WriteOptions::default();
opts.set_sync(true);
Transaction(self.transaction_opt(&opts, &Default::default()), &**self)
self.transaction()
}
}
pub type RocksDB = Arc<OptimisticTransactionDB<SingleThreaded>>;
pub type RocksDB = Arc<TransactionDB<SingleThreaded>>;
pub fn new_rocksdb(path: &str) -> RocksDB {
let mut options = Options::default();
options.create_if_missing(true);
options.set_compression_type(DBCompressionType::Zstd);
options.set_wal_compression_type(DBCompressionType::Zstd);
// 10 MB
options.set_max_total_wal_size(10 * 1024 * 1024);
options.set_wal_size_limit_mb(10);
options.set_log_level(LogLevel::Warn);
// 1 MB
options.set_max_log_file_size(1024 * 1024);
options.set_recycle_log_file_num(1);
Arc::new(OptimisticTransactionDB::open(&options, path).unwrap())
options.set_compression_type(DBCompressionType::Lz4);
Arc::new(TransactionDB::open(&options, &Default::default(), path).unwrap())
}

View File

@@ -3,7 +3,6 @@
// Obtain a variable from the Serai environment/secret store.
pub fn var(variable: &str) -> Option<String> {
// TODO: Move this to a proper secret store
// TODO: Unset this variable
// TODO: Move this to Kubernetes
std::env::var(variable).ok()
}

View File

@@ -17,13 +17,11 @@ rustdoc-args = ["--cfg", "docsrs"]
workspace = true
[dependencies]
tower-service = { version = "0.3", default-features = false }
hyper = { version = "1", default-features = false, features = ["http1", "client"] }
hyper-util = { version = "0.1", default-features = false, features = ["http1", "client-legacy", "tokio"] }
http-body-util = { version = "0.1", default-features = false }
# Deprecated here means to enable deprecated warnings, not to restore deprecated APIs
hyper = { version = "0.14", default-features = false, features = ["http1", "tcp", "client", "runtime", "backports", "deprecated"] }
tokio = { version = "1", default-features = false }
hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "ring", "rustls-native-certs", "native-tokio"], optional = true }
hyper-rustls = { version = "0.24", default-features = false, features = ["http1", "native-tokio"], optional = true }
zeroize = { version = "1", optional = true }
base64ct = { version = "1", features = ["alloc"], optional = true }

View File

@@ -5,13 +5,14 @@ use std::sync::Arc;
use tokio::sync::Mutex;
use tower_service::Service as TowerService;
#[cfg(feature = "tls")]
use hyper_rustls::{HttpsConnectorBuilder, HttpsConnector};
use hyper::{Uri, header::HeaderValue, body::Bytes, client::conn::http1::SendRequest};
use hyper_util::{
rt::tokio::TokioExecutor,
client::legacy::{Client as HyperClient, connect::HttpConnector},
use hyper::{
Uri,
header::HeaderValue,
body::Body,
service::Service,
client::{HttpConnector, conn::http1::SendRequest},
};
pub use hyper;
@@ -28,7 +29,6 @@ pub enum Error {
InconsistentHost,
ConnectionError(Box<dyn Send + Sync + std::error::Error>),
Hyper(hyper::Error),
HyperUtil(hyper_util::client::legacy::Error),
}
#[cfg(not(feature = "tls"))]
@@ -38,12 +38,8 @@ type Connector = HttpsConnector<HttpConnector>;
#[derive(Clone, Debug)]
enum Connection {
ConnectionPool(HyperClient<Connector, Full<Bytes>>),
Connection {
connector: Connector,
host: Uri,
connection: Arc<Mutex<Option<SendRequest<Full<Bytes>>>>>,
},
ConnectionPool(hyper::Client<Connector>),
Connection { connector: Connector, host: Uri, connection: Arc<Mutex<Option<SendRequest<Body>>>> },
}
#[derive(Clone, Debug)]
@@ -53,23 +49,17 @@ pub struct Client {
impl Client {
fn connector() -> Connector {
let mut res = HttpConnector::new();
res.set_keepalive(Some(core::time::Duration::from_secs(60)));
#[cfg(feature = "tls")]
let res = HttpsConnectorBuilder::new()
.with_native_roots()
.expect("couldn't fetch system's SSL roots")
.https_or_http()
.enable_http1()
.wrap_connector(res);
let res =
HttpsConnectorBuilder::new().with_native_roots().https_or_http().enable_http1().build();
#[cfg(not(feature = "tls"))]
let res = HttpConnector::new();
res
}
pub fn with_connection_pool() -> Client {
Client {
connection: Connection::ConnectionPool(
HyperClient::builder(TokioExecutor::new()).build(Self::connector()),
),
connection: Connection::ConnectionPool(hyper::Client::builder().build(Self::connector())),
}
}
@@ -122,9 +112,7 @@ impl Client {
}
let response = match &self.connection {
Connection::ConnectionPool(client) => {
client.request(request).await.map_err(Error::HyperUtil)?
}
Connection::ConnectionPool(client) => client.request(request).await.map_err(Error::Hyper)?,
Connection::Connection { connector, host, connection } => {
let mut connection_lock = connection.lock().await;

View File

@@ -1,13 +1,12 @@
use hyper::body::Bytes;
use hyper::body::Body;
#[cfg(feature = "basic-auth")]
use hyper::header::HeaderValue;
pub use http_body_util::Full;
#[cfg(feature = "basic-auth")]
use crate::Error;
#[derive(Debug)]
pub struct Request(pub(crate) hyper::Request<Full<Bytes>>);
pub struct Request(pub(crate) hyper::Request<Body>);
impl Request {
#[cfg(feature = "basic-auth")]
fn username_password_from_uri(&self) -> Result<(String, String), Error> {
@@ -60,8 +59,8 @@ impl Request {
let _ = self.basic_auth_from_uri();
}
}
impl From<hyper::Request<Full<Bytes>>> for Request {
fn from(request: hyper::Request<Full<Bytes>>) -> Request {
impl From<hyper::Request<Body>> for Request {
fn from(request: hyper::Request<Body>) -> Request {
Request(request)
}
}

View File

@@ -1,16 +1,14 @@
use hyper::{
StatusCode,
header::{HeaderValue, HeaderMap},
body::{Buf, Incoming},
body::{Buf, Body},
};
use http_body_util::BodyExt;
use crate::{Client, Error};
// Borrows the client so its async task lives as long as this response exists.
#[allow(dead_code)]
#[derive(Debug)]
pub struct Response<'a>(pub(crate) hyper::Response<Incoming>, pub(crate) &'a Client);
pub struct Response<'a>(pub(crate) hyper::Response<Body>, pub(crate) &'a Client);
impl<'a> Response<'a> {
pub fn status(&self) -> StatusCode {
self.0.status()
@@ -19,6 +17,6 @@ impl<'a> Response<'a> {
self.0.headers()
}
pub async fn body(self) -> Result<impl std::io::Read, Error> {
Ok(self.0.into_body().collect().await.map_err(Error::Hyper)?.aggregate().reader())
hyper::body::aggregate(self.0.into_body()).await.map(Buf::reader).map_err(Error::Hyper)
}
}

View File

@@ -64,20 +64,6 @@ mod shims {
}
}
pub trait BufRead: Read {
fn fill_buf(&mut self) -> Result<&[u8]>;
fn consume(&mut self, amt: usize);
}
impl BufRead for &[u8] {
fn fill_buf(&mut self) -> Result<&[u8]> {
Ok(*self)
}
fn consume(&mut self, amt: usize) {
*self = &self[amt ..];
}
}
pub trait Write {
fn write(&mut self, buf: &[u8]) -> Result<usize>;
fn write_all(&mut self, buf: &[u8]) -> Result<()> {

View File

@@ -32,7 +32,6 @@ frost-schnorrkel = { path = "../crypto/schnorrkel" }
scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std", "derive"] }
zalloc = { path = "../common/zalloc" }
serai-db = { path = "../common/db" }
serai-env = { path = "../common/env" }
@@ -40,7 +39,7 @@ processor-messages = { package = "serai-processor-messages", path = "../processo
message-queue = { package = "serai-message-queue", path = "../message-queue" }
tributary = { package = "tributary-chain", path = "./tributary" }
sp-application-crypto = { git = "https://github.com/serai-dex/substrate", default-features = false, features = ["std"] }
sp-application-crypto = { git = "https://github.com/serai-dex/polkadot-sdk", branch = "experimental", default-features = false, features = ["std"] }
serai-client = { path = "../substrate/client", default-features = false, features = ["serai", "borsh"] }
hex = { version = "0.4", default-features = false, features = ["std"] }
@@ -49,16 +48,15 @@ borsh = { version = "1", default-features = false, features = ["std", "derive",
log = { version = "0.4", default-features = false, features = ["std"] }
env_logger = { version = "0.10", default-features = false, features = ["humantime"] }
futures-util = { version = "0.3", default-features = false, features = ["std"] }
tokio = { version = "1", default-features = false, features = ["rt-multi-thread", "sync", "time", "macros"] }
libp2p = { version = "0.52", default-features = false, features = ["tokio", "tcp", "noise", "yamux", "request-response", "gossipsub", "macros"] }
libp2p = { version = "0.52", default-features = false, features = ["tokio", "tcp", "noise", "yamux", "gossipsub", "mdns", "macros"] }
[dev-dependencies]
futures-util = { version = "0.3", default-features = false, features = ["std"] }
tributary = { package = "tributary-chain", path = "./tributary", features = ["tests"] }
sp-application-crypto = { git = "https://github.com/serai-dex/substrate", default-features = false, features = ["std"] }
sp-runtime = { git = "https://github.com/serai-dex/substrate", default-features = false, features = ["std"] }
sp-application-crypto = { git = "https://github.com/serai-dex/polkadot-sdk", branch = "experimental", default-features = false, features = ["std"] }
sp-runtime = { git = "https://github.com/serai-dex/polkadot-sdk", branch = "experimental", default-features = false, features = ["std"] }
[features]
longer-reattempts = []
parity-db = ["serai-db/parity-db"]
rocksdb = ["serai-db/rocksdb"]

View File

@@ -22,7 +22,7 @@ use serai_db::{Get, DbTxn, Db, create_db};
use processor_messages::coordinator::cosign_block_msg;
use crate::{
p2p::{CosignedBlock, GossipMessageKind, P2p},
p2p::{CosignedBlock, P2pMessageKind, P2p},
substrate::LatestCosignedBlock,
};
@@ -323,7 +323,7 @@ impl<D: Db> CosignEvaluator<D> {
for cosign in cosigns {
let mut buf = vec![];
cosign.serialize(&mut buf).unwrap();
P2p::broadcast(&p2p, GossipMessageKind::CosignedBlock, buf).await;
P2p::broadcast(&p2p, P2pMessageKind::CosignedBlock, buf).await;
}
sleep(Duration::from_secs(60)).await;
}

View File

@@ -9,10 +9,7 @@ use zeroize::{Zeroize, Zeroizing};
use rand_core::OsRng;
use ciphersuite::{
group::{
ff::{Field, PrimeField},
GroupEncoding,
},
group::ff::{Field, PrimeField},
Ciphersuite, Ristretto,
};
use schnorr::SchnorrSignature;
@@ -66,10 +63,6 @@ use cosign_evaluator::CosignEvaluator;
#[cfg(test)]
pub mod tests;
#[global_allocator]
static ALLOCATOR: zalloc::ZeroizingAlloc<std::alloc::System> =
zalloc::ZeroizingAlloc(std::alloc::System);
#[derive(Clone)]
pub struct ActiveTributary<D: Db, P: P2p> {
pub spec: TributarySpec,
@@ -243,9 +236,7 @@ async fn handle_processor_message<D: Db, P: P2p>(
coordinator::ProcessorMessage::InvalidParticipant { id, .. } |
coordinator::ProcessorMessage::CosignPreprocess { id, .. } |
coordinator::ProcessorMessage::BatchPreprocess { id, .. } |
coordinator::ProcessorMessage::SlashReportPreprocess { id, .. } |
coordinator::ProcessorMessage::SubstrateShare { id, .. } => Some(id.session),
// This causes an action on our P2P net yet not on any Tributary
coordinator::ProcessorMessage::CosignedBlock { block_number, block, signature } => {
let cosigned_block = CosignedBlock {
network,
@@ -260,58 +251,9 @@ async fn handle_processor_message<D: Db, P: P2p>(
cosign_channel.send(cosigned_block).unwrap();
let mut buf = vec![];
cosigned_block.serialize(&mut buf).unwrap();
P2p::broadcast(p2p, GossipMessageKind::CosignedBlock, buf).await;
P2p::broadcast(p2p, P2pMessageKind::CosignedBlock, buf).await;
None
}
// This causes an action on Substrate yet not on any Tributary
coordinator::ProcessorMessage::SignedSlashReport { session, signature } => {
let set = ValidatorSet { network, session: *session };
let signature: &[u8] = signature.as_ref();
let signature = serai_client::Signature(signature.try_into().unwrap());
let slashes = crate::tributary::SlashReport::get(&txn, set)
.expect("signed slash report despite not having slash report locally");
let slashes_pubs =
slashes.iter().map(|(address, points)| (Public(*address), *points)).collect::<Vec<_>>();
let tx = serai_client::SeraiValidatorSets::report_slashes(
network,
slashes
.into_iter()
.map(|(address, points)| (serai_client::SeraiAddress(address), points))
.collect::<Vec<_>>()
.try_into()
.unwrap(),
signature.clone(),
);
loop {
if serai.publish(&tx).await.is_ok() {
break None;
}
// Check if the slashes shouldn't still be reported. If not, break.
let Ok(serai) = serai.as_of_latest_finalized_block().await else {
tokio::time::sleep(core::time::Duration::from_secs(5)).await;
continue;
};
let Ok(key) = serai.validator_sets().key_pending_slash_report(network).await else {
tokio::time::sleep(core::time::Duration::from_secs(5)).await;
continue;
};
let Some(key) = key else {
break None;
};
// If this is the key for this slash report, then this will verify
use sp_application_crypto::RuntimePublic;
if !key.verify(
&serai_client::validator_sets::primitives::report_slashes_message(&set, &slashes_pubs),
&signature,
) {
break None;
}
}
}
},
// These don't return a relevant Tributary as there's no Tributary with action expected
ProcessorMessage::Substrate(inner_msg) => match inner_msg {
@@ -429,22 +371,14 @@ async fn handle_processor_message<D: Db, P: P2p>(
}]
}
key_gen::ProcessorMessage::InvalidCommitments { id, faulty } => {
// This doesn't have guaranteed timing
//
// While the party *should* be fatally slashed and not included in future attempts,
// they'll actually be fatally slashed (assuming liveness before the Tributary retires)
// and not included in future attempts *which begin after the latency window completes*
let participant = spec
.reverse_lookup_i(
&crate::tributary::removed_as_of_dkg_attempt(&txn, spec.genesis(), id.attempt)
.expect("participating in DKG attempt yet we didn't save who was removed"),
faulty,
)
.unwrap();
vec![Transaction::RemoveParticipantDueToDkg {
participant,
signed: Transaction::empty_signed(),
}]
// This doesn't need the ID since it's a Provided transaction which everyone will provide
// With this provision comes explicit ordering (with regards to other
// RemoveParticipantDueToDkg transactions) and group consensus
// Accordingly, this can't be replayed
// It could be included on-chain early/late with regards to the chain's active attempt,
// which attempt scheduling is written to make a non-issue by auto re-attempting once a
// fatal slash occurs, regardless of timing
vec![Transaction::RemoveParticipantDueToDkg { attempt: id.attempt, participant: faulty }]
}
key_gen::ProcessorMessage::Shares { id, mut shares } => {
// Create a MuSig-based machine to inform Substrate of this key generation
@@ -454,7 +388,7 @@ async fn handle_processor_message<D: Db, P: P2p>(
.expect("participating in a DKG attempt yet we didn't track who was removed yet?");
let our_i = spec
.i(&removed, pub_key)
.expect("processor message to DKG for an attempt we aren't a validator in");
.expect("processor message to DKG for a session we aren't a validator in");
// `tx_shares` needs to be done here as while it can be serialized from the HashMap
// without further context, it can't be deserialized without context
@@ -483,13 +417,30 @@ async fn handle_processor_message<D: Db, P: P2p>(
}]
}
key_gen::ProcessorMessage::InvalidShare { id, accuser, faulty, blame } => {
vec![Transaction::InvalidDkgShare {
// Check if the MuSig signature had any errors as if so, we need to provide
// RemoveParticipantDueToDkg
// As for the safety of calling error_generating_key_pair, the processor is presumed
// to only send InvalidShare or GeneratedKeyPair for a given attempt
let mut txs = if let Some(faulty) =
crate::tributary::error_generating_key_pair(&mut txn, key, spec, id.attempt)
{
vec![Transaction::RemoveParticipantDueToDkg {
attempt: id.attempt,
participant: faulty,
}]
} else {
vec![]
};
txs.push(Transaction::InvalidDkgShare {
attempt: id.attempt,
accuser,
faulty,
blame,
signed: Transaction::empty_signed(),
}]
});
txs
}
key_gen::ProcessorMessage::GeneratedKeyPair { id, substrate_key, network_key } => {
// TODO2: Check the KeyGenId fields
@@ -503,7 +454,6 @@ async fn handle_processor_message<D: Db, P: P2p>(
id.attempt,
);
// TODO: Move this into generated_key_pair?
match share {
Ok(share) => {
vec![Transaction::DkgConfirmed {
@@ -513,32 +463,14 @@ async fn handle_processor_message<D: Db, P: P2p>(
}]
}
Err(p) => {
let participant = spec
.reverse_lookup_i(
&crate::tributary::removed_as_of_dkg_attempt(&txn, spec.genesis(), id.attempt)
.expect("participating in DKG attempt yet we didn't save who was removed"),
p,
)
.unwrap();
vec![Transaction::RemoveParticipantDueToDkg {
participant,
signed: Transaction::empty_signed(),
}]
vec![Transaction::RemoveParticipantDueToDkg { attempt: id.attempt, participant: p }]
}
}
}
// This is a response to the ordered VerifyBlame, which is why this satisfies the provided
// transaction's needs to be perfectly ordered
key_gen::ProcessorMessage::Blame { id, participant } => {
let participant = spec
.reverse_lookup_i(
&crate::tributary::removed_as_of_dkg_attempt(&txn, spec.genesis(), id.attempt)
.expect("participating in DKG attempt yet we didn't save who was removed"),
participant,
)
.unwrap();
vec![Transaction::RemoveParticipantDueToDkg {
participant,
signed: Transaction::empty_signed(),
}]
vec![Transaction::RemoveParticipantDueToDkg { attempt: id.attempt, participant }]
}
},
ProcessorMessage::Sign(msg) => match msg {
@@ -604,8 +536,7 @@ async fn handle_processor_message<D: Db, P: P2p>(
// slash) and censor transactions (yet don't explicitly ban)
vec![]
}
coordinator::ProcessorMessage::CosignPreprocess { id, preprocesses } |
coordinator::ProcessorMessage::SlashReportPreprocess { id, preprocesses } => {
coordinator::ProcessorMessage::CosignPreprocess { id, preprocesses } => {
vec![Transaction::SubstrateSign(SignData {
plan: id.id,
attempt: id.attempt,
@@ -720,8 +651,6 @@ async fn handle_processor_message<D: Db, P: P2p>(
}
#[allow(clippy::match_same_arms)] // Allowed to preserve layout
coordinator::ProcessorMessage::CosignedBlock { .. } => unreachable!(),
#[allow(clippy::match_same_arms)]
coordinator::ProcessorMessage::SignedSlashReport { .. } => unreachable!(),
},
ProcessorMessage::Substrate(inner_msg) => match inner_msg {
processor_messages::substrate::ProcessorMessage::Batch { .. } |
@@ -836,8 +765,8 @@ async fn handle_cosigns_and_batch_publication<D: Db, P: P2p>(
) {
let mut tributaries = HashMap::new();
'outer: loop {
// TODO: Create a better async flow for this
tokio::time::sleep(core::time::Duration::from_millis(100)).await;
// TODO: Create a better async flow for this, as this does still hammer this task
tokio::task::yield_now().await;
match tributary_event.try_recv() {
Ok(event) => match event {
@@ -1012,16 +941,16 @@ pub async fn run<D: Db, Pro: Processors, P: P2p>(
key: Zeroizing<<Ristretto as Ciphersuite>::F>,
p2p: P,
processors: Pro,
serai: Arc<Serai>,
serai: Serai,
) {
let serai = Arc::new(serai);
let (new_tributary_spec_send, mut new_tributary_spec_recv) = mpsc::unbounded_channel();
// Reload active tributaries from the database
for spec in ActiveTributaryDb::active_tributaries(&raw_db).1 {
new_tributary_spec_send.send(spec).unwrap();
}
let (perform_slash_report_send, mut perform_slash_report_recv) = mpsc::unbounded_channel();
let (tributary_retired_send, mut tributary_retired_recv) = mpsc::unbounded_channel();
// Handle new Substrate blocks
@@ -1031,7 +960,6 @@ pub async fn run<D: Db, Pro: Processors, P: P2p>(
processors.clone(),
serai.clone(),
new_tributary_spec_send,
perform_slash_report_send,
tributary_retired_send,
));
@@ -1086,12 +1014,10 @@ pub async fn run<D: Db, Pro: Processors, P: P2p>(
let raw_db = raw_db.clone();
let key = key.clone();
let specs = Arc::new(RwLock::new(HashMap::new()));
let tributaries = Arc::new(RwLock::new(HashMap::new()));
// Spawn a task to maintain a local view of the tributaries for whenever recognized_id is
// called
tokio::spawn({
let specs = specs.clone();
let tributaries = tributaries.clone();
let mut set_to_genesis = HashMap::new();
async move {
@@ -1100,11 +1026,9 @@ pub async fn run<D: Db, Pro: Processors, P: P2p>(
Ok(TributaryEvent::NewTributary(tributary)) => {
set_to_genesis.insert(tributary.spec.set(), tributary.spec.genesis());
tributaries.write().await.insert(tributary.spec.genesis(), tributary.tributary);
specs.write().await.insert(tributary.spec.set(), tributary.spec);
}
Ok(TributaryEvent::TributaryRetired(set)) => {
if let Some(genesis) = set_to_genesis.remove(&set) {
specs.write().await.remove(&set);
tributaries.write().await.remove(&genesis);
}
}
@@ -1117,84 +1041,6 @@ pub async fn run<D: Db, Pro: Processors, P: P2p>(
}
});
// Also spawn a task to handle slash reports, as this needs such a view of tributaries
tokio::spawn({
let mut raw_db = raw_db.clone();
let key = key.clone();
let tributaries = tributaries.clone();
async move {
'task_loop: loop {
match perform_slash_report_recv.recv().await {
Some(set) => {
let (genesis, validators) = loop {
let specs = specs.read().await;
let Some(spec) = specs.get(&set) else {
// If we don't have this Tributary because it's retired, break and move on
if RetiredTributaryDb::get(&raw_db, set).is_some() {
continue 'task_loop;
}
// This may happen if the task above is simply slow
log::warn!("tributary we don't have yet is supposed to perform a slash report");
continue;
};
break (spec.genesis(), spec.validators());
};
let mut slashes = vec![];
for (validator, _) in validators {
if validator == (<Ristretto as Ciphersuite>::generator() * key.deref()) {
continue;
}
let validator = validator.to_bytes();
let fatally = tributary::FatallySlashed::get(&raw_db, genesis, validator).is_some();
// TODO: Properly type this
let points = if fatally {
u32::MAX
} else {
tributary::SlashPoints::get(&raw_db, genesis, validator).unwrap_or(0)
};
slashes.push(points);
}
let mut tx = Transaction::SlashReport(slashes, Transaction::empty_signed());
tx.sign(&mut OsRng, genesis, &key);
let mut first = true;
loop {
if !first {
sleep(Duration::from_millis(100)).await;
}
first = false;
let tributaries = tributaries.read().await;
let Some(tributary) = tributaries.get(&genesis) else {
// If we don't have this Tributary because it's retired, break and move on
if RetiredTributaryDb::get(&raw_db, set).is_some() {
break;
}
// This may happen if the task above is simply slow
log::warn!("tributary we don't have yet is supposed to perform a slash report");
continue;
};
// This is safe to perform multiple times and solely needs atomicity with regards
// to itself
// TODO: Should this not take a txn accordingly? It's best practice to take a txn,
// yet taking a txn fails to declare its achieved independence
let mut txn = raw_db.txn();
tributary::publish_signed_transaction(&mut txn, tributary, tx).await;
txn.commit();
break;
}
}
None => panic!("perform slash report sender closed"),
}
}
}
});
move |set: ValidatorSet, genesis, id_type, id: Vec<u8>| {
log::debug!("recognized ID {:?} {}", id_type, hex::encode(&id));
let mut raw_db = raw_db.clone();
@@ -1356,10 +1202,11 @@ async fn main() {
key_bytes.zeroize();
key
};
let p2p = LibP2p::new();
let processors = Arc::new(MessageQueue::from_env(Service::Coordinator));
let serai = (async {
let serai = || async {
loop {
let Ok(serai) = Serai::new(format!(
"http://{}:9944",
@@ -1372,10 +1219,8 @@ async fn main() {
continue;
};
log::info!("made initial connection to Serai node");
return Arc::new(serai);
return serai;
}
})
.await;
let p2p = LibP2p::new(serai.clone());
run(db, key, p2p, processors, serai).await
};
run(db, key, p2p, processors, serai().await).await
}

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More