31 Commits

Author SHA1 Message Date
Luke Parker
2ba6d77ee7 Reduce target peers a bit 2024-04-23 12:59:34 -04:00
Luke Parker
67a0ff825b Correct recv to try_recv when exhausting channel 2024-04-23 12:41:10 -04:00
Luke Parker
6518379981 Correct selection of to-try peers to prevent infinite loops when to-try < target 2024-04-23 12:41:00 -04:00
Luke Parker
0c6ab50e35 Use a constant for the target amount of peer 2024-04-23 12:40:50 -04:00
Luke Parker
f73ce37e18 Use a HashSet for which networks to try peer finding for
Prevents a flood of retries from individually failed attempts within a batch of
peer connection attempts.
2024-04-23 12:40:41 -04:00
Luke Parker
973dcf065e Correct port forward in orchestration 2024-04-23 09:47:11 -04:00
Luke Parker
8f5aaa8492 New coordinator port, genesis 2024-04-23 09:32:44 -04:00
Luke Parker
93ba8d840a Remove cbor 2024-04-23 09:31:33 -04:00
Luke Parker
485e454680 Inline broadcast_raw now that it doesn't have multiple callers 2024-04-23 09:31:17 -04:00
Luke Parker
c3b6abf020 Properly diversify ReqResMessageKind/GossipMessageKind 2024-04-23 09:31:09 -04:00
Luke Parker
f3ccf1cab0 Move keep alive, heartbeat, block to request/response 2024-04-23 09:30:58 -04:00
Luke Parker
0deee0ec6b Line for prior commit 2024-04-21 08:55:50 -04:00
Luke Parker
6b428948d4 Comment the insanely aggressive timeout future trace log 2024-04-21 08:55:32 -04:00
Luke Parker
6986257d4f Add missing continue to prevent dialing a node we're connected to 2024-04-21 08:37:06 -04:00
Luke Parker
a3c37cba21 Replace expect with debug log 2024-04-21 08:03:01 -04:00
Luke Parker
b5f2ff1397 Correct boolean NOT on is_fresh_dial 2024-04-21 07:30:18 -04:00
Luke Parker
c84931c6ae Retry if initial dials fail, not just upon disconnect 2024-04-21 07:26:29 -04:00
Luke Parker
63abf2d022 Restart coordinator peer finding upon disconnections 2024-04-21 07:03:03 -04:00
Luke Parker
a62d2d05ad Correct log which didn't work as intended 2024-04-20 19:55:17 -04:00
Luke Parker
967cc16748 Correct log targets in tendermint-machine 2024-04-20 19:55:06 -04:00
Luke Parker
ab4b8cc2d5 Better logs in tendermint-machine 2024-04-20 18:13:57 -04:00
Luke Parker
387ccbad3a Extend time in sync test 2024-04-18 16:39:16 -04:00
Luke Parker
26cdfdd824 fmt 2024-04-18 16:39:03 -04:00
Luke Parker
68e77384ac Don't broadcast added blocks
Online validators should inherently have them. Offline validators will receive
from the sync protocol.

This does somewhat eliminate the class of nodes who would follow the blockchain
(without validating it), yet that's fine for the performance benefit.
2024-04-18 16:38:52 -04:00
Luke Parker
68da88c1f3 Only reply to heartbeats after a certain distance 2024-04-18 16:38:43 -04:00
Luke Parker
2b481ab71e Ensure we don't reply to stale heartbeats 2024-04-18 16:38:21 -04:00
Luke Parker
05e6d81948 Only have some nodes respond to latent heartbeats
Also only respond if they're more than 2 blocks behind to minimize redundant
sending of blocks.
2024-04-18 16:38:16 -04:00
Luke Parker
e426cd00bd Correct protocol ID -> ID 2024-04-11 23:11:23 -04:00
Luke Parker
09e3881b7d Add worksmarter at the last minute 2024-04-11 16:08:14 -04:00
Luke Parker
10124ac4a8 Add Testnet 2 Config
Starts Tuesday, April 16th, with confirmed keys/boot nodes.
2024-04-11 15:49:32 -04:00
Luke Parker
1987983f88 Add bootnode code prior used in testnet-internal
Also performs the devnet/testnet differentation done since the testnet branch.
2024-04-11 14:37:09 -04:00
390 changed files with 12924 additions and 21946 deletions

View File

@@ -5,7 +5,7 @@ inputs:
version:
description: "Version to download and run"
required: false
default: "27.0"
default: 24.0.1
runs:
using: "composite"

View File

@@ -10,7 +10,7 @@ inputs:
bitcoin-version:
description: "Bitcoin version to download and run as a regtest node"
required: false
default: "27.0"
default: 24.0.1
runs:
using: "composite"
@@ -19,9 +19,9 @@ runs:
uses: ./.github/actions/build-dependencies
- name: Install Foundry
uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773
uses: foundry-rs/foundry-toolchain@cb603ca0abb544f301eaed59ac0baf579aa6aecf
with:
version: nightly-f625d0fa7c51e65b4bf1e8f7931cd1c6e2e285e9
version: nightly-09fe3e041369a816365a020f715ad6f94dbce9f2
cache: false
- name: Run a Monero Regtest Node

View File

@@ -1 +1 @@
nightly-2024-07-01
nightly-2024-02-07

View File

@@ -30,22 +30,6 @@ jobs:
run: |
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
-p bitcoin-serai \
-p alloy-simple-request-transport \
-p ethereum-serai \
-p serai-ethereum-relayer \
-p monero-io \
-p monero-generators \
-p monero-primitives \
-p monero-mlsag \
-p monero-clsag \
-p monero-borromean \
-p monero-bulletproofs \
-p monero-serai \
-p monero-rpc \
-p monero-simple-request-rpc \
-p monero-address \
-p monero-wallet \
-p monero-seed \
-p polyseed \
-p monero-wallet-util \
-p monero-serai-verify-chain
-p monero-serai

View File

@@ -28,5 +28,4 @@ jobs:
-p std-shims \
-p zalloc \
-p serai-db \
-p serai-env \
-p simple-request
-p serai-env

View File

@@ -37,4 +37,4 @@ jobs:
uses: ./.github/actions/build-dependencies
- name: Run coordinator Docker tests
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-coordinator-tests
run: cd tests/coordinator && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -19,4 +19,4 @@ jobs:
uses: ./.github/actions/build-dependencies
- name: Run Full Stack Docker tests
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-full-stack-tests
run: cd tests/full-stack && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -33,4 +33,4 @@ jobs:
uses: ./.github/actions/build-dependencies
- name: Run message-queue Docker tests
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-message-queue-tests
run: cd tests/message-queue && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -26,22 +26,7 @@ jobs:
uses: ./.github/actions/test-dependencies
- name: Run Unit Tests Without Features
run: |
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-io --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-generators --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-primitives --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-mlsag --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-clsag --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-borromean --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-bulletproofs --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-rpc --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-address --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-seed --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package polyseed --lib
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --lib
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib
# Doesn't run unit tests with features as the tests workflow will
@@ -61,17 +46,11 @@ jobs:
monero-version: ${{ matrix.version }}
- name: Run Integration Tests Without Features
run: |
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --test '*'
# Runs with the binaries feature so the binaries build
# https://github.com/rust-lang/cargo/issues/8396
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --features binaries --test '*'
- name: Run Integration Tests
# Don't run if the the tests workflow also will
if: ${{ matrix.version != 'v0.18.2.0' }}
run: |
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --all-features --test '*'
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --all-features --test '*'
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'

View File

@@ -32,4 +32,4 @@ jobs:
run: sudo apt update && sudo apt install -y gcc-riscv64-unknown-elf gcc-multilib && rustup target add riscv32imac-unknown-none-elf
- name: Verify no-std builds
run: CFLAGS=-I/usr/include cargo build --target riscv32imac-unknown-none-elf -p serai-no-std-tests
run: cd tests/no-std && CFLAGS=-I/usr/include cargo build --target riscv32imac-unknown-none-elf

View File

@@ -37,4 +37,4 @@ jobs:
uses: ./.github/actions/build-dependencies
- name: Run processor Docker tests
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-processor-tests
run: cd tests/processor && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -33,4 +33,4 @@ jobs:
uses: ./.github/actions/build-dependencies
- name: Run Reproducible Runtime tests
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-reproducible-runtime-tests
run: cd tests/reproducible-runtime && GITHUB_CI=true RUST_BACKTRACE=1 cargo test

View File

@@ -43,7 +43,6 @@ jobs:
-p tendermint-machine \
-p tributary-chain \
-p serai-coordinator \
-p serai-orchestrator \
-p serai-docker-tests
test-substrate:
@@ -65,9 +64,7 @@ jobs:
-p serai-validator-sets-pallet \
-p serai-in-instructions-primitives \
-p serai-in-instructions-pallet \
-p serai-signals-primitives \
-p serai-signals-pallet \
-p serai-abi \
-p serai-runtime \
-p serai-node

2623
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,8 +2,6 @@
resolver = "2"
members = [
# Version patches
"patches/parking_lot_core",
"patches/parking_lot",
"patches/zstd",
"patches/rocksdb",
"patches/proc-macro-crate",
@@ -38,27 +36,9 @@ members = [
"crypto/schnorrkel",
"coins/bitcoin",
"coins/ethereum/alloy-simple-request-transport",
"coins/ethereum",
"coins/ethereum/relayer",
"coins/monero/io",
"coins/monero/generators",
"coins/monero/primitives",
"coins/monero/ringct/mlsag",
"coins/monero/ringct/clsag",
"coins/monero/ringct/borromean",
"coins/monero/ringct/bulletproofs",
"coins/monero",
"coins/monero/rpc",
"coins/monero/rpc/simple-request",
"coins/monero/wallet/address",
"coins/monero/wallet",
"coins/monero/wallet/seed",
"coins/monero/wallet/polyseed",
"coins/monero/wallet/util",
"coins/monero/verify-chain",
"message-queue",
@@ -74,14 +54,12 @@ members = [
"substrate/coins/primitives",
"substrate/coins/pallet",
"substrate/dex/pallet",
"substrate/in-instructions/primitives",
"substrate/in-instructions/pallet",
"substrate/validator-sets/primitives",
"substrate/validator-sets/pallet",
"substrate/in-instructions/primitives",
"substrate/in-instructions/pallet",
"substrate/signals/primitives",
"substrate/signals/pallet",
@@ -131,10 +109,8 @@ panic = "unwind"
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
# Needed due to dockertest's usage of `Rc`s when we need `Arc`s
dockertest = { git = "https://github.com/orcalabs/dockertest-rs", rev = "4dd6ae24738aa6dc5c89444cc822ea4745517493" }
dockertest = { git = "https://github.com/kayabaNerve/dockertest-rs", branch = "arc" }
parking_lot_core = { path = "patches/parking_lot_core" }
parking_lot = { path = "patches/parking_lot" }
# wasmtime pulls in an old version for this
zstd = { path = "patches/zstd" }
# Needed for WAL compression

View File

@@ -6,7 +6,7 @@ license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
edition = "2021"
rust-version = "1.79"
rust-version = "1.74"
[package.metadata.docs.rs]
all-features = true
@@ -23,7 +23,7 @@ thiserror = { version = "1", default-features = false, optional = true }
zeroize = { version = "^1.5", default-features = false }
rand_core = { version = "0.6", default-features = false }
bitcoin = { version = "0.32", default-features = false }
bitcoin = { version = "0.31", default-features = false, features = ["no-std"] }
k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] }
@@ -36,7 +36,7 @@ serde_json = { version = "1", default-features = false, optional = true }
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls", "basic-auth"], optional = true }
[dev-dependencies]
secp256k1 = { version = "0.29", default-features = false, features = ["std"] }
secp256k1 = { version = "0.28", default-features = false, features = ["std"] }
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }

View File

@@ -195,13 +195,13 @@ impl Rpc {
// If this was already successfully published, consider this having succeeded
if let RpcError::RequestError(Error { code, .. }) = e {
if code == RPC_VERIFY_ALREADY_IN_CHAIN {
return Ok(tx.compute_txid());
return Ok(tx.txid());
}
}
Err(e)?
}
};
if txid != tx.compute_txid() {
if txid != tx.txid() {
Err(RpcError::InvalidResponse("returned TX ID inequals calculated TX ID"))?;
}
Ok(txid)
@@ -215,7 +215,7 @@ impl Rpc {
let tx: Transaction = encode::deserialize(&bytes)
.map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized transaction"))?;
let mut tx_hash = *tx.compute_txid().as_raw_hash().as_byte_array();
let mut tx_hash = *tx.txid().as_raw_hash().as_byte_array();
tx_hash.reverse();
if hash != &tx_hash {
Err(RpcError::InvalidResponse("node replied with a different transaction"))?;

View File

@@ -39,7 +39,7 @@ fn test_algorithm() {
.verify_schnorr(
&Signature::from_slice(&sig)
.expect("couldn't convert produced signature to secp256k1::Signature"),
&Message::from_digest_slice(Hash::hash(MESSAGE).as_ref()).unwrap(),
&Message::from(Hash::hash(MESSAGE)),
&x_only(&keys[&Participant::new(1).unwrap()].group_key()),
)
.unwrap()

View File

@@ -4,7 +4,7 @@ use std_shims::{
io::{self, Write},
};
#[cfg(feature = "std")]
use std::io::{Read, BufReader};
use std_shims::io::Read;
use k256::{
elliptic_curve::sec1::{Tag, ToEncodedPoint},
@@ -18,8 +18,8 @@ use frost::{
};
use bitcoin::{
consensus::encode::serialize, key::TweakedPublicKey, OutPoint, ScriptBuf, TxOut, Transaction,
Block,
consensus::encode::serialize, key::TweakedPublicKey, address::Payload, OutPoint, ScriptBuf,
TxOut, Transaction, Block,
};
#[cfg(feature = "std")]
use bitcoin::consensus::encode::Decodable;
@@ -46,12 +46,12 @@ pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
/// Return the Taproot address payload for a public key.
///
/// If the key is odd, this will return None.
pub fn p2tr_script_buf(key: ProjectivePoint) -> Option<ScriptBuf> {
pub fn address_payload(key: ProjectivePoint) -> Option<Payload> {
if key.to_encoded_point(true).tag() != Tag::CompressedEvenY {
return None;
}
Some(ScriptBuf::new_p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key))))
Some(Payload::p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key))))
}
/// A spendable output.
@@ -89,17 +89,11 @@ impl ReceivedOutput {
/// Read a ReceivedOutput from a generic satisfying Read.
#[cfg(feature = "std")]
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
let offset = Secp256k1::read_F(r)?;
let output;
let outpoint;
{
let mut buf_r = BufReader::with_capacity(0, r);
output =
TxOut::consensus_decode(&mut buf_r).map_err(|_| io::Error::other("invalid TxOut"))?;
outpoint =
OutPoint::consensus_decode(&mut buf_r).map_err(|_| io::Error::other("invalid OutPoint"))?;
}
Ok(ReceivedOutput { offset, output, outpoint })
Ok(ReceivedOutput {
offset: Secp256k1::read_F(r)?,
output: TxOut::consensus_decode(r).map_err(|_| io::Error::other("invalid TxOut"))?,
outpoint: OutPoint::consensus_decode(r).map_err(|_| io::Error::other("invalid OutPoint"))?,
})
}
/// Write a ReceivedOutput to a generic satisfying Write.
@@ -130,7 +124,7 @@ impl Scanner {
/// Returns None if this key can't be scanned for.
pub fn new(key: ProjectivePoint) -> Option<Scanner> {
let mut scripts = HashMap::new();
scripts.insert(p2tr_script_buf(key)?, Scalar::ZERO);
scripts.insert(address_payload(key)?.script_pubkey(), Scalar::ZERO);
Some(Scanner { key, scripts })
}
@@ -147,8 +141,9 @@ impl Scanner {
// chance of being even
// That means this should terminate within a very small amount of iterations
loop {
match p2tr_script_buf(self.key + (ProjectivePoint::GENERATOR * offset)) {
Some(script) => {
match address_payload(self.key + (ProjectivePoint::GENERATOR * offset)) {
Some(address) => {
let script = address.script_pubkey();
if self.scripts.contains_key(&script) {
None?;
}
@@ -171,7 +166,7 @@ impl Scanner {
res.push(ReceivedOutput {
offset: *offset,
output: output.clone(),
outpoint: OutPoint::new(tx.compute_txid(), vout),
outpoint: OutPoint::new(tx.txid(), vout),
});
}
}

View File

@@ -18,12 +18,12 @@ use bitcoin::{
absolute::LockTime,
script::{PushBytesBuf, ScriptBuf},
transaction::{Version, Transaction},
OutPoint, Sequence, Witness, TxIn, Amount, TxOut,
OutPoint, Sequence, Witness, TxIn, Amount, TxOut, Address,
};
use crate::{
crypto::Schnorr,
wallet::{ReceivedOutput, p2tr_script_buf},
wallet::{ReceivedOutput, address_payload},
};
#[rustfmt::skip]
@@ -61,11 +61,7 @@ pub struct SignableTransaction {
}
impl SignableTransaction {
fn calculate_weight(
inputs: usize,
payments: &[(ScriptBuf, u64)],
change: Option<&ScriptBuf>,
) -> u64 {
fn calculate_weight(inputs: usize, payments: &[(Address, u64)], change: Option<&Address>) -> u64 {
// Expand this a full transaction in order to use the bitcoin library's weight function
let mut tx = Transaction {
version: Version(2),
@@ -90,14 +86,14 @@ impl SignableTransaction {
// The script pub key is not of a fixed size and does have to be used here
.map(|payment| TxOut {
value: Amount::from_sat(payment.1),
script_pubkey: payment.0.clone(),
script_pubkey: payment.0.script_pubkey(),
})
.collect(),
};
if let Some(change) = change {
// Use a 0 value since we're currently unsure what the change amount will be, and since
// the value is fixed size (so any value could be used here)
tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.clone() });
tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.script_pubkey() });
}
u64::from(tx.weight())
}
@@ -125,8 +121,8 @@ impl SignableTransaction {
/// If data is specified, an OP_RETURN output will be added with it.
pub fn new(
mut inputs: Vec<ReceivedOutput>,
payments: &[(ScriptBuf, u64)],
change: Option<ScriptBuf>,
payments: &[(Address, u64)],
change: Option<&Address>,
data: Option<Vec<u8>>,
fee_per_weight: u64,
) -> Result<SignableTransaction, TransactionError> {
@@ -163,7 +159,10 @@ impl SignableTransaction {
let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>();
let mut tx_outs = payments
.iter()
.map(|payment| TxOut { value: Amount::from_sat(payment.1), script_pubkey: payment.0.clone() })
.map(|payment| TxOut {
value: Amount::from_sat(payment.1),
script_pubkey: payment.0.script_pubkey(),
})
.collect::<Vec<_>>();
// Add the OP_RETURN output
@@ -214,11 +213,12 @@ impl SignableTransaction {
// If there's a change address, check if there's change to give it
if let Some(change) = change {
let weight_with_change = Self::calculate_weight(tx_ins.len(), payments, Some(&change));
let weight_with_change = Self::calculate_weight(tx_ins.len(), payments, Some(change));
let fee_with_change = fee_per_weight * weight_with_change;
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
if value >= DUST {
tx_outs.push(TxOut { value: Amount::from_sat(value), script_pubkey: change });
tx_outs
.push(TxOut { value: Amount::from_sat(value), script_pubkey: change.script_pubkey() });
weight = weight_with_change;
needed_fee = fee_with_change;
}
@@ -248,7 +248,7 @@ impl SignableTransaction {
/// Returns the TX ID of the transaction this will create.
pub fn txid(&self) -> [u8; 32] {
let mut res = self.tx.compute_txid().to_byte_array();
let mut res = self.tx.txid().to_byte_array();
res.reverse();
res
}
@@ -288,7 +288,7 @@ impl SignableTransaction {
transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes());
let offset = keys.clone().offset(self.offsets[i]);
if p2tr_script_buf(offset.group_key())? != self.prevouts[i].script_pubkey {
if address_payload(offset.group_key())?.script_pubkey() != self.prevouts[i].script_pubkey {
None?;
}
@@ -375,7 +375,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
msg: &[u8],
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
if !msg.is_empty() {
panic!("message was passed to the TransactionSignMachine when it generates its own");
panic!("message was passed to the TransactionMachine when it generates its own");
}
let commitments = (0 .. self.sigs.len())

View File

@@ -22,10 +22,11 @@ use bitcoin_serai::{
hashes::Hash as HashTrait,
blockdata::opcodes::all::OP_RETURN,
script::{PushBytesBuf, Instruction, Instructions, Script},
address::NetworkChecked,
OutPoint, Amount, TxOut, Transaction, Network, Address,
},
wallet::{
tweak_keys, p2tr_script_buf, ReceivedOutput, Scanner, TransactionError, SignableTransaction,
tweak_keys, address_payload, ReceivedOutput, Scanner, TransactionError, SignableTransaction,
},
rpc::Rpc,
};
@@ -47,7 +48,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
"generatetoaddress",
serde_json::json!([
1,
Address::from_script(&p2tr_script_buf(key).unwrap(), Network::Regtest).unwrap()
Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())
]),
)
.await
@@ -68,7 +69,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
assert_eq!(outputs, scanner.scan_transaction(&block.txdata[0]));
assert_eq!(outputs.len(), 1);
assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].compute_txid(), 0));
assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].txid(), 0));
assert_eq!(outputs[0].value(), block.txdata[0].output[0].value.to_sat());
assert_eq!(
@@ -192,7 +193,7 @@ async_sequential! {
assert_eq!(output.offset(), Scalar::ZERO);
let inputs = vec![output];
let addr = || p2tr_script_buf(key).unwrap();
let addr = || Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap());
let payments = vec![(addr(), 1000)];
assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok());
@@ -205,7 +206,7 @@ async_sequential! {
// No change
assert!(SignableTransaction::new(inputs.clone(), &[(addr(), 1000)], None, None, FEE).is_ok());
// Consolidation TX
assert!(SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, FEE).is_ok());
assert!(SignableTransaction::new(inputs.clone(), &[], Some(&addr()), None, FEE).is_ok());
// Data
assert!(SignableTransaction::new(inputs.clone(), &[], None, Some(vec![]), FEE).is_ok());
// No outputs
@@ -228,7 +229,7 @@ async_sequential! {
);
assert_eq!(
SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, 0),
SignableTransaction::new(inputs.clone(), &[], Some(&addr()), None, 0),
Err(TransactionError::TooLowFee),
);
@@ -260,19 +261,20 @@ async_sequential! {
// Declare payments, change, fee
let payments = [
(p2tr_script_buf(key).unwrap(), 1005),
(p2tr_script_buf(offset_key).unwrap(), 1007)
(Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap()), 1005),
(Address::<NetworkChecked>::new(Network::Regtest, address_payload(offset_key).unwrap()), 1007)
];
let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
let change_key = key + (ProjectivePoint::GENERATOR * change_offset);
let change_addr = p2tr_script_buf(change_key).unwrap();
let change_addr =
Address::<NetworkChecked>::new(Network::Regtest, address_payload(change_key).unwrap());
// Create and sign the TX
let tx = SignableTransaction::new(
vec![output.clone(), offset_output.clone()],
&payments,
Some(change_addr.clone()),
Some(&change_addr),
None,
FEE
).unwrap();
@@ -285,7 +287,7 @@ async_sequential! {
// Ensure we can scan it
let outputs = scanner.scan_transaction(&tx);
for (o, output) in outputs.iter().enumerate() {
assert_eq!(output.outpoint(), &OutPoint::new(tx.compute_txid(), u32::try_from(o).unwrap()));
assert_eq!(output.outpoint(), &OutPoint::new(tx.txid(), u32::try_from(o).unwrap()));
assert_eq!(&ReceivedOutput::read::<&[u8]>(&mut output.serialize().as_ref()).unwrap(), output);
}
@@ -297,7 +299,7 @@ async_sequential! {
for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) {
assert_eq!(
output,
&TxOut { script_pubkey: payment.0.clone(), value: Amount::from_sat(payment.1) },
&TxOut { script_pubkey: payment.0.script_pubkey(), value: Amount::from_sat(payment.1) },
);
assert_eq!(scanned.value(), payment.1 );
}
@@ -312,13 +314,13 @@ async_sequential! {
input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee;
assert_eq!(
tx.output[2],
TxOut { script_pubkey: change_addr, value: Amount::from_sat(change_amount) },
TxOut { script_pubkey: change_addr.script_pubkey(), value: Amount::from_sat(change_amount) },
);
// This also tests send_raw_transaction and get_transaction, which the RPC test can't
// effectively test
rpc.send_raw_transaction(&tx).await.unwrap();
let mut hash = *tx.compute_txid().as_raw_hash().as_byte_array();
let mut hash = *tx.txid().as_raw_hash().as_byte_array();
hash.reverse();
assert_eq!(tx, rpc.get_transaction(&hash).await.unwrap());
assert_eq!(expected_id, hash);
@@ -342,7 +344,7 @@ async_sequential! {
&SignableTransaction::new(
vec![output],
&[],
Some(p2tr_script_buf(key).unwrap()),
Some(&Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())),
Some(data.clone()),
FEE
).unwrap()

View File

@@ -1,3 +1,7 @@
# Solidity build outputs
cache
artifacts
# Auto-generated ABI files
src/abi/schnorr.rs
src/abi/router.rs

View File

@@ -7,7 +7,7 @@ repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
edition = "2021"
publish = false
rust-version = "1.79"
rust-version = "1.74"
[package.metadata.docs.rs]
all-features = true
@@ -18,32 +18,28 @@ workspace = true
[dependencies]
thiserror = { version = "1", default-features = false }
eyre = { version = "0.6", default-features = false }
rand_core = { version = "0.6", default-features = false, features = ["std"] }
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", default-features = false, features = ["recommended"] }
sha3 = { version = "0.10", default-features = false, features = ["std"] }
group = { version = "0.13", default-features = false }
k256 = { version = "^0.13.1", default-features = false, features = ["std", "ecdsa", "arithmetic"] }
frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["secp256k1"] }
k256 = { version = "^0.13.1", default-features = false, features = ["std", "ecdsa"] }
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] }
alloy-core = { version = "0.7", default-features = false }
alloy-sol-types = { version = "0.7", default-features = false, features = ["json"] }
alloy-consensus = { version = "0.1", default-features = false, features = ["k256"] }
alloy-network = { version = "0.1", default-features = false }
alloy-rpc-types-eth = { version = "0.1", default-features = false }
alloy-rpc-client = { version = "0.1", default-features = false }
alloy-simple-request-transport = { path = "./alloy-simple-request-transport", default-features = false }
alloy-provider = { version = "0.1", default-features = false }
ethers-core = { version = "2", default-features = false }
ethers-providers = { version = "2", default-features = false }
ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] }
alloy-node-bindings = { version = "0.1", default-features = false, optional = true }
[build-dependencies]
ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] }
[dev-dependencies]
frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["tests"] }
rand_core = { version = "0.6", default-features = false, features = ["std"] }
hex = { version = "0.4", default-features = false, features = ["std"] }
serde = { version = "1", default-features = false, features = ["std"] }
serde_json = { version = "1", default-features = false, features = ["std"] }
sha2 = { version = "0.10", default-features = false, features = ["std"] }
tokio = { version = "1", features = ["macros"] }
alloy-node-bindings = { version = "0.1", default-features = false }
[features]
tests = ["alloy-node-bindings", "frost/tests"]

View File

@@ -3,12 +3,6 @@
This package contains Ethereum-related functionality, specifically deploying and
interacting with Serai contracts.
While `monero-serai` and `bitcoin-serai` are general purpose libraries,
`ethereum-serai` is Serai specific. If any of the utilities are generally
desired, please fork and maintain your own copy to ensure the desired
functionality is preserved, or open an issue to request we make this library
general purpose.
### Dependencies
- solc

View File

@@ -1,29 +0,0 @@
[package]
name = "alloy-simple-request-transport"
version = "0.1.0"
description = "A transport for alloy based off simple-request"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum/alloy-simple-request-transport"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.74"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
tower = "0.4"
serde_json = { version = "1", default-features = false }
simple-request = { path = "../../../common/request", default-features = false }
alloy-json-rpc = { version = "0.1", default-features = false }
alloy-transport = { version = "0.1", default-features = false }
[features]
default = ["tls"]
tls = ["simple-request/tls"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,4 +0,0 @@
# Alloy Simple Request Transport
A transport for alloy based on simple-request, a small HTTP client built around
hyper.

View File

@@ -1,60 +0,0 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
use core::task;
use std::io;
use alloy_json_rpc::{RequestPacket, ResponsePacket};
use alloy_transport::{TransportError, TransportErrorKind, TransportFut};
use simple_request::{hyper, Request, Client};
use tower::Service;
#[derive(Clone, Debug)]
pub struct SimpleRequest {
client: Client,
url: String,
}
impl SimpleRequest {
pub fn new(url: String) -> Self {
Self { client: Client::with_connection_pool(), url }
}
}
impl Service<RequestPacket> for SimpleRequest {
type Response = ResponsePacket;
type Error = TransportError;
type Future = TransportFut<'static>;
#[inline]
fn poll_ready(&mut self, _cx: &mut task::Context<'_>) -> task::Poll<Result<(), Self::Error>> {
task::Poll::Ready(Ok(()))
}
#[inline]
fn call(&mut self, req: RequestPacket) -> Self::Future {
let inner = self.clone();
Box::pin(async move {
let packet = req.serialize().map_err(TransportError::SerError)?;
let request = Request::from(
hyper::Request::post(&inner.url)
.header("Content-Type", "application/json")
.body(serde_json::to_vec(&packet).map_err(TransportError::SerError)?.into())
.unwrap(),
);
let mut res = inner
.client
.request(request)
.await
.map_err(|e| TransportErrorKind::custom(io::Error::other(format!("{e:?}"))))?
.body()
.await
.map_err(|e| TransportErrorKind::custom(io::Error::other(format!("{e:?}"))))?;
serde_json::from_reader(&mut res).map_err(|e| TransportError::deser_err(e, ""))
})
}
}

View File

@@ -1,5 +1,7 @@
use std::process::Command;
use ethers_contract::Abigen;
fn main() {
println!("cargo:rerun-if-changed=contracts/*");
println!("cargo:rerun-if-changed=artifacts/*");
@@ -19,23 +21,22 @@ fn main() {
"--base-path", ".",
"-o", "./artifacts", "--overwrite",
"--bin", "--abi",
"--via-ir", "--optimize",
"./contracts/IERC20.sol",
"./contracts/Schnorr.sol",
"./contracts/Deployer.sol",
"./contracts/Sandbox.sol",
"./contracts/Router.sol",
"./src/tests/contracts/Schnorr.sol",
"./src/tests/contracts/ERC20.sol",
"--no-color",
"--optimize",
"./contracts/Schnorr.sol", "./contracts/Router.sol",
];
let solc = Command::new("solc").args(args).output().unwrap();
assert!(solc.status.success());
for line in String::from_utf8(solc.stderr).unwrap().lines() {
assert!(!line.starts_with("Error:"));
}
assert!(Command::new("solc").args(args).status().unwrap().success());
Abigen::new("Schnorr", "./artifacts/Schnorr.abi")
.unwrap()
.generate()
.unwrap()
.write_to_file("./src/abi/schnorr.rs")
.unwrap();
Abigen::new("Router", "./artifacts/Router.abi")
.unwrap()
.generate()
.unwrap()
.write_to_file("./src/abi/router.rs")
.unwrap();
}

View File

@@ -1,52 +0,0 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
/*
The expected deployment process of the Router is as follows:
1) A transaction deploying Deployer is made. Then, a deterministic signature is
created such that an account with an unknown private key is the creator of
the contract. Anyone can fund this address, and once anyone does, the
transaction deploying Deployer can be published by anyone. No other
transaction may be made from that account.
2) Anyone deploys the Router through the Deployer. This uses a sequential nonce
such that meet-in-the-middle attacks, with complexity 2**80, aren't feasible.
While such attacks would still be feasible if the Deployer's address was
controllable, the usage of a deterministic signature with a NUMS method
prevents that.
This doesn't have any denial-of-service risks and will resolve once anyone steps
forward as deployer. This does fail to guarantee an identical address across
every chain, though it enables letting anyone efficiently ask the Deployer for
the address (with the Deployer having an identical address on every chain).
Unfortunately, guaranteeing identical addresses aren't feasible. We'd need the
Deployer contract to use a consistent salt for the Router, yet the Router must
be deployed with a specific public key for Serai. Since Ethereum isn't able to
determine a valid public key (one the result of a Serai DKG) from a dishonest
public key, we have to allow multiple deployments with Serai being the one to
determine which to use.
The alternative would be to have a council publish the Serai key on-Ethereum,
with Serai verifying the published result. This would introduce a DoS risk in
the council not publishing the correct key/not publishing any key.
*/
contract Deployer {
event Deployment(bytes32 indexed init_code_hash, address created);
error DeploymentFailed();
function deploy(bytes memory init_code) external {
address created;
assembly {
created := create(0, add(init_code, 0x20), mload(init_code))
}
if (created == address(0)) {
revert DeploymentFailed();
}
// These may be emitted out of order upon re-entrancy
emit Deployment(keccak256(init_code), created);
}
}

View File

@@ -1,20 +0,0 @@
// SPDX-License-Identifier: CC0
pragma solidity ^0.8.0;
interface IERC20 {
event Transfer(address indexed from, address indexed to, uint256 value);
event Approval(address indexed owner, address indexed spender, uint256 value);
function name() external view returns (string memory);
function symbol() external view returns (string memory);
function decimals() external view returns (uint8);
function totalSupply() external view returns (uint256);
function balanceOf(address owner) external view returns (uint256);
function transfer(address to, uint256 value) external returns (bool);
function transferFrom(address from, address to, uint256 value) external returns (bool);
function approve(address spender, uint256 value) external returns (bool);
function allowance(address owner, address spender) external view returns (uint256);
}

View File

@@ -1,24 +1,27 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
import "./IERC20.sol";
import "./Schnorr.sol";
import "./Sandbox.sol";
contract Router {
// Nonce is incremented for each batch of transactions executed/key update
contract Router is Schnorr {
// Contract initializer
// TODO: Replace with a MuSig of the genesis validators
address public initializer;
// Nonce is incremented for each batch of transactions executed
uint256 public nonce;
// Current public key's x-coordinate
// This key must always have the parity defined within the Schnorr contract
// fixed parity for the public keys used in this contract
uint8 constant public KEY_PARITY = 27;
// current public key's x-coordinate
// note: this key must always use the fixed parity defined above
bytes32 public seraiKey;
struct OutInstruction {
address to;
Call[] calls;
uint256 value;
bytes data;
}
struct Signature {
@@ -26,197 +29,62 @@ contract Router {
bytes32 s;
}
event SeraiKeyUpdated(
uint256 indexed nonce,
bytes32 indexed key,
Signature signature
);
event InInstruction(
address indexed from,
address indexed coin,
uint256 amount,
bytes instruction
);
// success is a uint256 representing a bitfield of transaction successes
event Executed(
uint256 indexed nonce,
bytes32 indexed batch,
uint256 success,
Signature signature
);
event Executed(uint256 nonce, bytes32 batch, uint256 success);
// error types
error NotInitializer();
error AlreadyInitialized();
error InvalidKey();
error InvalidSignature();
error InvalidAmount();
error FailedTransfer();
error TooManyTransactions();
modifier _updateSeraiKeyAtEndOfFn(
uint256 _nonce,
bytes32 key,
Signature memory sig
) {
if (
(key == bytes32(0)) ||
((bytes32(uint256(key) % Schnorr.Q)) != key)
) {
revert InvalidKey();
}
_;
seraiKey = key;
emit SeraiKeyUpdated(_nonce, key, sig);
constructor() {
initializer = msg.sender;
}
constructor(bytes32 _seraiKey) _updateSeraiKeyAtEndOfFn(
0,
_seraiKey,
Signature({ c: bytes32(0), s: bytes32(0) })
) {
nonce = 1;
// initSeraiKey can be called by the contract initializer to set the first
// public key, only if the public key has yet to be set.
function initSeraiKey(bytes32 _seraiKey) external {
if (msg.sender != initializer) revert NotInitializer();
if (seraiKey != 0) revert AlreadyInitialized();
if (_seraiKey == bytes32(0)) revert InvalidKey();
seraiKey = _seraiKey;
}
// updateSeraiKey validates the given Schnorr signature against the current
// public key, and if successful, updates the contract's public key to the
// given one.
// updateSeraiKey validates the given Schnorr signature against the current public key,
// and if successful, updates the contract's public key to the given one.
function updateSeraiKey(
bytes32 _seraiKey,
Signature calldata sig
) external _updateSeraiKeyAtEndOfFn(nonce, _seraiKey, sig) {
bytes memory message =
abi.encodePacked("updateSeraiKey", block.chainid, nonce, _seraiKey);
nonce++;
if (!Schnorr.verify(seraiKey, message, sig.c, sig.s)) {
revert InvalidSignature();
}
Signature memory sig
) public {
if (_seraiKey == bytes32(0)) revert InvalidKey();
bytes32 message = keccak256(abi.encodePacked("updateSeraiKey", _seraiKey));
if (!verify(KEY_PARITY, seraiKey, message, sig.c, sig.s)) revert InvalidSignature();
seraiKey = _seraiKey;
}
function inInstruction(
address coin,
uint256 amount,
bytes memory instruction
) external payable {
if (coin == address(0)) {
if (amount != msg.value) {
revert InvalidAmount();
}
} else {
(bool success, bytes memory res) =
address(coin).call(
abi.encodeWithSelector(
IERC20.transferFrom.selector,
msg.sender,
address(this),
amount
)
);
// Require there was nothing returned, which is done by some non-standard
// tokens, or that the ERC20 contract did in fact return true
bool nonStandardResOrTrue =
(res.length == 0) || abi.decode(res, (bool));
if (!(success && nonStandardResOrTrue)) {
revert FailedTransfer();
}
}
/*
Due to fee-on-transfer tokens, emitting the amount directly is frowned upon.
The amount instructed to transfer may not actually be the amount
transferred.
If we add nonReentrant to every single function which can effect the
balance, we can check the amount exactly matches. This prevents transfers of
less value than expected occurring, at least, not without an additional
transfer to top up the difference (which isn't routed through this contract
and accordingly isn't trying to artificially create events).
If we don't add nonReentrant, a transfer can be started, and then a new
transfer for the difference can follow it up (again and again until a
rounding error is reached). This contract would believe all transfers were
done in full, despite each only being done in part (except for the last
one).
Given fee-on-transfer tokens aren't intended to be supported, the only
token planned to be supported is Dai and it doesn't have any fee-on-transfer
logic, fee-on-transfer tokens aren't even able to be supported at this time,
we simply classify this entire class of tokens as non-standard
implementations which induce undefined behavior. It is the Serai network's
role not to add support for any non-standard implementations.
*/
emit InInstruction(msg.sender, coin, amount, instruction);
}
// execute accepts a list of transactions to execute as well as a signature.
// execute accepts a list of transactions to execute as well as a Schnorr signature.
// if signature verification passes, the given transactions are executed.
// if signature verification fails, this function will revert.
function execute(
OutInstruction[] calldata transactions,
Signature calldata sig
) external {
if (transactions.length > 256) {
revert TooManyTransactions();
}
Signature memory sig
) public {
if (transactions.length > 256) revert TooManyTransactions();
bytes memory message =
abi.encode("execute", block.chainid, nonce, transactions);
uint256 executed_with_nonce = nonce;
bytes32 message = keccak256(abi.encode("execute", nonce, transactions));
// This prevents re-entrancy from causing double spends yet does allow
// out-of-order execution via re-entrancy
nonce++;
if (!Schnorr.verify(seraiKey, message, sig.c, sig.s)) {
revert InvalidSignature();
}
if (!verify(KEY_PARITY, seraiKey, message, sig.c, sig.s)) revert InvalidSignature();
uint256 successes;
for (uint256 i = 0; i < transactions.length; i++) {
bool success;
// If there are no calls, send to `to` the value
if (transactions[i].calls.length == 0) {
(success, ) = transactions[i].to.call{
value: transactions[i].value,
gas: 5_000
}("");
} else {
// If there are calls, ignore `to`. Deploy a new Sandbox and proxy the
// calls through that
//
// We could use a single sandbox in order to reduce gas costs, yet that
// risks one person creating an approval that's hooked before another
// user's intended action executes, in order to drain their coins
//
// While technically, that would be a flaw in the sandboxed flow, this
// is robust and prevents such flaws from being possible
//
// We also don't want people to set state via the Sandbox and expect it
// future available when anyone else could set a distinct value
Sandbox sandbox = new Sandbox();
(success, ) = address(sandbox).call{
value: transactions[i].value,
// TODO: Have the Call specify the gas up front
gas: 350_000
}(
abi.encodeWithSelector(
Sandbox.sandbox.selector,
transactions[i].calls
)
);
}
for(uint256 i = 0; i < transactions.length; i++) {
(bool success, ) = transactions[i].to.call{value: transactions[i].value, gas: 200_000}(transactions[i].data);
assembly {
successes := or(successes, shl(i, success))
}
}
emit Executed(
executed_with_nonce,
keccak256(message),
successes,
sig
);
emit Executed(nonce, message, successes);
}
}

View File

@@ -1,48 +0,0 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.24;
struct Call {
address to;
uint256 value;
bytes data;
}
// A minimal sandbox focused on gas efficiency.
//
// The first call is executed if any of the calls fail, making it a fallback.
// All other calls are executed sequentially.
contract Sandbox {
error AlreadyCalled();
error CallsFailed();
function sandbox(Call[] calldata calls) external payable {
// Prevent re-entrancy due to this executing arbitrary calls from anyone
// and anywhere
bool called;
assembly { called := tload(0) }
if (called) {
revert AlreadyCalled();
}
assembly { tstore(0, 1) }
// Execute the calls, starting from 1
for (uint256 i = 1; i < calls.length; i++) {
(bool success, ) =
calls[i].to.call{ value: calls[i].value }(calls[i].data);
// If this call failed, execute the fallback (call 0)
if (!success) {
(success, ) =
calls[0].to.call{ value: address(this).balance }(calls[0].data);
// If this call also failed, revert entirely
if (!success) {
revert CallsFailed();
}
return;
}
}
// We don't clear the re-entrancy guard as this contract should never be
// called again, so there's no reason to spend the effort
}
}

View File

@@ -2,43 +2,38 @@
pragma solidity ^0.8.0;
// see https://github.com/noot/schnorr-verify for implementation details
library Schnorr {
contract Schnorr {
// secp256k1 group order
uint256 constant public Q =
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
// Fixed parity for the public keys used in this contract
// This avoids spending a word passing the parity in a similar style to
// Bitcoin's Taproot
uint8 constant public KEY_PARITY = 27;
error InvalidSOrA();
error MalformedSignature();
error InvalidSignature();
// px := public key x-coord, where the public key has a parity of KEY_PARITY
// parity := public key y-coord parity (27 or 28)
// px := public key x-coord
// message := 32-byte hash of the message
// c := schnorr signature challenge
// s := schnorr signature
function verify(
uint8 parity,
bytes32 px,
bytes memory message,
bytes32 message,
bytes32 c,
bytes32 s
) internal pure returns (bool) {
// ecrecover = (m, v, r, s) -> key
// We instead pass the following to obtain the nonce (not the key)
// Then we hash it and verify it matches the challenge
) public view returns (bool) {
// ecrecover = (m, v, r, s);
bytes32 sa = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
bytes32 ca = bytes32(Q - mulmod(uint256(c), uint256(px), Q));
// For safety, we want each input to ecrecover to be 0 (sa, px, ca)
// The ecreover precomple checks `r` and `s` (`px` and `ca`) are non-zero
// That leaves us to check `sa` are non-zero
if (sa == 0) revert InvalidSOrA();
address R = ecrecover(sa, KEY_PARITY, px, ca);
if (R == address(0)) revert MalformedSignature();
// Check the signature is correct by rebuilding the challenge
return c == keccak256(abi.encodePacked(R, px, message));
// the ecrecover precompile implementation checks that the `r` and `s`
// inputs are non-zero (in this case, `px` and `ca`), thus we don't need to
// check if they're zero.
address R = ecrecover(sa, parity, px, ca);
if (R == address(0)) revert InvalidSignature();
return c == keccak256(
abi.encodePacked(R, uint8(parity), px, block.chainid, message)
);
}
}

View File

@@ -1,30 +0,0 @@
[package]
name = "serai-ethereum-relayer"
version = "0.1.0"
description = "A relayer for Serai's Ethereum transactions"
license = "AGPL-3.0-only"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum/relayer"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
keywords = []
edition = "2021"
publish = false
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
log = { version = "0.4", default-features = false, features = ["std"] }
env_logger = { version = "0.10", default-features = false, features = ["humantime"] }
tokio = { version = "1", default-features = false, features = ["rt", "time", "io-util", "net", "macros"] }
serai-env = { path = "../../../common/env" }
serai-db = { path = "../../../common/db" }
[features]
parity-db = ["serai-db/parity-db"]
rocksdb = ["serai-db/rocksdb"]

View File

@@ -1,15 +0,0 @@
AGPL-3.0-only license
Copyright (c) 2023-2024 Luke Parker
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License Version 3 as
published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.

View File

@@ -1,4 +0,0 @@
# Ethereum Transaction Relayer
This server collects Ethereum router commands to be published, offering an RPC
to fetch them.

View File

@@ -1,100 +0,0 @@
pub(crate) use tokio::{
io::{AsyncReadExt, AsyncWriteExt},
net::TcpListener,
};
use serai_db::{Get, DbTxn, Db as DbTrait};
#[tokio::main(flavor = "current_thread")]
async fn main() {
// Override the panic handler with one which will panic if any tokio task panics
{
let existing = std::panic::take_hook();
std::panic::set_hook(Box::new(move |panic| {
existing(panic);
const MSG: &str = "exiting the process due to a task panicking";
println!("{MSG}");
log::error!("{MSG}");
std::process::exit(1);
}));
}
if std::env::var("RUST_LOG").is_err() {
std::env::set_var("RUST_LOG", serai_env::var("RUST_LOG").unwrap_or_else(|| "info".to_string()));
}
env_logger::init();
log::info!("Starting Ethereum relayer server...");
// Open the DB
#[allow(unused_variables, unreachable_code)]
let db = {
#[cfg(all(feature = "parity-db", feature = "rocksdb"))]
panic!("built with parity-db and rocksdb");
#[cfg(all(feature = "parity-db", not(feature = "rocksdb")))]
let db =
serai_db::new_parity_db(&serai_env::var("DB_PATH").expect("path to DB wasn't specified"));
#[cfg(feature = "rocksdb")]
let db =
serai_db::new_rocksdb(&serai_env::var("DB_PATH").expect("path to DB wasn't specified"));
db
};
// Start command recipience server
// This should not be publicly exposed
// TODO: Add auth
tokio::spawn({
let db = db.clone();
async move {
// 5132 ^ ((b'E' << 8) | b'R')
let server = TcpListener::bind("0.0.0.0:20830").await.unwrap();
loop {
let (mut socket, _) = server.accept().await.unwrap();
let db = db.clone();
tokio::spawn(async move {
let mut db = db.clone();
loop {
let Ok(msg_len) = socket.read_u32_le().await else { break };
let mut buf = vec![0; usize::try_from(msg_len).unwrap()];
let Ok(_) = socket.read_exact(&mut buf).await else { break };
if buf.len() < 5 {
break;
}
let nonce = u32::from_le_bytes(buf[.. 4].try_into().unwrap());
let mut txn = db.txn();
txn.put(nonce.to_le_bytes(), &buf[4 ..]);
txn.commit();
let Ok(()) = socket.write_all(&[1]).await else { break };
log::info!("received signed command #{nonce}");
}
});
}
}
});
// Start command fetch server
// 5132 ^ ((b'E' << 8) | b'R') + 1
let server = TcpListener::bind("0.0.0.0:20831").await.unwrap();
loop {
let (mut socket, _) = server.accept().await.unwrap();
let db = db.clone();
tokio::spawn(async move {
let db = db.clone();
loop {
// Nonce to get the router comamnd for
let mut buf = vec![0; 4];
let Ok(_) = socket.read_exact(&mut buf).await else { break };
let command = db.get(&buf[.. 4]).unwrap_or(vec![]);
let Ok(()) = socket.write_all(&u32::try_from(command.len()).unwrap().to_le_bytes()).await
else {
break;
};
let Ok(()) = socket.write_all(&command).await else { break };
}
});
}
}

View File

@@ -1,37 +1,6 @@
use alloy_sol_types::sol;
#[rustfmt::skip]
#[allow(warnings)]
#[allow(needless_pass_by_value)]
#[allow(clippy::all)]
#[allow(clippy::ignored_unit_patterns)]
#[allow(clippy::redundant_closure_for_method_calls)]
mod erc20_container {
use super::*;
sol!("contracts/IERC20.sol");
}
pub use erc20_container::IERC20 as erc20;
pub(crate) mod schnorr;
#[rustfmt::skip]
#[allow(warnings)]
#[allow(needless_pass_by_value)]
#[allow(clippy::all)]
#[allow(clippy::ignored_unit_patterns)]
#[allow(clippy::redundant_closure_for_method_calls)]
mod deployer_container {
use super::*;
sol!("contracts/Deployer.sol");
}
pub use deployer_container::Deployer as deployer;
#[rustfmt::skip]
#[allow(warnings)]
#[allow(needless_pass_by_value)]
#[allow(clippy::all)]
#[allow(clippy::ignored_unit_patterns)]
#[allow(clippy::redundant_closure_for_method_calls)]
mod router_container {
use super::*;
sol!(Router, "artifacts/Router.abi");
}
pub use router_container::Router as router;
pub(crate) mod router;

View File

@@ -1,188 +1,91 @@
use sha3::{Digest, Keccak256};
use group::ff::PrimeField;
use k256::{
elliptic_curve::{ops::Reduce, point::AffineCoordinates, sec1::ToEncodedPoint},
ProjectivePoint, Scalar, U256 as KU256,
elliptic_curve::{
bigint::ArrayEncoding, ops::Reduce, point::AffineCoordinates, sec1::ToEncodedPoint,
},
ProjectivePoint, Scalar, U256,
};
#[cfg(test)]
use k256::{elliptic_curve::point::DecompressPoint, AffinePoint};
use frost::{
algorithm::{Hram, SchnorrSignature},
curve::{Ciphersuite, Secp256k1},
curve::Secp256k1,
};
use alloy_core::primitives::{Parity, Signature as AlloySignature};
use alloy_consensus::{SignableTransaction, Signed, TxLegacy};
use crate::abi::router::{Signature as AbiSignature};
pub(crate) fn keccak256(data: &[u8]) -> [u8; 32] {
alloy_core::primitives::keccak256(data).into()
Keccak256::digest(data).into()
}
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
<Scalar as Reduce<KU256>>::reduce_bytes(&keccak256(data).into())
}
pub fn address(point: &ProjectivePoint) -> [u8; 20] {
pub(crate) fn address(point: &ProjectivePoint) -> [u8; 20] {
let encoded_point = point.to_encoded_point(false);
// Last 20 bytes of the hash of the concatenated x and y coordinates
// We obtain the concatenated x and y coordinates via the uncompressed encoding of the point
keccak256(&encoded_point.as_ref()[1 .. 65])[12 ..].try_into().unwrap()
}
/// Deterministically sign a transaction.
///
/// This function panics if passed a transaction with a non-None chain ID.
pub fn deterministically_sign(tx: &TxLegacy) -> Signed<TxLegacy> {
assert!(
tx.chain_id.is_none(),
"chain ID was Some when deterministically signing a TX (causing a non-deterministic signer)"
);
let sig_hash = tx.signature_hash().0;
let mut r = hash_to_scalar(&[sig_hash.as_slice(), b"r"].concat());
let mut s = hash_to_scalar(&[sig_hash.as_slice(), b"s"].concat());
loop {
let r_bytes: [u8; 32] = r.to_repr().into();
let s_bytes: [u8; 32] = s.to_repr().into();
let v = Parity::NonEip155(false);
let signature =
AlloySignature::from_scalars_and_parity(r_bytes.into(), s_bytes.into(), v).unwrap();
let tx = tx.clone().into_signed(signature);
if tx.recover_signer().is_ok() {
return tx;
}
// Re-hash until valid
r = hash_to_scalar(r_bytes.as_ref());
s = hash_to_scalar(s_bytes.as_ref());
}
}
/// The public key for a Schnorr-signing account.
#[allow(non_snake_case)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct PublicKey {
pub(crate) A: ProjectivePoint,
pub(crate) px: Scalar,
pub A: ProjectivePoint,
pub px: Scalar,
pub parity: u8,
}
impl PublicKey {
/// Construct a new `PublicKey`.
///
/// This will return None if the provided point isn't eligible to be a public key (due to
/// bounds such as parity).
#[allow(non_snake_case)]
pub fn new(A: ProjectivePoint) -> Option<PublicKey> {
let affine = A.to_affine();
// Only allow even keys to save a word within Ethereum
let is_odd = bool::from(affine.y_is_odd());
if is_odd {
let parity = u8::from(bool::from(affine.y_is_odd())) + 27;
if parity != 27 {
None?;
}
let x_coord = affine.x();
let x_coord_scalar = <Scalar as Reduce<KU256>>::reduce_bytes(&x_coord);
let x_coord_scalar = <Scalar as Reduce<U256>>::reduce_bytes(&x_coord);
// Return None if a reduction would occur
// Reductions would be incredibly unlikely and shouldn't be an issue, yet it's one less
// headache/concern to have
// This does ban a trivial amoount of public keys
if x_coord_scalar.to_repr() != x_coord {
None?;
}
Some(PublicKey { A, px: x_coord_scalar })
}
pub fn point(&self) -> ProjectivePoint {
self.A
}
pub(crate) fn eth_repr(&self) -> [u8; 32] {
self.px.to_repr().into()
}
#[cfg(test)]
pub(crate) fn from_eth_repr(repr: [u8; 32]) -> Option<Self> {
#[allow(non_snake_case)]
let A = Option::<AffinePoint>::from(AffinePoint::decompress(&repr.into(), 0.into()))?.into();
Option::from(Scalar::from_repr(repr.into())).map(|px| PublicKey { A, px })
Some(PublicKey { A, px: x_coord_scalar, parity })
}
}
/// The HRAm to use for the Schnorr contract.
#[derive(Clone, Default)]
pub struct EthereumHram {}
impl Hram<Secp256k1> for EthereumHram {
#[allow(non_snake_case)]
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
let x_coord = A.to_affine().x();
let a_encoded_point = A.to_encoded_point(true);
let mut a_encoded = a_encoded_point.as_ref().to_owned();
a_encoded[0] += 25; // Ethereum uses 27/28 for point parity
assert!((a_encoded[0] == 27) || (a_encoded[0] == 28));
let mut data = address(R).to_vec();
data.extend(x_coord.as_slice());
data.append(&mut a_encoded);
data.extend(m);
<Scalar as Reduce<KU256>>::reduce_bytes(&keccak256(&data).into())
Scalar::reduce(U256::from_be_slice(&keccak256(&data)))
}
}
/// A signature for the Schnorr contract.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub struct Signature {
pub(crate) c: Scalar,
pub(crate) s: Scalar,
}
impl Signature {
pub fn verify(&self, public_key: &PublicKey, message: &[u8]) -> bool {
#[allow(non_snake_case)]
let R = (Secp256k1::generator() * self.s) - (public_key.A * self.c);
EthereumHram::hram(&R, &public_key.A, message) == self.c
}
/// Construct a new `Signature`.
///
/// This will return None if the signature is invalid.
pub fn new(
public_key: &PublicKey,
message: &[u8],
chain_id: U256,
m: &[u8],
signature: SchnorrSignature<Secp256k1>,
) -> Option<Signature> {
let c = EthereumHram::hram(&signature.R, &public_key.A, message);
let c = EthereumHram::hram(
&signature.R,
&public_key.A,
&[chain_id.to_be_byte_array().as_slice(), &keccak256(m)].concat(),
);
if !signature.verify(public_key.A, c) {
None?;
}
let res = Signature { c, s: signature.s };
assert!(res.verify(public_key, message));
Some(res)
}
pub fn c(&self) -> Scalar {
self.c
}
pub fn s(&self) -> Scalar {
self.s
}
pub fn to_bytes(&self) -> [u8; 64] {
let mut res = [0; 64];
res[.. 32].copy_from_slice(self.c.to_repr().as_ref());
res[32 ..].copy_from_slice(self.s.to_repr().as_ref());
res
}
pub fn from_bytes(bytes: [u8; 64]) -> std::io::Result<Self> {
let mut reader = bytes.as_slice();
let c = Secp256k1::read_F(&mut reader)?;
let s = Secp256k1::read_F(&mut reader)?;
Ok(Signature { c, s })
}
}
impl From<&Signature> for AbiSignature {
fn from(sig: &Signature) -> AbiSignature {
let c: [u8; 32] = sig.c.to_repr().into();
let s: [u8; 32] = sig.s.to_repr().into();
AbiSignature { c: c.into(), s: s.into() }
Some(Signature { c, s: signature.s })
}
}

View File

@@ -1,113 +0,0 @@
use std::sync::Arc;
use alloy_core::primitives::{hex::FromHex, Address, B256, U256, Bytes, TxKind};
use alloy_consensus::{Signed, TxLegacy};
use alloy_sol_types::{SolCall, SolEvent};
use alloy_rpc_types_eth::{BlockNumberOrTag, Filter};
use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider};
use crate::{
Error,
crypto::{self, keccak256, PublicKey},
router::Router,
};
pub use crate::abi::deployer as abi;
/// The Deployer contract for the Router contract.
///
/// This Deployer has a deterministic address, letting it be immediately identified on any
/// compatible chain. It then supports retrieving the Router contract's address (which isn't
/// deterministic) using a single log query.
#[derive(Clone, Debug)]
pub struct Deployer;
impl Deployer {
/// Obtain the transaction to deploy this contract, already signed.
///
/// The account this transaction is sent from (which is populated in `from`) must be sufficiently
/// funded for this transaction to be submitted. This account has no known private key to anyone,
/// so ETH sent can be neither misappropriated nor returned.
pub fn deployment_tx() -> Signed<TxLegacy> {
let bytecode = include_str!("../artifacts/Deployer.bin");
let bytecode =
Bytes::from_hex(bytecode).expect("compiled-in Deployer bytecode wasn't valid hex");
let tx = TxLegacy {
chain_id: None,
nonce: 0,
gas_price: 100_000_000_000u128,
// TODO: Use a more accurate gas limit
gas_limit: 1_000_000u128,
to: TxKind::Create,
value: U256::ZERO,
input: bytecode,
};
crypto::deterministically_sign(&tx)
}
/// Obtain the deterministic address for this contract.
pub fn address() -> [u8; 20] {
let deployer_deployer =
Self::deployment_tx().recover_signer().expect("deployment_tx didn't have a valid signature");
**Address::create(&deployer_deployer, 0)
}
/// Construct a new view of the `Deployer`.
pub async fn new(provider: Arc<RootProvider<SimpleRequest>>) -> Result<Option<Self>, Error> {
let address = Self::address();
let code = provider.get_code_at(address.into()).await.map_err(|_| Error::ConnectionError)?;
// Contract has yet to be deployed
if code.is_empty() {
return Ok(None);
}
Ok(Some(Self))
}
/// Yield the `ContractCall` necessary to deploy the Router.
pub fn deploy_router(&self, key: &PublicKey) -> TxLegacy {
TxLegacy {
to: TxKind::Call(Self::address().into()),
input: abi::deployCall::new((Router::init_code(key).into(),)).abi_encode().into(),
gas_limit: 1_000_000,
..Default::default()
}
}
/// Find the first Router deployed with the specified key as its first key.
///
/// This is the Router Serai will use, and is the only way to construct a `Router`.
pub async fn find_router(
&self,
provider: Arc<RootProvider<SimpleRequest>>,
key: &PublicKey,
) -> Result<Option<Router>, Error> {
let init_code = Router::init_code(key);
let init_code_hash = keccak256(&init_code);
#[cfg(not(test))]
let to_block = BlockNumberOrTag::Finalized;
#[cfg(test)]
let to_block = BlockNumberOrTag::Latest;
// Find the first log using this init code (where the init code is binding to the key)
// TODO: Make an abstraction for event filtering (de-duplicating common code)
let filter =
Filter::new().from_block(0).to_block(to_block).address(Address::from(Self::address()));
let filter = filter.event_signature(abi::Deployment::SIGNATURE_HASH);
let filter = filter.topic1(B256::from(init_code_hash));
let logs = provider.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
let Some(first_log) = logs.first() else { return Ok(None) };
let router = first_log
.log_decode::<abi::Deployment>()
.map_err(|_| Error::ConnectionError)?
.inner
.data
.created;
Ok(Some(Router::new(provider, router)))
}
}

View File

@@ -1,105 +0,0 @@
use std::{sync::Arc, collections::HashSet};
use alloy_core::primitives::{Address, B256, U256};
use alloy_sol_types::{SolInterface, SolEvent};
use alloy_rpc_types_eth::Filter;
use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider};
use crate::Error;
pub use crate::abi::erc20 as abi;
use abi::{IERC20Calls, Transfer, transferCall, transferFromCall};
#[derive(Clone, Debug)]
pub struct TopLevelErc20Transfer {
pub id: [u8; 32],
pub from: [u8; 20],
pub amount: U256,
pub data: Vec<u8>,
}
/// A view for an ERC20 contract.
#[derive(Clone, Debug)]
pub struct Erc20(Arc<RootProvider<SimpleRequest>>, Address);
impl Erc20 {
/// Construct a new view of the specified ERC20 contract.
pub fn new(provider: Arc<RootProvider<SimpleRequest>>, address: [u8; 20]) -> Self {
Self(provider, Address::from(&address))
}
pub async fn top_level_transfers(
&self,
block: u64,
to: [u8; 20],
) -> Result<Vec<TopLevelErc20Transfer>, Error> {
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
let filter = filter.event_signature(Transfer::SIGNATURE_HASH);
let mut to_topic = [0; 32];
to_topic[12 ..].copy_from_slice(&to);
let filter = filter.topic2(B256::from(to_topic));
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
let mut handled = HashSet::new();
let mut top_level_transfers = vec![];
for log in logs {
// Double check the address which emitted this log
if log.address() != self.1 {
Err(Error::ConnectionError)?;
}
let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?;
let tx =
self.0.get_transaction_by_hash(tx_id).await.ok().flatten().ok_or(Error::ConnectionError)?;
// If this is a top-level call...
if tx.to == Some(self.1) {
// And we recognize the call...
// Don't validate the encoding as this can't be re-encoded to an identical bytestring due
// to the InInstruction appended
if let Ok(call) = IERC20Calls::abi_decode(&tx.input, false) {
// Extract the top-level call's from/to/value
let (from, call_to, value) = match call {
IERC20Calls::transfer(transferCall { to: call_to, value }) => (tx.from, call_to, value),
IERC20Calls::transferFrom(transferFromCall { from, to: call_to, value }) => {
(from, call_to, value)
}
// Treat any other function selectors as unrecognized
_ => continue,
};
let log = log.log_decode::<Transfer>().map_err(|_| Error::ConnectionError)?.inner.data;
// Ensure the top-level transfer is equivalent, and this presumably isn't a log for an
// internal transfer
if (log.from != from) || (call_to != to) || (value != log.value) {
continue;
}
// Now that the top-level transfer is confirmed to be equivalent to the log, ensure it's
// the only log we handle
if handled.contains(&tx_id) {
continue;
}
handled.insert(tx_id);
// Read the data appended after
let encoded = call.abi_encode();
let data = tx.input.as_ref()[encoded.len() ..].to_vec();
// Push the transfer
top_level_transfers.push(TopLevelErc20Transfer {
// Since we'll only handle one log for this TX, set the ID to the TX ID
id: *tx_id,
from: *log.from.0,
amount: log.value,
data,
});
}
}
}
Ok(top_level_transfers)
}
}

View File

@@ -1,35 +1,16 @@
use thiserror::Error;
pub mod alloy {
pub use alloy_core::primitives;
pub use alloy_core as core;
pub use alloy_sol_types as sol_types;
pub use alloy_consensus as consensus;
pub use alloy_network as network;
pub use alloy_rpc_types_eth as rpc_types;
pub use alloy_simple_request_transport as simple_request_transport;
pub use alloy_rpc_client as rpc_client;
pub use alloy_provider as provider;
}
pub mod crypto;
pub(crate) mod abi;
pub mod erc20;
pub mod deployer;
pub mod schnorr;
pub mod router;
pub mod machine;
#[cfg(test)]
mod tests;
#[cfg(any(test, feature = "tests"))]
pub mod tests;
#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)]
#[derive(Error, Debug)]
pub enum Error {
#[error("failed to verify Schnorr signature")]
InvalidSignature,
#[error("couldn't make call/send TX")]
ConnectionError,
}

View File

@@ -1,414 +0,0 @@
use std::{
io::{self, Read},
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng};
use transcript::{Transcript, RecommendedTranscript};
use group::GroupEncoding;
use frost::{
curve::{Ciphersuite, Secp256k1},
Participant, ThresholdKeys, FrostError,
algorithm::Schnorr,
sign::*,
};
use alloy_core::primitives::U256;
use crate::{
crypto::{PublicKey, EthereumHram, Signature},
router::{
abi::{Call as AbiCall, OutInstruction as AbiOutInstruction},
Router,
},
};
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Call {
pub to: [u8; 20],
pub value: U256,
pub data: Vec<u8>,
}
impl Call {
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let mut to = [0; 20];
reader.read_exact(&mut to)?;
let value = {
let mut value_bytes = [0; 32];
reader.read_exact(&mut value_bytes)?;
U256::from_le_slice(&value_bytes)
};
let mut data_len = {
let mut data_len = [0; 4];
reader.read_exact(&mut data_len)?;
usize::try_from(u32::from_le_bytes(data_len)).expect("u32 couldn't fit within a usize")
};
// A valid DoS would be to claim a 4 GB data is present for only 4 bytes
// We read this in 1 KB chunks to only read data actually present (with a max DoS of 1 KB)
let mut data = vec![];
while data_len > 0 {
let chunk_len = data_len.min(1024);
let mut chunk = vec![0; chunk_len];
reader.read_exact(&mut chunk)?;
data.extend(&chunk);
data_len -= chunk_len;
}
Ok(Call { to, value, data })
}
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(&self.to)?;
writer.write_all(&self.value.as_le_bytes())?;
let data_len = u32::try_from(self.data.len())
.map_err(|_| io::Error::other("call data length exceeded 2**32"))?;
writer.write_all(&data_len.to_le_bytes())?;
writer.write_all(&self.data)
}
}
impl From<Call> for AbiCall {
fn from(call: Call) -> AbiCall {
AbiCall { to: call.to.into(), value: call.value, data: call.data.into() }
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum OutInstructionTarget {
Direct([u8; 20]),
Calls(Vec<Call>),
}
impl OutInstructionTarget {
fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let mut kind = [0xff];
reader.read_exact(&mut kind)?;
match kind[0] {
0 => {
let mut addr = [0; 20];
reader.read_exact(&mut addr)?;
Ok(OutInstructionTarget::Direct(addr))
}
1 => {
let mut calls_len = [0; 4];
reader.read_exact(&mut calls_len)?;
let calls_len = u32::from_le_bytes(calls_len);
let mut calls = vec![];
for _ in 0 .. calls_len {
calls.push(Call::read(reader)?);
}
Ok(OutInstructionTarget::Calls(calls))
}
_ => Err(io::Error::other("unrecognized OutInstructionTarget"))?,
}
}
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
match self {
OutInstructionTarget::Direct(addr) => {
writer.write_all(&[0])?;
writer.write_all(addr)?;
}
OutInstructionTarget::Calls(calls) => {
writer.write_all(&[1])?;
let call_len = u32::try_from(calls.len())
.map_err(|_| io::Error::other("amount of calls exceeded 2**32"))?;
writer.write_all(&call_len.to_le_bytes())?;
for call in calls {
call.write(writer)?;
}
}
}
Ok(())
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct OutInstruction {
pub target: OutInstructionTarget,
pub value: U256,
}
impl OutInstruction {
fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let target = OutInstructionTarget::read(reader)?;
let value = {
let mut value_bytes = [0; 32];
reader.read_exact(&mut value_bytes)?;
U256::from_le_slice(&value_bytes)
};
Ok(OutInstruction { target, value })
}
fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
self.target.write(writer)?;
writer.write_all(&self.value.as_le_bytes())
}
}
impl From<OutInstruction> for AbiOutInstruction {
fn from(instruction: OutInstruction) -> AbiOutInstruction {
match instruction.target {
OutInstructionTarget::Direct(addr) => {
AbiOutInstruction { to: addr.into(), calls: vec![], value: instruction.value }
}
OutInstructionTarget::Calls(calls) => AbiOutInstruction {
to: [0; 20].into(),
calls: calls.into_iter().map(Into::into).collect(),
value: instruction.value,
},
}
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum RouterCommand {
UpdateSeraiKey { chain_id: U256, nonce: U256, key: PublicKey },
Execute { chain_id: U256, nonce: U256, outs: Vec<OutInstruction> },
}
impl RouterCommand {
pub fn msg(&self) -> Vec<u8> {
match self {
RouterCommand::UpdateSeraiKey { chain_id, nonce, key } => {
Router::update_serai_key_message(*chain_id, *nonce, key)
}
RouterCommand::Execute { chain_id, nonce, outs } => Router::execute_message(
*chain_id,
*nonce,
outs.iter().map(|out| out.clone().into()).collect(),
),
}
}
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let mut kind = [0xff];
reader.read_exact(&mut kind)?;
match kind[0] {
0 => {
let mut chain_id = [0; 32];
reader.read_exact(&mut chain_id)?;
let mut nonce = [0; 32];
reader.read_exact(&mut nonce)?;
let key = PublicKey::new(Secp256k1::read_G(reader)?)
.ok_or(io::Error::other("key for RouterCommand doesn't have an eth representation"))?;
Ok(RouterCommand::UpdateSeraiKey {
chain_id: U256::from_le_slice(&chain_id),
nonce: U256::from_le_slice(&nonce),
key,
})
}
1 => {
let mut chain_id = [0; 32];
reader.read_exact(&mut chain_id)?;
let chain_id = U256::from_le_slice(&chain_id);
let mut nonce = [0; 32];
reader.read_exact(&mut nonce)?;
let nonce = U256::from_le_slice(&nonce);
let mut outs_len = [0; 4];
reader.read_exact(&mut outs_len)?;
let outs_len = u32::from_le_bytes(outs_len);
let mut outs = vec![];
for _ in 0 .. outs_len {
outs.push(OutInstruction::read(reader)?);
}
Ok(RouterCommand::Execute { chain_id, nonce, outs })
}
_ => Err(io::Error::other("reading unknown type of RouterCommand"))?,
}
}
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
match self {
RouterCommand::UpdateSeraiKey { chain_id, nonce, key } => {
writer.write_all(&[0])?;
writer.write_all(&chain_id.as_le_bytes())?;
writer.write_all(&nonce.as_le_bytes())?;
writer.write_all(&key.A.to_bytes())
}
RouterCommand::Execute { chain_id, nonce, outs } => {
writer.write_all(&[1])?;
writer.write_all(&chain_id.as_le_bytes())?;
writer.write_all(&nonce.as_le_bytes())?;
writer.write_all(&u32::try_from(outs.len()).unwrap().to_le_bytes())?;
for out in outs {
out.write(writer)?;
}
Ok(())
}
}
}
pub fn serialize(&self) -> Vec<u8> {
let mut res = vec![];
self.write(&mut res).unwrap();
res
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct SignedRouterCommand {
command: RouterCommand,
signature: Signature,
}
impl SignedRouterCommand {
pub fn new(key: &PublicKey, command: RouterCommand, signature: &[u8; 64]) -> Option<Self> {
let c = Secp256k1::read_F(&mut &signature[.. 32]).ok()?;
let s = Secp256k1::read_F(&mut &signature[32 ..]).ok()?;
let signature = Signature { c, s };
if !signature.verify(key, &command.msg()) {
None?
}
Some(SignedRouterCommand { command, signature })
}
pub fn command(&self) -> &RouterCommand {
&self.command
}
pub fn signature(&self) -> &Signature {
&self.signature
}
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let command = RouterCommand::read(reader)?;
let mut sig = [0; 64];
reader.read_exact(&mut sig)?;
let signature = Signature::from_bytes(sig)?;
Ok(SignedRouterCommand { command, signature })
}
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
self.command.write(writer)?;
writer.write_all(&self.signature.to_bytes())
}
}
pub struct RouterCommandMachine {
key: PublicKey,
command: RouterCommand,
machine: AlgorithmMachine<Secp256k1, Schnorr<Secp256k1, RecommendedTranscript, EthereumHram>>,
}
impl RouterCommandMachine {
pub fn new(keys: ThresholdKeys<Secp256k1>, command: RouterCommand) -> Option<Self> {
// The Schnorr algorithm should be fine without this, even when using the IETF variant
// If this is better and more comprehensive, we should do it, even if not necessary
let mut transcript = RecommendedTranscript::new(b"ethereum-serai RouterCommandMachine v0.1");
let key = keys.group_key();
transcript.append_message(b"key", key.to_bytes());
transcript.append_message(b"command", command.serialize());
Some(Self {
key: PublicKey::new(key)?,
command,
machine: AlgorithmMachine::new(Schnorr::new(transcript), keys),
})
}
}
impl PreprocessMachine for RouterCommandMachine {
type Preprocess = Preprocess<Secp256k1, ()>;
type Signature = SignedRouterCommand;
type SignMachine = RouterCommandSignMachine;
fn preprocess<R: RngCore + CryptoRng>(
self,
rng: &mut R,
) -> (Self::SignMachine, Self::Preprocess) {
let (machine, preprocess) = self.machine.preprocess(rng);
(RouterCommandSignMachine { key: self.key, command: self.command, machine }, preprocess)
}
}
pub struct RouterCommandSignMachine {
key: PublicKey,
command: RouterCommand,
machine: AlgorithmSignMachine<Secp256k1, Schnorr<Secp256k1, RecommendedTranscript, EthereumHram>>,
}
impl SignMachine<SignedRouterCommand> for RouterCommandSignMachine {
type Params = ();
type Keys = ThresholdKeys<Secp256k1>;
type Preprocess = Preprocess<Secp256k1, ()>;
type SignatureShare = SignatureShare<Secp256k1>;
type SignatureMachine = RouterCommandSignatureMachine;
fn cache(self) -> CachedPreprocess {
unimplemented!(
"RouterCommand machines don't support caching their preprocesses due to {}",
"being already bound to a specific command"
);
}
fn from_cache(
(): (),
_: ThresholdKeys<Secp256k1>,
_: CachedPreprocess,
) -> (Self, Self::Preprocess) {
unimplemented!(
"RouterCommand machines don't support caching their preprocesses due to {}",
"being already bound to a specific command"
);
}
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
self.machine.read_preprocess(reader)
}
fn sign(
self,
commitments: HashMap<Participant, Self::Preprocess>,
msg: &[u8],
) -> Result<(RouterCommandSignatureMachine, Self::SignatureShare), FrostError> {
if !msg.is_empty() {
panic!("message was passed to a RouterCommand machine when it generates its own");
}
let (machine, share) = self.machine.sign(commitments, &self.command.msg())?;
Ok((RouterCommandSignatureMachine { key: self.key, command: self.command, machine }, share))
}
}
pub struct RouterCommandSignatureMachine {
key: PublicKey,
command: RouterCommand,
machine:
AlgorithmSignatureMachine<Secp256k1, Schnorr<Secp256k1, RecommendedTranscript, EthereumHram>>,
}
impl SignatureMachine<SignedRouterCommand> for RouterCommandSignatureMachine {
type SignatureShare = SignatureShare<Secp256k1>;
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare> {
self.machine.read_share(reader)
}
fn complete(
self,
shares: HashMap<Participant, Self::SignatureShare>,
) -> Result<SignedRouterCommand, FrostError> {
let sig = self.machine.complete(shares)?;
let signature = Signature::new(&self.key, &self.command.msg(), sig)
.expect("machine produced an invalid signature");
Ok(SignedRouterCommand { command: self.command, signature })
}
}

View File

@@ -1,443 +1,30 @@
use std::{sync::Arc, io, collections::HashSet};
pub use crate::abi::router::*;
use k256::{
elliptic_curve::{group::GroupEncoding, sec1},
ProjectivePoint,
};
/*
use crate::crypto::{ProcessedSignature, PublicKey};
use ethers::{contract::ContractFactory, prelude::*, solc::artifacts::contract::ContractBytecode};
use eyre::Result;
use std::{convert::From, fs::File, sync::Arc};
use alloy_core::primitives::{hex::FromHex, Address, U256, Bytes, TxKind};
#[cfg(test)]
use alloy_core::primitives::B256;
use alloy_consensus::TxLegacy;
use alloy_sol_types::{SolValue, SolConstructor, SolCall, SolEvent};
use alloy_rpc_types_eth::Filter;
#[cfg(test)]
use alloy_rpc_types_eth::{BlockId, TransactionRequest, TransactionInput};
use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider};
pub use crate::{
Error,
crypto::{PublicKey, Signature},
abi::{erc20::Transfer, router as abi},
};
use abi::{SeraiKeyUpdated, InInstruction as InInstructionEvent, Executed as ExecutedEvent};
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum Coin {
Ether,
Erc20([u8; 20]),
pub async fn router_update_public_key<M: Middleware + 'static>(
contract: &Router<M>,
public_key: &PublicKey,
signature: &ProcessedSignature,
) -> std::result::Result<Option<TransactionReceipt>, eyre::ErrReport> {
let tx = contract.update_public_key(public_key.px.to_bytes().into(), signature.into());
let pending_tx = tx.send().await?;
let receipt = pending_tx.await?;
Ok(receipt)
}
impl Coin {
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let mut kind = [0xff];
reader.read_exact(&mut kind)?;
Ok(match kind[0] {
0 => Coin::Ether,
1 => {
let mut address = [0; 20];
reader.read_exact(&mut address)?;
Coin::Erc20(address)
}
_ => Err(io::Error::other("unrecognized Coin type"))?,
})
}
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
match self {
Coin::Ether => writer.write_all(&[0]),
Coin::Erc20(token) => {
writer.write_all(&[1])?;
writer.write_all(token)
}
}
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct InInstruction {
pub id: ([u8; 32], u64),
pub from: [u8; 20],
pub coin: Coin,
pub amount: U256,
pub data: Vec<u8>,
pub key_at_end_of_block: ProjectivePoint,
}
impl InInstruction {
pub fn read<R: io::Read>(reader: &mut R) -> io::Result<Self> {
let id = {
let mut id_hash = [0; 32];
reader.read_exact(&mut id_hash)?;
let mut id_pos = [0; 8];
reader.read_exact(&mut id_pos)?;
let id_pos = u64::from_le_bytes(id_pos);
(id_hash, id_pos)
};
let mut from = [0; 20];
reader.read_exact(&mut from)?;
let coin = Coin::read(reader)?;
let mut amount = [0; 32];
reader.read_exact(&mut amount)?;
let amount = U256::from_le_slice(&amount);
let mut data_len = [0; 4];
reader.read_exact(&mut data_len)?;
let data_len = usize::try_from(u32::from_le_bytes(data_len))
.map_err(|_| io::Error::other("InInstruction data exceeded 2**32 in length"))?;
let mut data = vec![0; data_len];
reader.read_exact(&mut data)?;
let mut key_at_end_of_block = <ProjectivePoint as GroupEncoding>::Repr::default();
reader.read_exact(&mut key_at_end_of_block)?;
let key_at_end_of_block = Option::from(ProjectivePoint::from_bytes(&key_at_end_of_block))
.ok_or(io::Error::other("InInstruction had key at end of block which wasn't valid"))?;
Ok(InInstruction { id, from, coin, amount, data, key_at_end_of_block })
}
pub fn write<W: io::Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(&self.id.0)?;
writer.write_all(&self.id.1.to_le_bytes())?;
writer.write_all(&self.from)?;
self.coin.write(writer)?;
writer.write_all(&self.amount.as_le_bytes())?;
writer.write_all(
&u32::try_from(self.data.len())
.map_err(|_| {
io::Error::other("InInstruction being written had data exceeding 2**32 in length")
})?
.to_le_bytes(),
)?;
writer.write_all(&self.data)?;
writer.write_all(&self.key_at_end_of_block.to_bytes())
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Executed {
pub tx_id: [u8; 32],
pub nonce: u64,
pub signature: [u8; 64],
}
/// The contract Serai uses to manage its state.
#[derive(Clone, Debug)]
pub struct Router(Arc<RootProvider<SimpleRequest>>, Address);
impl Router {
pub(crate) fn code() -> Vec<u8> {
let bytecode = include_str!("../artifacts/Router.bin");
Bytes::from_hex(bytecode).expect("compiled-in Router bytecode wasn't valid hex").to_vec()
}
pub(crate) fn init_code(key: &PublicKey) -> Vec<u8> {
let mut bytecode = Self::code();
// Append the constructor arguments
bytecode.extend((abi::constructorCall { _seraiKey: key.eth_repr().into() }).abi_encode());
bytecode
}
// This isn't pub in order to force users to use `Deployer::find_router`.
pub(crate) fn new(provider: Arc<RootProvider<SimpleRequest>>, address: Address) -> Self {
Self(provider, address)
}
pub fn address(&self) -> [u8; 20] {
**self.1
}
/// Get the key for Serai at the specified block.
#[cfg(test)]
pub async fn serai_key(&self, at: [u8; 32]) -> Result<PublicKey, Error> {
let call = TransactionRequest::default()
.to(self.1)
.input(TransactionInput::new(abi::seraiKeyCall::new(()).abi_encode().into()));
let bytes = self
.0
.call(&call)
.block(BlockId::Hash(B256::from(at).into()))
.await
.map_err(|_| Error::ConnectionError)?;
let res =
abi::seraiKeyCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?;
PublicKey::from_eth_repr(res._0.0).ok_or(Error::ConnectionError)
}
/// Get the message to be signed in order to update the key for Serai.
pub(crate) fn update_serai_key_message(chain_id: U256, nonce: U256, key: &PublicKey) -> Vec<u8> {
let mut buffer = b"updateSeraiKey".to_vec();
buffer.extend(&chain_id.to_be_bytes::<32>());
buffer.extend(&nonce.to_be_bytes::<32>());
buffer.extend(&key.eth_repr());
buffer
}
/// Update the key representing Serai.
pub fn update_serai_key(&self, public_key: &PublicKey, sig: &Signature) -> TxLegacy {
// TODO: Set a more accurate gas
TxLegacy {
to: TxKind::Call(self.1),
input: abi::updateSeraiKeyCall::new((public_key.eth_repr().into(), sig.into()))
.abi_encode()
.into(),
gas_limit: 100_000,
..Default::default()
}
}
/// Get the current nonce for the published batches.
#[cfg(test)]
pub async fn nonce(&self, at: [u8; 32]) -> Result<U256, Error> {
let call = TransactionRequest::default()
.to(self.1)
.input(TransactionInput::new(abi::nonceCall::new(()).abi_encode().into()));
let bytes = self
.0
.call(&call)
.block(BlockId::Hash(B256::from(at).into()))
.await
.map_err(|_| Error::ConnectionError)?;
let res =
abi::nonceCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?;
Ok(res._0)
}
/// Get the message to be signed in order to update the key for Serai.
pub(crate) fn execute_message(
chain_id: U256,
nonce: U256,
outs: Vec<abi::OutInstruction>,
) -> Vec<u8> {
("execute".to_string(), chain_id, nonce, outs).abi_encode_params()
}
/// Execute a batch of `OutInstruction`s.
pub fn execute(&self, outs: &[abi::OutInstruction], sig: &Signature) -> TxLegacy {
TxLegacy {
to: TxKind::Call(self.1),
input: abi::executeCall::new((outs.to_vec(), sig.into())).abi_encode().into(),
// TODO
gas_limit: 100_000 + ((200_000 + 10_000) * u128::try_from(outs.len()).unwrap()),
..Default::default()
}
}
pub async fn key_at_end_of_block(&self, block: u64) -> Result<Option<ProjectivePoint>, Error> {
let filter = Filter::new().from_block(0).to_block(block).address(self.1);
let filter = filter.event_signature(SeraiKeyUpdated::SIGNATURE_HASH);
let all_keys = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
if all_keys.is_empty() {
return Ok(None);
};
let last_key_x_coordinate_log = all_keys.last().ok_or(Error::ConnectionError)?;
let last_key_x_coordinate = last_key_x_coordinate_log
.log_decode::<SeraiKeyUpdated>()
.map_err(|_| Error::ConnectionError)?
.inner
.data
.key;
let mut compressed_point = <ProjectivePoint as GroupEncoding>::Repr::default();
compressed_point[0] = u8::from(sec1::Tag::CompressedEvenY);
compressed_point[1 ..].copy_from_slice(last_key_x_coordinate.as_slice());
let key =
Option::from(ProjectivePoint::from_bytes(&compressed_point)).ok_or(Error::ConnectionError)?;
Ok(Some(key))
}
pub async fn in_instructions(
&self,
block: u64,
allowed_tokens: &HashSet<[u8; 20]>,
) -> Result<Vec<InInstruction>, Error> {
let Some(key_at_end_of_block) = self.key_at_end_of_block(block).await? else {
return Ok(vec![]);
};
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
let filter = filter.event_signature(InInstructionEvent::SIGNATURE_HASH);
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
let mut transfer_check = HashSet::new();
let mut in_instructions = vec![];
for log in logs {
// Double check the address which emitted this log
if log.address() != self.1 {
Err(Error::ConnectionError)?;
}
let id = (
log.block_hash.ok_or(Error::ConnectionError)?.into(),
log.log_index.ok_or(Error::ConnectionError)?,
);
let tx_hash = log.transaction_hash.ok_or(Error::ConnectionError)?;
let tx = self
.0
.get_transaction_by_hash(tx_hash)
.await
.ok()
.flatten()
.ok_or(Error::ConnectionError)?;
let log =
log.log_decode::<InInstructionEvent>().map_err(|_| Error::ConnectionError)?.inner.data;
let coin = if log.coin.0 == [0; 20] {
Coin::Ether
} else {
let token = *log.coin.0;
if !allowed_tokens.contains(&token) {
continue;
}
// If this also counts as a top-level transfer via the token, drop it
//
// Necessary in order to handle a potential edge case with some theoretical token
// implementations
//
// This will either let it be handled by the top-level transfer hook or will drop it
// entirely on the side of caution
if tx.to == Some(token.into()) {
continue;
}
// Get all logs for this TX
let receipt = self
.0
.get_transaction_receipt(tx_hash)
.await
.map_err(|_| Error::ConnectionError)?
.ok_or(Error::ConnectionError)?;
let tx_logs = receipt.inner.logs();
// Find a matching transfer log
let mut found_transfer = false;
for tx_log in tx_logs {
let log_index = tx_log.log_index.ok_or(Error::ConnectionError)?;
// Ensure we didn't already use this transfer to check a distinct InInstruction event
if transfer_check.contains(&log_index) {
continue;
}
// Check if this log is from the token we expected to be transferred
if tx_log.address().0 != token {
continue;
}
// Check if this is a transfer log
// https://github.com/alloy-rs/core/issues/589
if tx_log.topics()[0] != Transfer::SIGNATURE_HASH {
continue;
}
let Ok(transfer) = Transfer::decode_log(&tx_log.inner.clone(), true) else { continue };
// Check if this is a transfer to us for the expected amount
if (transfer.to == self.1) && (transfer.value == log.amount) {
transfer_check.insert(log_index);
found_transfer = true;
break;
}
}
if !found_transfer {
// This shouldn't be a ConnectionError
// This is an exploit, a non-conforming ERC20, or an invalid connection
// This should halt the process which is sufficient, yet this is sub-optimal
// TODO
Err(Error::ConnectionError)?;
}
Coin::Erc20(token)
};
in_instructions.push(InInstruction {
id,
from: *log.from.0,
coin,
amount: log.amount,
data: log.instruction.as_ref().to_vec(),
key_at_end_of_block,
});
}
Ok(in_instructions)
}
pub async fn executed_commands(&self, block: u64) -> Result<Vec<Executed>, Error> {
let mut res = vec![];
{
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
let filter = filter.event_signature(SeraiKeyUpdated::SIGNATURE_HASH);
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
for log in logs {
// Double check the address which emitted this log
if log.address() != self.1 {
Err(Error::ConnectionError)?;
}
let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?.into();
let log =
log.log_decode::<SeraiKeyUpdated>().map_err(|_| Error::ConnectionError)?.inner.data;
let mut signature = [0; 64];
signature[.. 32].copy_from_slice(log.signature.c.as_ref());
signature[32 ..].copy_from_slice(log.signature.s.as_ref());
res.push(Executed {
tx_id,
nonce: log.nonce.try_into().map_err(|_| Error::ConnectionError)?,
signature,
});
}
}
{
let filter = Filter::new().from_block(block).to_block(block).address(self.1);
let filter = filter.event_signature(ExecutedEvent::SIGNATURE_HASH);
let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?;
for log in logs {
// Double check the address which emitted this log
if log.address() != self.1 {
Err(Error::ConnectionError)?;
}
let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?.into();
let log = log.log_decode::<ExecutedEvent>().map_err(|_| Error::ConnectionError)?.inner.data;
let mut signature = [0; 64];
signature[.. 32].copy_from_slice(log.signature.c.as_ref());
signature[32 ..].copy_from_slice(log.signature.s.as_ref());
res.push(Executed {
tx_id,
nonce: log.nonce.try_into().map_err(|_| Error::ConnectionError)?,
signature,
});
}
}
Ok(res)
}
#[cfg(feature = "tests")]
pub fn key_updated_filter(&self) -> Filter {
Filter::new().address(self.1).event_signature(SeraiKeyUpdated::SIGNATURE_HASH)
}
#[cfg(feature = "tests")]
pub fn executed_filter(&self) -> Filter {
Filter::new().address(self.1).event_signature(ExecutedEvent::SIGNATURE_HASH)
}
pub async fn router_execute<M: Middleware + 'static>(
contract: &Router<M>,
txs: Vec<Rtransaction>,
signature: &ProcessedSignature,
) -> std::result::Result<Option<TransactionReceipt>, eyre::ErrReport> {
let tx = contract.execute(txs, signature.into()).send();
let pending_tx = tx.send().await?;
let receipt = pending_tx.await?;
Ok(receipt)
}
*/

View File

@@ -0,0 +1,34 @@
use eyre::{eyre, Result};
use group::ff::PrimeField;
use ethers_providers::{Provider, Http};
use crate::{
Error,
crypto::{keccak256, PublicKey, Signature},
};
pub use crate::abi::schnorr::*;
pub async fn call_verify(
contract: &Schnorr<Provider<Http>>,
public_key: &PublicKey,
message: &[u8],
signature: &Signature,
) -> Result<()> {
if contract
.verify(
public_key.parity,
public_key.px.to_repr().into(),
keccak256(message),
signature.c.to_repr().into(),
signature.s.to_repr().into(),
)
.call()
.await?
{
Ok(())
} else {
Err(eyre!(Error::InvalidSignature))
}
}

View File

@@ -1,13 +0,0 @@
use alloy_sol_types::sol;
#[rustfmt::skip]
#[allow(warnings)]
#[allow(needless_pass_by_value)]
#[allow(clippy::all)]
#[allow(clippy::ignored_unit_patterns)]
#[allow(clippy::redundant_closure_for_method_calls)]
mod schnorr_container {
use super::*;
sol!("src/tests/contracts/Schnorr.sol");
}
pub(crate) use schnorr_container::TestSchnorr as schnorr;

View File

@@ -1,51 +0,0 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
contract TestERC20 {
event Transfer(address indexed from, address indexed to, uint256 value);
event Approval(address indexed owner, address indexed spender, uint256 value);
function name() public pure returns (string memory) {
return "Test ERC20";
}
function symbol() public pure returns (string memory) {
return "TEST";
}
function decimals() public pure returns (uint8) {
return 18;
}
function totalSupply() public pure returns (uint256) {
return 1_000_000 * 10e18;
}
mapping(address => uint256) balances;
mapping(address => mapping(address => uint256)) allowances;
constructor() {
balances[msg.sender] = totalSupply();
}
function balanceOf(address owner) public view returns (uint256) {
return balances[owner];
}
function transfer(address to, uint256 value) public returns (bool) {
balances[msg.sender] -= value;
balances[to] += value;
return true;
}
function transferFrom(address from, address to, uint256 value) public returns (bool) {
allowances[from][msg.sender] -= value;
balances[from] -= value;
balances[to] += value;
return true;
}
function approve(address spender, uint256 value) public returns (bool) {
allowances[msg.sender][spender] = value;
return true;
}
function allowance(address owner, address spender) public view returns (uint256) {
return allowances[owner][spender];
}
}

View File

@@ -1,15 +0,0 @@
// SPDX-License-Identifier: AGPLv3
pragma solidity ^0.8.0;
import "../../../contracts/Schnorr.sol";
contract TestSchnorr {
function verify(
bytes32 px,
bytes calldata message,
bytes32 c,
bytes32 s
) external pure returns (bool) {
return Schnorr.verify(px, message, c, s);
}
}

View File

@@ -1,33 +1,49 @@
use rand_core::OsRng;
use group::ff::{Field, PrimeField};
use sha2::Sha256;
use sha3::{Digest, Keccak256};
use group::Group;
use k256::{
ecdsa::{
self, hazmat::SignPrimitive, signature::hazmat::PrehashVerifier, SigningKey, VerifyingKey,
},
Scalar, ProjectivePoint,
ecdsa::{hazmat::SignPrimitive, signature::DigestVerifier, SigningKey, VerifyingKey},
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, point::DecompressPoint},
U256, Scalar, AffinePoint, ProjectivePoint,
};
use frost::{
curve::{Ciphersuite, Secp256k1},
curve::Secp256k1,
algorithm::{Hram, IetfSchnorr},
tests::{algorithm_machines, sign},
};
use crate::{crypto::*, tests::key_gen};
// The ecrecover opcode, yet with parity replacing v
pub(crate) fn ecrecover(message: Scalar, odd_y: bool, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
let sig = ecdsa::Signature::from_scalars(r, s).ok()?;
let message: [u8; 32] = message.to_repr().into();
alloy_core::primitives::Signature::from_signature_and_parity(
sig,
alloy_core::primitives::Parity::Parity(odd_y),
)
.ok()?
.recover_address_from_prehash(&alloy_core::primitives::B256::from(message))
.ok()
.map(Into::into)
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
Scalar::reduce(U256::from_be_slice(&keccak256(data)))
}
pub(crate) fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
if r.is_zero().into() || s.is_zero().into() || !((v == 27) || (v == 28)) {
return None;
}
#[allow(non_snake_case)]
let R = AffinePoint::decompress(&r.to_bytes(), (v - 27).into());
#[allow(non_snake_case)]
if let Some(R) = Option::<AffinePoint>::from(R) {
#[allow(non_snake_case)]
let R = ProjectivePoint::from(R);
let r = r.invert().unwrap();
let u1 = ProjectivePoint::GENERATOR * (-message * r);
let u2 = R * (s * r);
let key: ProjectivePoint = u1 + u2;
if !bool::from(key.is_identity()) {
return Some(address(&key));
}
}
None
}
#[test]
@@ -39,23 +55,20 @@ fn test_ecrecover() {
const MESSAGE: &[u8] = b"Hello, World!";
let (sig, recovery_id) = private
.as_nonzero_scalar()
.try_sign_prehashed(
<Secp256k1 as Ciphersuite>::F::random(&mut OsRng),
&keccak256(MESSAGE).into(),
)
.try_sign_prehashed_rfc6979::<Sha256>(&Keccak256::digest(MESSAGE), b"")
.unwrap();
// Sanity check the signature verifies
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
{
assert_eq!(public.verify_prehash(&keccak256(MESSAGE), &sig).unwrap(), ());
assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ());
}
// Perform the ecrecover
assert_eq!(
ecrecover(
hash_to_scalar(MESSAGE),
u8::from(recovery_id.unwrap().is_y_odd()) == 1,
u8::from(recovery_id.unwrap().is_y_odd()) + 27,
*sig.r(),
*sig.s()
)
@@ -80,13 +93,18 @@ fn test_signing() {
pub fn preprocess_signature_for_ecrecover(
R: ProjectivePoint,
public_key: &PublicKey,
chain_id: U256,
m: &[u8],
s: Scalar,
) -> (Scalar, Scalar) {
let c = EthereumHram::hram(&R, &public_key.A, m);
) -> (u8, Scalar, Scalar) {
let c = EthereumHram::hram(
&R,
&public_key.A,
&[chain_id.to_be_byte_array().as_slice(), &keccak256(m)].concat(),
);
let sa = -(s * public_key.px);
let ca = -(c * public_key.px);
(sa, ca)
(public_key.parity, sa, ca)
}
#[test]
@@ -94,12 +112,21 @@ fn test_ecrecover_hack() {
let (keys, public_key) = key_gen();
const MESSAGE: &[u8] = b"Hello, World!";
let hashed_message = keccak256(MESSAGE);
let chain_id = U256::ONE;
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let sig =
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
let sig = sign(
&mut OsRng,
&algo,
keys.clone(),
algorithm_machines(&mut OsRng, &algo, &keys),
full_message,
);
let (sa, ca) = preprocess_signature_for_ecrecover(sig.R, &public_key, MESSAGE, sig.s);
let q = ecrecover(sa, false, public_key.px, ca).unwrap();
let (parity, sa, ca) =
preprocess_signature_for_ecrecover(sig.R, &public_key, chain_id, MESSAGE, sig.s);
let q = ecrecover(sa, parity, public_key.px, ca).unwrap();
assert_eq!(q, address(&sig.R));
}

View File

@@ -1,30 +1,22 @@
use std::{sync::Arc, collections::HashMap};
use std::{sync::Arc, time::Duration, fs::File, collections::HashMap};
use rand_core::OsRng;
use group::ff::PrimeField;
use k256::{Scalar, ProjectivePoint};
use frost::{curve::Secp256k1, Participant, ThresholdKeys, tests::key_gen as frost_key_gen};
use alloy_core::{
primitives::{Address, U256, Bytes, TxKind},
hex::FromHex,
use ethers_core::{
types::{H160, Signature as EthersSignature},
abi::Abi,
};
use alloy_consensus::{SignableTransaction, TxLegacy};
use ethers_contract::ContractFactory;
use ethers_providers::{Middleware, Provider, Http};
use alloy_rpc_types_eth::TransactionReceipt;
use alloy_simple_request_transport::SimpleRequest;
use alloy_provider::{Provider, RootProvider};
use crate::crypto::PublicKey;
use crate::crypto::{address, deterministically_sign, PublicKey};
#[cfg(test)]
mod crypto;
#[cfg(test)]
mod abi;
#[cfg(test)]
mod schnorr;
#[cfg(test)]
mod router;
pub fn key_gen() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, PublicKey) {
@@ -44,88 +36,57 @@ pub fn key_gen() -> (HashMap<Participant, ThresholdKeys<Secp256k1>>, PublicKey)
(keys, public_key)
}
// TODO: Use a proper error here
pub async fn send(
provider: &RootProvider<SimpleRequest>,
wallet: &k256::ecdsa::SigningKey,
mut tx: TxLegacy,
) -> Option<TransactionReceipt> {
let verifying_key = *wallet.verifying_key().as_affine();
let address = Address::from(address(&verifying_key.into()));
// https://github.com/alloy-rs/alloy/issues/539
// let chain_id = provider.get_chain_id().await.unwrap();
// tx.chain_id = Some(chain_id);
tx.chain_id = None;
tx.nonce = provider.get_transaction_count(address).await.unwrap();
// 100 gwei
tx.gas_price = 100_000_000_000u128;
let sig = wallet.sign_prehash_recoverable(tx.signature_hash().as_ref()).unwrap();
assert_eq!(address, tx.clone().into_signed(sig.into()).recover_signer().unwrap());
assert!(
provider.get_balance(address).await.unwrap() >
((U256::from(tx.gas_price) * U256::from(tx.gas_limit)) + tx.value)
);
let mut bytes = vec![];
tx.encode_with_signature_fields(&sig.into(), &mut bytes);
let pending_tx = provider.send_raw_transaction(&bytes).await.ok()?;
pending_tx.get_receipt().await.ok()
}
pub async fn fund_account(
provider: &RootProvider<SimpleRequest>,
wallet: &k256::ecdsa::SigningKey,
to_fund: Address,
value: U256,
) -> Option<()> {
let funding_tx =
TxLegacy { to: TxKind::Call(to_fund), gas_limit: 21_000, value, ..Default::default() };
assert!(send(provider, wallet, funding_tx).await.unwrap().status());
Some(())
}
// TODO: Use a proper error here
// TODO: Replace with a contract deployment from an unknown account, so the environment solely has
// to fund the deployer, not create/pass a wallet
// TODO: Deterministic deployments across chains
pub async fn deploy_contract(
client: Arc<RootProvider<SimpleRequest>>,
chain_id: u32,
client: Arc<Provider<Http>>,
wallet: &k256::ecdsa::SigningKey,
name: &str,
) -> Option<Address> {
) -> eyre::Result<H160> {
let abi: Abi =
serde_json::from_reader(File::open(format!("./artifacts/{name}.abi")).unwrap()).unwrap();
let hex_bin_buf = std::fs::read_to_string(format!("./artifacts/{name}.bin")).unwrap();
let hex_bin =
if let Some(stripped) = hex_bin_buf.strip_prefix("0x") { stripped } else { &hex_bin_buf };
let bin = Bytes::from_hex(hex_bin).unwrap();
let bin = hex::decode(hex_bin).unwrap();
let factory = ContractFactory::new(abi, bin.into(), client.clone());
let deployment_tx = TxLegacy {
chain_id: None,
nonce: 0,
// 100 gwei
gas_price: 100_000_000_000u128,
gas_limit: 1_000_000,
to: TxKind::Create,
value: U256::ZERO,
input: bin,
};
let mut deployment_tx = factory.deploy(())?.tx;
deployment_tx.set_chain_id(chain_id);
deployment_tx.set_gas(1_000_000);
let (max_fee_per_gas, max_priority_fee_per_gas) = client.estimate_eip1559_fees(None).await?;
deployment_tx.as_eip1559_mut().unwrap().max_fee_per_gas = Some(max_fee_per_gas);
deployment_tx.as_eip1559_mut().unwrap().max_priority_fee_per_gas = Some(max_priority_fee_per_gas);
let deployment_tx = deterministically_sign(&deployment_tx);
let sig_hash = deployment_tx.sighash();
let (sig, rid) = wallet.sign_prehash_recoverable(sig_hash.as_ref()).unwrap();
// Fund the deployer address
fund_account(
&client,
wallet,
deployment_tx.recover_signer().unwrap(),
U256::from(deployment_tx.tx().gas_limit) * U256::from(deployment_tx.tx().gas_price),
)
.await?;
// EIP-155 v
let mut v = u64::from(rid.to_byte());
assert!((v == 0) || (v == 1));
v += u64::from((chain_id * 2) + 35);
let (deployment_tx, sig, _) = deployment_tx.into_parts();
let mut bytes = vec![];
deployment_tx.encode_with_signature_fields(&sig, &mut bytes);
let pending_tx = client.send_raw_transaction(&bytes).await.ok()?;
let receipt = pending_tx.get_receipt().await.ok()?;
assert!(receipt.status());
let r = sig.r().to_repr();
let r_ref: &[u8] = r.as_ref();
let s = sig.s().to_repr();
let s_ref: &[u8] = s.as_ref();
let deployment_tx =
deployment_tx.rlp_signed(&EthersSignature { r: r_ref.into(), s: s_ref.into(), v });
Some(receipt.contract_address.unwrap())
let pending_tx = client.send_raw_transaction(deployment_tx).await?;
let mut receipt;
while {
receipt = client.get_transaction_receipt(pending_tx.tx_hash()).await?;
receipt.is_none()
} {
tokio::time::sleep(Duration::from_secs(6)).await;
}
let receipt = receipt.unwrap();
assert!(receipt.status == Some(1.into()));
Ok(receipt.contract_address.unwrap())
}

View File

@@ -2,8 +2,7 @@ use std::{convert::TryFrom, sync::Arc, collections::HashMap};
use rand_core::OsRng;
use group::Group;
use k256::ProjectivePoint;
use group::ff::PrimeField;
use frost::{
curve::Secp256k1,
Participant, ThresholdKeys,
@@ -11,174 +10,100 @@ use frost::{
tests::{algorithm_machines, sign},
};
use alloy_core::primitives::{Address, U256};
use alloy_simple_request_transport::SimpleRequest;
use alloy_rpc_types_eth::BlockTransactionsKind;
use alloy_rpc_client::ClientBuilder;
use alloy_provider::{Provider, RootProvider};
use alloy_node_bindings::{Anvil, AnvilInstance};
use ethers_core::{
types::{H160, U256, Bytes},
abi::AbiEncode,
utils::{Anvil, AnvilInstance},
};
use ethers_providers::{Middleware, Provider, Http};
use crate::{
crypto::*,
deployer::Deployer,
router::{Router, abi as router},
tests::{key_gen, send, fund_account},
crypto::{keccak256, PublicKey, EthereumHram, Signature},
router::{self, *},
tests::{key_gen, deploy_contract},
};
async fn setup_test() -> (
u32,
AnvilInstance,
Arc<RootProvider<SimpleRequest>>,
u64,
Router,
Router<Provider<Http>>,
HashMap<Participant, ThresholdKeys<Secp256k1>>,
PublicKey,
) {
let anvil = Anvil::new().spawn();
let provider = RootProvider::new(
ClientBuilder::default().transport(SimpleRequest::new(anvil.endpoint()), true),
);
let chain_id = provider.get_chain_id().await.unwrap();
let provider = Provider::<Http>::try_from(anvil.endpoint()).unwrap();
let chain_id = provider.get_chainid().await.unwrap().as_u32();
let wallet = anvil.keys()[0].clone().into();
let client = Arc::new(provider);
// Make sure the Deployer constructor returns None, as it doesn't exist yet
assert!(Deployer::new(client.clone()).await.unwrap().is_none());
// Deploy the Deployer
let tx = Deployer::deployment_tx();
fund_account(
&client,
&wallet,
tx.recover_signer().unwrap(),
U256::from(tx.tx().gas_limit) * U256::from(tx.tx().gas_price),
)
.await
.unwrap();
let (tx, sig, _) = tx.into_parts();
let mut bytes = vec![];
tx.encode_with_signature_fields(&sig, &mut bytes);
let pending_tx = client.send_raw_transaction(&bytes).await.unwrap();
let receipt = pending_tx.get_receipt().await.unwrap();
assert!(receipt.status());
let deployer =
Deployer::new(client.clone()).await.expect("network error").expect("deployer wasn't deployed");
let contract_address =
deploy_contract(chain_id, client.clone(), &wallet, "Router").await.unwrap();
let contract = Router::new(contract_address, client.clone());
let (keys, public_key) = key_gen();
// Verify the Router constructor returns None, as it doesn't exist yet
assert!(deployer.find_router(client.clone(), &public_key).await.unwrap().is_none());
// Set the key to the threshold keys
let tx = contract.init_serai_key(public_key.px.to_repr().into()).gas(100_000);
let pending_tx = tx.send().await.unwrap();
let receipt = pending_tx.await.unwrap().unwrap();
assert!(receipt.status == Some(1.into()));
// Deploy the router
let receipt = send(&client, &anvil.keys()[0].clone().into(), deployer.deploy_router(&public_key))
.await
.unwrap();
assert!(receipt.status());
let contract = deployer.find_router(client.clone(), &public_key).await.unwrap().unwrap();
(anvil, client, chain_id, contract, keys, public_key)
}
async fn latest_block_hash(client: &RootProvider<SimpleRequest>) -> [u8; 32] {
client
.get_block(client.get_block_number().await.unwrap().into(), BlockTransactionsKind::Hashes)
.await
.unwrap()
.unwrap()
.header
.hash
.unwrap()
.0
(chain_id, anvil, contract, keys, public_key)
}
#[tokio::test]
async fn test_deploy_contract() {
let (_anvil, client, _, router, _, public_key) = setup_test().await;
let block_hash = latest_block_hash(&client).await;
assert_eq!(router.serai_key(block_hash).await.unwrap(), public_key);
assert_eq!(router.nonce(block_hash).await.unwrap(), U256::try_from(1u64).unwrap());
// TODO: Check it emitted SeraiKeyUpdated(public_key) at its genesis
setup_test().await;
}
pub fn hash_and_sign(
keys: &HashMap<Participant, ThresholdKeys<Secp256k1>>,
public_key: &PublicKey,
chain_id: U256,
message: &[u8],
) -> Signature {
let hashed_message = keccak256(message);
let mut chain_id_bytes = [0; 32];
chain_id.to_big_endian(&mut chain_id_bytes);
let full_message = &[chain_id_bytes.as_slice(), &hashed_message].concat();
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let sig =
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, keys), message);
Signature::new(public_key, message, sig).unwrap()
}
#[tokio::test]
async fn test_router_update_serai_key() {
let (anvil, client, chain_id, contract, keys, public_key) = setup_test().await;
let next_key = loop {
let point = ProjectivePoint::random(&mut OsRng);
let Some(next_key) = PublicKey::new(point) else { continue };
break next_key;
};
let message = Router::update_serai_key_message(
U256::try_from(chain_id).unwrap(),
U256::try_from(1u64).unwrap(),
&next_key,
let sig = sign(
&mut OsRng,
&algo,
keys.clone(),
algorithm_machines(&mut OsRng, &algo, keys),
full_message,
);
let sig = hash_and_sign(&keys, &public_key, &message);
let first_block_hash = latest_block_hash(&client).await;
assert_eq!(contract.serai_key(first_block_hash).await.unwrap(), public_key);
let receipt =
send(&client, &anvil.keys()[0].clone().into(), contract.update_serai_key(&next_key, &sig))
.await
.unwrap();
assert!(receipt.status());
let second_block_hash = latest_block_hash(&client).await;
assert_eq!(contract.serai_key(second_block_hash).await.unwrap(), next_key);
// Check this does still offer the historical state
assert_eq!(contract.serai_key(first_block_hash).await.unwrap(), public_key);
// TODO: Check logs
println!("gas used: {:?}", receipt.gas_used);
// println!("logs: {:?}", receipt.logs);
Signature::new(public_key, k256::U256::from_words(chain_id.0), message, sig).unwrap()
}
#[tokio::test]
async fn test_router_execute() {
let (anvil, client, chain_id, contract, keys, public_key) = setup_test().await;
let (chain_id, _anvil, contract, keys, public_key) = setup_test().await;
let to = Address::from([0; 20]);
let value = U256::ZERO;
let tx = router::OutInstruction { to, value, calls: vec![] };
let txs = vec![tx];
let to = H160([0u8; 20]);
let value = U256([0u64; 4]);
let data = Bytes::from([0]);
let tx = OutInstruction { to, value, data: data.clone() };
let first_block_hash = latest_block_hash(&client).await;
let nonce = contract.nonce(first_block_hash).await.unwrap();
assert_eq!(nonce, U256::try_from(1u64).unwrap());
let nonce_call = contract.nonce();
let nonce = nonce_call.call().await.unwrap();
let message = Router::execute_message(U256::try_from(chain_id).unwrap(), nonce, txs.clone());
let sig = hash_and_sign(&keys, &public_key, &message);
let encoded =
("execute".to_string(), nonce, vec![router::OutInstruction { to, value, data }]).encode();
let sig = hash_and_sign(&keys, &public_key, chain_id.into(), &encoded);
let receipt =
send(&client, &anvil.keys()[0].clone().into(), contract.execute(&txs, &sig)).await.unwrap();
assert!(receipt.status());
let tx = contract
.execute(vec![tx], router::Signature { c: sig.c.to_repr().into(), s: sig.s.to_repr().into() })
.gas(300_000);
let pending_tx = tx.send().await.unwrap();
let receipt = dbg!(pending_tx.await.unwrap().unwrap());
assert!(receipt.status == Some(1.into()));
let second_block_hash = latest_block_hash(&client).await;
assert_eq!(contract.nonce(second_block_hash).await.unwrap(), U256::try_from(2u64).unwrap());
// Check this does still offer the historical state
assert_eq!(contract.nonce(first_block_hash).await.unwrap(), U256::try_from(1u64).unwrap());
// TODO: Check logs
println!("gas used: {:?}", receipt.gas_used);
// println!("logs: {:?}", receipt.logs);
println!("gas used: {:?}", receipt.cumulative_gas_used);
println!("logs: {:?}", receipt.logs);
}

View File

@@ -1,9 +1,11 @@
use std::sync::Arc;
use std::{convert::TryFrom, sync::Arc};
use rand_core::OsRng;
use group::ff::PrimeField;
use k256::Scalar;
use ::k256::{elliptic_curve::bigint::ArrayEncoding, U256, Scalar};
use ethers_core::utils::{keccak256, Anvil, AnvilInstance};
use ethers_providers::{Middleware, Provider, Http};
use frost::{
curve::Secp256k1,
@@ -11,34 +13,24 @@ use frost::{
tests::{algorithm_machines, sign},
};
use alloy_core::primitives::Address;
use alloy_sol_types::SolCall;
use alloy_rpc_types_eth::{TransactionInput, TransactionRequest};
use alloy_simple_request_transport::SimpleRequest;
use alloy_rpc_client::ClientBuilder;
use alloy_provider::{Provider, RootProvider};
use alloy_node_bindings::{Anvil, AnvilInstance};
use crate::{
Error,
crypto::*,
tests::{key_gen, deploy_contract, abi::schnorr as abi},
schnorr::*,
tests::{key_gen, deploy_contract},
};
async fn setup_test() -> (AnvilInstance, Arc<RootProvider<SimpleRequest>>, Address) {
async fn setup_test() -> (u32, AnvilInstance, Schnorr<Provider<Http>>) {
let anvil = Anvil::new().spawn();
let provider = RootProvider::new(
ClientBuilder::default().transport(SimpleRequest::new(anvil.endpoint()), true),
);
let provider = Provider::<Http>::try_from(anvil.endpoint()).unwrap();
let chain_id = provider.get_chainid().await.unwrap().as_u32();
let wallet = anvil.keys()[0].clone().into();
let client = Arc::new(provider);
let address = deploy_contract(client.clone(), &wallet, "TestSchnorr").await.unwrap();
(anvil, client, address)
let contract_address =
deploy_contract(chain_id, client.clone(), &wallet, "Schnorr").await.unwrap();
let contract = Schnorr::new(contract_address, client.clone());
(chain_id, anvil, contract)
}
#[tokio::test]
@@ -46,48 +38,30 @@ async fn test_deploy_contract() {
setup_test().await;
}
pub async fn call_verify(
provider: &RootProvider<SimpleRequest>,
contract: Address,
public_key: &PublicKey,
message: &[u8],
signature: &Signature,
) -> Result<(), Error> {
let px: [u8; 32] = public_key.px.to_repr().into();
let c_bytes: [u8; 32] = signature.c.to_repr().into();
let s_bytes: [u8; 32] = signature.s.to_repr().into();
let call = TransactionRequest::default().to(contract).input(TransactionInput::new(
abi::verifyCall::new((px.into(), message.to_vec().into(), c_bytes.into(), s_bytes.into()))
.abi_encode()
.into(),
));
let bytes = provider.call(&call).await.map_err(|_| Error::ConnectionError)?;
let res =
abi::verifyCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?;
if res._0 {
Ok(())
} else {
Err(Error::InvalidSignature)
}
}
#[tokio::test]
async fn test_ecrecover_hack() {
let (_anvil, client, contract) = setup_test().await;
let (chain_id, _anvil, contract) = setup_test().await;
let chain_id = U256::from(chain_id);
let (keys, public_key) = key_gen();
const MESSAGE: &[u8] = b"Hello, World!";
let hashed_message = keccak256(MESSAGE);
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
let sig =
sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE);
let sig = Signature::new(&public_key, MESSAGE, sig).unwrap();
let sig = sign(
&mut OsRng,
&algo,
keys.clone(),
algorithm_machines(&mut OsRng, &algo, &keys),
full_message,
);
let sig = Signature::new(&public_key, chain_id, MESSAGE, sig).unwrap();
call_verify(&client, contract, &public_key, MESSAGE, &sig).await.unwrap();
call_verify(&contract, &public_key, MESSAGE, &sig).await.unwrap();
// Test an invalid signature fails
let mut sig = sig;
sig.s += Scalar::ONE;
assert!(call_verify(&client, contract, &public_key, MESSAGE, &sig).await.is_err());
assert!(call_verify(&contract, &public_key, MESSAGE, &sig).await.is_err());
}

View File

@@ -6,7 +6,7 @@ license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
rust-version = "1.74"
[package.metadata.docs.rs]
all-features = true
@@ -18,35 +18,96 @@ workspace = true
[dependencies]
std-shims = { path = "../../common/std-shims", version = "^0.1.1", default-features = false }
async-trait = { version = "0.1", default-features = false }
thiserror = { version = "1", default-features = false, optional = true }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
subtle = { version = "^2.4", default-features = false }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
rand_core = { version = "0.6", default-features = false }
# Used to send transactions
rand = { version = "0.8", default-features = false }
rand_chacha = { version = "0.3", default-features = false }
# Used to select decoys
rand_distr = { version = "0.4", default-features = false }
sha3 = { version = "0.10", default-features = false }
pbkdf2 = { version = "0.12", features = ["simple"], default-features = false }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "precomputed-tables"] }
# Used for the hash to curve, along with the more complicated proofs
group = { version = "0.13", default-features = false }
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.4", default-features = false }
multiexp = { path = "../../crypto/multiexp", version = "0.4", default-features = false, features = ["batch"] }
# Needed for multisig
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
dleq = { path = "../../crypto/dleq", version = "0.4", default-features = false, features = ["serialize"], optional = true }
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["ed25519"], optional = true }
monero-io = { path = "io", version = "0.1", default-features = false }
monero-generators = { path = "generators", version = "0.4", default-features = false }
monero-primitives = { path = "primitives", version = "0.1", default-features = false }
monero-mlsag = { path = "ringct/mlsag", version = "0.1", default-features = false }
monero-clsag = { path = "ringct/clsag", version = "0.1", default-features = false }
monero-borromean = { path = "ringct/borromean", version = "0.1", default-features = false }
monero-bulletproofs = { path = "ringct/bulletproofs", version = "0.1", default-features = false }
async-lock = { version = "3", default-features = false, optional = true }
hex-literal = "0.4"
hex = { version = "0.4", default-features = false, features = ["alloc"] }
serde = { version = "1", default-features = false, features = ["derive", "alloc"] }
serde_json = { version = "1", default-features = false, features = ["alloc"] }
base58-monero = { version = "2", default-features = false, features = ["check"] }
# Used for the provided HTTP RPC
digest_auth = { version = "0.3", default-features = false, optional = true }
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls"], optional = true }
tokio = { version = "1", default-features = false, optional = true }
[build-dependencies]
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.4", default-features = false }
monero-generators = { path = "generators", version = "0.4", default-features = false }
[dev-dependencies]
tokio = { version = "1", features = ["sync", "macros"] }
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
[features]
std = [
"std-shims/std",
"zeroize/std",
"thiserror",
"zeroize/std",
"subtle/std",
"rand_core/std",
"rand/std",
"rand_chacha/std",
"rand_distr/std",
"sha3/std",
"pbkdf2/std",
"multiexp/std",
"transcript/std",
"dleq/std",
"monero-io/std",
"monero-generators/std",
"monero-primitives/std",
"monero-mlsag/std",
"monero-clsag/std",
"monero-borromean/std",
"monero-bulletproofs/std",
"async-lock?/std",
"hex/std",
"serde/std",
"serde_json/std",
"base58-monero/std",
]
compile-time-generators = ["curve25519-dalek/precomputed-tables", "monero-bulletproofs/compile-time-generators"]
multisig = ["monero-clsag/multisig", "std"]
default = ["std", "compile-time-generators"]
cache-distribution = ["async-lock"]
http-rpc = ["digest_auth", "simple-request", "tokio"]
multisig = ["transcript", "frost", "dleq", "std"]
binaries = ["tokio/rt-multi-thread", "tokio/macros", "http-rpc"]
experimental = []
default = ["std", "http-rpc"]

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Copyright (c) 2022-2023 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,28 +1,49 @@
# monero-serai
A modern Monero transaction library. It provides a modern, Rust-friendly view of
the Monero protocol.
A modern Monero transaction library intended for usage in wallets. It prides
itself on accuracy, correctness, and removing common pit falls developers may
face.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
monero-serai also offers the following features:
### Wallet Functionality
- Featured Addresses
- A FROST-based multisig orders of magnitude more performant than Monero's
monero-serai originally included wallet functionality. That has been moved to
monero-wallet.
### Purpose and Support
### Purpose and support
monero-serai was written for Serai, a decentralized exchange aiming to support
Monero. Despite this, monero-serai is intended to be a widely usable library,
accurate to Monero. monero-serai guarantees the functionality needed for Serai,
yet does not include any functionality specific to Serai.
yet will not deprive functionality from other users.
### Cargo Features
Various legacy transaction formats are not currently implemented, yet we are
willing to add support for them. There aren't active development efforts around
them however.
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).
- `compile-time-generators` (on by default): Derives the generators at
compile-time so they don't need to be derived at runtime. This is recommended
if program size doesn't need to be kept minimal.
- `multisig`: Enables the `multisig` feature for all dependencies.
### Caveats
This library DOES attempt to do the following:
- Create on-chain transactions identical to how wallet2 would (unless told not
to)
- Not be detectable as monero-serai when scanning outputs
- Not reveal spent outputs to the connected RPC node
This library DOES NOT attempt to do the following:
- Have identical RPC behavior when creating transactions
- Be a wallet
This means that monero-serai shouldn't be fingerprintable on-chain. It also
shouldn't be fingerprintable if a targeted attack occurs to detect if the
receiving wallet is monero-serai or wallet2. It also should be generally safe
for usage with remote nodes.
It won't hide from remote nodes it's monero-serai however, potentially
allowing a remote node to profile you. The implications of this are left to the
user to consider.
It also won't act as a wallet, just as a transaction library. wallet2 has
several *non-transaction-level* policies, such as always attempting to use two
inputs to create transactions. These are considered out of scope to
monero-serai.

67
coins/monero/build.rs Normal file
View File

@@ -0,0 +1,67 @@
use std::{
io::Write,
env,
path::Path,
fs::{File, remove_file},
};
use dalek_ff_group::EdwardsPoint;
use monero_generators::bulletproofs_generators;
fn serialize(generators_string: &mut String, points: &[EdwardsPoint]) {
for generator in points {
generators_string.extend(
format!(
"
dalek_ff_group::EdwardsPoint(
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap()
),
",
generator.compress().to_bytes()
)
.chars(),
);
}
}
fn generators(prefix: &'static str, path: &str) {
let generators = bulletproofs_generators(prefix.as_bytes());
#[allow(non_snake_case)]
let mut G_str = String::new();
serialize(&mut G_str, &generators.G);
#[allow(non_snake_case)]
let mut H_str = String::new();
serialize(&mut H_str, &generators.H);
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
let _ = remove_file(&path);
File::create(&path)
.unwrap()
.write_all(
format!(
"
pub(crate) static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
pub fn GENERATORS() -> &'static Generators {{
GENERATORS_CELL.get_or_init(|| Generators {{
G: vec![
{G_str}
],
H: vec![
{H_str}
],
}})
}}
",
)
.as_bytes(),
)
.unwrap();
}
fn main() {
println!("cargo:rerun-if-changed=build.rs");
generators("bulletproof", "generators.rs");
generators("bulletproof_plus", "generators_plus.rs");
}

View File

@@ -1,7 +1,7 @@
[package]
name = "monero-generators"
version = "0.4.0"
description = "Monero's hash to point function and generators"
description = "Monero's hash_to_point and generators"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
@@ -20,27 +20,15 @@ std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-fe
subtle = { version = "^2.4", default-features = false }
sha3 = { version = "0.10", default-features = false }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "precomputed-tables"] }
group = { version = "0.13", default-features = false }
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.4", default-features = false }
monero-io = { path = "../io", version = "0.1", default-features = false }
[dev-dependencies]
hex = "0.4"
[features]
std = [
"std-shims/std",
"subtle/std",
"sha3/std",
"group/alloc",
"dalek-ff-group/std",
"monero-io/std"
]
std = ["std-shims/std", "subtle/std", "sha3/std", "dalek-ff-group/std"]
default = ["std"]

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Copyright (c) 2022-2023 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,13 +1,7 @@
# Monero Generators
Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
An implementation of Monero's `hash_to_ec` is included, as needed to generate
the generators.
An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
`hash_to_point` here, is included, as needed to generate generators.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).
This library is usable under no-std when the `std` feature is disabled.

View File

@@ -1,20 +1,27 @@
use subtle::ConditionallySelectable;
use curve25519_dalek::edwards::EdwardsPoint;
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
use group::ff::{Field, PrimeField};
use dalek_ff_group::FieldElement;
use monero_io::decompress_point;
use crate::hash;
use crate::keccak256;
/// Decompress canonically encoded ed25519 point
/// It does not check if the point is in the prime order subgroup
pub fn decompress_point(bytes: [u8; 32]) -> Option<EdwardsPoint> {
CompressedEdwardsY(bytes)
.decompress()
// Ban points which are either unreduced or -0
.filter(|point| point.compress().to_bytes() == bytes)
}
/// Monero's `hash_to_ec` function.
/// Monero's hash to point function, as named `hash_to_ec`.
pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
#[allow(non_snake_case)]
let A = FieldElement::from(486662u64);
let v = FieldElement::from_square(keccak256(&bytes)).double();
let v = FieldElement::from_square(hash(&bytes)).double();
let w = v + FieldElement::ONE;
let x = w.square() + (-A.square() * v);

View File

@@ -1,46 +1,45 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![deny(missing_docs)]
//! Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
//!
//! An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
//! `hash_to_point` here, is included, as needed to generate generators.
#![cfg_attr(not(feature = "std"), no_std)]
use std_shims::{sync::OnceLock, vec::Vec};
use sha3::{Digest, Keccak256};
use curve25519_dalek::{constants::ED25519_BASEPOINT_POINT, edwards::EdwardsPoint};
use curve25519_dalek::edwards::{EdwardsPoint as DalekPoint};
use monero_io::{write_varint, decompress_point};
use group::{Group, GroupEncoding};
use dalek_ff_group::EdwardsPoint;
mod varint;
use varint::write_varint;
mod hash_to_point;
pub use hash_to_point::hash_to_point;
pub use hash_to_point::{hash_to_point, decompress_point};
#[cfg(test)]
mod tests;
fn keccak256(data: &[u8]) -> [u8; 32] {
fn hash(data: &[u8]) -> [u8; 32] {
Keccak256::digest(data).into()
}
static H_CELL: OnceLock<EdwardsPoint> = OnceLock::new();
/// Monero's `H` generator.
///
/// Contrary to convention (`G` for values, `H` for randomness), `H` is used by Monero for amounts
/// within Pedersen commitments.
static H_CELL: OnceLock<DalekPoint> = OnceLock::new();
/// Monero's alternate generator `H`, used for amounts in Pedersen commitments.
#[allow(non_snake_case)]
pub fn H() -> EdwardsPoint {
pub fn H() -> DalekPoint {
*H_CELL.get_or_init(|| {
decompress_point(keccak256(&ED25519_BASEPOINT_POINT.compress().to_bytes()))
.unwrap()
.mul_by_cofactor()
decompress_point(hash(&EdwardsPoint::generator().to_bytes())).unwrap().mul_by_cofactor()
})
}
static H_POW_2_CELL: OnceLock<[EdwardsPoint; 64]> = OnceLock::new();
/// Monero's `H` generator, multiplied by 2**i for i in 1 ..= 64.
///
/// This table is useful when working with amounts, which are u64s.
static H_POW_2_CELL: OnceLock<[DalekPoint; 64]> = OnceLock::new();
/// Monero's alternate generator `H`, multiplied by 2**i for i in 1 ..= 64.
#[allow(non_snake_case)]
pub fn H_pow_2() -> &'static [EdwardsPoint; 64] {
pub fn H_pow_2() -> &'static [DalekPoint; 64] {
H_POW_2_CELL.get_or_init(|| {
let mut res = [H(); 64];
for i in 1 .. 64 {
@@ -50,45 +49,31 @@ pub fn H_pow_2() -> &'static [EdwardsPoint; 64] {
})
}
/// The maximum amount of commitments provable for within a single range proof.
pub const MAX_COMMITMENTS: usize = 16;
/// The amount of bits a value within a commitment may use.
pub const COMMITMENT_BITS: usize = 64;
/// The logarithm (over 2) of the amount of bits a value within a commitment may use.
pub const LOG_COMMITMENT_BITS: usize = 6; // 2 ** 6 == N
const MAX_M: usize = 16;
const N: usize = 64;
const MAX_MN: usize = MAX_M * N;
/// Container struct for Bulletproofs(+) generators.
#[allow(non_snake_case)]
pub struct Generators {
/// The G (bold) vector of generators.
pub G: Vec<EdwardsPoint>,
/// The H (bold) vector of generators.
pub H: Vec<EdwardsPoint>,
}
/// Generate generators as needed for Bulletproofs(+), as Monero does.
///
/// Consumers should not call this function ad-hoc, yet call it within a build script or use a
/// once-initialized static.
pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
// The maximum amount of bits used within a single range proof.
const MAX_MN: usize = MAX_COMMITMENTS * COMMITMENT_BITS;
let mut preimage = H().compress().to_bytes().to_vec();
preimage.extend(dst);
let mut res = Generators { G: Vec::with_capacity(MAX_MN), H: Vec::with_capacity(MAX_MN) };
for i in 0 .. MAX_MN {
// We generate a pair of generators per iteration
let i = 2 * i;
let mut even = preimage.clone();
write_varint(&i, &mut even).unwrap();
res.H.push(hash_to_point(keccak256(&even)));
let mut even = H().compress().to_bytes().to_vec();
even.extend(dst);
let mut odd = even.clone();
let mut odd = preimage.clone();
write_varint(&(i + 1), &mut odd).unwrap();
res.G.push(hash_to_point(keccak256(&odd)));
write_varint(&i.try_into().unwrap(), &mut even).unwrap();
write_varint(&(i + 1).try_into().unwrap(), &mut odd).unwrap();
res.H.push(EdwardsPoint(hash_to_point(hash(&even))));
res.G.push(EdwardsPoint(hash_to_point(hash(&odd))));
}
res
}

View File

@@ -0,0 +1,38 @@
use crate::{decompress_point, hash_to_point};
#[test]
fn crypto_tests() {
// tests.txt file copied from monero repo
// https://github.com/monero-project/monero/
// blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/tests/crypto/tests.txt
let reader = include_str!("./tests.txt");
for line in reader.lines() {
let mut words = line.split_whitespace();
let command = words.next().unwrap();
match command {
"check_key" => {
let key = words.next().unwrap();
let expected = match words.next().unwrap() {
"true" => true,
"false" => false,
_ => unreachable!("invalid result"),
};
let actual = decompress_point(hex::decode(key).unwrap().try_into().unwrap());
assert_eq!(actual.is_some(), expected);
}
"hash_to_ec" => {
let bytes = words.next().unwrap();
let expected = words.next().unwrap();
let actual = hash_to_point(hex::decode(bytes).unwrap().try_into().unwrap());
assert_eq!(hex::encode(actual.compress().to_bytes()), expected);
}
_ => unreachable!("unknown command"),
}
}
}

View File

@@ -1,36 +1 @@
use crate::{decompress_point, hash_to_point};
#[test]
fn test_vectors() {
// tests.txt file copied from monero repo
// https://github.com/monero-project/monero/
// blob/ac02af92867590ca80b2779a7bbeafa99ff94dcb/tests/crypto/tests.txt
let reader = include_str!("./tests.txt");
for line in reader.lines() {
let mut words = line.split_whitespace();
let command = words.next().unwrap();
match command {
"check_key" => {
let key = words.next().unwrap();
let expected = match words.next().unwrap() {
"true" => true,
"false" => false,
_ => unreachable!("invalid result"),
};
let actual = decompress_point(hex::decode(key).unwrap().try_into().unwrap());
assert_eq!(actual.is_some(), expected);
}
"hash_to_ec" => {
let bytes = words.next().unwrap();
let expected = words.next().unwrap();
let actual = hash_to_point(hex::decode(bytes).unwrap().try_into().unwrap());
assert_eq!(hex::encode(actual.compress().to_bytes()), expected);
}
_ => unreachable!("unknown command"),
}
}
}
mod hash_to_point;

View File

@@ -0,0 +1,16 @@
use std_shims::io::{self, Write};
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
let mut varint = *varint;
while {
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
varint >>= 7;
if varint != 0 {
b |= VARINT_CONTINUATION_MASK;
}
w.write_all(&[b])?;
varint != 0
} {}
Ok(())
}

View File

@@ -1,24 +0,0 @@
[package]
name = "monero-io"
version = "0.1.0"
description = "Serialization functions, as within the Monero protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/io"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc"] }
[features]
std = ["std-shims/std"]
default = ["std"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,11 +0,0 @@
# Monero IO
Serialization functions, as within the Monero protocol.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).

View File

@@ -1,219 +0,0 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
use core::fmt::Debug;
use std_shims::{
vec,
vec::Vec,
io::{self, Read, Write},
};
use curve25519_dalek::{
scalar::Scalar,
edwards::{EdwardsPoint, CompressedEdwardsY},
};
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
mod sealed {
/// A trait for a number readable/writable as a VarInt.
///
/// This is sealed to prevent unintended implementations.
pub trait VarInt: TryInto<u64> + TryFrom<u64> + Copy {
const BITS: usize;
}
impl VarInt for u8 {
const BITS: usize = 8;
}
impl VarInt for u32 {
const BITS: usize = 32;
}
impl VarInt for u64 {
const BITS: usize = 64;
}
impl VarInt for usize {
const BITS: usize = core::mem::size_of::<usize>() * 8;
}
}
/// The amount of bytes this number will take when serialized as a VarInt.
///
/// This function will panic if the VarInt exceeds u64::MAX.
pub fn varint_len<V: sealed::VarInt>(varint: V) -> usize {
let varint_u64: u64 = varint.try_into().map_err(|_| "varint exceeded u64").unwrap();
((usize::try_from(u64::BITS - varint_u64.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
}
/// Write a byte.
///
/// This is used as a building block within generic functions.
pub fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
w.write_all(&[*byte])
}
/// Write a number, VarInt-encoded.
///
/// This will panic if the VarInt exceeds u64::MAX.
pub fn write_varint<W: Write, U: sealed::VarInt>(varint: &U, w: &mut W) -> io::Result<()> {
let mut varint: u64 = (*varint).try_into().map_err(|_| "varint exceeded u64").unwrap();
while {
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
varint >>= 7;
if varint != 0 {
b |= VARINT_CONTINUATION_MASK;
}
write_byte(&b, w)?;
varint != 0
} {}
Ok(())
}
/// Write a scalar.
pub fn write_scalar<W: Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
w.write_all(&scalar.to_bytes())
}
/// Write a point.
pub fn write_point<W: Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
w.write_all(&point.compress().to_bytes())
}
/// Write a list of elements, without length-prefixing.
pub fn write_raw_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
f: F,
values: &[T],
w: &mut W,
) -> io::Result<()> {
for value in values {
f(value, w)?;
}
Ok(())
}
/// Write a list of elements, with length-prefixing.
pub fn write_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
f: F,
values: &[T],
w: &mut W,
) -> io::Result<()> {
write_varint(&values.len(), w)?;
write_raw_vec(f, values, w)
}
/// Read a constant amount of bytes.
pub fn read_bytes<R: Read, const N: usize>(r: &mut R) -> io::Result<[u8; N]> {
let mut res = [0; N];
r.read_exact(&mut res)?;
Ok(res)
}
/// Read a single byte.
pub fn read_byte<R: Read>(r: &mut R) -> io::Result<u8> {
Ok(read_bytes::<_, 1>(r)?[0])
}
/// Read a u16, little-endian encoded.
pub fn read_u16<R: Read>(r: &mut R) -> io::Result<u16> {
read_bytes(r).map(u16::from_le_bytes)
}
/// Read a u32, little-endian encoded.
pub fn read_u32<R: Read>(r: &mut R) -> io::Result<u32> {
read_bytes(r).map(u32::from_le_bytes)
}
/// Read a u64, little-endian encoded.
pub fn read_u64<R: Read>(r: &mut R) -> io::Result<u64> {
read_bytes(r).map(u64::from_le_bytes)
}
/// Read a canonically-encoded VarInt.
pub fn read_varint<R: Read, U: sealed::VarInt>(r: &mut R) -> io::Result<U> {
let mut bits = 0;
let mut res = 0;
while {
let b = read_byte(r)?;
if (bits != 0) && (b == 0) {
Err(io::Error::other("non-canonical varint"))?;
}
if ((bits + 7) >= U::BITS) && (b >= (1 << (U::BITS - bits))) {
Err(io::Error::other("varint overflow"))?;
}
res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits;
bits += 7;
b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK
} {}
res.try_into().map_err(|_| io::Error::other("VarInt does not fit into integer type"))
}
/// Read a canonically-encoded scalar.
///
/// Some scalars within the Monero protocol are not enforced to be canonically encoded. For such
/// scalars, they should be represented as `[u8; 32]` and later converted to scalars as relevant.
pub fn read_scalar<R: Read>(r: &mut R) -> io::Result<Scalar> {
Option::from(Scalar::from_canonical_bytes(read_bytes(r)?))
.ok_or_else(|| io::Error::other("unreduced scalar"))
}
/// Decompress a canonically-encoded Ed25519 point.
///
/// Ed25519 is of order `8 * l`. This function ensures each of those `8 * l` points have a singular
/// encoding by checking points aren't encoded with an unreduced field element, and aren't negative
/// when the negative is equivalent (0 == -0).
///
/// Since this decodes an Ed25519 point, it does not check the point is in the prime-order
/// subgroup. Torsioned points do have a canonical encoding, and only aren't canonical when
/// considered in relation to the prime-order subgroup.
pub fn decompress_point(bytes: [u8; 32]) -> Option<EdwardsPoint> {
CompressedEdwardsY(bytes)
.decompress()
// Ban points which are either unreduced or -0
.filter(|point| point.compress().to_bytes() == bytes)
}
/// Read a canonically-encoded Ed25519 point.
///
/// This internally calls `decompress_point` and has the same definition of canonicity. This
/// function does not check the resulting point is within the prime-order subgroup.
pub fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
let bytes = read_bytes(r)?;
decompress_point(bytes).ok_or_else(|| io::Error::other("invalid point"))
}
/// Read a canonically-encoded Ed25519 point, within the prime-order subgroup.
pub fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
read_point(r)
.ok()
.filter(EdwardsPoint::is_torsion_free)
.ok_or_else(|| io::Error::other("invalid point"))
}
/// Read a variable-length list of elements, without length-prefixing.
pub fn read_raw_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
f: F,
len: usize,
r: &mut R,
) -> io::Result<Vec<T>> {
let mut res = vec![];
for _ in 0 .. len {
res.push(f(r)?);
}
Ok(res)
}
/// Read a constant-length list of elements.
pub fn read_array<R: Read, T: Debug, F: Fn(&mut R) -> io::Result<T>, const N: usize>(
f: F,
r: &mut R,
) -> io::Result<[T; N]> {
read_raw_vec(f, N, r).map(|vec| vec.try_into().unwrap())
}
/// Read a length-prefixed variable-length list of elements.
pub fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(f: F, r: &mut R) -> io::Result<Vec<T>> {
read_raw_vec(f, read_varint(r)?, r)
}

View File

@@ -1,44 +0,0 @@
[package]
name = "monero-primitives"
version = "0.1.0"
description = "Primitives for the Monero protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/primitives"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
# Cryptographic dependencies
sha3 = { version = "0.10", default-features = false }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
# Other Monero dependencies
monero-io = { path = "../io", version = "0.1", default-features = false }
monero-generators = { path = "../generators", version = "0.4", default-features = false }
[dev-dependencies]
hex = { version = "0.4", default-features = false, features = ["alloc"] }
[features]
std = [
"std-shims/std",
"zeroize/std",
"sha3/std",
"monero-generators/std",
]
default = ["std"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,11 +0,0 @@
# Monero Primitives
Primitive structures and functions for the Monero protocol.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).

View File

@@ -1,248 +0,0 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
use std_shims::{io, vec::Vec};
#[cfg(feature = "std")]
use std_shims::sync::OnceLock;
use zeroize::{Zeroize, ZeroizeOnDrop};
use sha3::{Digest, Keccak256};
use curve25519_dalek::{
constants::ED25519_BASEPOINT_POINT,
traits::VartimePrecomputedMultiscalarMul,
scalar::Scalar,
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation},
};
use monero_io::*;
use monero_generators::H;
mod unreduced_scalar;
pub use unreduced_scalar::UnreducedScalar;
#[cfg(test)]
mod tests;
// On std, we cache some variables in statics.
#[cfg(feature = "std")]
static INV_EIGHT_CELL: OnceLock<Scalar> = OnceLock::new();
/// The inverse of 8 over l.
#[cfg(feature = "std")]
#[allow(non_snake_case)]
pub fn INV_EIGHT() -> Scalar {
*INV_EIGHT_CELL.get_or_init(|| Scalar::from(8u8).invert())
}
// In no-std environments, we prefer the reduced memory use and calculate it ad-hoc.
/// The inverse of 8 over l.
#[cfg(not(feature = "std"))]
#[allow(non_snake_case)]
pub fn INV_EIGHT() -> Scalar {
Scalar::from(8u8).invert()
}
#[cfg(feature = "std")]
static G_PRECOMP_CELL: OnceLock<VartimeEdwardsPrecomputation> = OnceLock::new();
/// A cached (if std) pre-computation of the Ed25519 generator, G.
#[cfg(feature = "std")]
#[allow(non_snake_case)]
pub fn G_PRECOMP() -> &'static VartimeEdwardsPrecomputation {
G_PRECOMP_CELL.get_or_init(|| VartimeEdwardsPrecomputation::new([ED25519_BASEPOINT_POINT]))
}
/// A cached (if std) pre-computation of the Ed25519 generator, G.
#[cfg(not(feature = "std"))]
#[allow(non_snake_case)]
pub fn G_PRECOMP() -> VartimeEdwardsPrecomputation {
VartimeEdwardsPrecomputation::new([ED25519_BASEPOINT_POINT])
}
/// The Keccak-256 hash function.
pub fn keccak256(data: impl AsRef<[u8]>) -> [u8; 32] {
Keccak256::digest(data.as_ref()).into()
}
/// Hash the provided data to a scalar via keccak256(data) % l.
///
/// This function panics if it finds the Keccak-256 preimage for [0; 32].
pub fn keccak256_to_scalar(data: impl AsRef<[u8]>) -> Scalar {
let scalar = Scalar::from_bytes_mod_order(keccak256(data.as_ref()));
// Monero will explicitly error in this case
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
// not generate/verify a proof we believe to be valid when it isn't
assert!(scalar != Scalar::ZERO, "ZERO HASH: {:?}", data.as_ref());
scalar
}
/// Transparent structure representing a Pedersen commitment's contents.
#[allow(non_snake_case)]
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct Commitment {
/// The mask for this commitment.
pub mask: Scalar,
/// The amount committed to by this commitment.
pub amount: u64,
}
impl core::fmt::Debug for Commitment {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
fmt.debug_struct("Commitment").field("amount", &self.amount).finish_non_exhaustive()
}
}
impl Commitment {
/// A commitment to zero, defined with a mask of 1 (as to not be the identity).
pub fn zero() -> Commitment {
Commitment { mask: Scalar::ONE, amount: 0 }
}
/// Create a new Commitment.
pub fn new(mask: Scalar, amount: u64) -> Commitment {
Commitment { mask, amount }
}
/// Calculate the Pedersen commitment, as a point, from this transparent structure.
pub fn calculate(&self) -> EdwardsPoint {
EdwardsPoint::vartime_double_scalar_mul_basepoint(&Scalar::from(self.amount), &H(), &self.mask)
}
/// Write the Commitment.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(&self.mask.to_bytes())?;
w.write_all(&self.amount.to_le_bytes())
}
/// Serialize the Commitment to a `Vec<u8>`.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(32 + 8);
self.write(&mut res).unwrap();
res
}
/// Read a Commitment.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn read<R: io::Read>(r: &mut R) -> io::Result<Commitment> {
Ok(Commitment::new(read_scalar(r)?, read_u64(r)?))
}
}
/// Decoy data, as used for producing Monero's ring signatures.
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub struct Decoys {
offsets: Vec<u64>,
signer_index: u8,
ring: Vec<[EdwardsPoint; 2]>,
}
impl core::fmt::Debug for Decoys {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
fmt
.debug_struct("Decoys")
.field("offsets", &self.offsets)
.field("ring", &self.ring)
.finish_non_exhaustive()
}
}
#[allow(clippy::len_without_is_empty)]
impl Decoys {
/// Create a new instance of decoy data.
///
/// `offsets` are the positions of each ring member within the Monero blockchain, offset from the
/// prior member's position (with the initial ring member offset from 0).
pub fn new(offsets: Vec<u64>, signer_index: u8, ring: Vec<[EdwardsPoint; 2]>) -> Option<Self> {
if (offsets.len() != ring.len()) || (usize::from(signer_index) >= ring.len()) {
None?;
}
Some(Decoys { offsets, signer_index, ring })
}
/// The length of the ring.
pub fn len(&self) -> usize {
self.offsets.len()
}
/// The positions of the ring members within the Monero blockchain, as their offsets.
///
/// The list is formatted as the position of the first ring member, then the offset from each
/// ring member to its prior.
pub fn offsets(&self) -> &[u64] {
&self.offsets
}
/// The positions of the ring members within the Monero blockchain.
pub fn positions(&self) -> Vec<u64> {
let mut res = Vec::with_capacity(self.len());
res.push(self.offsets[0]);
for m in 1 .. self.len() {
res.push(res[m - 1] + self.offsets[m]);
}
res
}
/// The index of the signer within the ring.
pub fn signer_index(&self) -> u8 {
self.signer_index
}
/// The ring.
pub fn ring(&self) -> &[[EdwardsPoint; 2]] {
&self.ring
}
/// The [key, commitment] pair of the signer.
pub fn signer_ring_members(&self) -> [EdwardsPoint; 2] {
self.ring[usize::from(self.signer_index)]
}
/// Write the Decoys.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn write(&self, w: &mut impl io::Write) -> io::Result<()> {
write_vec(write_varint, &self.offsets, w)?;
w.write_all(&[self.signer_index])?;
write_vec(
|pair, w| {
write_point(&pair[0], w)?;
write_point(&pair[1], w)
},
&self.ring,
w,
)
}
/// Serialize the Decoys to a `Vec<u8>`.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut res =
Vec::with_capacity((1 + (2 * self.offsets.len())) + 1 + 1 + (self.ring.len() * 64));
self.write(&mut res).unwrap();
res
}
/// Read a set of Decoys.
///
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
pub fn read(r: &mut impl io::Read) -> io::Result<Decoys> {
Decoys::new(
read_vec(read_varint, r)?,
read_byte(r)?,
read_vec(|r| Ok([read_point(r)?, read_point(r)?]), r)?,
)
.ok_or_else(|| io::Error::other("invalid Decoys"))
}
}

View File

@@ -1,41 +0,0 @@
[package]
name = "monero-borromean"
version = "0.1.0"
description = "Borromean ring signatures arranged into a range proof, as done by the Monero protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/ringct/borromean"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
# Cryptographic dependencies
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
# Other Monero dependencies
monero-io = { path = "../../io", version = "0.1", default-features = false }
monero-generators = { path = "../../generators", version = "0.4", default-features = false }
monero-primitives = { path = "../../primitives", version = "0.1", default-features = false }
[features]
std = [
"std-shims/std",
"zeroize/std",
"monero-io/std",
"monero-generators/std",
"monero-primitives/std",
]
default = ["std"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,12 +0,0 @@
# Monero Borromean
Borromean ring signatures arranged into a range proof, as done by the Monero
protocol.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).

View File

@@ -1,57 +0,0 @@
[package]
name = "monero-bulletproofs"
version = "0.1.0"
description = "Bulletproofs(+) range proofs, as defined by the Monero protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/ringct/bulletproofs"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
thiserror = { version = "1", default-features = false, optional = true }
rand_core = { version = "0.6", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
subtle = { version = "^2.4", default-features = false }
# Cryptographic dependencies
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
# Other Monero dependencies
monero-io = { path = "../../io", version = "0.1", default-features = false }
monero-generators = { path = "../../generators", version = "0.4", default-features = false }
monero-primitives = { path = "../../primitives", version = "0.1", default-features = false }
[build-dependencies]
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
monero-generators = { path = "../../generators", version = "0.4", default-features = false }
[dev-dependencies]
hex-literal = "0.4"
[features]
std = [
"std-shims/std",
"thiserror",
"rand_core/std",
"zeroize/std",
"subtle/std",
"monero-io/std",
"monero-generators/std",
"monero-primitives/std",
]
compile-time-generators = ["curve25519-dalek/precomputed-tables"]
default = ["std", "compile-time-generators"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,14 +0,0 @@
# Monero Bulletproofs(+)
Bulletproofs(+) range proofs, as defined by the Monero protocol.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).
- `compile-time-generators` (on by default): Derives the generators at
compile-time so they don't need to be derived at runtime. This is recommended
if program size doesn't need to be kept minimal.

View File

@@ -1,88 +0,0 @@
use std::{
io::Write,
env,
path::Path,
fs::{File, remove_file},
};
#[cfg(feature = "compile-time-generators")]
fn generators(prefix: &'static str, path: &str) {
use curve25519_dalek::EdwardsPoint;
use monero_generators::bulletproofs_generators;
fn serialize(generators_string: &mut String, points: &[EdwardsPoint]) {
for generator in points {
generators_string.extend(
format!(
"
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap(),
",
generator.compress().to_bytes()
)
.chars(),
);
}
}
let generators = bulletproofs_generators(prefix.as_bytes());
#[allow(non_snake_case)]
let mut G_str = String::new();
serialize(&mut G_str, &generators.G);
#[allow(non_snake_case)]
let mut H_str = String::new();
serialize(&mut H_str, &generators.H);
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
let _ = remove_file(&path);
File::create(&path)
.unwrap()
.write_all(
format!(
"
static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
pub(crate) fn GENERATORS() -> &'static Generators {{
GENERATORS_CELL.get_or_init(|| Generators {{
G: std_shims::vec![
{G_str}
],
H: std_shims::vec![
{H_str}
],
}})
}}
",
)
.as_bytes(),
)
.unwrap();
}
#[cfg(not(feature = "compile-time-generators"))]
fn generators(prefix: &'static str, path: &str) {
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
let _ = remove_file(&path);
File::create(&path)
.unwrap()
.write_all(
format!(
r#"
static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
pub(crate) fn GENERATORS() -> &'static Generators {{
GENERATORS_CELL.get_or_init(|| {{
monero_generators::bulletproofs_generators(b"{prefix}")
}})
}}
"#,
)
.as_bytes(),
)
.unwrap();
}
fn main() {
println!("cargo:rerun-if-changed=build.rs");
generators("bulletproof", "generators.rs");
generators("bulletproof_plus", "generators_plus.rs");
}

View File

@@ -1,101 +0,0 @@
use std_shims::vec::Vec;
use curve25519_dalek::{
constants::ED25519_BASEPOINT_POINT,
traits::{IsIdentity, VartimeMultiscalarMul},
scalar::Scalar,
edwards::EdwardsPoint,
};
use monero_generators::{H, Generators};
use crate::{original, plus};
#[derive(Default)]
pub(crate) struct InternalBatchVerifier {
pub(crate) g: Scalar,
pub(crate) h: Scalar,
pub(crate) g_bold: Vec<Scalar>,
pub(crate) h_bold: Vec<Scalar>,
pub(crate) other: Vec<(Scalar, EdwardsPoint)>,
}
impl InternalBatchVerifier {
#[must_use]
fn verify(self, G: EdwardsPoint, H: EdwardsPoint, generators: &Generators) -> bool {
let capacity = 2 + self.g_bold.len() + self.h_bold.len() + self.other.len();
let mut scalars = Vec::with_capacity(capacity);
let mut points = Vec::with_capacity(capacity);
scalars.push(self.g);
points.push(G);
scalars.push(self.h);
points.push(H);
for (i, g_bold) in self.g_bold.into_iter().enumerate() {
scalars.push(g_bold);
points.push(generators.G[i]);
}
for (i, h_bold) in self.h_bold.into_iter().enumerate() {
scalars.push(h_bold);
points.push(generators.H[i]);
}
for (scalar, point) in self.other {
scalars.push(scalar);
points.push(point);
}
EdwardsPoint::vartime_multiscalar_mul(scalars, points).is_identity()
}
}
#[derive(Default)]
pub(crate) struct BulletproofsBatchVerifier(pub(crate) InternalBatchVerifier);
impl BulletproofsBatchVerifier {
#[must_use]
pub(crate) fn verify(self) -> bool {
self.0.verify(ED25519_BASEPOINT_POINT, H(), original::GENERATORS())
}
}
#[derive(Default)]
pub(crate) struct BulletproofsPlusBatchVerifier(pub(crate) InternalBatchVerifier);
impl BulletproofsPlusBatchVerifier {
#[must_use]
pub(crate) fn verify(self) -> bool {
// Bulletproofs+ is written as per the paper, with G for the value and H for the mask
// Monero uses H for the value and G for the mask
self.0.verify(H(), ED25519_BASEPOINT_POINT, plus::GENERATORS())
}
}
/// A batch verifier for Bulletproofs(+).
///
/// This uses a fixed layout such that all fixed points only incur a single point scaling,
/// regardless of the amounts of proofs verified. For all variable points (commitments), they're
/// accumulated with the fixed points into a single multiscalar multiplication.
#[derive(Default)]
pub struct BatchVerifier {
pub(crate) original: BulletproofsBatchVerifier,
pub(crate) plus: BulletproofsPlusBatchVerifier,
}
impl BatchVerifier {
/// Create a new batch verifier.
pub fn new() -> Self {
Self {
original: BulletproofsBatchVerifier(InternalBatchVerifier::default()),
plus: BulletproofsPlusBatchVerifier(InternalBatchVerifier::default()),
}
}
/// Verify all of the proofs queued within this batch verifier.
///
/// This uses a variable-time multiscalar multiplication internally.
#[must_use]
pub fn verify(self) -> bool {
self.original.verify() && self.plus.verify()
}
}

View File

@@ -1,74 +0,0 @@
use std_shims::{vec, vec::Vec};
use curve25519_dalek::{
traits::{MultiscalarMul, VartimeMultiscalarMul},
scalar::Scalar,
edwards::EdwardsPoint,
};
pub(crate) use monero_generators::{MAX_COMMITMENTS, COMMITMENT_BITS, LOG_COMMITMENT_BITS};
pub(crate) fn multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
let mut buf_scalars = Vec::with_capacity(pairs.len());
let mut buf_points = Vec::with_capacity(pairs.len());
for (scalar, point) in pairs {
buf_scalars.push(scalar);
buf_points.push(point);
}
EdwardsPoint::multiscalar_mul(buf_scalars, buf_points)
}
pub(crate) fn multiexp_vartime(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
let mut buf_scalars = Vec::with_capacity(pairs.len());
let mut buf_points = Vec::with_capacity(pairs.len());
for (scalar, point) in pairs {
buf_scalars.push(scalar);
buf_points.push(point);
}
EdwardsPoint::vartime_multiscalar_mul(buf_scalars, buf_points)
}
/*
This has room for optimization worth investigating further. It currently takes
an iterative approach. It can be optimized further via divide and conquer.
Assume there are 4 challenges.
Iterative approach (current):
1. Do the optimal multiplications across challenge column 0 and 1.
2. Do the optimal multiplications across that result and column 2.
3. Do the optimal multiplications across that result and column 3.
Divide and conquer (worth investigating further):
1. Do the optimal multiplications across challenge column 0 and 1.
2. Do the optimal multiplications across challenge column 2 and 3.
3. Multiply both results together.
When there are 4 challenges (n=16), the iterative approach does 28 multiplications
versus divide and conquer's 24.
*/
pub(crate) fn challenge_products(challenges: &[(Scalar, Scalar)]) -> Vec<Scalar> {
let mut products = vec![Scalar::ONE; 1 << challenges.len()];
if !challenges.is_empty() {
products[0] = challenges[0].1;
products[1] = challenges[0].0;
for (j, challenge) in challenges.iter().enumerate().skip(1) {
let mut slots = (1 << (j + 1)) - 1;
while slots > 0 {
products[slots] = products[slots / 2] * challenge.0;
products[slots - 1] = products[slots / 2] * challenge.1;
slots = slots.saturating_sub(2);
}
}
// Sanity check since if the above failed to populate, it'd be critical
for product in &products {
debug_assert!(*product != Scalar::ZERO);
}
}
products
}

View File

@@ -1,292 +0,0 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
#![allow(non_snake_case)]
use std_shims::{
vec,
vec::Vec,
io::{self, Read, Write},
};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroizing;
use curve25519_dalek::edwards::EdwardsPoint;
use monero_io::*;
pub use monero_generators::MAX_COMMITMENTS;
use monero_primitives::Commitment;
pub(crate) mod scalar_vector;
pub(crate) mod point_vector;
pub(crate) mod core;
use crate::core::LOG_COMMITMENT_BITS;
pub(crate) mod batch_verifier;
use batch_verifier::{BulletproofsBatchVerifier, BulletproofsPlusBatchVerifier};
pub use batch_verifier::BatchVerifier;
pub(crate) mod original;
use crate::original::{
IpProof, AggregateRangeStatement as OriginalStatement, AggregateRangeWitness as OriginalWitness,
AggregateRangeProof as OriginalProof,
};
pub(crate) mod plus;
use crate::plus::{
WipProof, AggregateRangeStatement as PlusStatement, AggregateRangeWitness as PlusWitness,
AggregateRangeProof as PlusProof,
};
#[cfg(test)]
mod tests;
/// An error from proving/verifying Bulletproofs(+).
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum BulletproofError {
/// Proving/verifying a Bulletproof(+) range proof with no commitments.
#[cfg_attr(feature = "std", error("no commitments to prove the range for"))]
NoCommitments,
/// Proving/verifying a Bulletproof(+) range proof with more commitments than supported.
#[cfg_attr(feature = "std", error("too many commitments to prove the range for"))]
TooManyCommitments,
}
/// A Bulletproof(+).
///
/// This encapsulates either a Bulletproof or a Bulletproof+.
#[allow(clippy::large_enum_variant)]
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum Bulletproof {
/// A Bulletproof.
Original(OriginalProof),
/// A Bulletproof+.
Plus(PlusProof),
}
impl Bulletproof {
fn bp_fields(plus: bool) -> usize {
if plus {
6
} else {
9
}
}
/// Calculate the weight penalty for the Bulletproof(+).
///
/// Bulletproofs(+) are logarithmically sized yet linearly timed. Evaluating by their size alone
/// accordingly doesn't properly represent the burden of the proof. Monero 'claws back' some of
/// the weight lost by using a proof smaller than it is fast to compensate for this.
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
// src/cryptonote_basic/cryptonote_format_utils.cpp#L106-L124
pub fn calculate_bp_clawback(plus: bool, n_outputs: usize) -> (usize, usize) {
#[allow(non_snake_case)]
let mut LR_len = 0;
let mut n_padded_outputs = 1;
while n_padded_outputs < n_outputs {
LR_len += 1;
n_padded_outputs = 1 << LR_len;
}
LR_len += LOG_COMMITMENT_BITS;
let mut bp_clawback = 0;
if n_padded_outputs > 2 {
let fields = Bulletproof::bp_fields(plus);
let base = ((fields + (2 * (LOG_COMMITMENT_BITS + 1))) * 32) / 2;
let size = (fields + (2 * LR_len)) * 32;
bp_clawback = ((base * n_padded_outputs) - size) * 4 / 5;
}
(bp_clawback, LR_len)
}
/// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof.
pub fn prove<R: RngCore + CryptoRng>(
rng: &mut R,
outputs: Vec<Commitment>,
) -> Result<Bulletproof, BulletproofError> {
if outputs.is_empty() {
Err(BulletproofError::NoCommitments)?;
}
if outputs.len() > MAX_COMMITMENTS {
Err(BulletproofError::TooManyCommitments)?;
}
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
Ok(Bulletproof::Original(
OriginalStatement::new(&commitments)
.unwrap()
.prove(rng, OriginalWitness::new(outputs).unwrap())
.unwrap(),
))
}
/// Prove the list of commitments are within [0 .. 2^64) with an aggregate Bulletproof+.
pub fn prove_plus<R: RngCore + CryptoRng>(
rng: &mut R,
outputs: Vec<Commitment>,
) -> Result<Bulletproof, BulletproofError> {
if outputs.is_empty() {
Err(BulletproofError::NoCommitments)?;
}
if outputs.len() > MAX_COMMITMENTS {
Err(BulletproofError::TooManyCommitments)?;
}
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
Ok(Bulletproof::Plus(
PlusStatement::new(&commitments)
.unwrap()
.prove(rng, &Zeroizing::new(PlusWitness::new(outputs).unwrap()))
.unwrap(),
))
}
/// Verify the given Bulletproof(+).
#[must_use]
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
match self {
Bulletproof::Original(bp) => {
let mut verifier = BulletproofsBatchVerifier::default();
let Some(statement) = OriginalStatement::new(commitments) else {
return false;
};
if !statement.verify(rng, &mut verifier, bp.clone()) {
return false;
}
verifier.verify()
}
Bulletproof::Plus(bp) => {
let mut verifier = BulletproofsPlusBatchVerifier::default();
let Some(statement) = PlusStatement::new(commitments) else {
return false;
};
if !statement.verify(rng, &mut verifier, bp.clone()) {
return false;
}
verifier.verify()
}
}
}
/// Accumulate the verification for the given Bulletproof(+) into the specified BatchVerifier.
///
/// Returns false if the Bulletproof(+) isn't sane, leaving the BatchVerifier in an undefined
/// state.
///
/// Returns true if the Bulletproof(+) is sane, regardless of its validity.
///
/// The BatchVerifier must have its verification function executed to actually verify this proof.
#[must_use]
pub fn batch_verify<R: RngCore + CryptoRng>(
&self,
rng: &mut R,
verifier: &mut BatchVerifier,
commitments: &[EdwardsPoint],
) -> bool {
match self {
Bulletproof::Original(bp) => {
let Some(statement) = OriginalStatement::new(commitments) else {
return false;
};
statement.verify(rng, &mut verifier.original, bp.clone())
}
Bulletproof::Plus(bp) => {
let Some(statement) = PlusStatement::new(commitments) else {
return false;
};
statement.verify(rng, &mut verifier.plus, bp.clone())
}
}
}
fn write_core<W: Write, F: Fn(&[EdwardsPoint], &mut W) -> io::Result<()>>(
&self,
w: &mut W,
specific_write_vec: F,
) -> io::Result<()> {
match self {
Bulletproof::Original(bp) => {
write_point(&bp.A, w)?;
write_point(&bp.S, w)?;
write_point(&bp.T1, w)?;
write_point(&bp.T2, w)?;
write_scalar(&bp.tau_x, w)?;
write_scalar(&bp.mu, w)?;
specific_write_vec(&bp.ip.L, w)?;
specific_write_vec(&bp.ip.R, w)?;
write_scalar(&bp.ip.a, w)?;
write_scalar(&bp.ip.b, w)?;
write_scalar(&bp.t_hat, w)
}
Bulletproof::Plus(bp) => {
write_point(&bp.A, w)?;
write_point(&bp.wip.A, w)?;
write_point(&bp.wip.B, w)?;
write_scalar(&bp.wip.r_answer, w)?;
write_scalar(&bp.wip.s_answer, w)?;
write_scalar(&bp.wip.delta_answer, w)?;
specific_write_vec(&bp.wip.L, w)?;
specific_write_vec(&bp.wip.R, w)
}
}
}
/// Write a Bulletproof(+) for the message signed by a transaction's signature.
///
/// This has a distinct encoding from the standard encoding.
pub fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.write_core(w, |points, w| write_raw_vec(write_point, points, w))
}
/// Write a Bulletproof(+).
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
self.write_core(w, |points, w| write_vec(write_point, points, w))
}
/// Serialize a Bulletproof(+) to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = vec![];
self.write(&mut serialized).unwrap();
serialized
}
/// Read a Bulletproof.
pub fn read<R: Read>(r: &mut R) -> io::Result<Bulletproof> {
Ok(Bulletproof::Original(OriginalProof {
A: read_point(r)?,
S: read_point(r)?,
T1: read_point(r)?,
T2: read_point(r)?,
tau_x: read_scalar(r)?,
mu: read_scalar(r)?,
ip: IpProof {
L: read_vec(read_point, r)?,
R: read_vec(read_point, r)?,
a: read_scalar(r)?,
b: read_scalar(r)?,
},
t_hat: read_scalar(r)?,
}))
}
/// Read a Bulletproof+.
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Bulletproof> {
Ok(Bulletproof::Plus(PlusProof {
A: read_point(r)?,
wip: WipProof {
A: read_point(r)?,
B: read_point(r)?,
r_answer: read_scalar(r)?,
s_answer: read_scalar(r)?,
delta_answer: read_scalar(r)?,
L: read_vec(read_point, r)?.into_iter().collect(),
R: read_vec(read_point, r)?.into_iter().collect(),
},
}))
}
}

View File

@@ -1,303 +0,0 @@
use std_shims::{vec, vec::Vec};
use zeroize::Zeroize;
use curve25519_dalek::{Scalar, EdwardsPoint};
use monero_generators::H;
use monero_primitives::{INV_EIGHT, keccak256_to_scalar};
use crate::{
core::{multiexp_vartime, challenge_products},
scalar_vector::ScalarVector,
point_vector::PointVector,
BulletproofsBatchVerifier,
};
/// An error from proving/verifying Inner-Product statements.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub(crate) enum IpError {
IncorrectAmountOfGenerators,
DifferingLrLengths,
}
/// The Bulletproofs Inner-Product statement.
///
/// This is for usage with Protocol 2 from the Bulletproofs paper.
#[derive(Clone, Debug)]
pub(crate) struct IpStatement {
// Weights for h_bold
h_bold_weights: ScalarVector,
// u as the discrete logarithm of G
u: Scalar,
}
/// The witness for the Bulletproofs Inner-Product statement.
#[derive(Clone, Debug)]
pub(crate) struct IpWitness {
// a
a: ScalarVector,
// b
b: ScalarVector,
}
impl IpWitness {
/// Construct a new witness for an Inner-Product statement.
///
/// This functions return None if the lengths of a, b are mismatched, not a power of two, or are
/// empty.
pub(crate) fn new(a: ScalarVector, b: ScalarVector) -> Option<Self> {
if a.0.is_empty() || (a.len() != b.len()) {
None?;
}
let mut power_of_2 = 1;
while power_of_2 < a.len() {
power_of_2 <<= 1;
}
if power_of_2 != a.len() {
None?;
}
Some(Self { a, b })
}
}
/// A proof for the Bulletproofs Inner-Product statement.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub(crate) struct IpProof {
pub(crate) L: Vec<EdwardsPoint>,
pub(crate) R: Vec<EdwardsPoint>,
pub(crate) a: Scalar,
pub(crate) b: Scalar,
}
impl IpStatement {
/// Create a new Inner-Product statement which won't transcript P.
///
/// This MUST only be called when P is deterministic to already transcripted elements.
pub(crate) fn new_without_P_transcript(h_bold_weights: ScalarVector, u: Scalar) -> Self {
Self { h_bold_weights, u }
}
// Transcript a round of the protocol
fn transcript_L_R(transcript: Scalar, L: EdwardsPoint, R: EdwardsPoint) -> Scalar {
let mut transcript = transcript.to_bytes().to_vec();
transcript.extend(L.compress().to_bytes());
transcript.extend(R.compress().to_bytes());
keccak256_to_scalar(transcript)
}
/// Prove for this Inner-Product statement.
///
/// Returns an error if this statement couldn't be proven for (such as if the witness isn't
/// consistent).
pub(crate) fn prove(
self,
mut transcript: Scalar,
witness: IpWitness,
) -> Result<IpProof, IpError> {
let generators = crate::original::GENERATORS();
let g_bold_slice = &generators.G[.. witness.a.len()];
let h_bold_slice = &generators.H[.. witness.a.len()];
let (mut g_bold, mut h_bold, u, mut a, mut b) = {
let IpStatement { h_bold_weights, u } = self;
let u = H() * u;
// Ensure we have the exact amount of weights
if h_bold_weights.len() != g_bold_slice.len() {
Err(IpError::IncorrectAmountOfGenerators)?;
}
// Acquire a local copy of the generators
let g_bold = PointVector(g_bold_slice.to_vec());
let h_bold = PointVector(h_bold_slice.to_vec()).mul_vec(&h_bold_weights);
let IpWitness { a, b } = witness;
(g_bold, h_bold, u, a, b)
};
let mut L_vec = vec![];
let mut R_vec = vec![];
// `else: (n > 1)` case, lines 18-35 of the Bulletproofs paper
// This interprets `g_bold.len()` as `n`
while g_bold.len() > 1 {
// Split a, b, g_bold, h_bold as needed for lines 20-24
let (a1, a2) = a.clone().split();
let (b1, b2) = b.clone().split();
let (g_bold1, g_bold2) = g_bold.split();
let (h_bold1, h_bold2) = h_bold.split();
let n_hat = g_bold1.len();
// Sanity
debug_assert_eq!(a1.len(), n_hat);
debug_assert_eq!(a2.len(), n_hat);
debug_assert_eq!(b1.len(), n_hat);
debug_assert_eq!(b2.len(), n_hat);
debug_assert_eq!(g_bold1.len(), n_hat);
debug_assert_eq!(g_bold2.len(), n_hat);
debug_assert_eq!(h_bold1.len(), n_hat);
debug_assert_eq!(h_bold2.len(), n_hat);
// cl, cr, lines 21-22
let cl = a1.clone().inner_product(&b2);
let cr = a2.clone().inner_product(&b1);
let L = {
let mut L_terms = Vec::with_capacity(1 + (2 * g_bold1.len()));
for (a, g) in a1.0.iter().zip(g_bold2.0.iter()) {
L_terms.push((*a, *g));
}
for (b, h) in b2.0.iter().zip(h_bold1.0.iter()) {
L_terms.push((*b, *h));
}
L_terms.push((cl, u));
// Uses vartime since this isn't a ZK proof
multiexp_vartime(&L_terms)
};
L_vec.push(L * INV_EIGHT());
let R = {
let mut R_terms = Vec::with_capacity(1 + (2 * g_bold1.len()));
for (a, g) in a2.0.iter().zip(g_bold1.0.iter()) {
R_terms.push((*a, *g));
}
for (b, h) in b1.0.iter().zip(h_bold2.0.iter()) {
R_terms.push((*b, *h));
}
R_terms.push((cr, u));
multiexp_vartime(&R_terms)
};
R_vec.push(R * INV_EIGHT());
// Now that we've calculate L, R, transcript them to receive x (26-27)
transcript = Self::transcript_L_R(transcript, *L_vec.last().unwrap(), *R_vec.last().unwrap());
let x = transcript;
let x_inv = x.invert();
// The prover and verifier now calculate the following (28-31)
g_bold = PointVector(Vec::with_capacity(g_bold1.len()));
for (a, b) in g_bold1.0.into_iter().zip(g_bold2.0.into_iter()) {
g_bold.0.push(multiexp_vartime(&[(x_inv, a), (x, b)]));
}
h_bold = PointVector(Vec::with_capacity(h_bold1.len()));
for (a, b) in h_bold1.0.into_iter().zip(h_bold2.0.into_iter()) {
h_bold.0.push(multiexp_vartime(&[(x, a), (x_inv, b)]));
}
// 32-34
a = (a1 * x) + &(a2 * x_inv);
b = (b1 * x_inv) + &(b2 * x);
}
// `if n = 1` case from line 14-17
// Sanity
debug_assert_eq!(g_bold.len(), 1);
debug_assert_eq!(h_bold.len(), 1);
debug_assert_eq!(a.len(), 1);
debug_assert_eq!(b.len(), 1);
// We simply send a/b
Ok(IpProof { L: L_vec, R: R_vec, a: a[0], b: b[0] })
}
/// Queue an Inner-Product proof for batch verification.
///
/// This will return Err if there is an error. This will return Ok if the proof was successfully
/// queued for batch verification. The caller is required to verify the batch in order to ensure
/// the proof is actually correct.
pub(crate) fn verify(
self,
verifier: &mut BulletproofsBatchVerifier,
ip_rows: usize,
mut transcript: Scalar,
verifier_weight: Scalar,
proof: IpProof,
) -> Result<(), IpError> {
let generators = crate::original::GENERATORS();
let g_bold_slice = &generators.G[.. ip_rows];
let h_bold_slice = &generators.H[.. ip_rows];
let IpStatement { h_bold_weights, u } = self;
// Verify the L/R lengths
{
// Calculate the discrete log w.r.t. 2 for the amount of generators present
let mut lr_len = 0;
while (1 << lr_len) < g_bold_slice.len() {
lr_len += 1;
}
// This proof has less/more terms than the passed in generators are for
if proof.L.len() != lr_len {
Err(IpError::IncorrectAmountOfGenerators)?;
}
if proof.L.len() != proof.R.len() {
Err(IpError::DifferingLrLengths)?;
}
}
// Again, we start with the `else: (n > 1)` case
// We need x, x_inv per lines 25-27 for lines 28-31
let mut xs = Vec::with_capacity(proof.L.len());
for (L, R) in proof.L.iter().zip(proof.R.iter()) {
transcript = Self::transcript_L_R(transcript, *L, *R);
xs.push(transcript);
}
// We calculate their inverse in batch
let mut x_invs = xs.clone();
Scalar::batch_invert(&mut x_invs);
// Now, with x and x_inv, we need to calculate g_bold', h_bold', P'
//
// For the sake of performance, we solely want to calculate all of these in terms of scalings
// for g_bold, h_bold, P, and don't want to actually perform intermediary scalings of the
// points
//
// L and R are easy, as it's simply x**2, x**-2
//
// For the series of g_bold, h_bold, we use the `challenge_products` function
// For how that works, please see its own documentation
let product_cache = {
let mut challenges = Vec::with_capacity(proof.L.len());
let x_iter = xs.into_iter().zip(x_invs);
let lr_iter = proof.L.into_iter().zip(proof.R);
for ((x, x_inv), (L, R)) in x_iter.zip(lr_iter) {
challenges.push((x, x_inv));
verifier.0.other.push((verifier_weight * (x * x), L.mul_by_cofactor()));
verifier.0.other.push((verifier_weight * (x_inv * x_inv), R.mul_by_cofactor()));
}
challenge_products(&challenges)
};
// And now for the `if n = 1` case
let c = proof.a * proof.b;
// The multiexp of these terms equate to the final permutation of P
// We now add terms for a * g_bold' + b * h_bold' b + c * u, with the scalars negative such
// that the terms sum to 0 for an honest prover
// The g_bold * a term case from line 16
#[allow(clippy::needless_range_loop)]
for i in 0 .. g_bold_slice.len() {
verifier.0.g_bold[i] -= verifier_weight * product_cache[i] * proof.a;
}
// The h_bold * b term case from line 16
for i in 0 .. h_bold_slice.len() {
verifier.0.h_bold[i] -=
verifier_weight * product_cache[product_cache.len() - 1 - i] * proof.b * h_bold_weights[i];
}
// The c * u term case from line 16
verifier.0.h -= verifier_weight * c * u;
Ok(())
}
}

View File

@@ -1,344 +0,0 @@
use std_shims::{sync::OnceLock, vec::Vec};
use rand_core::{RngCore, CryptoRng};
use zeroize::Zeroize;
use curve25519_dalek::{constants::ED25519_BASEPOINT_POINT, Scalar, EdwardsPoint};
use monero_generators::{H, Generators, MAX_COMMITMENTS, COMMITMENT_BITS};
use monero_primitives::{Commitment, INV_EIGHT, keccak256_to_scalar};
use crate::{core::multiexp, scalar_vector::ScalarVector, BulletproofsBatchVerifier};
pub(crate) mod inner_product;
use inner_product::*;
pub(crate) use inner_product::IpProof;
include!(concat!(env!("OUT_DIR"), "/generators.rs"));
#[derive(Clone, Debug)]
pub(crate) struct AggregateRangeStatement<'a> {
commitments: &'a [EdwardsPoint],
}
#[derive(Clone, Debug)]
pub(crate) struct AggregateRangeWitness {
commitments: Vec<Commitment>,
}
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
pub struct AggregateRangeProof {
pub(crate) A: EdwardsPoint,
pub(crate) S: EdwardsPoint,
pub(crate) T1: EdwardsPoint,
pub(crate) T2: EdwardsPoint,
pub(crate) tau_x: Scalar,
pub(crate) mu: Scalar,
pub(crate) t_hat: Scalar,
pub(crate) ip: IpProof,
}
impl<'a> AggregateRangeStatement<'a> {
pub(crate) fn new(commitments: &'a [EdwardsPoint]) -> Option<Self> {
if commitments.is_empty() || (commitments.len() > MAX_COMMITMENTS) {
None?;
}
Some(Self { commitments })
}
}
impl AggregateRangeWitness {
pub(crate) fn new(commitments: Vec<Commitment>) -> Option<Self> {
if commitments.is_empty() || (commitments.len() > MAX_COMMITMENTS) {
None?;
}
Some(Self { commitments })
}
}
impl<'a> AggregateRangeStatement<'a> {
fn initial_transcript(&self) -> (Scalar, Vec<EdwardsPoint>) {
let V = self.commitments.iter().map(|c| c * INV_EIGHT()).collect::<Vec<_>>();
(keccak256_to_scalar(V.iter().flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>()), V)
}
fn transcript_A_S(transcript: Scalar, A: EdwardsPoint, S: EdwardsPoint) -> (Scalar, Scalar) {
let mut buf = Vec::with_capacity(96);
buf.extend(transcript.to_bytes());
buf.extend(A.compress().to_bytes());
buf.extend(S.compress().to_bytes());
let y = keccak256_to_scalar(buf);
let z = keccak256_to_scalar(y.to_bytes());
(y, z)
}
fn transcript_T12(transcript: Scalar, T1: EdwardsPoint, T2: EdwardsPoint) -> Scalar {
let mut buf = Vec::with_capacity(128);
buf.extend(transcript.to_bytes());
buf.extend(transcript.to_bytes());
buf.extend(T1.compress().to_bytes());
buf.extend(T2.compress().to_bytes());
keccak256_to_scalar(buf)
}
fn transcript_tau_x_mu_t_hat(
transcript: Scalar,
tau_x: Scalar,
mu: Scalar,
t_hat: Scalar,
) -> Scalar {
let mut buf = Vec::with_capacity(128);
buf.extend(transcript.to_bytes());
buf.extend(transcript.to_bytes());
buf.extend(tau_x.to_bytes());
buf.extend(mu.to_bytes());
buf.extend(t_hat.to_bytes());
keccak256_to_scalar(buf)
}
#[allow(clippy::needless_pass_by_value)]
pub(crate) fn prove(
self,
rng: &mut (impl RngCore + CryptoRng),
witness: AggregateRangeWitness,
) -> Option<AggregateRangeProof> {
if self.commitments != witness.commitments.iter().map(Commitment::calculate).collect::<Vec<_>>()
{
None?
};
let generators = GENERATORS();
let (mut transcript, _) = self.initial_transcript();
// Find out the padded amount of commitments
let mut padded_pow_of_2 = 1;
while padded_pow_of_2 < witness.commitments.len() {
padded_pow_of_2 <<= 1;
}
let mut aL = ScalarVector::new(padded_pow_of_2 * COMMITMENT_BITS);
for (i, commitment) in witness.commitments.iter().enumerate() {
let mut amount = commitment.amount;
for j in 0 .. COMMITMENT_BITS {
aL[(i * COMMITMENT_BITS) + j] = Scalar::from(amount & 1);
amount >>= 1;
}
}
let aR = aL.clone() - Scalar::ONE;
let alpha = Scalar::random(&mut *rng);
let A = {
let mut terms = Vec::with_capacity(1 + (2 * aL.len()));
terms.push((alpha, ED25519_BASEPOINT_POINT));
for (aL, G) in aL.0.iter().zip(&generators.G) {
terms.push((*aL, *G));
}
for (aR, H) in aR.0.iter().zip(&generators.H) {
terms.push((*aR, *H));
}
let res = multiexp(&terms) * INV_EIGHT();
terms.zeroize();
res
};
let mut sL = ScalarVector::new(padded_pow_of_2 * COMMITMENT_BITS);
let mut sR = ScalarVector::new(padded_pow_of_2 * COMMITMENT_BITS);
for i in 0 .. (padded_pow_of_2 * COMMITMENT_BITS) {
sL[i] = Scalar::random(&mut *rng);
sR[i] = Scalar::random(&mut *rng);
}
let rho = Scalar::random(&mut *rng);
let S = {
let mut terms = Vec::with_capacity(1 + (2 * sL.len()));
terms.push((rho, ED25519_BASEPOINT_POINT));
for (sL, G) in sL.0.iter().zip(&generators.G) {
terms.push((*sL, *G));
}
for (sR, H) in sR.0.iter().zip(&generators.H) {
terms.push((*sR, *H));
}
let res = multiexp(&terms) * INV_EIGHT();
terms.zeroize();
res
};
let (y, z) = Self::transcript_A_S(transcript, A, S);
transcript = z;
let twos = ScalarVector::powers(Scalar::from(2u8), COMMITMENT_BITS);
let l = [aL - z, sL];
let y_pow_n = ScalarVector::powers(y, aR.len());
let mut r = [((aR + z) * &y_pow_n), sR * &y_pow_n];
{
let mut z_current = z * z;
for j in 0 .. padded_pow_of_2 {
for i in 0 .. COMMITMENT_BITS {
r[0].0[(j * COMMITMENT_BITS) + i] += z_current * twos[i];
}
z_current *= z;
}
}
let t1 = (l[0].clone().inner_product(&r[1])) + (r[0].clone().inner_product(&l[1]));
let t2 = l[1].clone().inner_product(&r[1]);
let tau_1 = Scalar::random(&mut *rng);
let T1 = {
let mut T1_terms = [(t1, H()), (tau_1, ED25519_BASEPOINT_POINT)];
for term in &mut T1_terms {
term.0 *= INV_EIGHT();
}
let T1 = multiexp(&T1_terms);
T1_terms.zeroize();
T1
};
let tau_2 = Scalar::random(&mut *rng);
let T2 = {
let mut T2_terms = [(t2, H()), (tau_2, ED25519_BASEPOINT_POINT)];
for term in &mut T2_terms {
term.0 *= INV_EIGHT();
}
let T2 = multiexp(&T2_terms);
T2_terms.zeroize();
T2
};
transcript = Self::transcript_T12(transcript, T1, T2);
let x = transcript;
let [l0, l1] = l;
let l = l0 + &(l1 * x);
let [r0, r1] = r;
let r = r0 + &(r1 * x);
let t_hat = l.clone().inner_product(&r);
let mut tau_x = ((tau_2 * x) + tau_1) * x;
{
let mut z_current = z * z;
for commitment in &witness.commitments {
tau_x += z_current * commitment.mask;
z_current *= z;
}
}
let mu = alpha + (rho * x);
let y_inv_pow_n = ScalarVector::powers(y.invert(), l.len());
transcript = Self::transcript_tau_x_mu_t_hat(transcript, tau_x, mu, t_hat);
let x_ip = transcript;
let ip = IpStatement::new_without_P_transcript(y_inv_pow_n, x_ip)
.prove(transcript, IpWitness::new(l, r).unwrap())
.unwrap();
let res = AggregateRangeProof { A, S, T1, T2, tau_x, mu, t_hat, ip };
#[cfg(debug_assertions)]
{
let mut verifier = BulletproofsBatchVerifier::default();
debug_assert!(self.verify(rng, &mut verifier, res.clone()));
debug_assert!(verifier.verify());
}
Some(res)
}
#[must_use]
pub(crate) fn verify(
self,
rng: &mut (impl RngCore + CryptoRng),
verifier: &mut BulletproofsBatchVerifier,
mut proof: AggregateRangeProof,
) -> bool {
let mut padded_pow_of_2 = 1;
while padded_pow_of_2 < self.commitments.len() {
padded_pow_of_2 <<= 1;
}
let ip_rows = padded_pow_of_2 * COMMITMENT_BITS;
while verifier.0.g_bold.len() < ip_rows {
verifier.0.g_bold.push(Scalar::ZERO);
verifier.0.h_bold.push(Scalar::ZERO);
}
let (mut transcript, mut commitments) = self.initial_transcript();
for commitment in &mut commitments {
*commitment = commitment.mul_by_cofactor();
}
let (y, z) = Self::transcript_A_S(transcript, proof.A, proof.S);
transcript = z;
transcript = Self::transcript_T12(transcript, proof.T1, proof.T2);
let x = transcript;
transcript = Self::transcript_tau_x_mu_t_hat(transcript, proof.tau_x, proof.mu, proof.t_hat);
let x_ip = transcript;
proof.A = proof.A.mul_by_cofactor();
proof.S = proof.S.mul_by_cofactor();
proof.T1 = proof.T1.mul_by_cofactor();
proof.T2 = proof.T2.mul_by_cofactor();
let y_pow_n = ScalarVector::powers(y, ip_rows);
let y_inv_pow_n = ScalarVector::powers(y.invert(), ip_rows);
let twos = ScalarVector::powers(Scalar::from(2u8), COMMITMENT_BITS);
// 65
{
let weight = Scalar::random(&mut *rng);
verifier.0.h += weight * proof.t_hat;
verifier.0.g += weight * proof.tau_x;
// Now that we've accumulated the lhs, negate the weight and accumulate the rhs
// These will now sum to 0 if equal
let weight = -weight;
verifier.0.h += weight * (z - (z * z)) * y_pow_n.sum();
let mut z_current = z * z;
for commitment in &commitments {
verifier.0.other.push((weight * z_current, *commitment));
z_current *= z;
}
let mut z_current = z * z * z;
for _ in 0 .. padded_pow_of_2 {
verifier.0.h -= weight * z_current * twos.clone().sum();
z_current *= z;
}
verifier.0.other.push((weight * x, proof.T1));
verifier.0.other.push((weight * (x * x), proof.T2));
}
let ip_weight = Scalar::random(&mut *rng);
// 66
verifier.0.other.push((ip_weight, proof.A));
verifier.0.other.push((ip_weight * x, proof.S));
// TODO: g_sum
for i in 0 .. ip_rows {
verifier.0.g_bold[i] += ip_weight * -z;
}
// TODO: h_sum
for i in 0 .. ip_rows {
verifier.0.h_bold[i] += ip_weight * z;
}
{
let mut z_current = z * z;
for j in 0 .. padded_pow_of_2 {
for i in 0 .. COMMITMENT_BITS {
let full_i = (j * COMMITMENT_BITS) + i;
verifier.0.h_bold[full_i] += ip_weight * y_inv_pow_n[full_i] * z_current * twos[i];
}
z_current *= z;
}
}
verifier.0.h += ip_weight * x_ip * proof.t_hat;
// 67, 68
verifier.0.g += ip_weight * -proof.mu;
let res = IpStatement::new_without_P_transcript(y_inv_pow_n, x_ip)
.verify(verifier, ip_rows, transcript, ip_weight, proof.ip);
res.is_ok()
}
}

View File

@@ -1,20 +0,0 @@
use std_shims::{sync::OnceLock, vec::Vec};
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use monero_generators::hash_to_point;
use monero_primitives::{keccak256, keccak256_to_scalar};
// Monero starts BP+ transcripts with the following constant.
static TRANSCRIPT_CELL: OnceLock<[u8; 32]> = OnceLock::new();
pub(crate) fn TRANSCRIPT() -> [u8; 32] {
// Why this uses a hash_to_point is completely unknown.
*TRANSCRIPT_CELL
.get_or_init(|| hash_to_point(keccak256(b"bulletproof_plus_transcript")).compress().to_bytes())
}
pub(crate) fn initial_transcript(commitments: core::slice::Iter<'_, EdwardsPoint>) -> Scalar {
let commitments_hash =
keccak256_to_scalar(commitments.flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>());
keccak256_to_scalar([TRANSCRIPT().as_ref(), &commitments_hash.to_bytes()].concat())
}

View File

@@ -1,56 +0,0 @@
use rand_core::{RngCore, OsRng};
use curve25519_dalek::scalar::Scalar;
use monero_primitives::Commitment;
use crate::{batch_verifier::BatchVerifier, Bulletproof, BulletproofError};
mod original;
mod plus;
macro_rules! bulletproofs_tests {
($name: ident, $max: ident, $plus: literal) => {
#[test]
fn $name() {
// Create Bulletproofs for all possible output quantities
let mut verifier = BatchVerifier::new();
for i in 1 ..= 16 {
let commitments = (1 ..= i)
.map(|_| Commitment::new(Scalar::random(&mut OsRng), OsRng.next_u64()))
.collect::<Vec<_>>();
let bp = if $plus {
Bulletproof::prove_plus(&mut OsRng, commitments.clone()).unwrap()
} else {
Bulletproof::prove(&mut OsRng, commitments.clone()).unwrap()
};
let commitments = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
assert!(bp.verify(&mut OsRng, &commitments));
assert!(bp.batch_verify(&mut OsRng, &mut verifier, &commitments));
}
assert!(verifier.verify());
}
#[test]
fn $max() {
// Check Bulletproofs errors if we try to prove for too many outputs
let mut commitments = vec![];
for _ in 0 .. 17 {
commitments.push(Commitment::new(Scalar::ZERO, 0));
}
assert_eq!(
(if $plus {
Bulletproof::prove_plus(&mut OsRng, commitments)
} else {
Bulletproof::prove(&mut OsRng, commitments)
})
.unwrap_err(),
BulletproofError::TooManyCommitments,
);
}
};
}
bulletproofs_tests!(bulletproofs, bulletproofs_max, false);
bulletproofs_tests!(bulletproofs_plus, bulletproofs_plus_max, true);

View File

@@ -1,75 +0,0 @@
// The inner product relation is P = sum(g_bold * a, h_bold * b, g * (a * b))
use rand_core::OsRng;
use curve25519_dalek::Scalar;
use monero_generators::H;
use crate::{
scalar_vector::ScalarVector,
point_vector::PointVector,
original::{
GENERATORS,
inner_product::{IpStatement, IpWitness},
},
BulletproofsBatchVerifier,
};
#[test]
fn test_zero_inner_product() {
let statement =
IpStatement::new_without_P_transcript(ScalarVector(vec![Scalar::ONE; 1]), Scalar::ONE);
let witness = IpWitness::new(ScalarVector::new(1), ScalarVector::new(1)).unwrap();
let transcript = Scalar::random(&mut OsRng);
let proof = statement.clone().prove(transcript, witness).unwrap();
let mut verifier = BulletproofsBatchVerifier::default();
verifier.0.g_bold = vec![Scalar::ZERO; 1];
verifier.0.h_bold = vec![Scalar::ZERO; 1];
statement.verify(&mut verifier, 1, transcript, Scalar::random(&mut OsRng), proof).unwrap();
assert!(verifier.verify());
}
#[test]
fn test_inner_product() {
// P = sum(g_bold * a, h_bold * b, g * u * <a, b>)
let generators = GENERATORS();
let mut verifier = BulletproofsBatchVerifier::default();
verifier.0.g_bold = vec![Scalar::ZERO; 32];
verifier.0.h_bold = vec![Scalar::ZERO; 32];
for i in [1, 2, 4, 8, 16, 32] {
let g = H();
let mut g_bold = vec![];
let mut h_bold = vec![];
for i in 0 .. i {
g_bold.push(generators.G[i]);
h_bold.push(generators.H[i]);
}
let g_bold = PointVector(g_bold);
let h_bold = PointVector(h_bold);
let mut a = ScalarVector::new(i);
let mut b = ScalarVector::new(i);
for i in 0 .. i {
a[i] = Scalar::random(&mut OsRng);
b[i] = Scalar::random(&mut OsRng);
}
let P = g_bold.multiexp(&a) + h_bold.multiexp(&b) + (g * a.clone().inner_product(&b));
let statement =
IpStatement::new_without_P_transcript(ScalarVector(vec![Scalar::ONE; i]), Scalar::ONE);
let witness = IpWitness::new(a, b).unwrap();
let transcript = Scalar::random(&mut OsRng);
let proof = statement.clone().prove(transcript, witness).unwrap();
let weight = Scalar::random(&mut OsRng);
verifier.0.other.push((weight, P));
statement.verify(&mut verifier, i, transcript, weight, proof).unwrap();
}
assert!(verifier.verify());
}

View File

@@ -1,62 +0,0 @@
use hex_literal::hex;
use rand_core::OsRng;
use curve25519_dalek::scalar::Scalar;
use monero_io::decompress_point;
use crate::{
original::{IpProof, AggregateRangeProof as OriginalProof},
Bulletproof,
};
mod inner_product;
#[test]
fn bulletproofs_vector() {
let scalar = |scalar| Scalar::from_canonical_bytes(scalar).unwrap();
let point = |point| decompress_point(point).unwrap();
// Generated from Monero
assert!(Bulletproof::Original(OriginalProof {
A: point(hex!("ef32c0b9551b804decdcb107eb22aa715b7ce259bf3c5cac20e24dfa6b28ac71")),
S: point(hex!("e1285960861783574ee2b689ae53622834eb0b035d6943103f960cd23e063fa0")),
T1: point(hex!("4ea07735f184ba159d0e0eb662bac8cde3eb7d39f31e567b0fbda3aa23fe5620")),
T2: point(hex!("b8390aa4b60b255630d40e592f55ec6b7ab5e3a96bfcdcd6f1cd1d2fc95f441e")),
tau_x: scalar(hex!("5957dba8ea9afb23d6e81cc048a92f2d502c10c749dc1b2bd148ae8d41ec7107")),
mu: scalar(hex!("923023b234c2e64774b820b4961f7181f6c1dc152c438643e5a25b0bf271bc02")),
ip: IpProof {
L: vec![
point(hex!("c45f656316b9ebf9d357fb6a9f85b5f09e0b991dd50a6e0ae9b02de3946c9d99")),
point(hex!("9304d2bf0f27183a2acc58cc755a0348da11bd345485fda41b872fee89e72aac")),
point(hex!("1bb8b71925d155dd9569f64129ea049d6149fdc4e7a42a86d9478801d922129b")),
point(hex!("5756a7bf887aa72b9a952f92f47182122e7b19d89e5dd434c747492b00e1c6b7")),
point(hex!("6e497c910d102592830555356af5ff8340e8d141e3fb60ea24cfa587e964f07d")),
point(hex!("f4fa3898e7b08e039183d444f3d55040f3c790ed806cb314de49f3068bdbb218")),
point(hex!("0bbc37597c3ead517a3841e159c8b7b79a5ceaee24b2a9a20350127aab428713")),
],
R: vec![
point(hex!("609420ba1702781692e84accfd225adb3d077aedc3cf8125563400466b52dbd9")),
point(hex!("fb4e1d079e7a2b0ec14f7e2a3943bf50b6d60bc346a54fcf562fb234b342abf8")),
point(hex!("6ae3ac97289c48ce95b9c557289e82a34932055f7f5e32720139824fe81b12e5")),
point(hex!("d071cc2ffbdab2d840326ad15f68c01da6482271cae3cf644670d1632f29a15c")),
point(hex!("e52a1754b95e1060589ba7ce0c43d0060820ebfc0d49dc52884bc3c65ad18af5")),
point(hex!("41573b06140108539957df71aceb4b1816d2409ce896659aa5c86f037ca5e851")),
point(hex!("a65970b2cc3c7b08b2b5b739dbc8e71e646783c41c625e2a5b1535e3d2e0f742")),
],
a: scalar(hex!("0077c5383dea44d3cd1bc74849376bd60679612dc4b945255822457fa0c0a209")),
b: scalar(hex!("fe80cf5756473482581e1d38644007793ddc66fdeb9404ec1689a907e4863302")),
},
t_hat: scalar(hex!("40dfb08e09249040df997851db311bd6827c26e87d6f0f332c55be8eef10e603"))
})
.verify(
&mut OsRng,
&[
// For some reason, these vectors are * INV_EIGHT
point(hex!("8e8f23f315edae4f6c2f948d9a861e0ae32d356b933cd11d2f0e031ac744c41f"))
.mul_by_cofactor(),
point(hex!("2829cbd025aa54cd6e1b59a032564f22f0b2e5627f7f2c4297f90da438b5510f"))
.mul_by_cofactor(),
]
));
}

View File

@@ -1,28 +0,0 @@
use rand_core::{RngCore, OsRng};
use curve25519_dalek::Scalar;
use monero_primitives::Commitment;
use crate::{
batch_verifier::BulletproofsPlusBatchVerifier,
plus::aggregate_range_proof::{AggregateRangeStatement, AggregateRangeWitness},
};
#[test]
fn test_aggregate_range_proof() {
let mut verifier = BulletproofsPlusBatchVerifier::default();
for m in 1 ..= 16 {
let mut commitments = vec![];
for _ in 0 .. m {
commitments.push(Commitment::new(Scalar::random(&mut OsRng), OsRng.next_u64()));
}
let commitment_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
let statement = AggregateRangeStatement::new(&commitment_points).unwrap();
let witness = AggregateRangeWitness::new(commitments).unwrap();
let proof = statement.clone().prove(&mut OsRng, &witness).unwrap();
statement.verify(&mut OsRng, &mut verifier, proof);
}
assert!(verifier.verify());
}

View File

@@ -1,65 +0,0 @@
[package]
name = "monero-clsag"
version = "0.1.0"
description = "The CLSAG linkable ring signature, as defined by the Monero protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/ringct/clsag"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
thiserror = { version = "1", default-features = false, optional = true }
rand_core = { version = "0.6", default-features = false }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
subtle = { version = "^2.4", default-features = false }
# Cryptographic dependencies
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
# Multisig dependencies
rand_chacha = { version = "0.3", default-features = false, optional = true }
transcript = { package = "flexible-transcript", path = "../../../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
group = { version = "0.13", default-features = false, optional = true }
dalek-ff-group = { path = "../../../../crypto/dalek-ff-group", version = "0.4", default-features = false, optional = true }
frost = { package = "modular-frost", path = "../../../../crypto/frost", default-features = false, features = ["ed25519"], optional = true }
# Other Monero dependencies
monero-io = { path = "../../io", version = "0.1", default-features = false }
monero-generators = { path = "../../generators", version = "0.4", default-features = false }
monero-primitives = { path = "../../primitives", version = "0.1", default-features = false }
[dev-dependencies]
frost = { package = "modular-frost", path = "../../../../crypto/frost", default-features = false, features = ["ed25519", "tests"] }
[features]
std = [
"std-shims/std",
"thiserror",
"rand_core/std",
"zeroize/std",
"subtle/std",
"rand_chacha?/std",
"transcript?/std",
"group?/alloc",
"dalek-ff-group?/std",
"monero-io/std",
"monero-generators/std",
"monero-primitives/std",
]
multisig = ["rand_chacha", "transcript", "group", "dalek-ff-group", "frost", "std"]
default = ["std"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,15 +0,0 @@
# Monero CLSAG
The CLSAG linkable ring signature, as defined by the Monero protocol.
Additionally included is a FROST-inspired threshold multisignature algorithm.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).
- `multisig`: Provides a FROST-inspired threshold multisignature algorithm for
use.

View File

@@ -1,400 +0,0 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc = include_str!("../README.md")]
#![deny(missing_docs)]
#![cfg_attr(not(feature = "std"), no_std)]
#![allow(non_snake_case)]
use core::ops::Deref;
use std_shims::{
vec,
vec::Vec,
io::{self, Read, Write},
};
use rand_core::{RngCore, CryptoRng};
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use subtle::{ConstantTimeEq, ConditionallySelectable};
use curve25519_dalek::{
constants::{ED25519_BASEPOINT_TABLE, ED25519_BASEPOINT_POINT},
scalar::Scalar,
traits::{IsIdentity, MultiscalarMul, VartimePrecomputedMultiscalarMul},
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation},
};
use monero_io::*;
use monero_generators::hash_to_point;
use monero_primitives::{INV_EIGHT, G_PRECOMP, Commitment, Decoys, keccak256_to_scalar};
#[cfg(feature = "multisig")]
mod multisig;
#[cfg(feature = "multisig")]
pub use multisig::{ClsagMultisigMaskSender, ClsagAddendum, ClsagMultisig};
#[cfg(all(feature = "std", test))]
mod tests;
/// Errors when working with CLSAGs.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[cfg_attr(feature = "std", derive(thiserror::Error))]
pub enum ClsagError {
/// The ring was invalid (such as being too small or too large).
#[cfg_attr(feature = "std", error("invalid ring"))]
InvalidRing,
/// The discrete logarithm of the key, scaling G, wasn't equivalent to the signing ring member.
#[cfg_attr(feature = "std", error("invalid commitment"))]
InvalidKey,
/// The commitment opening provided did not match the ring member's.
#[cfg_attr(feature = "std", error("invalid commitment"))]
InvalidCommitment,
/// The key image was invalid (such as being identity or torsioned)
#[cfg_attr(feature = "std", error("invalid key image"))]
InvalidImage,
/// The `D` component was invalid.
#[cfg_attr(feature = "std", error("invalid D"))]
InvalidD,
/// The `s` vector was invalid.
#[cfg_attr(feature = "std", error("invalid s"))]
InvalidS,
/// The `c1` variable was invalid.
#[cfg_attr(feature = "std", error("invalid c1"))]
InvalidC1,
}
/// Context on the input being signed for.
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
pub struct ClsagContext {
// The opening for the commitment of the signing ring member
commitment: Commitment,
// Selected ring members' positions, signer index, and ring
decoys: Decoys,
}
impl ClsagContext {
/// Create a new context, as necessary for signing.
pub fn new(decoys: Decoys, commitment: Commitment) -> Result<ClsagContext, ClsagError> {
if decoys.len() > u8::MAX.into() {
Err(ClsagError::InvalidRing)?;
}
// Validate the commitment matches
if decoys.signer_ring_members()[1] != commitment.calculate() {
Err(ClsagError::InvalidCommitment)?;
}
Ok(ClsagContext { commitment, decoys })
}
}
#[allow(clippy::large_enum_variant)]
enum Mode {
Sign(usize, EdwardsPoint, EdwardsPoint),
Verify(Scalar),
}
// Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences
//
// Said differences are covered via the above Mode
fn core(
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32],
D: &EdwardsPoint,
s: &[Scalar],
A_c1: &Mode,
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
let n = ring.len();
let images_precomp = match A_c1 {
Mode::Sign(..) => None,
Mode::Verify(..) => Some(VartimeEdwardsPrecomputation::new([I, D])),
};
let D_INV_EIGHT = D * INV_EIGHT();
// Generate the transcript
// Instead of generating multiple, a single transcript is created and then edited as needed
const PREFIX: &[u8] = b"CLSAG_";
#[rustfmt::skip]
const AGG_0: &[u8] = b"agg_0";
#[rustfmt::skip]
const ROUND: &[u8] = b"round";
const PREFIX_AGG_0_LEN: usize = PREFIX.len() + AGG_0.len();
let mut to_hash = Vec::with_capacity(((2 * n) + 5) * 32);
to_hash.extend(PREFIX);
to_hash.extend(AGG_0);
to_hash.extend([0; 32 - PREFIX_AGG_0_LEN]);
let mut P = Vec::with_capacity(n);
for member in ring {
P.push(member[0]);
to_hash.extend(member[0].compress().to_bytes());
}
let mut C = Vec::with_capacity(n);
for member in ring {
C.push(member[1] - pseudo_out);
to_hash.extend(member[1].compress().to_bytes());
}
to_hash.extend(I.compress().to_bytes());
to_hash.extend(D_INV_EIGHT.compress().to_bytes());
to_hash.extend(pseudo_out.compress().to_bytes());
// mu_P with agg_0
let mu_P = keccak256_to_scalar(&to_hash);
// mu_C with agg_1
to_hash[PREFIX_AGG_0_LEN - 1] = b'1';
let mu_C = keccak256_to_scalar(&to_hash);
// Truncate it for the round transcript, altering the DST as needed
to_hash.truncate(((2 * n) + 1) * 32);
for i in 0 .. ROUND.len() {
to_hash[PREFIX.len() + i] = ROUND[i];
}
// Unfortunately, it's I D pseudo_out instead of pseudo_out I D, meaning this needs to be
// truncated just to add it back
to_hash.extend(pseudo_out.compress().to_bytes());
to_hash.extend(msg);
// Configure the loop based on if we're signing or verifying
let start;
let end;
let mut c;
match A_c1 {
Mode::Sign(r, A, AH) => {
start = r + 1;
end = r + n;
to_hash.extend(A.compress().to_bytes());
to_hash.extend(AH.compress().to_bytes());
c = keccak256_to_scalar(&to_hash);
}
Mode::Verify(c1) => {
start = 0;
end = n;
c = *c1;
}
}
// Perform the core loop
let mut c1 = c;
for i in (start .. end).map(|i| i % n) {
let c_p = mu_P * c;
let c_c = mu_C * c;
// (s_i * G) + (c_p * P_i) + (c_c * C_i)
let L = match A_c1 {
Mode::Sign(..) => {
EdwardsPoint::multiscalar_mul([s[i], c_p, c_c], [ED25519_BASEPOINT_POINT, P[i], C[i]])
}
Mode::Verify(..) => {
G_PRECOMP().vartime_mixed_multiscalar_mul([s[i]], [c_p, c_c], [P[i], C[i]])
}
};
let PH = hash_to_point(P[i].compress().0);
// (c_p * I) + (c_c * D) + (s_i * PH)
let R = match A_c1 {
Mode::Sign(..) => EdwardsPoint::multiscalar_mul([c_p, c_c, s[i]], [I, D, &PH]),
Mode::Verify(..) => {
images_precomp.as_ref().unwrap().vartime_mixed_multiscalar_mul([c_p, c_c], [s[i]], [PH])
}
};
to_hash.truncate(((2 * n) + 3) * 32);
to_hash.extend(L.compress().to_bytes());
to_hash.extend(R.compress().to_bytes());
c = keccak256_to_scalar(&to_hash);
// This will only execute once and shouldn't need to be constant time. Making it constant time
// removes the risk of branch prediction creating timing differences depending on ring index
// however
c1.conditional_assign(&c, i.ct_eq(&(n - 1)));
}
// This first tuple is needed to continue signing, the latter is the c to be tested/worked with
((D_INV_EIGHT, c * mu_P, c * mu_C), c1)
}
/// The CLSAG signature, as used in Monero.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Clsag {
/// The difference of the commitment randomnesses, scaling the key image generator.
pub D: EdwardsPoint,
/// The responses for each ring member.
pub s: Vec<Scalar>,
/// The first challenge in the ring.
pub c1: Scalar,
}
struct ClsagSignCore {
incomplete_clsag: Clsag,
pseudo_out: EdwardsPoint,
key_challenge: Scalar,
challenged_mask: Scalar,
}
impl Clsag {
// Sign core is the extension of core as needed for signing, yet is shared between single signer
// and multisig, hence why it's still core
fn sign_core<R: RngCore + CryptoRng>(
rng: &mut R,
I: &EdwardsPoint,
input: &ClsagContext,
mask: Scalar,
msg: &[u8; 32],
A: EdwardsPoint,
AH: EdwardsPoint,
) -> ClsagSignCore {
let r: usize = input.decoys.signer_index().into();
let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate();
let mask_delta = input.commitment.mask - mask;
let H = hash_to_point(input.decoys.ring()[r][0].compress().0);
let D = H * mask_delta;
let mut s = Vec::with_capacity(input.decoys.ring().len());
for _ in 0 .. input.decoys.ring().len() {
s.push(Scalar::random(rng));
}
let ((D, c_p, c_c), c1) =
core(input.decoys.ring(), I, &pseudo_out, msg, &D, &s, &Mode::Sign(r, A, AH));
ClsagSignCore {
incomplete_clsag: Clsag { D, s, c1 },
pseudo_out,
key_challenge: c_p,
challenged_mask: c_c * mask_delta,
}
}
/// Sign CLSAG signatures for the provided inputs.
///
/// Monero ensures the rerandomized input commitments have the same value as the outputs by
/// checking `sum(rerandomized_input_commitments) - sum(output_commitments) == 0`. This requires
/// not only the amounts balance, yet also
/// `sum(input_commitment_masks) - sum(output_commitment_masks)`.
///
/// Monero solves this by following the wallet protocol to determine each output commitment's
/// randomness, then using random masks for all but the last input. The last input is
/// rerandomized to the necessary mask for the equation to balance.
///
/// Due to Monero having this behavior, it only makes sense to sign CLSAGs as a list, hence this
/// API being the way it is.
///
/// `inputs` is of the form (discrete logarithm of the key, context).
///
/// `sum_outputs` is for the sum of the output commitments' masks.
pub fn sign<R: RngCore + CryptoRng>(
rng: &mut R,
mut inputs: Vec<(Zeroizing<Scalar>, ClsagContext)>,
sum_outputs: Scalar,
msg: [u8; 32],
) -> Result<Vec<(Clsag, EdwardsPoint)>, ClsagError> {
// Create the key images
let mut key_image_generators = vec![];
let mut key_images = vec![];
for input in &inputs {
let key = input.1.decoys.signer_ring_members()[0];
// Check the key is consistent
if (ED25519_BASEPOINT_TABLE * input.0.deref()) != key {
Err(ClsagError::InvalidKey)?;
}
let key_image_generator = hash_to_point(key.compress().0);
key_image_generators.push(key_image_generator);
key_images.push(key_image_generator * input.0.deref());
}
let mut res = Vec::with_capacity(inputs.len());
let mut sum_pseudo_outs = Scalar::ZERO;
for i in 0 .. inputs.len() {
let mask;
// If this is the last input, set the mask as described above
if i == (inputs.len() - 1) {
mask = sum_outputs - sum_pseudo_outs;
} else {
mask = Scalar::random(rng);
sum_pseudo_outs += mask;
}
let mut nonce = Zeroizing::new(Scalar::random(rng));
let ClsagSignCore { mut incomplete_clsag, pseudo_out, key_challenge, challenged_mask } =
Clsag::sign_core(
rng,
&key_images[i],
&inputs[i].1,
mask,
&msg,
nonce.deref() * ED25519_BASEPOINT_TABLE,
nonce.deref() * key_image_generators[i],
);
// Effectively r - c x, except c x is (c_p x) + (c_c z), where z is the delta between the
// ring member's commitment and our pseudo-out commitment (which will only have a known
// discrete log over G if the amounts cancel out)
incomplete_clsag.s[usize::from(inputs[i].1.decoys.signer_index())] =
nonce.deref() - ((key_challenge * inputs[i].0.deref()) + challenged_mask);
let clsag = incomplete_clsag;
// Zeroize private keys and nonces.
inputs[i].0.zeroize();
nonce.zeroize();
debug_assert!(clsag
.verify(inputs[i].1.decoys.ring(), &key_images[i], &pseudo_out, &msg)
.is_ok());
res.push((clsag, pseudo_out));
}
Ok(res)
}
/// Verify a CLSAG signature for the provided context.
pub fn verify(
&self,
ring: &[[EdwardsPoint; 2]],
I: &EdwardsPoint,
pseudo_out: &EdwardsPoint,
msg: &[u8; 32],
) -> Result<(), ClsagError> {
// Preliminary checks
// s, c1, and points must also be encoded canonically, which is checked at time of decode
if ring.is_empty() {
Err(ClsagError::InvalidRing)?;
}
if ring.len() != self.s.len() {
Err(ClsagError::InvalidS)?;
}
if I.is_identity() || (!I.is_torsion_free()) {
Err(ClsagError::InvalidImage)?;
}
let D = self.D.mul_by_cofactor();
if D.is_identity() {
Err(ClsagError::InvalidD)?;
}
let (_, c1) = core(ring, I, pseudo_out, msg, &D, &self.s, &Mode::Verify(self.c1));
if c1 != self.c1 {
Err(ClsagError::InvalidC1)?;
}
Ok(())
}
/// Write a CLSAG.
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
write_raw_vec(write_scalar, &self.s, w)?;
w.write_all(&self.c1.to_bytes())?;
write_point(&self.D, w)
}
/// Read a CLSAG.
pub fn read<R: Read>(decoys: usize, r: &mut R) -> io::Result<Clsag> {
Ok(Clsag { s: read_raw_vec(read_scalar, decoys, r)?, c1: read_scalar(r)?, D: read_point(r)? })
}
}

View File

@@ -1,379 +0,0 @@
use core::{ops::Deref, fmt::Debug};
use std_shims::{
sync::{Arc, Mutex},
io::{self, Read, Write},
collections::HashMap,
};
use rand_core::{RngCore, CryptoRng, SeedableRng};
use rand_chacha::ChaCha20Rng;
use zeroize::{Zeroize, Zeroizing};
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
use group::{
ff::{Field, PrimeField},
Group, GroupEncoding,
};
use transcript::{Transcript, RecommendedTranscript};
use dalek_ff_group as dfg;
use frost::{
dkg::lagrange,
curve::Ed25519,
Participant, FrostError, ThresholdKeys, ThresholdView,
algorithm::{WriteAddendum, Algorithm},
};
use monero_generators::hash_to_point;
use crate::{ClsagContext, Clsag};
impl ClsagContext {
fn transcript<T: Transcript>(&self, transcript: &mut T) {
// Doesn't domain separate as this is considered part of the larger CLSAG proof
// Ring index
transcript.append_message(b"signer_index", [self.decoys.signer_index()]);
// Ring
for (i, pair) in self.decoys.ring().iter().enumerate() {
// Doesn't include global output indexes as CLSAG doesn't care/won't be affected by it
// They're just a unreliable reference to this data which will be included in the message
// if somehow relevant
transcript.append_message(b"member", [u8::try_from(i).expect("ring size exceeded 255")]);
// This also transcripts the key image generator since it's derived from this key
transcript.append_message(b"key", pair[0].compress().to_bytes());
transcript.append_message(b"commitment", pair[1].compress().to_bytes())
}
// Doesn't include the commitment's parts as the above ring + index includes the commitment
// The only potential malleability would be if the G/H relationship is known, breaking the
// discrete log problem, which breaks everything already
}
}
/// A channel to send the mask to use for the pseudo-out (rerandomized commitment) with.
///
/// A mask must be sent along this channel before any preprocess addendums are handled. Breaking
/// this rule will cause a panic.
#[derive(Clone, Debug)]
pub struct ClsagMultisigMaskSender {
buf: Arc<Mutex<Option<Scalar>>>,
}
#[derive(Clone, Debug)]
struct ClsagMultisigMaskReceiver {
buf: Arc<Mutex<Option<Scalar>>>,
}
impl ClsagMultisigMaskSender {
fn new() -> (ClsagMultisigMaskSender, ClsagMultisigMaskReceiver) {
let buf = Arc::new(Mutex::new(None));
(ClsagMultisigMaskSender { buf: buf.clone() }, ClsagMultisigMaskReceiver { buf })
}
/// Send a mask to a CLSAG multisig instance.
pub fn send(self, mask: Scalar) {
*self.buf.lock() = Some(mask);
}
}
impl ClsagMultisigMaskReceiver {
fn recv(self) -> Scalar {
self.buf.lock().unwrap()
}
}
/// Addendum produced during the signing process.
#[derive(Clone, PartialEq, Eq, Zeroize, Debug)]
pub struct ClsagAddendum {
key_image_share: dfg::EdwardsPoint,
}
impl ClsagAddendum {
/// The key image share within this addendum.
pub fn key_image_share(&self) -> dfg::EdwardsPoint {
self.key_image_share
}
}
impl WriteAddendum for ClsagAddendum {
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
writer.write_all(self.key_image_share.compress().to_bytes().as_ref())
}
}
#[allow(non_snake_case)]
#[derive(Clone, PartialEq, Eq, Debug)]
struct Interim {
p: Scalar,
c: Scalar,
clsag: Clsag,
pseudo_out: EdwardsPoint,
}
/// FROST-inspired algorithm for producing a CLSAG signature.
///
/// Before this has its `process_addendum` called, a mask must be set. Else this will panic.
///
/// The message signed is expected to be a 32-byte value. Per Monero, it's the keccak256 hash of
/// the transaction data which is signed. This will panic if the message is not a 32-byte value.
#[allow(non_snake_case)]
#[derive(Clone, Debug)]
pub struct ClsagMultisig {
transcript: RecommendedTranscript,
key_image_generator: EdwardsPoint,
key_image_shares: HashMap<[u8; 32], dfg::EdwardsPoint>,
image: Option<dfg::EdwardsPoint>,
context: ClsagContext,
mask_recv: Option<ClsagMultisigMaskReceiver>,
mask: Option<Scalar>,
msg: Option<[u8; 32]>,
interim: Option<Interim>,
}
impl ClsagMultisig {
/// Construct a new instance of multisignature CLSAG signing.
pub fn new(
transcript: RecommendedTranscript,
context: ClsagContext,
) -> (ClsagMultisig, ClsagMultisigMaskSender) {
let (mask_send, mask_recv) = ClsagMultisigMaskSender::new();
(
ClsagMultisig {
transcript,
key_image_generator: hash_to_point(context.decoys.signer_ring_members()[0].compress().0),
key_image_shares: HashMap::new(),
image: None,
context,
mask_recv: Some(mask_recv),
mask: None,
msg: None,
interim: None,
},
mask_send,
)
}
/// The key image generator used by the signer.
pub fn key_image_generator(&self) -> EdwardsPoint {
self.key_image_generator
}
}
impl Algorithm<Ed25519> for ClsagMultisig {
type Transcript = RecommendedTranscript;
type Addendum = ClsagAddendum;
// We output the CLSAG and the key image, which requires an interactive protocol to obtain
type Signature = (Clsag, EdwardsPoint);
// We need the nonce represented against both G and the key image generator
fn nonces(&self) -> Vec<Vec<dfg::EdwardsPoint>> {
vec![vec![dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.key_image_generator)]]
}
// We also publish our share of the key image
fn preprocess_addendum<R: RngCore + CryptoRng>(
&mut self,
_rng: &mut R,
keys: &ThresholdKeys<Ed25519>,
) -> ClsagAddendum {
ClsagAddendum {
key_image_share: dfg::EdwardsPoint(self.key_image_generator) * keys.secret_share().deref(),
}
}
fn read_addendum<R: Read>(&self, reader: &mut R) -> io::Result<ClsagAddendum> {
let mut bytes = [0; 32];
reader.read_exact(&mut bytes)?;
// dfg ensures the point is torsion free
let xH = Option::<dfg::EdwardsPoint>::from(dfg::EdwardsPoint::from_bytes(&bytes))
.ok_or_else(|| io::Error::other("invalid key image"))?;
// Ensure this is a canonical point
if xH.to_bytes() != bytes {
Err(io::Error::other("non-canonical key image"))?;
}
Ok(ClsagAddendum { key_image_share: xH })
}
fn process_addendum(
&mut self,
view: &ThresholdView<Ed25519>,
l: Participant,
addendum: ClsagAddendum,
) -> Result<(), FrostError> {
if self.image.is_none() {
self.transcript.domain_separate(b"CLSAG");
// Transcript the ring
self.context.transcript(&mut self.transcript);
// Fetch the mask from the Mutex
// We set it to a variable to ensure our view of it is consistent
// It was this or a mpsc channel... std doesn't have oneshot :/
self.mask = Some(self.mask_recv.take().unwrap().recv());
// Transcript the mask
self.transcript.append_message(b"mask", self.mask.expect("mask wasn't set").to_bytes());
// Init the image to the offset
self.image = Some(dfg::EdwardsPoint(self.key_image_generator) * view.offset());
}
// Transcript this participant's contribution
self.transcript.append_message(b"participant", l.to_bytes());
self
.transcript
.append_message(b"key_image_share", addendum.key_image_share.compress().to_bytes());
// Accumulate the interpolated share
let interpolated_key_image_share =
addendum.key_image_share * lagrange::<dfg::Scalar>(l, view.included());
*self.image.as_mut().unwrap() += interpolated_key_image_share;
self
.key_image_shares
.insert(view.verification_share(l).to_bytes(), interpolated_key_image_share);
Ok(())
}
fn transcript(&mut self) -> &mut Self::Transcript {
&mut self.transcript
}
fn sign_share(
&mut self,
view: &ThresholdView<Ed25519>,
nonce_sums: &[Vec<dfg::EdwardsPoint>],
nonces: Vec<Zeroizing<dfg::Scalar>>,
msg: &[u8],
) -> dfg::Scalar {
// Use the transcript to get a seeded random number generator
//
// The transcript contains private data, preventing passive adversaries from recreating this
// process even if they have access to the commitments/key image share broadcast so far
//
// Specifically, the transcript contains the signer's index within the ring, along with the
// opening of the commitment being re-randomized (and what it's re-randomized to)
let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"decoy_responses"));
self.msg = Some(msg.try_into().expect("CLSAG message should be 32-bytes"));
let sign_core = Clsag::sign_core(
&mut rng,
&self.image.expect("verifying a share despite never processing any addendums").0,
&self.context,
self.mask.expect("mask wasn't set"),
self.msg.as_ref().unwrap(),
nonce_sums[0][0].0,
nonce_sums[0][1].0,
);
self.interim = Some(Interim {
p: sign_core.key_challenge,
c: sign_core.challenged_mask,
clsag: sign_core.incomplete_clsag,
pseudo_out: sign_core.pseudo_out,
});
// r - p x, where p is the challenge for the keys
*nonces[0] - dfg::Scalar(sign_core.key_challenge) * view.secret_share().deref()
}
#[must_use]
fn verify(
&self,
_: dfg::EdwardsPoint,
_: &[Vec<dfg::EdwardsPoint>],
sum: dfg::Scalar,
) -> Option<Self::Signature> {
let interim = self.interim.as_ref().unwrap();
let mut clsag = interim.clsag.clone();
// We produced shares as `r - p x`, yet the signature is actually `r - p x - c x`
// Substract `c x` (saved as `c`) now
clsag.s[usize::from(self.context.decoys.signer_index())] = sum.0 - interim.c;
if clsag
.verify(
self.context.decoys.ring(),
&self.image.expect("verifying a signature despite never processing any addendums").0,
&interim.pseudo_out,
self.msg.as_ref().unwrap(),
)
.is_ok()
{
return Some((clsag, interim.pseudo_out));
}
None
}
fn verify_share(
&self,
verification_share: dfg::EdwardsPoint,
nonces: &[Vec<dfg::EdwardsPoint>],
share: dfg::Scalar,
) -> Result<Vec<(dfg::Scalar, dfg::EdwardsPoint)>, ()> {
let interim = self.interim.as_ref().unwrap();
// For a share `r - p x`, the following two equalities should hold:
// - `(r - p x)G == R.0 - pV`, where `V = xG`
// - `(r - p x)H == R.1 - pK`, where `K = xH` (the key image share)
//
// This is effectively a discrete log equality proof for:
// V, K over G, H
// with nonces
// R.0, R.1
// and solution
// s
//
// Which is a batch-verifiable rewrite of the traditional CP93 proof
// (and also writable as Generalized Schnorr Protocol)
//
// That means that given a proper challenge, this alone can be certainly argued to prove the
// key image share is well-formed and the provided signature so proves for that.
// This is a bit funky as it doesn't prove the nonces are well-formed however. They're part of
// the prover data/transcript for a CP93/GSP proof, not part of the statement. This practically
// is fine, for a variety of reasons (given a consistent `x`, a consistent `r` can be
// extracted, and the nonces as used in CLSAG are also part of its prover data/transcript).
let key_image_share = self.key_image_shares[&verification_share.to_bytes()];
// Hash every variable relevant here, using the hash output as the random weight
let mut weight_transcript =
RecommendedTranscript::new(b"monero-serai v0.1 ClsagMultisig::verify_share");
weight_transcript.append_message(b"G", dfg::EdwardsPoint::generator().to_bytes());
weight_transcript.append_message(b"H", self.key_image_generator.to_bytes());
weight_transcript.append_message(b"xG", verification_share.to_bytes());
weight_transcript.append_message(b"xH", key_image_share.to_bytes());
weight_transcript.append_message(b"rG", nonces[0][0].to_bytes());
weight_transcript.append_message(b"rH", nonces[0][1].to_bytes());
weight_transcript.append_message(b"c", dfg::Scalar(interim.p).to_repr());
weight_transcript.append_message(b"s", share.to_repr());
let weight = weight_transcript.challenge(b"weight");
let weight = dfg::Scalar(Scalar::from_bytes_mod_order_wide(&weight.into()));
let part_one = vec![
(share, dfg::EdwardsPoint::generator()),
// -(R.0 - pV) == -R.0 + pV
(-dfg::Scalar::ONE, nonces[0][0]),
(dfg::Scalar(interim.p), verification_share),
];
let mut part_two = vec![
(weight * share, dfg::EdwardsPoint(self.key_image_generator)),
// -(R.1 - pK) == -R.1 + pK
(-weight, nonces[0][1]),
(weight * dfg::Scalar(interim.p), key_image_share),
];
let mut all = part_one;
all.append(&mut part_two);
Ok(all)
}
}

View File

@@ -1,45 +0,0 @@
[package]
name = "monero-mlsag"
version = "0.1.0"
description = "The MLSAG linkable ring signature, as defined by the Monero protocol"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/ringct/mlsag"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../../common/std-shims", version = "^0.1.1", default-features = false }
thiserror = { version = "1", default-features = false, optional = true }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
# Cryptographic dependencies
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
# Other Monero dependencies
monero-io = { path = "../../io", version = "0.1", default-features = false }
monero-generators = { path = "../../generators", version = "0.4", default-features = false }
monero-primitives = { path = "../../primitives", version = "0.1", default-features = false }
[features]
std = [
"std-shims/std",
"thiserror",
"zeroize/std",
"monero-io/std",
"monero-generators/std",
"monero-primitives/std",
]
default = ["std"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,11 +0,0 @@
# Monero MLSAG
The MLSAG linkable ring signature, as defined by the Monero protocol.
This library is usable under no-std when the `std` feature (on by default) is
disabled.
### Cargo Features
- `std` (on by default): Enables `std` (and with it, more efficient internal
implementations).

View File

@@ -1,48 +0,0 @@
[package]
name = "monero-rpc"
version = "0.1.0"
description = "Trait for an RPC connection to a Monero daemon, built around monero-serai"
license = "MIT"
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/rpc"
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
edition = "2021"
rust-version = "1.79"
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[lints]
workspace = true
[dependencies]
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
async-trait = { version = "0.1", default-features = false }
thiserror = { version = "1", default-features = false, optional = true }
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
hex = { version = "0.4", default-features = false, features = ["alloc"] }
serde = { version = "1", default-features = false, features = ["derive", "alloc"] }
serde_json = { version = "1", default-features = false, features = ["alloc"] }
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
monero-serai = { path = "..", default-features = false }
monero-address = { path = "../wallet/address", default-features = false }
[features]
std = [
"std-shims/std",
"thiserror",
"zeroize/std",
"hex/std",
"serde/std",
"serde_json/std",
"monero-serai/std",
"monero-address/std",
]
default = ["std"]

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022-2024 Luke Parker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

Some files were not shown because too many files have changed in this diff Show More