mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-11 21:49:26 +00:00
Restore the reserialize chain binary
This commit is contained in:
16
Cargo.lock
generated
16
Cargo.lock
generated
@@ -4900,6 +4900,22 @@ dependencies = [
|
|||||||
"zeroize",
|
"zeroize",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "monero-serai-verify-chain"
|
||||||
|
version = "0.1.0"
|
||||||
|
dependencies = [
|
||||||
|
"curve25519-dalek",
|
||||||
|
"hex",
|
||||||
|
"monero-rpc",
|
||||||
|
"monero-serai",
|
||||||
|
"monero-simple-request-rpc",
|
||||||
|
"rand_core",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"std-shims",
|
||||||
|
"tokio",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "monero-simple-request-rpc"
|
name = "monero-simple-request-rpc"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
|||||||
@@ -58,6 +58,7 @@ members = [
|
|||||||
"coins/monero/wallet/seed",
|
"coins/monero/wallet/seed",
|
||||||
"coins/monero/wallet/polyseed",
|
"coins/monero/wallet/polyseed",
|
||||||
"coins/monero/wallet/util",
|
"coins/monero/wallet/util",
|
||||||
|
"coins/monero/verify-chain",
|
||||||
|
|
||||||
"message-queue",
|
"message-queue",
|
||||||
|
|
||||||
|
|||||||
@@ -50,5 +50,4 @@ std = [
|
|||||||
|
|
||||||
compile-time-generators = ["curve25519-dalek/precomputed-tables", "monero-bulletproofs/compile-time-generators"]
|
compile-time-generators = ["curve25519-dalek/precomputed-tables", "monero-bulletproofs/compile-time-generators"]
|
||||||
multisig = ["monero-clsag/multisig", "std"]
|
multisig = ["monero-clsag/multisig", "std"]
|
||||||
#binaries = ["tokio/rt-multi-thread", "tokio/macros", "http-rpc"]
|
|
||||||
default = ["std", "compile-time-generators"]
|
default = ["std", "compile-time-generators"]
|
||||||
|
|||||||
@@ -257,9 +257,11 @@ impl OriginalStruct {
|
|||||||
let res = OriginalStruct { A, S, T1, T2, tau_x, mu, L, R, a: a[0], b: b[0], t };
|
let res = OriginalStruct { A, S, T1, T2, tau_x, mu, L, R, a: a[0], b: b[0], t };
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
let mut verifier = BulletproofsBatchVerifier::default();
|
{
|
||||||
debug_assert!(res.verify(rng, &mut verifier, &commitments_points));
|
let mut verifier = BulletproofsBatchVerifier::default();
|
||||||
debug_assert!(verifier.verify());
|
debug_assert!(res.verify(rng, &mut verifier, &commitments_points));
|
||||||
|
debug_assert!(verifier.verify());
|
||||||
|
}
|
||||||
|
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,321 +0,0 @@
|
|||||||
/* TODO
|
|
||||||
#[cfg(feature = "binaries")]
|
|
||||||
mod binaries {
|
|
||||||
pub(crate) use std::sync::Arc;
|
|
||||||
|
|
||||||
pub(crate) use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
|
||||||
|
|
||||||
pub(crate) use serde::Deserialize;
|
|
||||||
pub(crate) use serde_json::json;
|
|
||||||
|
|
||||||
pub(crate) use monero_serai::{
|
|
||||||
primitives::Commitment,
|
|
||||||
ringct::{RctPrunable, bulletproofs::BatchVerifier},
|
|
||||||
transaction::{Input, Transaction},
|
|
||||||
block::Block,
|
|
||||||
rpc::{RpcError, Rpc, SimpleRequestRpc},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub(crate) use monero_io::decompress_point;
|
|
||||||
|
|
||||||
pub(crate) use tokio::task::JoinHandle;
|
|
||||||
|
|
||||||
pub(crate) async fn check_block(rpc: Arc<SimpleRequestRpc>, block_i: usize) {
|
|
||||||
let hash = loop {
|
|
||||||
match rpc.get_block_hash(block_i).await {
|
|
||||||
Ok(hash) => break hash,
|
|
||||||
Err(RpcError::ConnectionError(e)) => {
|
|
||||||
println!("get_block_hash ConnectionError: {e}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
Err(e) => panic!("couldn't get block {block_i}'s hash: {e:?}"),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: Grab the JSON to also check it was deserialized correctly
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct BlockResponse {
|
|
||||||
blob: String,
|
|
||||||
}
|
|
||||||
let res: BlockResponse = loop {
|
|
||||||
match rpc.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await {
|
|
||||||
Ok(res) => break res,
|
|
||||||
Err(RpcError::ConnectionError(e)) => {
|
|
||||||
println!("get_block ConnectionError: {e}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
Err(e) => panic!("couldn't get block {block_i} via block.hash(): {e:?}"),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let blob = hex::decode(res.blob).expect("node returned non-hex block");
|
|
||||||
let block = Block::read(&mut blob.as_slice())
|
|
||||||
.unwrap_or_else(|e| panic!("couldn't deserialize block {block_i}: {e}"));
|
|
||||||
assert_eq!(block.hash(), hash, "hash differs");
|
|
||||||
assert_eq!(block.serialize(), blob, "serialization differs");
|
|
||||||
|
|
||||||
let txs_len = 1 + block.txs.len();
|
|
||||||
|
|
||||||
if !block.txs.is_empty() {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct TransactionResponse {
|
|
||||||
tx_hash: String,
|
|
||||||
as_hex: String,
|
|
||||||
}
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct TransactionsResponse {
|
|
||||||
#[serde(default)]
|
|
||||||
missed_tx: Vec<String>,
|
|
||||||
txs: Vec<TransactionResponse>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut hashes_hex = block.txs.iter().map(hex::encode).collect::<Vec<_>>();
|
|
||||||
let mut all_txs = vec![];
|
|
||||||
while !hashes_hex.is_empty() {
|
|
||||||
let txs: TransactionsResponse = loop {
|
|
||||||
match rpc
|
|
||||||
.rpc_call(
|
|
||||||
"get_transactions",
|
|
||||||
Some(json!({
|
|
||||||
"txs_hashes": hashes_hex.drain(.. hashes_hex.len().min(100)).collect::<Vec<_>>(),
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
Ok(txs) => break txs,
|
|
||||||
Err(RpcError::ConnectionError(e)) => {
|
|
||||||
println!("get_transactions ConnectionError: {e}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
Err(e) => panic!("couldn't call get_transactions: {e:?}"),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
assert!(txs.missed_tx.is_empty());
|
|
||||||
all_txs.extend(txs.txs);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut batch = BatchVerifier::new();
|
|
||||||
for (tx_hash, tx_res) in block.txs.into_iter().zip(all_txs) {
|
|
||||||
assert_eq!(
|
|
||||||
tx_res.tx_hash,
|
|
||||||
hex::encode(tx_hash),
|
|
||||||
"node returned a transaction with different hash"
|
|
||||||
);
|
|
||||||
|
|
||||||
let tx = Transaction::read(
|
|
||||||
&mut hex::decode(&tx_res.as_hex).expect("node returned non-hex transaction").as_slice(),
|
|
||||||
)
|
|
||||||
.expect("couldn't deserialize transaction");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
hex::encode(tx.serialize()),
|
|
||||||
tx_res.as_hex,
|
|
||||||
"Transaction serialization was different"
|
|
||||||
);
|
|
||||||
assert_eq!(tx.hash(), tx_hash, "Transaction hash was different");
|
|
||||||
|
|
||||||
if matches!(tx.proofs.prunable, RctPrunable::Null) {
|
|
||||||
assert_eq!(tx.prefix.version, 1);
|
|
||||||
assert!(!tx.signatures.is_empty());
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let sig_hash = tx.signature_hash();
|
|
||||||
// Verify all proofs we support proving for
|
|
||||||
// This is due to having debug_asserts calling verify within their proving, and CLSAG
|
|
||||||
// multisig explicitly calling verify as part of its signing process
|
|
||||||
// Accordingly, making sure our signature_hash algorithm is correct is great, and further
|
|
||||||
// making sure the verification functions are valid is appreciated
|
|
||||||
match tx.proofs.prunable {
|
|
||||||
RctPrunable::Null |
|
|
||||||
RctPrunable::AggregateMlsagBorromean { .. } |
|
|
||||||
RctPrunable::MlsagBorromean { .. } => {}
|
|
||||||
RctPrunable::MlsagBulletproofs { bulletproofs, .. } => {
|
|
||||||
assert!(bulletproofs.batch_verify(
|
|
||||||
&mut rand_core::OsRng,
|
|
||||||
&mut batch,
|
|
||||||
&tx.proofs.base.commitments
|
|
||||||
));
|
|
||||||
}
|
|
||||||
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs } => {
|
|
||||||
assert!(bulletproofs.batch_verify(
|
|
||||||
&mut rand_core::OsRng,
|
|
||||||
&mut batch,
|
|
||||||
&tx.proofs.base.commitments
|
|
||||||
));
|
|
||||||
|
|
||||||
for (i, clsag) in clsags.into_iter().enumerate() {
|
|
||||||
let (amount, key_offsets, image) = match &tx.prefix.inputs[i] {
|
|
||||||
Input::Gen(_) => panic!("Input::Gen"),
|
|
||||||
Input::ToKey { amount, key_offsets, key_image } => (amount, key_offsets, key_image),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut running_sum = 0;
|
|
||||||
let mut actual_indexes = vec![];
|
|
||||||
for offset in key_offsets {
|
|
||||||
running_sum += offset;
|
|
||||||
actual_indexes.push(running_sum);
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_outs(
|
|
||||||
rpc: &SimpleRequestRpc,
|
|
||||||
amount: u64,
|
|
||||||
indexes: &[u64],
|
|
||||||
) -> Vec<[EdwardsPoint; 2]> {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct Out {
|
|
||||||
key: String,
|
|
||||||
mask: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct Outs {
|
|
||||||
outs: Vec<Out>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let outs: Outs = loop {
|
|
||||||
match rpc
|
|
||||||
.rpc_call(
|
|
||||||
"get_outs",
|
|
||||||
Some(json!({
|
|
||||||
"get_txid": true,
|
|
||||||
"outputs": indexes.iter().map(|o| json!({
|
|
||||||
"amount": amount,
|
|
||||||
"index": o
|
|
||||||
})).collect::<Vec<_>>()
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
Ok(outs) => break outs,
|
|
||||||
Err(RpcError::ConnectionError(e)) => {
|
|
||||||
println!("get_outs ConnectionError: {e}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
Err(e) => panic!("couldn't connect to RPC to get outs: {e:?}"),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let rpc_point = |point: &str| {
|
|
||||||
decompress_point(
|
|
||||||
hex::decode(point)
|
|
||||||
.expect("invalid hex for ring member")
|
|
||||||
.try_into()
|
|
||||||
.expect("invalid point len for ring member"),
|
|
||||||
)
|
|
||||||
.expect("invalid point for ring member")
|
|
||||||
};
|
|
||||||
|
|
||||||
outs
|
|
||||||
.outs
|
|
||||||
.iter()
|
|
||||||
.map(|out| {
|
|
||||||
let mask = rpc_point(&out.mask);
|
|
||||||
if amount != 0 {
|
|
||||||
assert_eq!(mask, Commitment::new(Scalar::from(1u8), amount).calculate());
|
|
||||||
}
|
|
||||||
[rpc_point(&out.key), mask]
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
clsag
|
|
||||||
.verify(
|
|
||||||
&get_outs(&rpc, amount.unwrap_or(0), &actual_indexes).await,
|
|
||||||
image,
|
|
||||||
&pseudo_outs[i],
|
|
||||||
&sig_hash,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assert!(batch.verify());
|
|
||||||
}
|
|
||||||
|
|
||||||
println!("Deserialized, hashed, and reserialized {block_i} with {txs_len} TXs");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "binaries")]
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() {
|
|
||||||
use binaries::*;
|
|
||||||
|
|
||||||
let args = std::env::args().collect::<Vec<String>>();
|
|
||||||
|
|
||||||
// Read start block as the first arg
|
|
||||||
let mut block_i = args[1].parse::<usize>().expect("invalid start block");
|
|
||||||
|
|
||||||
// How many blocks to work on at once
|
|
||||||
let async_parallelism: usize =
|
|
||||||
args.get(2).unwrap_or(&"8".to_string()).parse::<usize>().expect("invalid parallelism argument");
|
|
||||||
|
|
||||||
// Read further args as RPC URLs
|
|
||||||
let default_nodes = vec![
|
|
||||||
"http://xmr-node.cakewallet.com:18081".to_string(),
|
|
||||||
"https://node.sethforprivacy.com".to_string(),
|
|
||||||
];
|
|
||||||
let mut specified_nodes = vec![];
|
|
||||||
{
|
|
||||||
let mut i = 0;
|
|
||||||
loop {
|
|
||||||
let Some(node) = args.get(3 + i) else { break };
|
|
||||||
specified_nodes.push(node.clone());
|
|
||||||
i += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let nodes = if specified_nodes.is_empty() { default_nodes } else { specified_nodes };
|
|
||||||
|
|
||||||
let rpc = |url: String| async move {
|
|
||||||
SimpleRequestRpc::new(url.clone())
|
|
||||||
.await
|
|
||||||
.unwrap_or_else(|_| panic!("couldn't create SimpleRequestRpc connected to {url}"))
|
|
||||||
};
|
|
||||||
let main_rpc = rpc(nodes[0].clone()).await;
|
|
||||||
let mut rpcs = vec![];
|
|
||||||
for i in 0 .. async_parallelism {
|
|
||||||
rpcs.push(Arc::new(rpc(nodes[i % nodes.len()].clone()).await));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut rpc_i = 0;
|
|
||||||
let mut handles: Vec<JoinHandle<()>> = vec![];
|
|
||||||
let mut height = 0;
|
|
||||||
loop {
|
|
||||||
let new_height = main_rpc.get_height().await.expect("couldn't call get_height");
|
|
||||||
if new_height == height {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
height = new_height;
|
|
||||||
|
|
||||||
while block_i < height {
|
|
||||||
if handles.len() >= async_parallelism {
|
|
||||||
// Guarantee one handle is complete
|
|
||||||
handles.swap_remove(0).await.unwrap();
|
|
||||||
|
|
||||||
// Remove all of the finished handles
|
|
||||||
let mut i = 0;
|
|
||||||
while i < handles.len() {
|
|
||||||
if handles[i].is_finished() {
|
|
||||||
handles.swap_remove(i).await.unwrap();
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
i += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
handles.push(tokio::spawn(check_block(rpcs[rpc_i].clone(), block_i)));
|
|
||||||
rpc_i = (rpc_i + 1) % rpcs.len();
|
|
||||||
block_i += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(feature = "binaries"))]
|
|
||||||
fn main() {
|
|
||||||
panic!("To run binaries, please build with `--feature binaries`.");
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
fn main() {}
|
|
||||||
33
coins/monero/verify-chain/Cargo.toml
Normal file
33
coins/monero/verify-chain/Cargo.toml
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
[package]
|
||||||
|
name = "monero-serai-verify-chain"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "A binary to deserialize and verify the Monero blockchain"
|
||||||
|
license = "MIT"
|
||||||
|
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/verify-chain"
|
||||||
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
|
edition = "2021"
|
||||||
|
rust-version = "1.79"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||||
|
|
||||||
|
rand_core = { version = "0.6", default-features = false, features = ["std"] }
|
||||||
|
|
||||||
|
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize"] }
|
||||||
|
|
||||||
|
hex = { version = "0.4", default-features = false, features = ["std"] }
|
||||||
|
serde = { version = "1", default-features = false, features = ["derive", "alloc", "std"] }
|
||||||
|
serde_json = { version = "1", default-features = false, features = ["alloc", "std"] }
|
||||||
|
|
||||||
|
monero-serai = { path = "..", default-features = false, features = ["std", "compile-time-generators"] }
|
||||||
|
monero-rpc = { path = "../rpc", default-features = false, features = ["std"] }
|
||||||
|
monero-simple-request-rpc = { path = "../rpc/simple-request", default-features = false }
|
||||||
|
|
||||||
|
tokio = { version = "1", default-features = false, features = ["rt-multi-thread", "macros"] }
|
||||||
21
coins/monero/verify-chain/LICENSE
Normal file
21
coins/monero/verify-chain/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2022-2024 Luke Parker
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
7
coins/monero/verify-chain/README.md
Normal file
7
coins/monero/verify-chain/README.md
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# monero-serai Verify Chain
|
||||||
|
|
||||||
|
A binary to deserialize and verify the Monero blockchain.
|
||||||
|
|
||||||
|
This is not complete. This is not intended to be complete. This is intended to
|
||||||
|
test monero-serai against actual blockchain data. Do not use this as an
|
||||||
|
inflation checker.
|
||||||
316
coins/monero/verify-chain/src/main.rs
Normal file
316
coins/monero/verify-chain/src/main.rs
Normal file
@@ -0,0 +1,316 @@
|
|||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
#![doc = include_str!("../README.md")]
|
||||||
|
#![deny(missing_docs)]
|
||||||
|
|
||||||
|
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
||||||
|
|
||||||
|
use serde::Deserialize;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
use monero_serai::{
|
||||||
|
io::decompress_point,
|
||||||
|
primitives::Commitment,
|
||||||
|
ringct::{RctPrunable, bulletproofs::BatchVerifier},
|
||||||
|
transaction::{Input, Transaction},
|
||||||
|
block::Block,
|
||||||
|
};
|
||||||
|
|
||||||
|
use monero_rpc::{RpcError, Rpc};
|
||||||
|
use monero_simple_request_rpc::SimpleRequestRpc;
|
||||||
|
|
||||||
|
use tokio::task::JoinHandle;
|
||||||
|
|
||||||
|
async fn check_block(rpc: impl Rpc, block_i: usize) {
|
||||||
|
let hash = loop {
|
||||||
|
match rpc.get_block_hash(block_i).await {
|
||||||
|
Ok(hash) => break hash,
|
||||||
|
Err(RpcError::ConnectionError(e)) => {
|
||||||
|
println!("get_block_hash ConnectionError: {e}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Err(e) => panic!("couldn't get block {block_i}'s hash: {e:?}"),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// TODO: Grab the JSON to also check it was deserialized correctly
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct BlockResponse {
|
||||||
|
blob: String,
|
||||||
|
}
|
||||||
|
let res: BlockResponse = loop {
|
||||||
|
match rpc.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await {
|
||||||
|
Ok(res) => break res,
|
||||||
|
Err(RpcError::ConnectionError(e)) => {
|
||||||
|
println!("get_block ConnectionError: {e}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Err(e) => panic!("couldn't get block {block_i} via block.hash(): {e:?}"),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let blob = hex::decode(res.blob).expect("node returned non-hex block");
|
||||||
|
let block = Block::read(&mut blob.as_slice())
|
||||||
|
.unwrap_or_else(|e| panic!("couldn't deserialize block {block_i}: {e}"));
|
||||||
|
assert_eq!(block.hash(), hash, "hash differs");
|
||||||
|
assert_eq!(block.serialize(), blob, "serialization differs");
|
||||||
|
|
||||||
|
let txs_len = 1 + block.transactions.len();
|
||||||
|
|
||||||
|
if !block.transactions.is_empty() {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct TransactionResponse {
|
||||||
|
tx_hash: String,
|
||||||
|
as_hex: String,
|
||||||
|
}
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct TransactionsResponse {
|
||||||
|
#[serde(default)]
|
||||||
|
missed_tx: Vec<String>,
|
||||||
|
txs: Vec<TransactionResponse>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut hashes_hex = block.transactions.iter().map(hex::encode).collect::<Vec<_>>();
|
||||||
|
let mut all_txs = vec![];
|
||||||
|
while !hashes_hex.is_empty() {
|
||||||
|
let txs: TransactionsResponse = loop {
|
||||||
|
match rpc
|
||||||
|
.rpc_call(
|
||||||
|
"get_transactions",
|
||||||
|
Some(json!({
|
||||||
|
"txs_hashes": hashes_hex.drain(.. hashes_hex.len().min(100)).collect::<Vec<_>>(),
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(txs) => break txs,
|
||||||
|
Err(RpcError::ConnectionError(e)) => {
|
||||||
|
println!("get_transactions ConnectionError: {e}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Err(e) => panic!("couldn't call get_transactions: {e:?}"),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
assert!(txs.missed_tx.is_empty());
|
||||||
|
all_txs.extend(txs.txs);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut batch = BatchVerifier::new();
|
||||||
|
for (tx_hash, tx_res) in block.transactions.into_iter().zip(all_txs) {
|
||||||
|
assert_eq!(
|
||||||
|
tx_res.tx_hash,
|
||||||
|
hex::encode(tx_hash),
|
||||||
|
"node returned a transaction with different hash"
|
||||||
|
);
|
||||||
|
|
||||||
|
let tx = Transaction::read(
|
||||||
|
&mut hex::decode(&tx_res.as_hex).expect("node returned non-hex transaction").as_slice(),
|
||||||
|
)
|
||||||
|
.expect("couldn't deserialize transaction");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
hex::encode(tx.serialize()),
|
||||||
|
tx_res.as_hex,
|
||||||
|
"Transaction serialization was different"
|
||||||
|
);
|
||||||
|
assert_eq!(tx.hash(), tx_hash, "Transaction hash was different");
|
||||||
|
|
||||||
|
match tx {
|
||||||
|
Transaction::V1 { prefix: _, signatures } => {
|
||||||
|
assert!(!signatures.is_empty());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Transaction::V2 { prefix: _, proofs: None } => {
|
||||||
|
panic!("proofs were empty in non-miner v2 transaction");
|
||||||
|
}
|
||||||
|
Transaction::V2 { ref prefix, proofs: Some(ref proofs) } => {
|
||||||
|
let sig_hash = tx.signature_hash().expect("no signature hash for TX with proofs");
|
||||||
|
// Verify all proofs we support proving for
|
||||||
|
// This is due to having debug_asserts calling verify within their proving, and CLSAG
|
||||||
|
// multisig explicitly calling verify as part of its signing process
|
||||||
|
// Accordingly, making sure our signature_hash algorithm is correct is great, and further
|
||||||
|
// making sure the verification functions are valid is appreciated
|
||||||
|
match &proofs.prunable {
|
||||||
|
RctPrunable::AggregateMlsagBorromean { .. } | RctPrunable::MlsagBorromean { .. } => {}
|
||||||
|
RctPrunable::MlsagBulletproofs { bulletproof, .. } |
|
||||||
|
RctPrunable::MlsagBulletproofsCompactAmount { bulletproof, .. } => {
|
||||||
|
assert!(bulletproof.batch_verify(
|
||||||
|
&mut rand_core::OsRng,
|
||||||
|
&mut batch,
|
||||||
|
&proofs.base.commitments
|
||||||
|
));
|
||||||
|
}
|
||||||
|
RctPrunable::Clsag { bulletproof, clsags, pseudo_outs } => {
|
||||||
|
assert!(bulletproof.batch_verify(
|
||||||
|
&mut rand_core::OsRng,
|
||||||
|
&mut batch,
|
||||||
|
&proofs.base.commitments
|
||||||
|
));
|
||||||
|
|
||||||
|
for (i, clsag) in clsags.iter().enumerate() {
|
||||||
|
let (amount, key_offsets, image) = match &prefix.inputs[i] {
|
||||||
|
Input::Gen(_) => panic!("Input::Gen"),
|
||||||
|
Input::ToKey { amount, key_offsets, key_image } => {
|
||||||
|
(amount, key_offsets, key_image)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut running_sum = 0;
|
||||||
|
let mut actual_indexes = vec![];
|
||||||
|
for offset in key_offsets {
|
||||||
|
running_sum += offset;
|
||||||
|
actual_indexes.push(running_sum);
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_outs(
|
||||||
|
rpc: &impl Rpc,
|
||||||
|
amount: u64,
|
||||||
|
indexes: &[u64],
|
||||||
|
) -> Vec<[EdwardsPoint; 2]> {
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct Out {
|
||||||
|
key: String,
|
||||||
|
mask: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Debug)]
|
||||||
|
struct Outs {
|
||||||
|
outs: Vec<Out>,
|
||||||
|
}
|
||||||
|
|
||||||
|
let outs: Outs = loop {
|
||||||
|
match rpc
|
||||||
|
.rpc_call(
|
||||||
|
"get_outs",
|
||||||
|
Some(json!({
|
||||||
|
"get_txid": true,
|
||||||
|
"outputs": indexes.iter().map(|o| json!({
|
||||||
|
"amount": amount,
|
||||||
|
"index": o
|
||||||
|
})).collect::<Vec<_>>()
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(outs) => break outs,
|
||||||
|
Err(RpcError::ConnectionError(e)) => {
|
||||||
|
println!("get_outs ConnectionError: {e}");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Err(e) => panic!("couldn't connect to RPC to get outs: {e:?}"),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let rpc_point = |point: &str| {
|
||||||
|
decompress_point(
|
||||||
|
hex::decode(point)
|
||||||
|
.expect("invalid hex for ring member")
|
||||||
|
.try_into()
|
||||||
|
.expect("invalid point len for ring member"),
|
||||||
|
)
|
||||||
|
.expect("invalid point for ring member")
|
||||||
|
};
|
||||||
|
|
||||||
|
outs
|
||||||
|
.outs
|
||||||
|
.iter()
|
||||||
|
.map(|out| {
|
||||||
|
let mask = rpc_point(&out.mask);
|
||||||
|
if amount != 0 {
|
||||||
|
assert_eq!(mask, Commitment::new(Scalar::from(1u8), amount).calculate());
|
||||||
|
}
|
||||||
|
[rpc_point(&out.key), mask]
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
clsag
|
||||||
|
.verify(
|
||||||
|
&get_outs(&rpc, amount.unwrap_or(0), &actual_indexes).await,
|
||||||
|
image,
|
||||||
|
&pseudo_outs[i],
|
||||||
|
&sig_hash,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert!(batch.verify());
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("Deserialized, hashed, and reserialized {block_i} with {txs_len} TXs");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() {
|
||||||
|
let args = std::env::args().collect::<Vec<String>>();
|
||||||
|
|
||||||
|
// Read start block as the first arg
|
||||||
|
let mut block_i = args.get(1).expect("no start block specified").parse::<usize>().expect("invalid start block");
|
||||||
|
|
||||||
|
// How many blocks to work on at once
|
||||||
|
let async_parallelism: usize =
|
||||||
|
args.get(2).unwrap_or(&"8".to_string()).parse::<usize>().expect("invalid parallelism argument");
|
||||||
|
|
||||||
|
// Read further args as RPC URLs
|
||||||
|
let default_nodes = vec![
|
||||||
|
// "http://xmr-node.cakewallet.com:18081".to_string(),
|
||||||
|
"http://node.tools.rino.io:18081".to_string(),
|
||||||
|
// "https://node.sethforprivacy.com".to_string(),
|
||||||
|
];
|
||||||
|
let mut specified_nodes = vec![];
|
||||||
|
{
|
||||||
|
let mut i = 0;
|
||||||
|
loop {
|
||||||
|
let Some(node) = args.get(3 + i) else { break };
|
||||||
|
specified_nodes.push(node.clone());
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let nodes = if specified_nodes.is_empty() { default_nodes } else { specified_nodes };
|
||||||
|
|
||||||
|
let rpc = |url: String| async move {
|
||||||
|
SimpleRequestRpc::new(url.clone())
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|_| panic!("couldn't create SimpleRequestRpc connected to {url}"))
|
||||||
|
};
|
||||||
|
let main_rpc = rpc(nodes[0].clone()).await;
|
||||||
|
let mut rpcs = vec![];
|
||||||
|
for i in 0 .. async_parallelism {
|
||||||
|
rpcs.push(rpc(nodes[i % nodes.len()].clone()).await);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut rpc_i = 0;
|
||||||
|
let mut handles: Vec<JoinHandle<()>> = vec![];
|
||||||
|
let mut height = 0;
|
||||||
|
loop {
|
||||||
|
let new_height = main_rpc.get_height().await.expect("couldn't call get_height");
|
||||||
|
if new_height == height {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
height = new_height;
|
||||||
|
|
||||||
|
while block_i < height {
|
||||||
|
if handles.len() >= async_parallelism {
|
||||||
|
// Guarantee one handle is complete
|
||||||
|
handles.swap_remove(0).await.unwrap();
|
||||||
|
|
||||||
|
// Remove all of the finished handles
|
||||||
|
let mut i = 0;
|
||||||
|
while i < handles.len() {
|
||||||
|
if handles[i].is_finished() {
|
||||||
|
handles.swap_remove(i).await.unwrap();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
handles.push(tokio::spawn(check_block(rpcs[rpc_i].clone(), block_i)));
|
||||||
|
rpc_i = (rpc_i + 1) % rpcs.len();
|
||||||
|
block_i += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -58,6 +58,8 @@ impl Client {
|
|||||||
res.set_nodelay(true);
|
res.set_nodelay(true);
|
||||||
res.set_reuse_address(true);
|
res.set_reuse_address(true);
|
||||||
#[cfg(feature = "tls")]
|
#[cfg(feature = "tls")]
|
||||||
|
res.enforce_http(false);
|
||||||
|
#[cfg(feature = "tls")]
|
||||||
let res = HttpsConnectorBuilder::new()
|
let res = HttpsConnectorBuilder::new()
|
||||||
.with_native_roots()
|
.with_native_roots()
|
||||||
.expect("couldn't fetch system's SSL roots")
|
.expect("couldn't fetch system's SSL roots")
|
||||||
|
|||||||
Reference in New Issue
Block a user