diff --git a/.github/actions/bitcoin/action.yml b/.github/actions/bitcoin/action.yml index 5008b690..2a6dbce3 100644 --- a/.github/actions/bitcoin/action.yml +++ b/.github/actions/bitcoin/action.yml @@ -5,7 +5,7 @@ inputs: version: description: "Version to download and run" required: false - default: 27.0 + default: "27.0" runs: using: "composite" diff --git a/.github/actions/test-dependencies/action.yml b/.github/actions/test-dependencies/action.yml index 7487a33b..49c2fa64 100644 --- a/.github/actions/test-dependencies/action.yml +++ b/.github/actions/test-dependencies/action.yml @@ -10,7 +10,7 @@ inputs: bitcoin-version: description: "Bitcoin version to download and run as a regtest node" required: false - default: 27.0 + default: "27.0" runs: using: "composite" @@ -19,9 +19,9 @@ runs: uses: ./.github/actions/build-dependencies - name: Install Foundry - uses: foundry-rs/foundry-toolchain@cb603ca0abb544f301eaed59ac0baf579aa6aecf + uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773 with: - version: nightly-09fe3e041369a816365a020f715ad6f94dbce9f2 + version: nightly-f625d0fa7c51e65b4bf1e8f7931cd1c6e2e285e9 cache: false - name: Run a Monero Regtest Node diff --git a/.github/workflows/coins-tests.yml b/.github/workflows/coins-tests.yml index a0437c61..f94e9fd5 100644 --- a/.github/workflows/coins-tests.yml +++ b/.github/workflows/coins-tests.yml @@ -30,6 +30,7 @@ jobs: run: | GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \ -p bitcoin-serai \ + -p alloy-simple-request-transport \ -p ethereum-serai \ -p monero-generators \ -p monero-serai diff --git a/Cargo.lock b/Cargo.lock index ee2ecdcf..edc46693 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -95,9 +95,344 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.16" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" +checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" + +[[package]] +name = "alloy-consensus" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6#037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "c-kzg", + "serde", + "sha2", +] + +[[package]] +name = "alloy-core" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcbd9ee412dfb4e81d23cd1ae816d828c494a77d1eb00358035043695d4c5808" +dependencies = [ + "alloy-primitives", +] + +[[package]] +name = "alloy-eips" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6#037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "c-kzg", + "once_cell", + "serde", +] + +[[package]] +name = "alloy-genesis" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6#037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6" +dependencies = [ + "alloy-primitives", + "alloy-serde", + "serde", +] + +[[package]] +name = "alloy-json-abi" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83a35ddfd27576474322a5869e4c123e5f3e7b2177297c18e4e82ea501cb125b" +dependencies = [ + "alloy-primitives", + "alloy-sol-type-parser", + "serde", +] + +[[package]] +name = "alloy-json-rpc" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6#037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6" +dependencies = [ + "alloy-primitives", + "serde", + "serde_json", + "thiserror", + "tracing", +] + +[[package]] +name = "alloy-network" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6#037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-json-rpc", + "alloy-primitives", + "alloy-rpc-types", + "alloy-signer", + "async-trait", + "futures-utils-wasm", + "thiserror", +] + +[[package]] +name = "alloy-node-bindings" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6#037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6" +dependencies = [ + "alloy-genesis", + "alloy-primitives", + "k256", + "serde_json", + "tempfile", + "thiserror", + "tracing", + "url", +] + +[[package]] +name = "alloy-primitives" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99bbad0a6b588ef4aec1b5ddbbfdacd9ef04e00b979617765b03174318ee1f3a" +dependencies = [ + "alloy-rlp", + "bytes", + "cfg-if", + "const-hex", + "derive_more", + "hex-literal", + "itoa", + "k256", + "keccak-asm", + "proptest", + "rand", + "ruint", + "serde", + "tiny-keccak", +] + +[[package]] +name = "alloy-provider" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6#037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6" +dependencies = [ + "alloy-eips", + "alloy-json-rpc", + "alloy-network", + "alloy-primitives", + "alloy-rpc-client", + "alloy-rpc-types", + "alloy-rpc-types-trace", + "alloy-transport", + "async-stream", + "async-trait", + "auto_impl", + "dashmap", + "futures", + "futures-utils-wasm", + "lru", + "serde_json", + "tokio", + "tracing", +] + +[[package]] +name = "alloy-rlp" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d58d9f5da7b40e9bfff0b7e7816700be4019db97d4b6359fe7f94a9e22e42ac" +dependencies = [ + "alloy-rlp-derive", + "arrayvec", + "bytes", +] + +[[package]] +name = "alloy-rlp-derive" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a047897373be4bbb0224c1afdabca92648dc57a9c9ef6e7b0be3aff7a859c83" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.60", +] + +[[package]] +name = "alloy-rpc-client" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6#037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6" +dependencies = [ + "alloy-json-rpc", + "alloy-transport", + "alloy-transport-http", + "futures", + "pin-project", + "serde", + "serde_json", + "tokio", + "tokio-stream", + "tower", + "tracing", +] + +[[package]] +name = "alloy-rpc-types" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6#037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-genesis", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "alloy-sol-types", + "itertools 0.12.1", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "alloy-rpc-types-trace" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6#037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6" +dependencies = [ + "alloy-primitives", + "alloy-rpc-types", + "alloy-serde", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-serde" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6#037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6" +dependencies = [ + "alloy-primitives", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-signer" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6#037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6" +dependencies = [ + "alloy-primitives", + "async-trait", + "auto_impl", + "elliptic-curve", + "k256", + "thiserror", +] + +[[package]] +name = "alloy-simple-request-transport" +version = "0.1.0" +dependencies = [ + "alloy-json-rpc", + "alloy-transport", + "serde_json", + "simple-request", + "tower", +] + +[[package]] +name = "alloy-sol-macro" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "452d929748ac948a10481fff4123affead32c553cf362841c5103dd508bdfc16" +dependencies = [ + "alloy-json-abi", + "alloy-sol-macro-input", + "const-hex", + "heck 0.4.1", + "indexmap 2.2.6", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.60", + "syn-solidity", + "tiny-keccak", +] + +[[package]] +name = "alloy-sol-macro-input" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df64e094f6d2099339f9e82b5b38440b159757b6920878f28316243f8166c8d1" +dependencies = [ + "alloy-json-abi", + "const-hex", + "dunce", + "heck 0.5.0", + "proc-macro2", + "quote", + "serde_json", + "syn 2.0.60", + "syn-solidity", +] + +[[package]] +name = "alloy-sol-type-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "715f4d09a330cc181fc7c361b5c5c2766408fa59a0bac60349dcb7baabd404cc" +dependencies = [ + "winnow 0.6.6", +] + +[[package]] +name = "alloy-sol-types" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43bc2d6dfc2a19fd56644494479510f98b1ee929e04cf0d4aa45e98baa3e545b" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "alloy-sol-macro", + "const-hex", +] + +[[package]] +name = "alloy-transport" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6#037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6" +dependencies = [ + "alloy-json-rpc", + "base64 0.22.0", + "futures-util", + "futures-utils-wasm", + "serde", + "serde_json", + "thiserror", + "tokio", + "tower", + "url", + "wasm-bindgen-futures", +] + +[[package]] +name = "alloy-transport-http" +version = "0.1.0" +source = "git+https://github.com/alloy-rs/alloy?rev=037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6#037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6" +dependencies = [ + "alloy-transport", + "url", +] [[package]] name = "android-tzdata" @@ -192,6 +527,130 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" +[[package]] +name = "ark-ff" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6" +dependencies = [ + "ark-ff-asm 0.3.0", + "ark-ff-macros 0.3.0", + "ark-serialize 0.3.0", + "ark-std 0.3.0", + "derivative", + "num-bigint", + "num-traits", + "paste", + "rustc_version 0.3.3", + "zeroize", +] + +[[package]] +name = "ark-ff" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" +dependencies = [ + "ark-ff-asm 0.4.2", + "ark-ff-macros 0.4.2", + "ark-serialize 0.4.2", + "ark-std 0.4.0", + "derivative", + "digest 0.10.7", + "itertools 0.10.5", + "num-bigint", + "num-traits", + "paste", + "rustc_version 0.4.0", + "zeroize", +] + +[[package]] +name = "ark-ff-asm" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-asm" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-macros" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" +dependencies = [ + "num-bigint", + "num-traits", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-macros" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" +dependencies = [ + "num-bigint", + "num-traits", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-serialize" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671" +dependencies = [ + "ark-std 0.3.0", + "digest 0.9.0", +] + +[[package]] +name = "ark-serialize" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" +dependencies = [ + "ark-std 0.4.0", + "digest 0.10.7", + "num-bigint", +] + +[[package]] +name = "ark-std" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" +dependencies = [ + "num-traits", + "rand", +] + +[[package]] +name = "ark-std" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" +dependencies = [ + "num-traits", + "rand", +] + [[package]] name = "array-bytes" version = "6.2.2" @@ -291,25 +750,36 @@ dependencies = [ ] [[package]] -name = "async-trait" -version = "0.1.79" +name = "async-stream" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", + "async-stream-impl", + "futures-core", + "pin-project-lite 0.2.14", ] [[package]] -name = "async_io_stream" -version = "0.3.3" +name = "async-stream-impl" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ - "futures", - "pharos", - "rustc_version", + "proc-macro2", + "quote", + "syn 2.0.60", +] + +[[package]] +name = "async-trait" +version = "0.1.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.60", ] [[package]] @@ -344,7 +814,7 @@ checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -402,6 +872,12 @@ version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +[[package]] +name = "base64" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" + [[package]] name = "base64ct" version = "1.6.0" @@ -441,7 +917,7 @@ dependencies = [ "bitflags 2.5.0", "cexpr", "clang-sys", - "itertools", + "itertools 0.12.1", "lazy_static", "lazycell", "proc-macro2", @@ -449,9 +925,24 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.58", + "syn 2.0.60", ] +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + [[package]] name = "bitcoin" version = "0.31.2" @@ -617,6 +1108,18 @@ dependencies = [ "subtle", ] +[[package]] +name = "blst" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c94087b935a822949d3291a9989ad2b2051ea141eda0fd4e478a75f6aa3e604b" +dependencies = [ + "cc", + "glob", + "threadpool", + "zeroize", +] + [[package]] name = "bollard" version = "0.15.0" @@ -677,7 +1180,7 @@ dependencies = [ "proc-macro-crate 3.1.0", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", "syn_derive", ] @@ -765,6 +1268,20 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "c-kzg" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3130f3d8717cc02e668a896af24984d5d5d4e8bf12e278e982e0f1bd88a0f9af" +dependencies = [ + "blst", + "cc", + "glob", + "hex", + "libc", + "serde", +] + [[package]] name = "camino" version = "1.1.6" @@ -817,9 +1334,9 @@ dependencies = [ [[package]] name = "cfg-expr" -version = "0.15.7" +version = "0.15.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa50868b64a9a6fda9d593ce778849ea8715cd2a3d2cc17ffdb4a2f2f2f1961d" +checksum = "d067ad48b8650848b989a59a86c6c36a995d02d2bf778d45c3c5d57bc2718f02" dependencies = [ "smallvec", ] @@ -862,9 +1379,9 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.35" +version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", @@ -935,9 +1452,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.2" +version = "4.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b230ab84b0ffdf890d5a10abdbc8b83ae1c4918275daea1ab8801f71536b2651" +checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" dependencies = [ "clap_builder", "clap_derive", @@ -957,14 +1474,14 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.0" +version = "4.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "307bc0538d5f0f83b8248db3087aa92fe504e4691294d0c96c0eabc33f47ba47" +checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" dependencies = [ - "heck", + "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -1000,13 +1517,14 @@ dependencies = [ [[package]] name = "const-hex" -version = "1.9.1" +version = "1.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c37be52ef5e3b394db27a2341010685ad5103c72ac15ce2e9420a7e8f93f342c" +checksum = "5ba00838774b4ab0233e355d26710fbfc8327a05c017f6dc4873f876d1f79f78" dependencies = [ "cfg-if", "cpufeatures", "hex", + "proptest", "serde", ] @@ -1042,6 +1560,12 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "core-foundation" version = "0.9.4" @@ -1195,7 +1719,7 @@ dependencies = [ "cranelift-codegen", "cranelift-entity", "cranelift-frontend", - "itertools", + "itertools 0.10.5", "log", "smallvec", "wasmparser", @@ -1288,7 +1812,7 @@ dependencies = [ "group", "platforms", "rand_core", - "rustc_version", + "rustc_version 0.4.0", "subtle", "zeroize", ] @@ -1301,7 +1825,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -1328,7 +1852,7 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -1345,7 +1869,7 @@ checksum = "ad08a837629ad949b73d032c637653d069e909cffe4ee7870b02301939ce39cc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -1364,6 +1888,19 @@ dependencies = [ "zeroize", ] +[[package]] +name = "dashmap" +version = "5.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +dependencies = [ + "cfg-if", + "hashbrown 0.14.3", + "lock_api", + "once_cell", + "parking_lot_core 0.9.9", +] + [[package]] name = "data-encoding" version = "2.5.0" @@ -1433,6 +1970,17 @@ dependencies = [ "serde", ] +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "derive-syn-parse" version = "0.1.5" @@ -1450,8 +1998,10 @@ version = "0.99.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ + "convert_case", "proc-macro2", "quote", + "rustc_version 0.4.0", "syn 1.0.109", ] @@ -1531,7 +2081,7 @@ checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -1692,9 +2242,9 @@ dependencies = [ [[package]] name = "either" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" +checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2" [[package]] name = "elliptic-curve" @@ -1716,40 +2266,13 @@ dependencies = [ "zeroize", ] -[[package]] -name = "encoding_rs" -version = "0.8.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "enr" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe81b5c06ecfdbc71dd845216f225f53b62a10cb8a16c946836a3467f701d05b" -dependencies = [ - "base64 0.21.7", - "bytes", - "hex", - "k256", - "log", - "rand", - "rlp", - "serde", - "sha3", - "zeroize", -] - [[package]] name = "enum-as-inner" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9720bba047d567ffc8a3cba48bf19126600e249ab7f128e9233e6376976a116" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2", "quote", "syn 1.0.109", @@ -1761,10 +2284,10 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -1802,198 +2325,27 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "ethabi" -version = "18.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" -dependencies = [ - "ethereum-types", - "hex", - "once_cell", - "regex", - "serde", - "serde_json", - "sha3", - "thiserror", - "uint", -] - -[[package]] -name = "ethbloom" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c22d4b5885b6aa2fe5e8b9329fb8d232bf739e434e6b87347c63bdd00c120f60" -dependencies = [ - "crunchy", - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "scale-info", - "tiny-keccak", -] - [[package]] name = "ethereum-serai" version = "0.1.0" dependencies = [ - "ethers-contract", - "ethers-core", - "ethers-providers", - "eyre", + "alloy-consensus", + "alloy-core", + "alloy-node-bindings", + "alloy-provider", + "alloy-rpc-client", + "alloy-rpc-types", + "alloy-simple-request-transport", + "alloy-sol-types", + "flexible-transcript", "group", - "hex", "k256", "modular-frost", "rand_core", - "serde", - "serde_json", - "sha2", - "sha3", "thiserror", "tokio", ] -[[package]] -name = "ethereum-types" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02d215cbf040552efcbe99a38372fe80ab9d00268e20012b79fcd0f073edd8ee" -dependencies = [ - "ethbloom", - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "primitive-types", - "scale-info", - "uint", -] - -[[package]] -name = "ethers-contract" -version = "2.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d79269278125006bb0552349c03593ffa9702112ca88bc7046cc669f148fb47c" -dependencies = [ - "const-hex", - "ethers-contract-abigen", - "ethers-contract-derive", - "ethers-core", - "ethers-providers", - "futures-util", - "once_cell", - "pin-project", - "serde", - "serde_json", - "thiserror", -] - -[[package]] -name = "ethers-contract-abigen" -version = "2.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce95a43c939b2e4e2f3191c5ad4a1f279780b8a39139c9905b43a7433531e2ab" -dependencies = [ - "Inflector", - "const-hex", - "dunce", - "ethers-core", - "eyre", - "prettyplease 0.2.16", - "proc-macro2", - "quote", - "regex", - "serde", - "serde_json", - "syn 2.0.58", - "toml 0.7.8", - "walkdir", -] - -[[package]] -name = "ethers-contract-derive" -version = "2.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9ce44906fc871b3ee8c69a695ca7ec7f70e50cb379c9b9cb5e532269e492f6" -dependencies = [ - "Inflector", - "const-hex", - "ethers-contract-abigen", - "ethers-core", - "proc-macro2", - "quote", - "serde_json", - "syn 2.0.58", -] - -[[package]] -name = "ethers-core" -version = "2.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0a17f0708692024db9956b31d7a20163607d2745953f5ae8125ab368ba280ad" -dependencies = [ - "arrayvec", - "bytes", - "cargo_metadata", - "chrono", - "const-hex", - "elliptic-curve", - "ethabi", - "generic-array 0.14.7", - "k256", - "num_enum", - "once_cell", - "open-fastrlp", - "rand", - "rlp", - "serde", - "serde_json", - "strum 0.25.0", - "syn 2.0.58", - "tempfile", - "thiserror", - "tiny-keccak", - "unicode-xid", -] - -[[package]] -name = "ethers-providers" -version = "2.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6838fa110e57d572336178b7c79e94ff88ef976306852d8cb87d9e5b1fc7c0b5" -dependencies = [ - "async-trait", - "auto_impl", - "base64 0.21.7", - "bytes", - "const-hex", - "enr", - "ethers-core", - "futures-core", - "futures-timer", - "futures-util", - "hashers", - "http 0.2.12", - "instant", - "jsonwebtoken", - "once_cell", - "pin-project", - "reqwest", - "serde", - "serde_json", - "thiserror", - "tokio", - "tracing", - "tracing-futures", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "ws_stream_wasm", -] - [[package]] name = "event-listener" version = "2.5.3" @@ -2040,17 +2392,7 @@ dependencies = [ "fs-err", "proc-macro2", "quote", - "syn 2.0.58", -] - -[[package]] -name = "eyre" -version = "0.6.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" -dependencies = [ - "indenter", - "once_cell", + "syn 2.0.60", ] [[package]] @@ -2065,6 +2407,17 @@ version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984" +[[package]] +name = "fastrlp" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" +dependencies = [ + "arrayvec", + "auto_impl", + "bytes", +] + [[package]] name = "fdlimit" version = "0.2.1" @@ -2309,12 +2662,12 @@ dependencies = [ "derive-syn-parse", "expander", "frame-support-procedural-tools", - "itertools", + "itertools 0.10.5", "macro_magic", "proc-macro-warning", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -2326,7 +2679,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -2336,7 +2689,7 @@ source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46 dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -2495,7 +2848,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -2505,7 +2858,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35bd3cf68c183738046838e300353e4716c674dc5e56890de4826801a6622a28" dependencies = [ "futures-io", - "rustls 0.21.10", + "rustls 0.21.11", ] [[package]] @@ -2536,10 +2889,6 @@ name = "futures-timer" version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" -dependencies = [ - "gloo-timers", - "send_wrapper 0.4.0", -] [[package]] name = "futures-util" @@ -2559,6 +2908,12 @@ dependencies = [ "slab", ] +[[package]] +name = "futures-utils-wasm" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" + [[package]] name = "fxhash" version = "0.2.1" @@ -2616,9 +2971,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.14" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" dependencies = [ "cfg-if", "libc", @@ -2671,18 +3026,6 @@ dependencies = [ "regex-syntax 0.8.3", ] -[[package]] -name = "gloo-timers" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - [[package]] name = "group" version = "0.13.0" @@ -2753,21 +3096,18 @@ dependencies = [ "allocator-api2", ] -[[package]] -name = "hashers" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2bca93b15ea5a746f220e56587f71e73c6165eab783df9e26590069953e3c30" -dependencies = [ - "fxhash", -] - [[package]] name = "heck" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + [[package]] name = "hermit-abi" version = "0.3.9" @@ -2779,6 +3119,9 @@ name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +dependencies = [ + "serde", +] [[package]] name = "hex-conservative" @@ -2951,9 +3294,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.2.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "186548d73ac615b32a73aafe38fb4f56c0d340e110e5a200bcadbaf2e199263a" +checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" dependencies = [ "bytes", "futures-channel", @@ -2970,15 +3313,15 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.27.0" +version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "736f15a50e749d033164c56c09783b6102c4ff8da79ad77dbddbbaea0f8567f7" +checksum = "908bb38696d7a037a01ebcc68a00634112ac2bbf8ca74e30a2c3d2f4f021302b" dependencies = [ "futures-util", "http 1.1.0", - "hyper 1.2.0", + "hyper 1.3.1", "hyper-util", - "rustls 0.23.4", + "rustls 0.23.5", "rustls-native-certs", "rustls-pki-types", "tokio", @@ -2997,7 +3340,7 @@ dependencies = [ "futures-util", "http 1.1.0", "http-body 1.0.0", - "hyper 1.2.0", + "hyper 1.3.1", "pin-project-lite 0.2.14", "socket2 0.5.6", "tokio", @@ -3063,6 +3406,16 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "if-addrs" version = "0.10.2" @@ -3120,15 +3473,6 @@ dependencies = [ "parity-scale-codec", ] -[[package]] -name = "impl-rlp" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" -dependencies = [ - "rlp", -] - [[package]] name = "impl-serde" version = "0.4.0" @@ -3149,12 +3493,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "indenter" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" - [[package]] name = "indexmap" version = "1.9.3" @@ -3242,6 +3580,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.11" @@ -3302,7 +3649,7 @@ version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44e8ab85614a08792b9bff6c8feee23be78c98d0182d4c622c05256ab553892a" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro-crate 1.3.1", "proc-macro2", "quote", @@ -3345,20 +3692,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "jsonwebtoken" -version = "8.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378" -dependencies = [ - "base64 0.21.7", - "pem", - "ring 0.16.20", - "serde", - "serde_json", - "simple_asn1", -] - [[package]] name = "k256" version = "0.13.3" @@ -3370,7 +3703,6 @@ dependencies = [ "elliptic-curve", "once_cell", "sha2", - "signature", ] [[package]] @@ -3382,6 +3714,16 @@ dependencies = [ "cpufeatures", ] +[[package]] +name = "keccak-asm" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb8515fff80ed850aea4a1595f2e519c003e2a00a82fe168ebf5269196caf444" +dependencies = [ + "digest 0.10.7", + "sha3-asm", +] + [[package]] name = "kvdb" version = "0.13.0" @@ -3448,7 +3790,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" dependencies = [ "cfg-if", - "windows-targets 0.52.4", + "windows-targets 0.48.5", ] [[package]] @@ -3765,7 +4107,7 @@ dependencies = [ "quinn", "rand", "ring 0.16.20", - "rustls 0.21.10", + "rustls 0.21.11", "socket2 0.5.6", "thiserror", "tokio", @@ -3818,11 +4160,11 @@ version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4d5ec2a3df00c7836d7696c136274c9c59705bac69133253696a6c932cd1d74" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro-warning", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -3854,7 +4196,7 @@ dependencies = [ "libp2p-identity", "rcgen", "ring 0.16.20", - "rustls 0.21.10", + "rustls 0.21.11", "rustls-webpki 0.101.7", "thiserror", "x509-parser", @@ -3886,7 +4228,7 @@ dependencies = [ "futures", "js-sys", "libp2p-core", - "send_wrapper 0.6.0", + "send_wrapper", "wasm-bindgen", "wasm-bindgen-futures", ] @@ -4075,7 +4417,7 @@ dependencies = [ "macro_magic_core", "macro_magic_macros", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -4089,7 +4431,7 @@ dependencies = [ "macro_magic_core_macros", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -4100,7 +4442,7 @@ checksum = "d710e1214dffbab3b5dacb21475dde7d6ed84c69ff722b3a47a782668d44fbac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -4111,7 +4453,7 @@ checksum = "b8fb85ec1620619edf2984a7693497d4ec88a9665d8b87e942856884c92dbf2a" dependencies = [ "macro_magic_core", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -4216,12 +4558,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - [[package]] name = "mini-serai" version = "0.1.0" @@ -4732,27 +5068,6 @@ dependencies = [ "libc", ] -[[package]] -name = "num_enum" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02339744ee7253741199f897151b38e72257d13802d4ee837285cc2990a90845" -dependencies = [ - "num_enum_derive", -] - -[[package]] -name = "num_enum_derive" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" -dependencies = [ - "proc-macro-crate 3.1.0", - "proc-macro2", - "quote", - "syn 2.0.58", -] - [[package]] name = "object" version = "0.31.1" @@ -4795,31 +5110,6 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" -[[package]] -name = "open-fastrlp" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", - "ethereum-types", - "open-fastrlp-derive", -] - -[[package]] -name = "open-fastrlp-derive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "003b2be5c6c53c1cfeb0a238b8a1c3915cd410feb684457a36c10038f764bb1c" -dependencies = [ - "bytes", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "openssl-probe" version = "0.1.5" @@ -5182,6 +5472,17 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +[[package]] +name = "pest" +version = "2.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "311fb059dee1a7b802f036316d790138c613a4e8b180c822e3925a662e9f0c95" +dependencies = [ + "memchr", + "thiserror", + "ucd-trie", +] + [[package]] name = "petgraph" version = "0.6.4" @@ -5192,16 +5493,6 @@ dependencies = [ "indexmap 2.2.6", ] -[[package]] -name = "pharos" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" -dependencies = [ - "futures", - "rustc_version", -] - [[package]] name = "pin-project" version = "1.1.5" @@ -5219,7 +5510,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -5320,7 +5611,7 @@ checksum = "59230a63c37f3e18569bdb90e4a89cbf5bf8b06fea0b84e65ea10cc4df47addd" dependencies = [ "difflib", "float-cmp", - "itertools", + "itertools 0.10.5", "normalize-line-endings", "predicates-core", "regex", @@ -5352,16 +5643,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "prettyplease" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a41cf62165e97c7f814d2221421dbb9afcbcdb0a88068e5ea206e19951c2cbb5" -dependencies = [ - "proc-macro2", - "syn 2.0.58", -] - [[package]] name = "primeorder" version = "0.13.6" @@ -5379,7 +5660,6 @@ checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" dependencies = [ "fixed-hash", "impl-codec", - "impl-rlp", "impl-serde", "scale-info", "uint", @@ -5443,14 +5723,14 @@ checksum = "3d1eaa7fa0aa1929ffdf7eeb6eac234dde6268914a14ad44d23521ab6a9b258e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "proc-macro2" -version = "1.0.79" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" +checksum = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba" dependencies = [ "unicode-ident", ] @@ -5489,7 +5769,27 @@ checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", +] + +[[package]] +name = "proptest" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" +dependencies = [ + "bit-set", + "bit-vec", + "bitflags 2.5.0", + "lazy_static", + "num-traits", + "rand", + "rand_chacha", + "rand_xorshift", + "regex-syntax 0.8.3", + "rusty-fork", + "tempfile", + "unarray", ] [[package]] @@ -5509,13 +5809,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" dependencies = [ "bytes", - "heck", - "itertools", + "heck 0.4.1", + "itertools 0.10.5", "lazy_static", "log", "multimap", "petgraph", - "prettyplease 0.1.25", + "prettyplease", "prost", "prost-types", "regex", @@ -5531,7 +5831,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", - "itertools", + "itertools 0.10.5", "proc-macro2", "quote", "syn 1.0.109", @@ -5606,7 +5906,7 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash", - "rustls 0.21.10", + "rustls 0.21.11", "thiserror", "tokio", "tracing", @@ -5622,7 +5922,7 @@ dependencies = [ "rand", "ring 0.16.20", "rustc-hash", - "rustls 0.21.10", + "rustls 0.21.11", "slab", "thiserror", "tinyvec", @@ -5706,6 +6006,15 @@ dependencies = [ "rand_core", ] +[[package]] +name = "rand_xorshift" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" +dependencies = [ + "rand_core", +] + [[package]] name = "rawpointer" version = "0.2.1" @@ -5790,7 +6099,7 @@ checksum = "5fddb4f8d99b0a2ebafc65a87a69a7b9875e4b1ae1f00db265d300ef7f28bccc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -5850,41 +6159,6 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" -[[package]] -name = "reqwest" -version = "0.11.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b1ae8d9ac08420c66222fb9096fc5de435c3c48542bc5336c51892cffafb41" -dependencies = [ - "base64 0.21.7", - "bytes", - "encoding_rs", - "futures-core", - "futures-util", - "h2", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.28", - "ipnet", - "js-sys", - "log", - "mime", - "once_cell", - "percent-encoding", - "pin-project-lite 0.2.14", - "serde", - "serde_json", - "serde_urlencoded", - "system-configuration", - "tokio", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "winreg", -] - [[package]] name = "resolv-conf" version = "0.7.0" @@ -5951,21 +6225,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" dependencies = [ "bytes", - "rlp-derive", "rustc-hex", ] -[[package]] -name = "rlp-derive" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33d7b2abe0c340d8797fe2907d3f20d3b5ea5908683618bfe80df7f621f672a" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "rocksdb" version = "0.21.0" @@ -6019,6 +6281,36 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "ruint" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f308135fef9fc398342da5472ce7c484529df23743fb7c734e0f3d472971e62" +dependencies = [ + "alloy-rlp", + "ark-ff 0.3.0", + "ark-ff 0.4.2", + "bytes", + "fastrlp", + "num-bigint", + "num-traits", + "parity-scale-codec", + "primitive-types", + "proptest", + "rand", + "rlp", + "ruint-macro", + "serde", + "valuable", + "zeroize", +] + +[[package]] +name = "ruint-macro" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f86854cf50259291520509879a5c294c3c9a4c334e9ff65071c51e42ef1e2343" + [[package]] name = "rustc-demangle" version = "0.1.23" @@ -6037,6 +6329,15 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" +[[package]] +name = "rustc_version" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" +dependencies = [ + "semver 0.11.0", +] + [[package]] name = "rustc_version" version = "0.4.0" @@ -6070,9 +6371,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.10" +version = "0.21.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" +checksum = "7fecbfb7b1444f477b345853b1fce097a2c6fb637b2bfb87e6bc5db0f043fae4" dependencies = [ "log", "ring 0.17.8", @@ -6082,9 +6383,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.4" +version = "0.23.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c4d6d8ad9f2492485e13453acbb291dd08f64441b6609c491f1c2cd2c6b4fe1" +checksum = "afabcee0551bd1aa3e18e5adbf2c0544722014b899adb31bd186ec638d3da97e" dependencies = [ "once_cell", "ring 0.17.8", @@ -6109,11 +6410,11 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "2.1.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f48172685e6ff52a556baa527774f61fcaa884f59daf3375c62a3f1cd2549dab" +checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" dependencies = [ - "base64 0.21.7", + "base64 0.22.0", "rustls-pki-types", ] @@ -6150,6 +6451,18 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80af6f9131f277a45a3fba6ce8e2258037bb0477a67e610d3c1fe046ab31de47" +[[package]] +name = "rusty-fork" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + [[package]] name = "rw-stream-sink" version = "0.4.0" @@ -6289,7 +6602,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -7050,7 +7363,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -7290,7 +7603,16 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a3186ec9e65071a2095434b1f5bb24838d4e8e130f584c790f6033c79943537" dependencies = [ - "semver-parser", + "semver-parser 0.7.0", +] + +[[package]] +name = "semver" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" +dependencies = [ + "semver-parser 0.10.2", ] [[package]] @@ -7309,10 +7631,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] -name = "send_wrapper" -version = "0.4.0" +name = "semver-parser" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" +checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" +dependencies = [ + "pest", +] [[package]] name = "send_wrapper" @@ -7688,9 +8013,11 @@ dependencies = [ "bitcoin-serai", "borsh", "ciphersuite", + "const-hex", "dalek-ff-group", "dockertest", "env_logger", + "ethereum-serai", "flexible-transcript", "frost-schnorrkel", "hex", @@ -7877,9 +8204,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.197" +version = "1.0.198" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" +checksum = "9846a40c979031340571da2545a4e5b7c4163bdae79b301d5f86d03979451fcc" dependencies = [ "serde_derive", ] @@ -7895,20 +8222,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.197" +version = "1.0.198" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +checksum = "e88edab869b01783ba905e7d0153f9fc1a6505a96e4ad3018011eedb838566d9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] name = "serde_json" -version = "1.0.115" +version = "1.0.116" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd" +checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" dependencies = [ "itoa", "ryu", @@ -7923,7 +8250,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -8009,6 +8336,16 @@ dependencies = [ "keccak", ] +[[package]] +name = "sha3-asm" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bac61da6b35ad76b195eb4771210f947734321a8d81d7738e1580d953bc7a15e" +dependencies = [ + "cc", + "cfg-if", +] + [[package]] name = "sharded-slab" version = "0.1.7" @@ -8062,7 +8399,7 @@ version = "0.1.0" dependencies = [ "base64ct", "http-body-util", - "hyper 1.2.0", + "hyper 1.3.1", "hyper-rustls", "hyper-util", "tokio", @@ -8070,18 +8407,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "simple_asn1" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" -dependencies = [ - "num-bigint", - "num-traits", - "thiserror", - "time", -] - [[package]] name = "siphasher" version = "0.3.11" @@ -8127,7 +8452,7 @@ dependencies = [ "curve25519-dalek", "rand_core", "ring 0.17.8", - "rustc_version", + "rustc_version 0.4.0", "sha2", "subtle", ] @@ -8200,7 +8525,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -8396,7 +8721,7 @@ source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46 dependencies = [ "quote", "sp-core-hashing", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -8415,7 +8740,7 @@ source = "git+https://github.com/serai-dex/substrate#6e3f07bf5c98a6a3ec15f2b1a46 dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -8587,7 +8912,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -8740,7 +9065,7 @@ dependencies = [ "parity-scale-codec", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -8902,7 +9227,7 @@ version = "0.24.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2", "quote", "rustversion", @@ -8915,11 +9240,11 @@ version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2", "quote", "rustversion", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -9007,15 +9332,27 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.58" +version = "2.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44cfb93f38070beee36b3fef7d4f5a16f27751d94b187b666a5cc5e9b0d30687" +checksum = "909518bc7b1c9b779f1bbf07f2929d35af9f0f37e47c6e9ef7f9dddc1e1821f3" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] +[[package]] +name = "syn-solidity" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4497156948bd342b52038035a6fa514a89626e37af9d2c52a5e8d8ebcc7ee479" +dependencies = [ + "paste", + "proc-macro2", + "quote", + "syn 2.0.60", +] + [[package]] name = "syn_derive" version = "0.1.8" @@ -9025,7 +9362,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -9117,22 +9454,22 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" [[package]] name = "thiserror" -version = "1.0.58" +version = "1.0.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" +checksum = "f0126ad08bff79f29fc3ae6a55cc72352056dfff61e3ff8bb7129476d44b23aa" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.58" +version = "1.0.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" +checksum = "d1cd413b5d558b4c5bf3680e324a6fa5014e7b7c067a51e69dbdf47eb7148b66" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -9253,7 +9590,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -9262,7 +9599,7 @@ version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ - "rustls 0.23.4", + "rustls 0.23.5", "rustls-pki-types", "tokio", ] @@ -9334,7 +9671,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.5.40", ] [[package]] @@ -9345,7 +9682,7 @@ checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ "indexmap 2.2.6", "toml_datetime", - "winnow", + "winnow 0.5.40", ] [[package]] @@ -9414,7 +9751,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -9647,6 +9984,12 @@ version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +[[package]] +name = "ucd-trie" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" + [[package]] name = "uint" version = "0.9.5" @@ -9659,6 +10002,12 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + [[package]] name = "unicode-bidi" version = "0.3.15" @@ -9728,12 +10077,12 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", - "idna 0.4.0", + "idna 0.5.0", "percent-encoding", ] @@ -9773,6 +10122,15 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + [[package]] name = "walkdir" version = "2.5.0" @@ -9819,7 +10177,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", "wasm-bindgen-shared", ] @@ -9853,7 +10211,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -10162,7 +10520,7 @@ checksum = "ca7af9bb3ee875c4907835e607a275d10b04d15623d3aebe01afe8fbd3f85050" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -10195,9 +10553,9 @@ dependencies = [ [[package]] name = "wide" -version = "0.7.15" +version = "0.7.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89beec544f246e679fc25490e3f8e08003bc4bf612068f325120dad4cea02c1c" +checksum = "81a1851a719f11d1d2fea40e15c72f6c00de8c142d7ac47c1441cc7e4d0d5bc6" dependencies = [ "bytemuck", "safe_arch", @@ -10409,6 +10767,12 @@ dependencies = [ "memchr", ] +[[package]] +name = "winnow" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0c976aaaa0e1f90dbb21e9587cdaf1d9679a1cde8875c0d6bd83ab96a208352" + [[package]] name = "winreg" version = "0.50.0" @@ -10419,25 +10783,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "ws_stream_wasm" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7999f5f4217fe3818726b66257a4475f71e74ffd190776ad053fa159e50737f5" -dependencies = [ - "async_io_stream", - "futures", - "js-sys", - "log", - "pharos", - "rustc_version", - "send_wrapper 0.6.0", - "thiserror", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - [[package]] name = "wyz" version = "0.5.1" @@ -10539,7 +10884,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] @@ -10559,7 +10904,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.58", + "syn 2.0.60", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index bcc344ed..8a19d159 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,6 +36,7 @@ members = [ "crypto/schnorrkel", "coins/bitcoin", + "coins/ethereum/alloy-simple-request-transport", "coins/ethereum", "coins/monero/generators", "coins/monero", diff --git a/coins/bitcoin/src/wallet/send.rs b/coins/bitcoin/src/wallet/send.rs index f4cfa3b5..24594ab4 100644 --- a/coins/bitcoin/src/wallet/send.rs +++ b/coins/bitcoin/src/wallet/send.rs @@ -375,7 +375,7 @@ impl SignMachine for TransactionSignMachine { msg: &[u8], ) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> { if !msg.is_empty() { - panic!("message was passed to the TransactionMachine when it generates its own"); + panic!("message was passed to the TransactionSignMachine when it generates its own"); } let commitments = (0 .. self.sigs.len()) diff --git a/coins/ethereum/.gitignore b/coins/ethereum/.gitignore index 46365e03..2dccdce9 100644 --- a/coins/ethereum/.gitignore +++ b/coins/ethereum/.gitignore @@ -1,7 +1,3 @@ # Solidity build outputs cache artifacts - -# Auto-generated ABI files -src/abi/schnorr.rs -src/abi/router.rs diff --git a/coins/ethereum/Cargo.toml b/coins/ethereum/Cargo.toml index bc60d3a4..4bb92fe4 100644 --- a/coins/ethereum/Cargo.toml +++ b/coins/ethereum/Cargo.toml @@ -18,28 +18,29 @@ workspace = true [dependencies] thiserror = { version = "1", default-features = false } -eyre = { version = "0.6", default-features = false } -sha3 = { version = "0.10", default-features = false, features = ["std"] } - -group = { version = "0.13", default-features = false } -k256 = { version = "^0.13.1", default-features = false, features = ["std", "ecdsa"] } -frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] } - -ethers-core = { version = "2", default-features = false } -ethers-providers = { version = "2", default-features = false } -ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] } - -[build-dependencies] -ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] } - -[dev-dependencies] rand_core = { version = "0.6", default-features = false, features = ["std"] } -hex = { version = "0.4", default-features = false, features = ["std"] } -serde = { version = "1", default-features = false, features = ["std"] } -serde_json = { version = "1", default-features = false, features = ["std"] } +transcript = { package = "flexible-transcript", path = "../../crypto/transcript", default-features = false, features = ["recommended"] } -sha2 = { version = "0.10", default-features = false, features = ["std"] } +group = { version = "0.13", default-features = false } +k256 = { version = "^0.13.1", default-features = false, features = ["std", "ecdsa", "arithmetic"] } +frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["secp256k1"] } + +alloy-core = { version = "0.7", default-features = false } +alloy-sol-types = { version = "0.7", default-features = false, features = ["json"] } +alloy-consensus = { git = "https://github.com/alloy-rs/alloy", rev = "037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6", default-features = false, features = ["k256"] } +alloy-rpc-types = { git = "https://github.com/alloy-rs/alloy", rev = "037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6", default-features = false } +alloy-rpc-client = { git = "https://github.com/alloy-rs/alloy", rev = "037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6", default-features = false } +alloy-simple-request-transport = { path = "./alloy-simple-request-transport", default-features = false } +alloy-provider = { git = "https://github.com/alloy-rs/alloy", rev = "037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6", default-features = false } + +[dev-dependencies] +frost = { package = "modular-frost", path = "../../crypto/frost", default-features = false, features = ["tests"] } tokio = { version = "1", features = ["macros"] } + +alloy-node-bindings = { git = "https://github.com/alloy-rs/alloy", rev = "037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6", default-features = false } + +[features] +tests = [] diff --git a/coins/ethereum/README.md b/coins/ethereum/README.md index 13f1f2db..0090b26b 100644 --- a/coins/ethereum/README.md +++ b/coins/ethereum/README.md @@ -3,6 +3,12 @@ This package contains Ethereum-related functionality, specifically deploying and interacting with Serai contracts. +While `monero-serai` and `bitcoin-serai` are general purpose libraries, +`ethereum-serai` is Serai specific. If any of the utilities are generally +desired, please fork and maintain your own copy to ensure the desired +functionality is preserved, or open an issue to request we make this library +general purpose. + ### Dependencies - solc diff --git a/coins/ethereum/alloy-simple-request-transport/Cargo.toml b/coins/ethereum/alloy-simple-request-transport/Cargo.toml new file mode 100644 index 00000000..115998e4 --- /dev/null +++ b/coins/ethereum/alloy-simple-request-transport/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "alloy-simple-request-transport" +version = "0.1.0" +description = "A transport for alloy based off simple-request" +license = "MIT" +repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum/alloy-simple-request-transport" +authors = ["Luke Parker "] +edition = "2021" +rust-version = "1.74" + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[lints] +workspace = true + +[dependencies] +tower = "0.4" + +serde_json = { version = "1", default-features = false } +simple-request = { path = "../../../common/request", default-features = false } + +alloy-json-rpc = { git = "https://github.com/alloy-rs/alloy", rev = "037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6", default-features = false } +alloy-transport = { git = "https://github.com/alloy-rs/alloy", rev = "037dd4b20ec8533d6b6d5cf5e9489bbb182c18c6", default-features = false } + +[features] +default = ["tls"] +tls = ["simple-request/tls"] diff --git a/coins/ethereum/alloy-simple-request-transport/LICENSE b/coins/ethereum/alloy-simple-request-transport/LICENSE new file mode 100644 index 00000000..659881f1 --- /dev/null +++ b/coins/ethereum/alloy-simple-request-transport/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Luke Parker + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/coins/ethereum/alloy-simple-request-transport/README.md b/coins/ethereum/alloy-simple-request-transport/README.md new file mode 100644 index 00000000..372540d1 --- /dev/null +++ b/coins/ethereum/alloy-simple-request-transport/README.md @@ -0,0 +1,4 @@ +# Alloy Simple Request Transport + +A transport for alloy based on simple-request, a small HTTP client built around +hyper. diff --git a/coins/ethereum/alloy-simple-request-transport/src/lib.rs b/coins/ethereum/alloy-simple-request-transport/src/lib.rs new file mode 100644 index 00000000..93b35bc1 --- /dev/null +++ b/coins/ethereum/alloy-simple-request-transport/src/lib.rs @@ -0,0 +1,60 @@ +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +#![doc = include_str!("../README.md")] + +use core::task; +use std::io; + +use alloy_json_rpc::{RequestPacket, ResponsePacket}; +use alloy_transport::{TransportError, TransportErrorKind, TransportFut}; + +use simple_request::{hyper, Request, Client}; + +use tower::Service; + +#[derive(Clone, Debug)] +pub struct SimpleRequest { + client: Client, + url: String, +} + +impl SimpleRequest { + pub fn new(url: String) -> Self { + Self { client: Client::with_connection_pool(), url } + } +} + +impl Service for SimpleRequest { + type Response = ResponsePacket; + type Error = TransportError; + type Future = TransportFut<'static>; + + #[inline] + fn poll_ready(&mut self, _cx: &mut task::Context<'_>) -> task::Poll> { + task::Poll::Ready(Ok(())) + } + + #[inline] + fn call(&mut self, req: RequestPacket) -> Self::Future { + let inner = self.clone(); + Box::pin(async move { + let packet = req.serialize().map_err(TransportError::SerError)?; + let request = Request::from( + hyper::Request::post(&inner.url) + .header("Content-Type", "application/json") + .body(serde_json::to_vec(&packet).map_err(TransportError::SerError)?.into()) + .unwrap(), + ); + + let mut res = inner + .client + .request(request) + .await + .map_err(|e| TransportErrorKind::custom(io::Error::other(format!("{e:?}"))))? + .body() + .await + .map_err(|e| TransportErrorKind::custom(io::Error::other(format!("{e:?}"))))?; + + serde_json::from_reader(&mut res).map_err(|e| TransportError::deser_err(e, "")) + }) + } +} diff --git a/coins/ethereum/build.rs b/coins/ethereum/build.rs index 3590b12f..38fcfe00 100644 --- a/coins/ethereum/build.rs +++ b/coins/ethereum/build.rs @@ -1,7 +1,5 @@ use std::process::Command; -use ethers_contract::Abigen; - fn main() { println!("cargo:rerun-if-changed=contracts/*"); println!("cargo:rerun-if-changed=artifacts/*"); @@ -21,22 +19,23 @@ fn main() { "--base-path", ".", "-o", "./artifacts", "--overwrite", "--bin", "--abi", - "--optimize", - "./contracts/Schnorr.sol", "./contracts/Router.sol", + "--via-ir", "--optimize", + + "./contracts/IERC20.sol", + + "./contracts/Schnorr.sol", + "./contracts/Deployer.sol", + "./contracts/Sandbox.sol", + "./contracts/Router.sol", + + "./src/tests/contracts/Schnorr.sol", + "./src/tests/contracts/ERC20.sol", + + "--no-color", ]; - assert!(Command::new("solc").args(args).status().unwrap().success()); - - Abigen::new("Schnorr", "./artifacts/Schnorr.abi") - .unwrap() - .generate() - .unwrap() - .write_to_file("./src/abi/schnorr.rs") - .unwrap(); - - Abigen::new("Router", "./artifacts/Router.abi") - .unwrap() - .generate() - .unwrap() - .write_to_file("./src/abi/router.rs") - .unwrap(); + let solc = Command::new("solc").args(args).output().unwrap(); + assert!(solc.status.success()); + for line in String::from_utf8(solc.stderr).unwrap().lines() { + assert!(!line.starts_with("Error:")); + } } diff --git a/coins/ethereum/contracts/Deployer.sol b/coins/ethereum/contracts/Deployer.sol new file mode 100644 index 00000000..475be4c1 --- /dev/null +++ b/coins/ethereum/contracts/Deployer.sol @@ -0,0 +1,52 @@ +// SPDX-License-Identifier: AGPLv3 +pragma solidity ^0.8.0; + +/* +The expected deployment process of the Router is as follows: + +1) A transaction deploying Deployer is made. Then, a deterministic signature is + created such that an account with an unknown private key is the creator of + the contract. Anyone can fund this address, and once anyone does, the + transaction deploying Deployer can be published by anyone. No other + transaction may be made from that account. + +2) Anyone deploys the Router through the Deployer. This uses a sequential nonce + such that meet-in-the-middle attacks, with complexity 2**80, aren't feasible. + While such attacks would still be feasible if the Deployer's address was + controllable, the usage of a deterministic signature with a NUMS method + prevents that. + +This doesn't have any denial-of-service risks and will resolve once anyone steps +forward as deployer. This does fail to guarantee an identical address across +every chain, though it enables letting anyone efficiently ask the Deployer for +the address (with the Deployer having an identical address on every chain). + +Unfortunately, guaranteeing identical addresses aren't feasible. We'd need the +Deployer contract to use a consistent salt for the Router, yet the Router must +be deployed with a specific public key for Serai. Since Ethereum isn't able to +determine a valid public key (one the result of a Serai DKG) from a dishonest +public key, we have to allow multiple deployments with Serai being the one to +determine which to use. + +The alternative would be to have a council publish the Serai key on-Ethereum, +with Serai verifying the published result. This would introduce a DoS risk in +the council not publishing the correct key/not publishing any key. +*/ + +contract Deployer { + event Deployment(bytes32 indexed init_code_hash, address created); + + error DeploymentFailed(); + + function deploy(bytes memory init_code) external { + address created; + assembly { + created := create(0, add(init_code, 0x20), mload(init_code)) + } + if (created == address(0)) { + revert DeploymentFailed(); + } + // These may be emitted out of order upon re-entrancy + emit Deployment(keccak256(init_code), created); + } +} diff --git a/coins/ethereum/contracts/IERC20.sol b/coins/ethereum/contracts/IERC20.sol new file mode 100644 index 00000000..70f1f93c --- /dev/null +++ b/coins/ethereum/contracts/IERC20.sol @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: CC0 +pragma solidity ^0.8.0; + +interface IERC20 { + event Transfer(address indexed from, address indexed to, uint256 value); + event Approval(address indexed owner, address indexed spender, uint256 value); + + function name() external view returns (string memory); + function symbol() external view returns (string memory); + function decimals() external view returns (uint8); + + function totalSupply() external view returns (uint256); + + function balanceOf(address owner) external view returns (uint256); + function transfer(address to, uint256 value) external returns (bool); + function transferFrom(address from, address to, uint256 value) external returns (bool); + + function approve(address spender, uint256 value) external returns (bool); + function allowance(address owner, address spender) external view returns (uint256); +} diff --git a/coins/ethereum/contracts/Router.sol b/coins/ethereum/contracts/Router.sol index 25775ec5..c5e1efa2 100644 --- a/coins/ethereum/contracts/Router.sol +++ b/coins/ethereum/contracts/Router.sol @@ -1,27 +1,24 @@ // SPDX-License-Identifier: AGPLv3 pragma solidity ^0.8.0; +import "./IERC20.sol"; + import "./Schnorr.sol"; +import "./Sandbox.sol"; -contract Router is Schnorr { - // Contract initializer - // TODO: Replace with a MuSig of the genesis validators - address public initializer; - - // Nonce is incremented for each batch of transactions executed +contract Router { + // Nonce is incremented for each batch of transactions executed/key update uint256 public nonce; - // fixed parity for the public keys used in this contract - uint8 constant public KEY_PARITY = 27; - - // current public key's x-coordinate - // note: this key must always use the fixed parity defined above + // Current public key's x-coordinate + // This key must always have the parity defined within the Schnorr contract bytes32 public seraiKey; struct OutInstruction { address to; + Call[] calls; + uint256 value; - bytes data; } struct Signature { @@ -29,62 +26,197 @@ contract Router is Schnorr { bytes32 s; } + event SeraiKeyUpdated( + uint256 indexed nonce, + bytes32 indexed key, + Signature signature + ); + event InInstruction( + address indexed from, + address indexed coin, + uint256 amount, + bytes instruction + ); // success is a uint256 representing a bitfield of transaction successes - event Executed(uint256 nonce, bytes32 batch, uint256 success); + event Executed( + uint256 indexed nonce, + bytes32 indexed batch, + uint256 success, + Signature signature + ); // error types - error NotInitializer(); - error AlreadyInitialized(); error InvalidKey(); + error InvalidSignature(); + error InvalidAmount(); + error FailedTransfer(); error TooManyTransactions(); - constructor() { - initializer = msg.sender; + modifier _updateSeraiKeyAtEndOfFn( + uint256 _nonce, + bytes32 key, + Signature memory sig + ) { + if ( + (key == bytes32(0)) || + ((bytes32(uint256(key) % Schnorr.Q)) != key) + ) { + revert InvalidKey(); + } + + _; + + seraiKey = key; + emit SeraiKeyUpdated(_nonce, key, sig); } - // initSeraiKey can be called by the contract initializer to set the first - // public key, only if the public key has yet to be set. - function initSeraiKey(bytes32 _seraiKey) external { - if (msg.sender != initializer) revert NotInitializer(); - if (seraiKey != 0) revert AlreadyInitialized(); - if (_seraiKey == bytes32(0)) revert InvalidKey(); - seraiKey = _seraiKey; + constructor(bytes32 _seraiKey) _updateSeraiKeyAtEndOfFn( + 0, + _seraiKey, + Signature({ c: bytes32(0), s: bytes32(0) }) + ) { + nonce = 1; } - // updateSeraiKey validates the given Schnorr signature against the current public key, - // and if successful, updates the contract's public key to the given one. + // updateSeraiKey validates the given Schnorr signature against the current + // public key, and if successful, updates the contract's public key to the + // given one. function updateSeraiKey( bytes32 _seraiKey, - Signature memory sig - ) public { - if (_seraiKey == bytes32(0)) revert InvalidKey(); - bytes32 message = keccak256(abi.encodePacked("updateSeraiKey", _seraiKey)); - if (!verify(KEY_PARITY, seraiKey, message, sig.c, sig.s)) revert InvalidSignature(); - seraiKey = _seraiKey; + Signature calldata sig + ) external _updateSeraiKeyAtEndOfFn(nonce, _seraiKey, sig) { + bytes memory message = + abi.encodePacked("updateSeraiKey", block.chainid, nonce, _seraiKey); + nonce++; + + if (!Schnorr.verify(seraiKey, message, sig.c, sig.s)) { + revert InvalidSignature(); + } } - // execute accepts a list of transactions to execute as well as a Schnorr signature. + function inInstruction( + address coin, + uint256 amount, + bytes memory instruction + ) external payable { + if (coin == address(0)) { + if (amount != msg.value) { + revert InvalidAmount(); + } + } else { + (bool success, bytes memory res) = + address(coin).call( + abi.encodeWithSelector( + IERC20.transferFrom.selector, + msg.sender, + address(this), + amount + ) + ); + + // Require there was nothing returned, which is done by some non-standard + // tokens, or that the ERC20 contract did in fact return true + bool nonStandardResOrTrue = + (res.length == 0) || abi.decode(res, (bool)); + if (!(success && nonStandardResOrTrue)) { + revert FailedTransfer(); + } + } + + /* + Due to fee-on-transfer tokens, emitting the amount directly is frowned upon. + The amount instructed to transfer may not actually be the amount + transferred. + + If we add nonReentrant to every single function which can effect the + balance, we can check the amount exactly matches. This prevents transfers of + less value than expected occurring, at least, not without an additional + transfer to top up the difference (which isn't routed through this contract + and accordingly isn't trying to artificially create events). + + If we don't add nonReentrant, a transfer can be started, and then a new + transfer for the difference can follow it up (again and again until a + rounding error is reached). This contract would believe all transfers were + done in full, despite each only being done in part (except for the last + one). + + Given fee-on-transfer tokens aren't intended to be supported, the only + token planned to be supported is Dai and it doesn't have any fee-on-transfer + logic, fee-on-transfer tokens aren't even able to be supported at this time, + we simply classify this entire class of tokens as non-standard + implementations which induce undefined behavior. It is the Serai network's + role not to add support for any non-standard implementations. + */ + emit InInstruction(msg.sender, coin, amount, instruction); + } + + // execute accepts a list of transactions to execute as well as a signature. // if signature verification passes, the given transactions are executed. // if signature verification fails, this function will revert. function execute( OutInstruction[] calldata transactions, - Signature memory sig - ) public { - if (transactions.length > 256) revert TooManyTransactions(); + Signature calldata sig + ) external { + if (transactions.length > 256) { + revert TooManyTransactions(); + } - bytes32 message = keccak256(abi.encode("execute", nonce, transactions)); + bytes memory message = + abi.encode("execute", block.chainid, nonce, transactions); + uint256 executed_with_nonce = nonce; // This prevents re-entrancy from causing double spends yet does allow // out-of-order execution via re-entrancy nonce++; - if (!verify(KEY_PARITY, seraiKey, message, sig.c, sig.s)) revert InvalidSignature(); + + if (!Schnorr.verify(seraiKey, message, sig.c, sig.s)) { + revert InvalidSignature(); + } uint256 successes; - for(uint256 i = 0; i < transactions.length; i++) { - (bool success, ) = transactions[i].to.call{value: transactions[i].value, gas: 200_000}(transactions[i].data); + for (uint256 i = 0; i < transactions.length; i++) { + bool success; + + // If there are no calls, send to `to` the value + if (transactions[i].calls.length == 0) { + (success, ) = transactions[i].to.call{ + value: transactions[i].value, + gas: 5_000 + }(""); + } else { + // If there are calls, ignore `to`. Deploy a new Sandbox and proxy the + // calls through that + // + // We could use a single sandbox in order to reduce gas costs, yet that + // risks one person creating an approval that's hooked before another + // user's intended action executes, in order to drain their coins + // + // While technically, that would be a flaw in the sandboxed flow, this + // is robust and prevents such flaws from being possible + // + // We also don't want people to set state via the Sandbox and expect it + // future available when anyone else could set a distinct value + Sandbox sandbox = new Sandbox(); + (success, ) = address(sandbox).call{ + value: transactions[i].value, + // TODO: Have the Call specify the gas up front + gas: 350_000 + }( + abi.encodeWithSelector( + Sandbox.sandbox.selector, + transactions[i].calls + ) + ); + } + assembly { successes := or(successes, shl(i, success)) } } - emit Executed(nonce, message, successes); + emit Executed( + executed_with_nonce, + keccak256(message), + successes, + sig + ); } } diff --git a/coins/ethereum/contracts/Sandbox.sol b/coins/ethereum/contracts/Sandbox.sol new file mode 100644 index 00000000..a82a3afd --- /dev/null +++ b/coins/ethereum/contracts/Sandbox.sol @@ -0,0 +1,48 @@ +// SPDX-License-Identifier: AGPLv3 +pragma solidity ^0.8.24; + +struct Call { + address to; + uint256 value; + bytes data; +} + +// A minimal sandbox focused on gas efficiency. +// +// The first call is executed if any of the calls fail, making it a fallback. +// All other calls are executed sequentially. +contract Sandbox { + error AlreadyCalled(); + error CallsFailed(); + + function sandbox(Call[] calldata calls) external payable { + // Prevent re-entrancy due to this executing arbitrary calls from anyone + // and anywhere + bool called; + assembly { called := tload(0) } + if (called) { + revert AlreadyCalled(); + } + assembly { tstore(0, 1) } + + // Execute the calls, starting from 1 + for (uint256 i = 1; i < calls.length; i++) { + (bool success, ) = + calls[i].to.call{ value: calls[i].value }(calls[i].data); + + // If this call failed, execute the fallback (call 0) + if (!success) { + (success, ) = + calls[0].to.call{ value: address(this).balance }(calls[0].data); + // If this call also failed, revert entirely + if (!success) { + revert CallsFailed(); + } + return; + } + } + + // We don't clear the re-entrancy guard as this contract should never be + // called again, so there's no reason to spend the effort + } +} diff --git a/coins/ethereum/contracts/Schnorr.sol b/coins/ethereum/contracts/Schnorr.sol index 47263e66..8edcdffd 100644 --- a/coins/ethereum/contracts/Schnorr.sol +++ b/coins/ethereum/contracts/Schnorr.sol @@ -2,38 +2,43 @@ pragma solidity ^0.8.0; // see https://github.com/noot/schnorr-verify for implementation details -contract Schnorr { +library Schnorr { // secp256k1 group order uint256 constant public Q = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141; - error InvalidSOrA(); - error InvalidSignature(); + // Fixed parity for the public keys used in this contract + // This avoids spending a word passing the parity in a similar style to + // Bitcoin's Taproot + uint8 constant public KEY_PARITY = 27; - // parity := public key y-coord parity (27 or 28) - // px := public key x-coord + error InvalidSOrA(); + error MalformedSignature(); + + // px := public key x-coord, where the public key has a parity of KEY_PARITY // message := 32-byte hash of the message // c := schnorr signature challenge // s := schnorr signature function verify( - uint8 parity, bytes32 px, - bytes32 message, + bytes memory message, bytes32 c, bytes32 s - ) public view returns (bool) { - // ecrecover = (m, v, r, s); + ) internal pure returns (bool) { + // ecrecover = (m, v, r, s) -> key + // We instead pass the following to obtain the nonce (not the key) + // Then we hash it and verify it matches the challenge bytes32 sa = bytes32(Q - mulmod(uint256(s), uint256(px), Q)); bytes32 ca = bytes32(Q - mulmod(uint256(c), uint256(px), Q)); + // For safety, we want each input to ecrecover to be 0 (sa, px, ca) + // The ecreover precomple checks `r` and `s` (`px` and `ca`) are non-zero + // That leaves us to check `sa` are non-zero if (sa == 0) revert InvalidSOrA(); - // the ecrecover precompile implementation checks that the `r` and `s` - // inputs are non-zero (in this case, `px` and `ca`), thus we don't need to - // check if they're zero. - address R = ecrecover(sa, parity, px, ca); - if (R == address(0)) revert InvalidSignature(); - return c == keccak256( - abi.encodePacked(R, uint8(parity), px, block.chainid, message) - ); + address R = ecrecover(sa, KEY_PARITY, px, ca); + if (R == address(0)) revert MalformedSignature(); + + // Check the signature is correct by rebuilding the challenge + return c == keccak256(abi.encodePacked(R, px, message)); } } diff --git a/coins/ethereum/src/abi/mod.rs b/coins/ethereum/src/abi/mod.rs index 2d7dd47c..1ae23374 100644 --- a/coins/ethereum/src/abi/mod.rs +++ b/coins/ethereum/src/abi/mod.rs @@ -1,6 +1,37 @@ +use alloy_sol_types::sol; + #[rustfmt::skip] +#[allow(warnings)] +#[allow(needless_pass_by_value)] #[allow(clippy::all)] -pub(crate) mod schnorr; +#[allow(clippy::ignored_unit_patterns)] +#[allow(clippy::redundant_closure_for_method_calls)] +mod erc20_container { + use super::*; + sol!("contracts/IERC20.sol"); +} +pub use erc20_container::IERC20 as erc20; + #[rustfmt::skip] +#[allow(warnings)] +#[allow(needless_pass_by_value)] #[allow(clippy::all)] -pub(crate) mod router; +#[allow(clippy::ignored_unit_patterns)] +#[allow(clippy::redundant_closure_for_method_calls)] +mod deployer_container { + use super::*; + sol!("contracts/Deployer.sol"); +} +pub use deployer_container::Deployer as deployer; + +#[rustfmt::skip] +#[allow(warnings)] +#[allow(needless_pass_by_value)] +#[allow(clippy::all)] +#[allow(clippy::ignored_unit_patterns)] +#[allow(clippy::redundant_closure_for_method_calls)] +mod router_container { + use super::*; + sol!(Router, "artifacts/Router.abi"); +} +pub use router_container::Router as router; diff --git a/coins/ethereum/src/crypto.rs b/coins/ethereum/src/crypto.rs index 5f681cfa..ca228eb5 100644 --- a/coins/ethereum/src/crypto.rs +++ b/coins/ethereum/src/crypto.rs @@ -1,91 +1,185 @@ -use sha3::{Digest, Keccak256}; - use group::ff::PrimeField; use k256::{ - elliptic_curve::{ - bigint::ArrayEncoding, ops::Reduce, point::AffineCoordinates, sec1::ToEncodedPoint, - }, - ProjectivePoint, Scalar, U256, + elliptic_curve::{ops::Reduce, point::AffineCoordinates, sec1::ToEncodedPoint}, + ProjectivePoint, Scalar, U256 as KU256, }; +#[cfg(test)] +use k256::{elliptic_curve::point::DecompressPoint, AffinePoint}; use frost::{ algorithm::{Hram, SchnorrSignature}, - curve::Secp256k1, + curve::{Ciphersuite, Secp256k1}, }; +use alloy_core::primitives::{Parity, Signature as AlloySignature}; +use alloy_consensus::{SignableTransaction, Signed, TxLegacy}; + +use crate::abi::router::{Signature as AbiSignature}; + pub(crate) fn keccak256(data: &[u8]) -> [u8; 32] { - Keccak256::digest(data).into() + alloy_core::primitives::keccak256(data).into() } -pub(crate) fn address(point: &ProjectivePoint) -> [u8; 20] { +pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar { + >::reduce_bytes(&keccak256(data).into()) +} + +pub fn address(point: &ProjectivePoint) -> [u8; 20] { let encoded_point = point.to_encoded_point(false); // Last 20 bytes of the hash of the concatenated x and y coordinates // We obtain the concatenated x and y coordinates via the uncompressed encoding of the point keccak256(&encoded_point.as_ref()[1 .. 65])[12 ..].try_into().unwrap() } +pub(crate) fn deterministically_sign(tx: &TxLegacy) -> Signed { + assert!( + tx.chain_id.is_none(), + "chain ID was Some when deterministically signing a TX (causing a non-deterministic signer)" + ); + + let sig_hash = tx.signature_hash().0; + let mut r = hash_to_scalar(&[sig_hash.as_slice(), b"r"].concat()); + let mut s = hash_to_scalar(&[sig_hash.as_slice(), b"s"].concat()); + loop { + let r_bytes: [u8; 32] = r.to_repr().into(); + let s_bytes: [u8; 32] = s.to_repr().into(); + let v = Parity::NonEip155(false); + let signature = + AlloySignature::from_scalars_and_parity(r_bytes.into(), s_bytes.into(), v).unwrap(); + let tx = tx.clone().into_signed(signature); + if tx.recover_signer().is_ok() { + return tx; + } + + // Re-hash until valid + r = hash_to_scalar(r_bytes.as_ref()); + s = hash_to_scalar(s_bytes.as_ref()); + } +} + +/// The public key for a Schnorr-signing account. #[allow(non_snake_case)] +#[derive(Clone, Copy, PartialEq, Eq, Debug)] pub struct PublicKey { - pub A: ProjectivePoint, - pub px: Scalar, - pub parity: u8, + pub(crate) A: ProjectivePoint, + pub(crate) px: Scalar, } impl PublicKey { + /// Construct a new `PublicKey`. + /// + /// This will return None if the provided point isn't eligible to be a public key (due to + /// bounds such as parity). #[allow(non_snake_case)] pub fn new(A: ProjectivePoint) -> Option { let affine = A.to_affine(); - let parity = u8::from(bool::from(affine.y_is_odd())) + 27; - if parity != 27 { + // Only allow even keys to save a word within Ethereum + let is_odd = bool::from(affine.y_is_odd()); + if is_odd { None?; } let x_coord = affine.x(); - let x_coord_scalar = >::reduce_bytes(&x_coord); + let x_coord_scalar = >::reduce_bytes(&x_coord); // Return None if a reduction would occur + // Reductions would be incredibly unlikely and shouldn't be an issue, yet it's one less + // headache/concern to have + // This does ban a trivial amoount of public keys if x_coord_scalar.to_repr() != x_coord { None?; } - Some(PublicKey { A, px: x_coord_scalar, parity }) + Some(PublicKey { A, px: x_coord_scalar }) + } + + pub fn point(&self) -> ProjectivePoint { + self.A + } + + pub(crate) fn eth_repr(&self) -> [u8; 32] { + self.px.to_repr().into() + } + + #[cfg(test)] + pub(crate) fn from_eth_repr(repr: [u8; 32]) -> Option { + #[allow(non_snake_case)] + let A = Option::::from(AffinePoint::decompress(&repr.into(), 0.into()))?.into(); + Option::from(Scalar::from_repr(repr.into())).map(|px| PublicKey { A, px }) } } +/// The HRAm to use for the Schnorr contract. #[derive(Clone, Default)] pub struct EthereumHram {} impl Hram for EthereumHram { #[allow(non_snake_case)] fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar { - let a_encoded_point = A.to_encoded_point(true); - let mut a_encoded = a_encoded_point.as_ref().to_owned(); - a_encoded[0] += 25; // Ethereum uses 27/28 for point parity - assert!((a_encoded[0] == 27) || (a_encoded[0] == 28)); + let x_coord = A.to_affine().x(); + let mut data = address(R).to_vec(); - data.append(&mut a_encoded); + data.extend(x_coord.as_slice()); data.extend(m); - Scalar::reduce(U256::from_be_slice(&keccak256(&data))) + + >::reduce_bytes(&keccak256(&data).into()) } } +/// A signature for the Schnorr contract. +#[derive(Clone, Copy, PartialEq, Eq, Debug)] pub struct Signature { pub(crate) c: Scalar, pub(crate) s: Scalar, } impl Signature { + pub fn verify(&self, public_key: &PublicKey, message: &[u8]) -> bool { + #[allow(non_snake_case)] + let R = (Secp256k1::generator() * self.s) - (public_key.A * self.c); + EthereumHram::hram(&R, &public_key.A, message) == self.c + } + + /// Construct a new `Signature`. + /// + /// This will return None if the signature is invalid. pub fn new( public_key: &PublicKey, - chain_id: U256, - m: &[u8], + message: &[u8], signature: SchnorrSignature, ) -> Option { - let c = EthereumHram::hram( - &signature.R, - &public_key.A, - &[chain_id.to_be_byte_array().as_slice(), &keccak256(m)].concat(), - ); + let c = EthereumHram::hram(&signature.R, &public_key.A, message); if !signature.verify(public_key.A, c) { None?; } - Some(Signature { c, s: signature.s }) + + let res = Signature { c, s: signature.s }; + assert!(res.verify(public_key, message)); + Some(res) + } + + pub fn c(&self) -> Scalar { + self.c + } + pub fn s(&self) -> Scalar { + self.s + } + + pub fn to_bytes(&self) -> [u8; 64] { + let mut res = [0; 64]; + res[.. 32].copy_from_slice(self.c.to_repr().as_ref()); + res[32 ..].copy_from_slice(self.s.to_repr().as_ref()); + res + } + + pub fn from_bytes(bytes: [u8; 64]) -> std::io::Result { + let mut reader = bytes.as_slice(); + let c = Secp256k1::read_F(&mut reader)?; + let s = Secp256k1::read_F(&mut reader)?; + Ok(Signature { c, s }) + } +} +impl From<&Signature> for AbiSignature { + fn from(sig: &Signature) -> AbiSignature { + let c: [u8; 32] = sig.c.to_repr().into(); + let s: [u8; 32] = sig.s.to_repr().into(); + AbiSignature { c: c.into(), s: s.into() } } } diff --git a/coins/ethereum/src/deployer.rs b/coins/ethereum/src/deployer.rs new file mode 100644 index 00000000..d6cfeee9 --- /dev/null +++ b/coins/ethereum/src/deployer.rs @@ -0,0 +1,119 @@ +use std::sync::Arc; + +use alloy_core::primitives::{hex::FromHex, Address, B256, U256, Bytes, TxKind}; +use alloy_consensus::{Signed, TxLegacy}; + +use alloy_sol_types::{SolCall, SolEvent}; + +use alloy_rpc_types::{BlockNumberOrTag, Filter}; +use alloy_simple_request_transport::SimpleRequest; +use alloy_provider::{Provider, RootProvider}; + +use crate::{ + Error, + crypto::{self, keccak256, PublicKey}, + router::Router, +}; +pub use crate::abi::deployer as abi; + +/// The Deployer contract for the Router contract. +/// +/// This Deployer has a deterministic address, letting it be immediately identified on any +/// compatible chain. It then supports retrieving the Router contract's address (which isn't +/// deterministic) using a single log query. +#[derive(Clone, Debug)] +pub struct Deployer; +impl Deployer { + /// Obtain the transaction to deploy this contract, already signed. + /// + /// The account this transaction is sent from (which is populated in `from`) must be sufficiently + /// funded for this transaction to be submitted. This account has no known private key to anyone, + /// so ETH sent can be neither misappropriated nor returned. + pub fn deployment_tx() -> Signed { + let bytecode = include_str!("../artifacts/Deployer.bin"); + let bytecode = + Bytes::from_hex(bytecode).expect("compiled-in Deployer bytecode wasn't valid hex"); + + let tx = TxLegacy { + chain_id: None, + nonce: 0, + gas_price: 100_000_000_000u128, + // TODO: Use a more accurate gas limit + gas_limit: 1_000_000u128, + to: TxKind::Create, + value: U256::ZERO, + input: bytecode, + }; + + crypto::deterministically_sign(&tx) + } + + /// Obtain the deterministic address for this contract. + pub fn address() -> [u8; 20] { + let deployer_deployer = + Self::deployment_tx().recover_signer().expect("deployment_tx didn't have a valid signature"); + **Address::create(&deployer_deployer, 0) + } + + /// Construct a new view of the `Deployer`. + pub async fn new(provider: Arc>) -> Result, Error> { + let address = Self::address(); + #[cfg(not(test))] + let required_block = BlockNumberOrTag::Finalized; + #[cfg(test)] + let required_block = BlockNumberOrTag::Latest; + let code = provider + .get_code_at(address.into(), required_block.into()) + .await + .map_err(|_| Error::ConnectionError)?; + // Contract has yet to be deployed + if code.is_empty() { + return Ok(None); + } + Ok(Some(Self)) + } + + /// Yield the `ContractCall` necessary to deploy the Router. + pub fn deploy_router(&self, key: &PublicKey) -> TxLegacy { + TxLegacy { + to: TxKind::Call(Self::address().into()), + input: abi::deployCall::new((Router::init_code(key).into(),)).abi_encode().into(), + gas_limit: 1_000_000, + ..Default::default() + } + } + + /// Find the first Router deployed with the specified key as its first key. + /// + /// This is the Router Serai will use, and is the only way to construct a `Router`. + pub async fn find_router( + &self, + provider: Arc>, + key: &PublicKey, + ) -> Result, Error> { + let init_code = Router::init_code(key); + let init_code_hash = keccak256(&init_code); + + #[cfg(not(test))] + let to_block = BlockNumberOrTag::Finalized; + #[cfg(test)] + let to_block = BlockNumberOrTag::Latest; + + // Find the first log using this init code (where the init code is binding to the key) + let filter = + Filter::new().from_block(0).to_block(to_block).address(Address::from(Self::address())); + let filter = filter.event_signature(abi::Deployment::SIGNATURE_HASH); + let filter = filter.topic1(B256::from(init_code_hash)); + let logs = provider.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?; + + let Some(first_log) = logs.first() else { return Ok(None) }; + let router = first_log + .log_decode::() + .map_err(|_| Error::ConnectionError)? + .inner + .data + .created; + + Ok(Some(Router::new(provider, router))) + } +} diff --git a/coins/ethereum/src/erc20.rs b/coins/ethereum/src/erc20.rs new file mode 100644 index 00000000..3b5bbee2 --- /dev/null +++ b/coins/ethereum/src/erc20.rs @@ -0,0 +1,118 @@ +use std::{sync::Arc, collections::HashSet}; + +use alloy_core::primitives::{Address, B256, U256}; + +use alloy_sol_types::{SolInterface, SolEvent}; + +use alloy_rpc_types::{BlockNumberOrTag, Filter}; +use alloy_simple_request_transport::SimpleRequest; +use alloy_provider::{Provider, RootProvider}; + +use crate::Error; +pub use crate::abi::erc20 as abi; +use abi::{IERC20Calls, Transfer, transferCall, transferFromCall}; + +#[derive(Clone, Debug)] +pub struct TopLevelErc20Transfer { + pub id: [u8; 32], + pub from: [u8; 20], + pub amount: U256, + pub data: Vec, +} + +/// A view for an ERC20 contract. +#[derive(Clone, Debug)] +pub struct ERC20(Arc>, Address); +impl ERC20 { + /// Construct a new view of the specified ERC20 contract. + /// + /// This checks a contract is deployed at that address yet does not check the contract is + /// actually an ERC20. + pub async fn new( + provider: Arc>, + address: [u8; 20], + ) -> Result, Error> { + let code = provider + .get_code_at(address.into(), BlockNumberOrTag::Finalized.into()) + .await + .map_err(|_| Error::ConnectionError)?; + // Contract has yet to be deployed + if code.is_empty() { + return Ok(None); + } + Ok(Some(Self(provider.clone(), Address::from(&address)))) + } + + pub async fn top_level_transfers( + &self, + block: u64, + to: [u8; 20], + ) -> Result, Error> { + let filter = Filter::new().from_block(block).to_block(block).address(self.1); + let filter = filter.event_signature(Transfer::SIGNATURE_HASH); + let mut to_topic = [0; 32]; + to_topic[12 ..].copy_from_slice(&to); + let filter = filter.topic2(B256::from(to_topic)); + let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?; + + let mut handled = HashSet::new(); + + let mut top_level_transfers = vec![]; + for log in logs { + // Double check the address which emitted this log + if log.address() != self.1 { + Err(Error::ConnectionError)?; + } + + let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?; + let tx = self.0.get_transaction_by_hash(tx_id).await.map_err(|_| Error::ConnectionError)?; + + // If this is a top-level call... + if tx.to == Some(self.1) { + // And we recognize the call... + // Don't validate the encoding as this can't be re-encoded to an identical bytestring due + // to the InInstruction appended + if let Ok(call) = IERC20Calls::abi_decode(&tx.input, false) { + // Extract the top-level call's from/to/value + let (from, call_to, value) = match call { + IERC20Calls::transfer(transferCall { to: call_to, value }) => (tx.from, call_to, value), + IERC20Calls::transferFrom(transferFromCall { from, to: call_to, value }) => { + (from, call_to, value) + } + // Treat any other function selectors as unrecognized + _ => continue, + }; + + let log = log.log_decode::().map_err(|_| Error::ConnectionError)?.inner.data; + + // Ensure the top-level transfer is equivalent, and this presumably isn't a log for an + // internal transfer + if (log.from != from) || (call_to != to) || (value != log.value) { + continue; + } + + // Now that the top-level transfer is confirmed to be equivalent to the log, ensure it's + // the only log we handle + if handled.contains(&tx_id) { + continue; + } + handled.insert(tx_id); + + // Read the data appended after + let encoded = call.abi_encode(); + let data = tx.input.as_ref()[encoded.len() ..].to_vec(); + + // Push the transfer + top_level_transfers.push(TopLevelErc20Transfer { + // Since we'll only handle one log for this TX, set the ID to the TX ID + id: *tx_id, + from: *log.from.0, + amount: log.value, + data, + }); + } + } + } + Ok(top_level_transfers) + } +} diff --git a/coins/ethereum/src/lib.rs b/coins/ethereum/src/lib.rs index 505de38e..8d4a5312 100644 --- a/coins/ethereum/src/lib.rs +++ b/coins/ethereum/src/lib.rs @@ -1,16 +1,30 @@ use thiserror::Error; +pub use alloy_core; +pub use alloy_consensus; + +pub use alloy_rpc_types; +pub use alloy_simple_request_transport; +pub use alloy_rpc_client; +pub use alloy_provider; + pub mod crypto; pub(crate) mod abi; -pub mod schnorr; + +pub mod erc20; +pub mod deployer; pub mod router; +pub mod machine; + #[cfg(test)] mod tests; -#[derive(Error, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, Debug, Error)] pub enum Error { #[error("failed to verify Schnorr signature")] InvalidSignature, + #[error("couldn't make call/send TX")] + ConnectionError, } diff --git a/coins/ethereum/src/machine.rs b/coins/ethereum/src/machine.rs new file mode 100644 index 00000000..0d5dc7a5 --- /dev/null +++ b/coins/ethereum/src/machine.rs @@ -0,0 +1,414 @@ +use std::{ + io::{self, Read}, + collections::HashMap, +}; + +use rand_core::{RngCore, CryptoRng}; + +use transcript::{Transcript, RecommendedTranscript}; + +use group::GroupEncoding; +use frost::{ + curve::{Ciphersuite, Secp256k1}, + Participant, ThresholdKeys, FrostError, + algorithm::Schnorr, + sign::*, +}; + +use alloy_core::primitives::U256; + +use crate::{ + crypto::{PublicKey, EthereumHram, Signature}, + router::{ + abi::{Call as AbiCall, OutInstruction as AbiOutInstruction}, + Router, + }, +}; + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct Call { + pub to: [u8; 20], + pub value: U256, + pub data: Vec, +} +impl Call { + pub fn read(reader: &mut R) -> io::Result { + let mut to = [0; 20]; + reader.read_exact(&mut to)?; + + let value = { + let mut value_bytes = [0; 32]; + reader.read_exact(&mut value_bytes)?; + U256::from_le_slice(&value_bytes) + }; + + let mut data_len = { + let mut data_len = [0; 4]; + reader.read_exact(&mut data_len)?; + usize::try_from(u32::from_le_bytes(data_len)).expect("u32 couldn't fit within a usize") + }; + + // A valid DoS would be to claim a 4 GB data is present for only 4 bytes + // We read this in 1 KB chunks to only read data actually present (with a max DoS of 1 KB) + let mut data = vec![]; + while data_len > 0 { + let chunk_len = data_len.min(1024); + let mut chunk = vec![0; chunk_len]; + reader.read_exact(&mut chunk)?; + data.extend(&chunk); + data_len -= chunk_len; + } + + Ok(Call { to, value, data }) + } + + fn write(&self, writer: &mut W) -> io::Result<()> { + writer.write_all(&self.to)?; + writer.write_all(&self.value.as_le_bytes())?; + + let data_len = u32::try_from(self.data.len()) + .map_err(|_| io::Error::other("call data length exceeded 2**32"))?; + writer.write_all(&data_len.to_le_bytes())?; + writer.write_all(&self.data) + } +} +impl From for AbiCall { + fn from(call: Call) -> AbiCall { + AbiCall { to: call.to.into(), value: call.value, data: call.data.into() } + } +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub enum OutInstructionTarget { + Direct([u8; 20]), + Calls(Vec), +} +impl OutInstructionTarget { + fn read(reader: &mut R) -> io::Result { + let mut kind = [0xff]; + reader.read_exact(&mut kind)?; + + match kind[0] { + 0 => { + let mut addr = [0; 20]; + reader.read_exact(&mut addr)?; + Ok(OutInstructionTarget::Direct(addr)) + } + 1 => { + let mut calls_len = [0; 4]; + reader.read_exact(&mut calls_len)?; + let calls_len = u32::from_le_bytes(calls_len); + + let mut calls = vec![]; + for _ in 0 .. calls_len { + calls.push(Call::read(reader)?); + } + Ok(OutInstructionTarget::Calls(calls)) + } + _ => Err(io::Error::other("unrecognized OutInstructionTarget"))?, + } + } + + fn write(&self, writer: &mut W) -> io::Result<()> { + match self { + OutInstructionTarget::Direct(addr) => { + writer.write_all(&[0])?; + writer.write_all(addr)?; + } + OutInstructionTarget::Calls(calls) => { + writer.write_all(&[1])?; + let call_len = u32::try_from(calls.len()) + .map_err(|_| io::Error::other("amount of calls exceeded 2**32"))?; + writer.write_all(&call_len.to_le_bytes())?; + for call in calls { + call.write(writer)?; + } + } + } + Ok(()) + } +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct OutInstruction { + pub target: OutInstructionTarget, + pub value: U256, +} +impl OutInstruction { + fn read(reader: &mut R) -> io::Result { + let target = OutInstructionTarget::read(reader)?; + + let value = { + let mut value_bytes = [0; 32]; + reader.read_exact(&mut value_bytes)?; + U256::from_le_slice(&value_bytes) + }; + + Ok(OutInstruction { target, value }) + } + fn write(&self, writer: &mut W) -> io::Result<()> { + self.target.write(writer)?; + writer.write_all(&self.value.as_le_bytes()) + } +} +impl From for AbiOutInstruction { + fn from(instruction: OutInstruction) -> AbiOutInstruction { + match instruction.target { + OutInstructionTarget::Direct(addr) => { + AbiOutInstruction { to: addr.into(), calls: vec![], value: instruction.value } + } + OutInstructionTarget::Calls(calls) => AbiOutInstruction { + to: [0; 20].into(), + calls: calls.into_iter().map(Into::into).collect(), + value: instruction.value, + }, + } + } +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub enum RouterCommand { + UpdateSeraiKey { chain_id: U256, nonce: U256, key: PublicKey }, + Execute { chain_id: U256, nonce: U256, outs: Vec }, +} + +impl RouterCommand { + pub fn msg(&self) -> Vec { + match self { + RouterCommand::UpdateSeraiKey { chain_id, nonce, key } => { + Router::update_serai_key_message(*chain_id, *nonce, key) + } + RouterCommand::Execute { chain_id, nonce, outs } => Router::execute_message( + *chain_id, + *nonce, + outs.iter().map(|out| out.clone().into()).collect(), + ), + } + } + + pub fn read(reader: &mut R) -> io::Result { + let mut kind = [0xff]; + reader.read_exact(&mut kind)?; + + match kind[0] { + 0 => { + let mut chain_id = [0; 32]; + reader.read_exact(&mut chain_id)?; + + let mut nonce = [0; 32]; + reader.read_exact(&mut nonce)?; + + let key = PublicKey::new(Secp256k1::read_G(reader)?) + .ok_or(io::Error::other("key for RouterCommand doesn't have an eth representation"))?; + Ok(RouterCommand::UpdateSeraiKey { + chain_id: U256::from_le_slice(&chain_id), + nonce: U256::from_le_slice(&nonce), + key, + }) + } + 1 => { + let mut chain_id = [0; 32]; + reader.read_exact(&mut chain_id)?; + let chain_id = U256::from_le_slice(&chain_id); + + let mut nonce = [0; 32]; + reader.read_exact(&mut nonce)?; + let nonce = U256::from_le_slice(&nonce); + + let mut outs_len = [0; 4]; + reader.read_exact(&mut outs_len)?; + let outs_len = u32::from_le_bytes(outs_len); + + let mut outs = vec![]; + for _ in 0 .. outs_len { + outs.push(OutInstruction::read(reader)?); + } + + Ok(RouterCommand::Execute { chain_id, nonce, outs }) + } + _ => Err(io::Error::other("reading unknown type of RouterCommand"))?, + } + } + + pub fn write(&self, writer: &mut W) -> io::Result<()> { + match self { + RouterCommand::UpdateSeraiKey { chain_id, nonce, key } => { + writer.write_all(&[0])?; + writer.write_all(&chain_id.as_le_bytes())?; + writer.write_all(&nonce.as_le_bytes())?; + writer.write_all(&key.A.to_bytes()) + } + RouterCommand::Execute { chain_id, nonce, outs } => { + writer.write_all(&[1])?; + writer.write_all(&chain_id.as_le_bytes())?; + writer.write_all(&nonce.as_le_bytes())?; + writer.write_all(&u32::try_from(outs.len()).unwrap().to_le_bytes())?; + for out in outs { + out.write(writer)?; + } + Ok(()) + } + } + } + + pub fn serialize(&self) -> Vec { + let mut res = vec![]; + self.write(&mut res).unwrap(); + res + } +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct SignedRouterCommand { + command: RouterCommand, + signature: Signature, +} + +impl SignedRouterCommand { + pub fn new(key: &PublicKey, command: RouterCommand, signature: &[u8; 64]) -> Option { + let c = Secp256k1::read_F(&mut &signature[.. 32]).ok()?; + let s = Secp256k1::read_F(&mut &signature[32 ..]).ok()?; + let signature = Signature { c, s }; + + if !signature.verify(key, &command.msg()) { + None? + } + Some(SignedRouterCommand { command, signature }) + } + + pub fn command(&self) -> &RouterCommand { + &self.command + } + + pub fn signature(&self) -> &Signature { + &self.signature + } + + pub fn read(reader: &mut R) -> io::Result { + let command = RouterCommand::read(reader)?; + + let mut sig = [0; 64]; + reader.read_exact(&mut sig)?; + let signature = Signature::from_bytes(sig)?; + + Ok(SignedRouterCommand { command, signature }) + } + + pub fn write(&self, writer: &mut W) -> io::Result<()> { + self.command.write(writer)?; + writer.write_all(&self.signature.to_bytes()) + } +} + +pub struct RouterCommandMachine { + key: PublicKey, + command: RouterCommand, + machine: AlgorithmMachine>, +} + +impl RouterCommandMachine { + pub fn new(keys: ThresholdKeys, command: RouterCommand) -> Option { + // The Schnorr algorithm should be fine without this, even when using the IETF variant + // If this is better and more comprehensive, we should do it, even if not necessary + let mut transcript = RecommendedTranscript::new(b"ethereum-serai RouterCommandMachine v0.1"); + let key = keys.group_key(); + transcript.append_message(b"key", key.to_bytes()); + transcript.append_message(b"command", command.serialize()); + + Some(Self { + key: PublicKey::new(key)?, + command, + machine: AlgorithmMachine::new(Schnorr::new(transcript), keys), + }) + } +} + +impl PreprocessMachine for RouterCommandMachine { + type Preprocess = Preprocess; + type Signature = SignedRouterCommand; + type SignMachine = RouterCommandSignMachine; + + fn preprocess( + self, + rng: &mut R, + ) -> (Self::SignMachine, Self::Preprocess) { + let (machine, preprocess) = self.machine.preprocess(rng); + + (RouterCommandSignMachine { key: self.key, command: self.command, machine }, preprocess) + } +} + +pub struct RouterCommandSignMachine { + key: PublicKey, + command: RouterCommand, + machine: AlgorithmSignMachine>, +} + +impl SignMachine for RouterCommandSignMachine { + type Params = (); + type Keys = ThresholdKeys; + type Preprocess = Preprocess; + type SignatureShare = SignatureShare; + type SignatureMachine = RouterCommandSignatureMachine; + + fn cache(self) -> CachedPreprocess { + unimplemented!( + "RouterCommand machines don't support caching their preprocesses due to {}", + "being already bound to a specific command" + ); + } + + fn from_cache( + (): (), + _: ThresholdKeys, + _: CachedPreprocess, + ) -> (Self, Self::Preprocess) { + unimplemented!( + "RouterCommand machines don't support caching their preprocesses due to {}", + "being already bound to a specific command" + ); + } + + fn read_preprocess(&self, reader: &mut R) -> io::Result { + self.machine.read_preprocess(reader) + } + + fn sign( + self, + commitments: HashMap, + msg: &[u8], + ) -> Result<(RouterCommandSignatureMachine, Self::SignatureShare), FrostError> { + if !msg.is_empty() { + panic!("message was passed to a RouterCommand machine when it generates its own"); + } + + let (machine, share) = self.machine.sign(commitments, &self.command.msg())?; + + Ok((RouterCommandSignatureMachine { key: self.key, command: self.command, machine }, share)) + } +} + +pub struct RouterCommandSignatureMachine { + key: PublicKey, + command: RouterCommand, + machine: + AlgorithmSignatureMachine>, +} + +impl SignatureMachine for RouterCommandSignatureMachine { + type SignatureShare = SignatureShare; + + fn read_share(&self, reader: &mut R) -> io::Result { + self.machine.read_share(reader) + } + + fn complete( + self, + shares: HashMap, + ) -> Result { + let sig = self.machine.complete(shares)?; + let signature = Signature::new(&self.key, &self.command.msg(), sig) + .expect("machine produced an invalid signature"); + Ok(SignedRouterCommand { command: self.command, signature }) + } +} diff --git a/coins/ethereum/src/router.rs b/coins/ethereum/src/router.rs index 3696fd9b..c4399ae3 100644 --- a/coins/ethereum/src/router.rs +++ b/coins/ethereum/src/router.rs @@ -1,30 +1,426 @@ -pub use crate::abi::router::*; +use std::{sync::Arc, io, collections::HashSet}; -/* -use crate::crypto::{ProcessedSignature, PublicKey}; -use ethers::{contract::ContractFactory, prelude::*, solc::artifacts::contract::ContractBytecode}; -use eyre::Result; -use std::{convert::From, fs::File, sync::Arc}; +use k256::{ + elliptic_curve::{group::GroupEncoding, sec1}, + ProjectivePoint, +}; -pub async fn router_update_public_key( - contract: &Router, - public_key: &PublicKey, - signature: &ProcessedSignature, -) -> std::result::Result, eyre::ErrReport> { - let tx = contract.update_public_key(public_key.px.to_bytes().into(), signature.into()); - let pending_tx = tx.send().await?; - let receipt = pending_tx.await?; - Ok(receipt) +use alloy_core::primitives::{hex::FromHex, Address, U256, Bytes, TxKind}; +#[cfg(test)] +use alloy_core::primitives::B256; +use alloy_consensus::TxLegacy; + +use alloy_sol_types::{SolValue, SolConstructor, SolCall, SolEvent}; + +use alloy_rpc_types::Filter; +#[cfg(test)] +use alloy_rpc_types::{BlockId, TransactionRequest, TransactionInput}; +use alloy_simple_request_transport::SimpleRequest; +use alloy_provider::{Provider, RootProvider}; + +pub use crate::{ + Error, + crypto::{PublicKey, Signature}, + abi::{erc20::Transfer, router as abi}, +}; +use abi::{SeraiKeyUpdated, InInstruction as InInstructionEvent, Executed as ExecutedEvent}; + +#[derive(Clone, PartialEq, Eq, Debug)] +pub enum Coin { + Ether, + Erc20([u8; 20]), } -pub async fn router_execute( - contract: &Router, - txs: Vec, - signature: &ProcessedSignature, -) -> std::result::Result, eyre::ErrReport> { - let tx = contract.execute(txs, signature.into()).send(); - let pending_tx = tx.send().await?; - let receipt = pending_tx.await?; - Ok(receipt) +impl Coin { + pub fn read(reader: &mut R) -> io::Result { + let mut kind = [0xff]; + reader.read_exact(&mut kind)?; + Ok(match kind[0] { + 0 => Coin::Ether, + 1 => { + let mut address = [0; 20]; + reader.read_exact(&mut address)?; + Coin::Erc20(address) + } + _ => Err(io::Error::other("unrecognized Coin type"))?, + }) + } + + pub fn write(&self, writer: &mut W) -> io::Result<()> { + match self { + Coin::Ether => writer.write_all(&[0]), + Coin::Erc20(token) => { + writer.write_all(&[1])?; + writer.write_all(token) + } + } + } +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct InInstruction { + pub id: ([u8; 32], u64), + pub from: [u8; 20], + pub coin: Coin, + pub amount: U256, + pub data: Vec, + pub key_at_end_of_block: ProjectivePoint, +} + +impl InInstruction { + pub fn read(reader: &mut R) -> io::Result { + let id = { + let mut id_hash = [0; 32]; + reader.read_exact(&mut id_hash)?; + let mut id_pos = [0; 8]; + reader.read_exact(&mut id_pos)?; + let id_pos = u64::from_le_bytes(id_pos); + (id_hash, id_pos) + }; + + let mut from = [0; 20]; + reader.read_exact(&mut from)?; + + let coin = Coin::read(reader)?; + let mut amount = [0; 32]; + reader.read_exact(&mut amount)?; + let amount = U256::from_le_slice(&amount); + + let mut data_len = [0; 4]; + reader.read_exact(&mut data_len)?; + let data_len = usize::try_from(u32::from_le_bytes(data_len)) + .map_err(|_| io::Error::other("InInstruction data exceeded 2**32 in length"))?; + let mut data = vec![0; data_len]; + reader.read_exact(&mut data)?; + + let mut key_at_end_of_block = ::Repr::default(); + reader.read_exact(&mut key_at_end_of_block)?; + let key_at_end_of_block = Option::from(ProjectivePoint::from_bytes(&key_at_end_of_block)) + .ok_or(io::Error::other("InInstruction had key at end of block which wasn't valid"))?; + + Ok(InInstruction { id, from, coin, amount, data, key_at_end_of_block }) + } + + pub fn write(&self, writer: &mut W) -> io::Result<()> { + writer.write_all(&self.id.0)?; + writer.write_all(&self.id.1.to_le_bytes())?; + + writer.write_all(&self.from)?; + + self.coin.write(writer)?; + writer.write_all(&self.amount.as_le_bytes())?; + + writer.write_all( + &u32::try_from(self.data.len()) + .map_err(|_| { + io::Error::other("InInstruction being written had data exceeding 2**32 in length") + })? + .to_le_bytes(), + )?; + writer.write_all(&self.data)?; + + writer.write_all(&self.key_at_end_of_block.to_bytes()) + } +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct Executed { + pub tx_id: [u8; 32], + pub nonce: u64, + pub signature: [u8; 64], +} + +/// The contract Serai uses to manage its state. +#[derive(Clone, Debug)] +pub struct Router(Arc>, Address); +impl Router { + pub(crate) fn code() -> Vec { + let bytecode = include_str!("../artifacts/Router.bin"); + Bytes::from_hex(bytecode).expect("compiled-in Router bytecode wasn't valid hex").to_vec() + } + + pub(crate) fn init_code(key: &PublicKey) -> Vec { + let mut bytecode = Self::code(); + // Append the constructor arguments + bytecode.extend((abi::constructorCall { _seraiKey: key.eth_repr().into() }).abi_encode()); + bytecode + } + + // This isn't pub in order to force users to use `Deployer::find_router`. + pub(crate) fn new(provider: Arc>, address: Address) -> Self { + Self(provider, address) + } + + pub fn address(&self) -> [u8; 20] { + **self.1 + } + + /// Get the key for Serai at the specified block. + #[cfg(test)] + pub async fn serai_key(&self, at: [u8; 32]) -> Result { + let call = TransactionRequest::default() + .to(Some(self.1)) + .input(TransactionInput::new(abi::seraiKeyCall::new(()).abi_encode().into())); + let bytes = self + .0 + .call(&call, Some(BlockId::Hash(B256::from(at).into()))) + .await + .map_err(|_| Error::ConnectionError)?; + let res = + abi::seraiKeyCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?; + PublicKey::from_eth_repr(res._0.0).ok_or(Error::ConnectionError) + } + + /// Get the message to be signed in order to update the key for Serai. + pub(crate) fn update_serai_key_message(chain_id: U256, nonce: U256, key: &PublicKey) -> Vec { + let mut buffer = b"updateSeraiKey".to_vec(); + buffer.extend(&chain_id.to_be_bytes::<32>()); + buffer.extend(&nonce.to_be_bytes::<32>()); + buffer.extend(&key.eth_repr()); + buffer + } + + /// Update the key representing Serai. + pub fn update_serai_key(&self, public_key: &PublicKey, sig: &Signature) -> TxLegacy { + // TODO: Set a more accurate gas + TxLegacy { + to: TxKind::Call(self.1), + input: abi::updateSeraiKeyCall::new((public_key.eth_repr().into(), sig.into())) + .abi_encode() + .into(), + gas_limit: 100_000, + ..Default::default() + } + } + + /// Get the current nonce for the published batches. + #[cfg(test)] + pub async fn nonce(&self, at: [u8; 32]) -> Result { + let call = TransactionRequest::default() + .to(Some(self.1)) + .input(TransactionInput::new(abi::nonceCall::new(()).abi_encode().into())); + let bytes = self + .0 + .call(&call, Some(BlockId::Hash(B256::from(at).into()))) + .await + .map_err(|_| Error::ConnectionError)?; + let res = + abi::nonceCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?; + Ok(res._0) + } + + /// Get the message to be signed in order to update the key for Serai. + pub(crate) fn execute_message( + chain_id: U256, + nonce: U256, + outs: Vec, + ) -> Vec { + ("execute".to_string(), chain_id, nonce, outs).abi_encode_params() + } + + /// Execute a batch of `OutInstruction`s. + pub fn execute(&self, outs: &[abi::OutInstruction], sig: &Signature) -> TxLegacy { + TxLegacy { + to: TxKind::Call(self.1), + input: abi::executeCall::new((outs.to_vec(), sig.into())).abi_encode().into(), + // TODO + gas_limit: 100_000 + ((200_000 + 10_000) * u128::try_from(outs.len()).unwrap()), + ..Default::default() + } + } + + pub async fn in_instructions( + &self, + block: u64, + allowed_tokens: &HashSet<[u8; 20]>, + ) -> Result, Error> { + let key_at_end_of_block = { + let filter = Filter::new().from_block(0).to_block(block).address(self.1); + let filter = filter.event_signature(SeraiKeyUpdated::SIGNATURE_HASH); + let all_keys = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?; + + let last_key_x_coordinate_log = all_keys.last().ok_or(Error::ConnectionError)?; + let last_key_x_coordinate = last_key_x_coordinate_log + .log_decode::() + .map_err(|_| Error::ConnectionError)? + .inner + .data + .key; + + let mut compressed_point = ::Repr::default(); + compressed_point[0] = u8::from(sec1::Tag::CompressedEvenY); + compressed_point[1 ..].copy_from_slice(last_key_x_coordinate.as_slice()); + + ProjectivePoint::from_bytes(&compressed_point).expect("router's last key wasn't a valid key") + }; + + let filter = Filter::new().from_block(block).to_block(block).address(self.1); + let filter = filter.event_signature(InInstructionEvent::SIGNATURE_HASH); + let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?; + + let mut transfer_check = HashSet::new(); + let mut in_instructions = vec![]; + for log in logs { + // Double check the address which emitted this log + if log.address() != self.1 { + Err(Error::ConnectionError)?; + } + + let id = ( + log.block_hash.ok_or(Error::ConnectionError)?.into(), + log.log_index.ok_or(Error::ConnectionError)?, + ); + + let tx_hash = log.transaction_hash.ok_or(Error::ConnectionError)?; + let tx = self.0.get_transaction_by_hash(tx_hash).await.map_err(|_| Error::ConnectionError)?; + + let log = + log.log_decode::().map_err(|_| Error::ConnectionError)?.inner.data; + + let coin = if log.coin.0 == [0; 20] { + Coin::Ether + } else { + let token = *log.coin.0; + + if !allowed_tokens.contains(&token) { + continue; + } + + // If this also counts as a top-level transfer via the token, drop it + // + // Necessary in order to handle a potential edge case with some theoretical token + // implementations + // + // This will either let it be handled by the top-level transfer hook or will drop it + // entirely on the side of caution + if tx.to == Some(token.into()) { + continue; + } + + // Get all logs for this TX + let receipt = self + .0 + .get_transaction_receipt(tx_hash) + .await + .map_err(|_| Error::ConnectionError)? + .ok_or(Error::ConnectionError)?; + let tx_logs = receipt.inner.logs(); + + // Find a matching transfer log + let mut found_transfer = false; + for tx_log in tx_logs { + let log_index = tx_log.log_index.ok_or(Error::ConnectionError)?; + // Ensure we didn't already use this transfer to check a distinct InInstruction event + if transfer_check.contains(&log_index) { + continue; + } + + // Check if this log is from the token we expected to be transferred + if tx_log.address().0 != token { + continue; + } + // Check if this is a transfer log + // https://github.com/alloy-rs/core/issues/589 + if tx_log.topics()[0] != Transfer::SIGNATURE_HASH { + continue; + } + let Ok(transfer) = Transfer::decode_log(&tx_log.inner.clone(), true) else { continue }; + // Check if this is a transfer to us for the expected amount + if (transfer.to == self.1) && (transfer.value == log.amount) { + transfer_check.insert(log_index); + found_transfer = true; + break; + } + } + if !found_transfer { + // This shouldn't be a ConnectionError + // This is an exploit, a non-conforming ERC20, or an invalid connection + // This should halt the process which is sufficient, yet this is sub-optimal + // TODO + Err(Error::ConnectionError)?; + } + + Coin::Erc20(token) + }; + + in_instructions.push(InInstruction { + id, + from: *log.from.0, + coin, + amount: log.amount, + data: log.instruction.as_ref().to_vec(), + key_at_end_of_block, + }); + } + + Ok(in_instructions) + } + + pub async fn executed_commands(&self, block: u64) -> Result, Error> { + let mut res = vec![]; + + { + let filter = Filter::new().from_block(block).to_block(block).address(self.1); + let filter = filter.event_signature(SeraiKeyUpdated::SIGNATURE_HASH); + let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?; + + for log in logs { + // Double check the address which emitted this log + if log.address() != self.1 { + Err(Error::ConnectionError)?; + } + + let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?.into(); + + let log = + log.log_decode::().map_err(|_| Error::ConnectionError)?.inner.data; + + let mut signature = [0; 64]; + signature[.. 32].copy_from_slice(log.signature.c.as_ref()); + signature[32 ..].copy_from_slice(log.signature.s.as_ref()); + res.push(Executed { + tx_id, + nonce: log.nonce.try_into().map_err(|_| Error::ConnectionError)?, + signature, + }); + } + } + + { + let filter = Filter::new().from_block(block).to_block(block).address(self.1); + let filter = filter.event_signature(ExecutedEvent::SIGNATURE_HASH); + let logs = self.0.get_logs(&filter).await.map_err(|_| Error::ConnectionError)?; + + for log in logs { + // Double check the address which emitted this log + if log.address() != self.1 { + Err(Error::ConnectionError)?; + } + + let tx_id = log.transaction_hash.ok_or(Error::ConnectionError)?.into(); + + let log = log.log_decode::().map_err(|_| Error::ConnectionError)?.inner.data; + + let mut signature = [0; 64]; + signature[.. 32].copy_from_slice(log.signature.c.as_ref()); + signature[32 ..].copy_from_slice(log.signature.s.as_ref()); + res.push(Executed { + tx_id, + nonce: log.nonce.try_into().map_err(|_| Error::ConnectionError)?, + signature, + }); + } + } + + Ok(res) + } + + #[cfg(feature = "tests")] + pub fn key_updated_filter(&self) -> Filter { + Filter::new().address(self.1).event_signature(SeraiKeyUpdated::SIGNATURE_HASH) + } + #[cfg(feature = "tests")] + pub fn executed_filter(&self) -> Filter { + Filter::new().address(self.1).event_signature(ExecutedEvent::SIGNATURE_HASH) + } } -*/ diff --git a/coins/ethereum/src/schnorr.rs b/coins/ethereum/src/schnorr.rs deleted file mode 100644 index 0e4495ec..00000000 --- a/coins/ethereum/src/schnorr.rs +++ /dev/null @@ -1,34 +0,0 @@ -use eyre::{eyre, Result}; - -use group::ff::PrimeField; - -use ethers_providers::{Provider, Http}; - -use crate::{ - Error, - crypto::{keccak256, PublicKey, Signature}, -}; -pub use crate::abi::schnorr::*; - -pub async fn call_verify( - contract: &Schnorr>, - public_key: &PublicKey, - message: &[u8], - signature: &Signature, -) -> Result<()> { - if contract - .verify( - public_key.parity, - public_key.px.to_repr().into(), - keccak256(message), - signature.c.to_repr().into(), - signature.s.to_repr().into(), - ) - .call() - .await? - { - Ok(()) - } else { - Err(eyre!(Error::InvalidSignature)) - } -} diff --git a/coins/ethereum/src/tests/abi/mod.rs b/coins/ethereum/src/tests/abi/mod.rs new file mode 100644 index 00000000..57ea8811 --- /dev/null +++ b/coins/ethereum/src/tests/abi/mod.rs @@ -0,0 +1,13 @@ +use alloy_sol_types::sol; + +#[rustfmt::skip] +#[allow(warnings)] +#[allow(needless_pass_by_value)] +#[allow(clippy::all)] +#[allow(clippy::ignored_unit_patterns)] +#[allow(clippy::redundant_closure_for_method_calls)] +mod schnorr_container { + use super::*; + sol!("src/tests/contracts/Schnorr.sol"); +} +pub(crate) use schnorr_container::TestSchnorr as schnorr; diff --git a/coins/ethereum/src/tests/contracts/ERC20.sol b/coins/ethereum/src/tests/contracts/ERC20.sol new file mode 100644 index 00000000..e157974c --- /dev/null +++ b/coins/ethereum/src/tests/contracts/ERC20.sol @@ -0,0 +1,51 @@ +// SPDX-License-Identifier: AGPLv3 +pragma solidity ^0.8.0; + +contract TestERC20 { + event Transfer(address indexed from, address indexed to, uint256 value); + event Approval(address indexed owner, address indexed spender, uint256 value); + + function name() public pure returns (string memory) { + return "Test ERC20"; + } + function symbol() public pure returns (string memory) { + return "TEST"; + } + function decimals() public pure returns (uint8) { + return 18; + } + + function totalSupply() public pure returns (uint256) { + return 1_000_000 * 10e18; + } + + mapping(address => uint256) balances; + mapping(address => mapping(address => uint256)) allowances; + + constructor() { + balances[msg.sender] = totalSupply(); + } + + function balanceOf(address owner) public view returns (uint256) { + return balances[owner]; + } + function transfer(address to, uint256 value) public returns (bool) { + balances[msg.sender] -= value; + balances[to] += value; + return true; + } + function transferFrom(address from, address to, uint256 value) public returns (bool) { + allowances[from][msg.sender] -= value; + balances[from] -= value; + balances[to] += value; + return true; + } + + function approve(address spender, uint256 value) public returns (bool) { + allowances[msg.sender][spender] = value; + return true; + } + function allowance(address owner, address spender) public view returns (uint256) { + return allowances[owner][spender]; + } +} diff --git a/coins/ethereum/src/tests/contracts/Schnorr.sol b/coins/ethereum/src/tests/contracts/Schnorr.sol new file mode 100644 index 00000000..832cd2fe --- /dev/null +++ b/coins/ethereum/src/tests/contracts/Schnorr.sol @@ -0,0 +1,15 @@ +// SPDX-License-Identifier: AGPLv3 +pragma solidity ^0.8.0; + +import "../../../contracts/Schnorr.sol"; + +contract TestSchnorr { + function verify( + bytes32 px, + bytes calldata message, + bytes32 c, + bytes32 s + ) external pure returns (bool) { + return Schnorr.verify(px, message, c, s); + } +} diff --git a/coins/ethereum/src/tests/crypto.rs b/coins/ethereum/src/tests/crypto.rs index 6dced933..a668b2d6 100644 --- a/coins/ethereum/src/tests/crypto.rs +++ b/coins/ethereum/src/tests/crypto.rs @@ -1,49 +1,33 @@ use rand_core::OsRng; -use sha2::Sha256; -use sha3::{Digest, Keccak256}; - -use group::Group; +use group::ff::{Field, PrimeField}; use k256::{ - ecdsa::{hazmat::SignPrimitive, signature::DigestVerifier, SigningKey, VerifyingKey}, - elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, point::DecompressPoint}, - U256, Scalar, AffinePoint, ProjectivePoint, + ecdsa::{ + self, hazmat::SignPrimitive, signature::hazmat::PrehashVerifier, SigningKey, VerifyingKey, + }, + Scalar, ProjectivePoint, }; use frost::{ - curve::Secp256k1, + curve::{Ciphersuite, Secp256k1}, algorithm::{Hram, IetfSchnorr}, tests::{algorithm_machines, sign}, }; use crate::{crypto::*, tests::key_gen}; -pub fn hash_to_scalar(data: &[u8]) -> Scalar { - Scalar::reduce(U256::from_be_slice(&keccak256(data))) -} - -pub(crate) fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> { - if r.is_zero().into() || s.is_zero().into() || !((v == 27) || (v == 28)) { - return None; - } - - #[allow(non_snake_case)] - let R = AffinePoint::decompress(&r.to_bytes(), (v - 27).into()); - #[allow(non_snake_case)] - if let Some(R) = Option::::from(R) { - #[allow(non_snake_case)] - let R = ProjectivePoint::from(R); - - let r = r.invert().unwrap(); - let u1 = ProjectivePoint::GENERATOR * (-message * r); - let u2 = R * (s * r); - let key: ProjectivePoint = u1 + u2; - if !bool::from(key.is_identity()) { - return Some(address(&key)); - } - } - - None +// The ecrecover opcode, yet with parity replacing v +pub(crate) fn ecrecover(message: Scalar, odd_y: bool, r: Scalar, s: Scalar) -> Option<[u8; 20]> { + let sig = ecdsa::Signature::from_scalars(r, s).ok()?; + let message: [u8; 32] = message.to_repr().into(); + alloy_core::primitives::Signature::from_signature_and_parity( + sig, + alloy_core::primitives::Parity::Parity(odd_y), + ) + .ok()? + .recover_address_from_prehash(&alloy_core::primitives::B256::from(message)) + .ok() + .map(Into::into) } #[test] @@ -55,20 +39,23 @@ fn test_ecrecover() { const MESSAGE: &[u8] = b"Hello, World!"; let (sig, recovery_id) = private .as_nonzero_scalar() - .try_sign_prehashed_rfc6979::(&Keccak256::digest(MESSAGE), b"") + .try_sign_prehashed( + ::F::random(&mut OsRng), + &keccak256(MESSAGE).into(), + ) .unwrap(); // Sanity check the signature verifies #[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result { - assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ()); + assert_eq!(public.verify_prehash(&keccak256(MESSAGE), &sig).unwrap(), ()); } // Perform the ecrecover assert_eq!( ecrecover( hash_to_scalar(MESSAGE), - u8::from(recovery_id.unwrap().is_y_odd()) + 27, + u8::from(recovery_id.unwrap().is_y_odd()) == 1, *sig.r(), *sig.s() ) @@ -93,18 +80,13 @@ fn test_signing() { pub fn preprocess_signature_for_ecrecover( R: ProjectivePoint, public_key: &PublicKey, - chain_id: U256, m: &[u8], s: Scalar, -) -> (u8, Scalar, Scalar) { - let c = EthereumHram::hram( - &R, - &public_key.A, - &[chain_id.to_be_byte_array().as_slice(), &keccak256(m)].concat(), - ); +) -> (Scalar, Scalar) { + let c = EthereumHram::hram(&R, &public_key.A, m); let sa = -(s * public_key.px); let ca = -(c * public_key.px); - (public_key.parity, sa, ca) + (sa, ca) } #[test] @@ -112,21 +94,12 @@ fn test_ecrecover_hack() { let (keys, public_key) = key_gen(); const MESSAGE: &[u8] = b"Hello, World!"; - let hashed_message = keccak256(MESSAGE); - let chain_id = U256::ONE; - let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat(); let algo = IetfSchnorr::::ietf(); - let sig = sign( - &mut OsRng, - &algo, - keys.clone(), - algorithm_machines(&mut OsRng, &algo, &keys), - full_message, - ); + let sig = + sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE); - let (parity, sa, ca) = - preprocess_signature_for_ecrecover(sig.R, &public_key, chain_id, MESSAGE, sig.s); - let q = ecrecover(sa, parity, public_key.px, ca).unwrap(); + let (sa, ca) = preprocess_signature_for_ecrecover(sig.R, &public_key, MESSAGE, sig.s); + let q = ecrecover(sa, false, public_key.px, ca).unwrap(); assert_eq!(q, address(&sig.R)); } diff --git a/coins/ethereum/src/tests/mod.rs b/coins/ethereum/src/tests/mod.rs index c468cfb6..3a381d42 100644 --- a/coins/ethereum/src/tests/mod.rs +++ b/coins/ethereum/src/tests/mod.rs @@ -1,21 +1,25 @@ -use std::{sync::Arc, time::Duration, fs::File, collections::HashMap}; +use std::{sync::Arc, collections::HashMap}; use rand_core::OsRng; -use group::ff::PrimeField; use k256::{Scalar, ProjectivePoint}; use frost::{curve::Secp256k1, Participant, ThresholdKeys, tests::key_gen as frost_key_gen}; -use ethers_core::{ - types::{H160, Signature as EthersSignature}, - abi::Abi, +use alloy_core::{ + primitives::{Address, U256, Bytes, TxKind}, + hex::FromHex, }; -use ethers_contract::ContractFactory; -use ethers_providers::{Middleware, Provider, Http}; +use alloy_consensus::{SignableTransaction, TxLegacy}; -use crate::crypto::PublicKey; +use alloy_rpc_types::TransactionReceipt; +use alloy_simple_request_transport::SimpleRequest; +use alloy_provider::{Provider, RootProvider}; + +use crate::crypto::{address, deterministically_sign, PublicKey}; mod crypto; + +mod abi; mod schnorr; mod router; @@ -36,57 +40,88 @@ pub fn key_gen() -> (HashMap>, PublicKey) (keys, public_key) } -// TODO: Replace with a contract deployment from an unknown account, so the environment solely has -// to fund the deployer, not create/pass a wallet -// TODO: Deterministic deployments across chains +// TODO: Use a proper error here +pub async fn send( + provider: &RootProvider, + wallet: &k256::ecdsa::SigningKey, + mut tx: TxLegacy, +) -> Option { + let verifying_key = *wallet.verifying_key().as_affine(); + let address = Address::from(address(&verifying_key.into())); + + // https://github.com/alloy-rs/alloy/issues/539 + // let chain_id = provider.get_chain_id().await.unwrap(); + // tx.chain_id = Some(chain_id); + tx.chain_id = None; + tx.nonce = provider.get_transaction_count(address, None).await.unwrap(); + // 100 gwei + tx.gas_price = 100_000_000_000u128; + + let sig = wallet.sign_prehash_recoverable(tx.signature_hash().as_ref()).unwrap(); + assert_eq!(address, tx.clone().into_signed(sig.into()).recover_signer().unwrap()); + assert!( + provider.get_balance(address, None).await.unwrap() > + ((U256::from(tx.gas_price) * U256::from(tx.gas_limit)) + tx.value) + ); + + let mut bytes = vec![]; + tx.encode_with_signature_fields(&sig.into(), &mut bytes); + let pending_tx = provider.send_raw_transaction(&bytes).await.ok()?; + pending_tx.get_receipt().await.ok() +} + +pub async fn fund_account( + provider: &RootProvider, + wallet: &k256::ecdsa::SigningKey, + to_fund: Address, + value: U256, +) -> Option<()> { + let funding_tx = + TxLegacy { to: TxKind::Call(to_fund), gas_limit: 21_000, value, ..Default::default() }; + assert!(send(provider, wallet, funding_tx).await.unwrap().status()); + + Some(()) +} + +// TODO: Use a proper error here pub async fn deploy_contract( - chain_id: u32, - client: Arc>, + client: Arc>, wallet: &k256::ecdsa::SigningKey, name: &str, -) -> eyre::Result { - let abi: Abi = - serde_json::from_reader(File::open(format!("./artifacts/{name}.abi")).unwrap()).unwrap(); - +) -> Option
{ let hex_bin_buf = std::fs::read_to_string(format!("./artifacts/{name}.bin")).unwrap(); let hex_bin = if let Some(stripped) = hex_bin_buf.strip_prefix("0x") { stripped } else { &hex_bin_buf }; - let bin = hex::decode(hex_bin).unwrap(); - let factory = ContractFactory::new(abi, bin.into(), client.clone()); + let bin = Bytes::from_hex(hex_bin).unwrap(); - let mut deployment_tx = factory.deploy(())?.tx; - deployment_tx.set_chain_id(chain_id); - deployment_tx.set_gas(1_000_000); - let (max_fee_per_gas, max_priority_fee_per_gas) = client.estimate_eip1559_fees(None).await?; - deployment_tx.as_eip1559_mut().unwrap().max_fee_per_gas = Some(max_fee_per_gas); - deployment_tx.as_eip1559_mut().unwrap().max_priority_fee_per_gas = Some(max_priority_fee_per_gas); + let deployment_tx = TxLegacy { + chain_id: None, + nonce: 0, + // 100 gwei + gas_price: 100_000_000_000u128, + gas_limit: 1_000_000, + to: TxKind::Create, + value: U256::ZERO, + input: bin, + }; - let sig_hash = deployment_tx.sighash(); - let (sig, rid) = wallet.sign_prehash_recoverable(sig_hash.as_ref()).unwrap(); + let deployment_tx = deterministically_sign(&deployment_tx); - // EIP-155 v - let mut v = u64::from(rid.to_byte()); - assert!((v == 0) || (v == 1)); - v += u64::from((chain_id * 2) + 35); + // Fund the deployer address + fund_account( + &client, + wallet, + deployment_tx.recover_signer().unwrap(), + U256::from(deployment_tx.tx().gas_limit) * U256::from(deployment_tx.tx().gas_price), + ) + .await?; - let r = sig.r().to_repr(); - let r_ref: &[u8] = r.as_ref(); - let s = sig.s().to_repr(); - let s_ref: &[u8] = s.as_ref(); - let deployment_tx = - deployment_tx.rlp_signed(&EthersSignature { r: r_ref.into(), s: s_ref.into(), v }); + let (deployment_tx, sig, _) = deployment_tx.into_parts(); + let mut bytes = vec![]; + deployment_tx.encode_with_signature_fields(&sig, &mut bytes); + let pending_tx = client.send_raw_transaction(&bytes).await.ok()?; + let receipt = pending_tx.get_receipt().await.ok()?; + assert!(receipt.status()); - let pending_tx = client.send_raw_transaction(deployment_tx).await?; - - let mut receipt; - while { - receipt = client.get_transaction_receipt(pending_tx.tx_hash()).await?; - receipt.is_none() - } { - tokio::time::sleep(Duration::from_secs(6)).await; - } - let receipt = receipt.unwrap(); - assert!(receipt.status == Some(1.into())); - - Ok(receipt.contract_address.unwrap()) + Some(receipt.contract_address.unwrap()) } diff --git a/coins/ethereum/src/tests/router.rs b/coins/ethereum/src/tests/router.rs index c9be93be..39a865bd 100644 --- a/coins/ethereum/src/tests/router.rs +++ b/coins/ethereum/src/tests/router.rs @@ -2,7 +2,8 @@ use std::{convert::TryFrom, sync::Arc, collections::HashMap}; use rand_core::OsRng; -use group::ff::PrimeField; +use group::Group; +use k256::ProjectivePoint; use frost::{ curve::Secp256k1, Participant, ThresholdKeys, @@ -10,100 +11,173 @@ use frost::{ tests::{algorithm_machines, sign}, }; -use ethers_core::{ - types::{H160, U256, Bytes}, - abi::AbiEncode, - utils::{Anvil, AnvilInstance}, -}; -use ethers_providers::{Middleware, Provider, Http}; +use alloy_core::primitives::{Address, U256}; + +use alloy_simple_request_transport::SimpleRequest; +use alloy_rpc_client::ClientBuilder; +use alloy_provider::{Provider, RootProvider}; + +use alloy_node_bindings::{Anvil, AnvilInstance}; use crate::{ - crypto::{keccak256, PublicKey, EthereumHram, Signature}, - router::{self, *}, - tests::{key_gen, deploy_contract}, + crypto::*, + deployer::Deployer, + router::{Router, abi as router}, + tests::{key_gen, send, fund_account}, }; async fn setup_test() -> ( - u32, AnvilInstance, - Router>, + Arc>, + u64, + Router, HashMap>, PublicKey, ) { let anvil = Anvil::new().spawn(); - let provider = Provider::::try_from(anvil.endpoint()).unwrap(); - let chain_id = provider.get_chainid().await.unwrap().as_u32(); + let provider = RootProvider::new( + ClientBuilder::default().transport(SimpleRequest::new(anvil.endpoint()), true), + ); + let chain_id = provider.get_chain_id().await.unwrap(); let wallet = anvil.keys()[0].clone().into(); let client = Arc::new(provider); - let contract_address = - deploy_contract(chain_id, client.clone(), &wallet, "Router").await.unwrap(); - let contract = Router::new(contract_address, client.clone()); + // Make sure the Deployer constructor returns None, as it doesn't exist yet + assert!(Deployer::new(client.clone()).await.unwrap().is_none()); + + // Deploy the Deployer + let tx = Deployer::deployment_tx(); + fund_account( + &client, + &wallet, + tx.recover_signer().unwrap(), + U256::from(tx.tx().gas_limit) * U256::from(tx.tx().gas_price), + ) + .await + .unwrap(); + + let (tx, sig, _) = tx.into_parts(); + let mut bytes = vec![]; + tx.encode_with_signature_fields(&sig, &mut bytes); + + let pending_tx = client.send_raw_transaction(&bytes).await.unwrap(); + let receipt = pending_tx.get_receipt().await.unwrap(); + assert!(receipt.status()); + let deployer = + Deployer::new(client.clone()).await.expect("network error").expect("deployer wasn't deployed"); let (keys, public_key) = key_gen(); - // Set the key to the threshold keys - let tx = contract.init_serai_key(public_key.px.to_repr().into()).gas(100_000); - let pending_tx = tx.send().await.unwrap(); - let receipt = pending_tx.await.unwrap().unwrap(); - assert!(receipt.status == Some(1.into())); + // Verify the Router constructor returns None, as it doesn't exist yet + assert!(deployer.find_router(client.clone(), &public_key).await.unwrap().is_none()); - (chain_id, anvil, contract, keys, public_key) + // Deploy the router + let receipt = send(&client, &anvil.keys()[0].clone().into(), deployer.deploy_router(&public_key)) + .await + .unwrap(); + assert!(receipt.status()); + let contract = deployer.find_router(client.clone(), &public_key).await.unwrap().unwrap(); + + (anvil, client, chain_id, contract, keys, public_key) +} + +async fn latest_block_hash(client: &RootProvider) -> [u8; 32] { + client + .get_block(client.get_block_number().await.unwrap().into(), false) + .await + .unwrap() + .unwrap() + .header + .hash + .unwrap() + .0 } #[tokio::test] async fn test_deploy_contract() { - setup_test().await; + let (_anvil, client, _, router, _, public_key) = setup_test().await; + + let block_hash = latest_block_hash(&client).await; + assert_eq!(router.serai_key(block_hash).await.unwrap(), public_key); + assert_eq!(router.nonce(block_hash).await.unwrap(), U256::try_from(1u64).unwrap()); + // TODO: Check it emitted SeraiKeyUpdated(public_key) at its genesis } pub fn hash_and_sign( keys: &HashMap>, public_key: &PublicKey, - chain_id: U256, message: &[u8], ) -> Signature { - let hashed_message = keccak256(message); - - let mut chain_id_bytes = [0; 32]; - chain_id.to_big_endian(&mut chain_id_bytes); - let full_message = &[chain_id_bytes.as_slice(), &hashed_message].concat(); - let algo = IetfSchnorr::::ietf(); - let sig = sign( - &mut OsRng, - &algo, - keys.clone(), - algorithm_machines(&mut OsRng, &algo, keys), - full_message, - ); + let sig = + sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, keys), message); - Signature::new(public_key, k256::U256::from_words(chain_id.0), message, sig).unwrap() + Signature::new(public_key, message, sig).unwrap() +} + +#[tokio::test] +async fn test_router_update_serai_key() { + let (anvil, client, chain_id, contract, keys, public_key) = setup_test().await; + + let next_key = loop { + let point = ProjectivePoint::random(&mut OsRng); + let Some(next_key) = PublicKey::new(point) else { continue }; + break next_key; + }; + + let message = Router::update_serai_key_message( + U256::try_from(chain_id).unwrap(), + U256::try_from(1u64).unwrap(), + &next_key, + ); + let sig = hash_and_sign(&keys, &public_key, &message); + + let first_block_hash = latest_block_hash(&client).await; + assert_eq!(contract.serai_key(first_block_hash).await.unwrap(), public_key); + + let receipt = + send(&client, &anvil.keys()[0].clone().into(), contract.update_serai_key(&next_key, &sig)) + .await + .unwrap(); + assert!(receipt.status()); + + let second_block_hash = latest_block_hash(&client).await; + assert_eq!(contract.serai_key(second_block_hash).await.unwrap(), next_key); + // Check this does still offer the historical state + assert_eq!(contract.serai_key(first_block_hash).await.unwrap(), public_key); + // TODO: Check logs + + println!("gas used: {:?}", receipt.gas_used); + // println!("logs: {:?}", receipt.logs); } #[tokio::test] async fn test_router_execute() { - let (chain_id, _anvil, contract, keys, public_key) = setup_test().await; + let (anvil, client, chain_id, contract, keys, public_key) = setup_test().await; - let to = H160([0u8; 20]); - let value = U256([0u64; 4]); - let data = Bytes::from([0]); - let tx = OutInstruction { to, value, data: data.clone() }; + let to = Address::from([0; 20]); + let value = U256::ZERO; + let tx = router::OutInstruction { to, value, calls: vec![] }; + let txs = vec![tx]; - let nonce_call = contract.nonce(); - let nonce = nonce_call.call().await.unwrap(); + let first_block_hash = latest_block_hash(&client).await; + let nonce = contract.nonce(first_block_hash).await.unwrap(); + assert_eq!(nonce, U256::try_from(1u64).unwrap()); - let encoded = - ("execute".to_string(), nonce, vec![router::OutInstruction { to, value, data }]).encode(); - let sig = hash_and_sign(&keys, &public_key, chain_id.into(), &encoded); + let message = Router::execute_message(U256::try_from(chain_id).unwrap(), nonce, txs.clone()); + let sig = hash_and_sign(&keys, &public_key, &message); - let tx = contract - .execute(vec![tx], router::Signature { c: sig.c.to_repr().into(), s: sig.s.to_repr().into() }) - .gas(300_000); - let pending_tx = tx.send().await.unwrap(); - let receipt = dbg!(pending_tx.await.unwrap().unwrap()); - assert!(receipt.status == Some(1.into())); + let receipt = + send(&client, &anvil.keys()[0].clone().into(), contract.execute(&txs, &sig)).await.unwrap(); + assert!(receipt.status()); - println!("gas used: {:?}", receipt.cumulative_gas_used); - println!("logs: {:?}", receipt.logs); + let second_block_hash = latest_block_hash(&client).await; + assert_eq!(contract.nonce(second_block_hash).await.unwrap(), U256::try_from(2u64).unwrap()); + // Check this does still offer the historical state + assert_eq!(contract.nonce(first_block_hash).await.unwrap(), U256::try_from(1u64).unwrap()); + // TODO: Check logs + + println!("gas used: {:?}", receipt.gas_used); + // println!("logs: {:?}", receipt.logs); } diff --git a/coins/ethereum/src/tests/schnorr.rs b/coins/ethereum/src/tests/schnorr.rs index 9525e4d6..9311c292 100644 --- a/coins/ethereum/src/tests/schnorr.rs +++ b/coins/ethereum/src/tests/schnorr.rs @@ -1,11 +1,9 @@ -use std::{convert::TryFrom, sync::Arc}; +use std::sync::Arc; use rand_core::OsRng; -use ::k256::{elliptic_curve::bigint::ArrayEncoding, U256, Scalar}; - -use ethers_core::utils::{keccak256, Anvil, AnvilInstance}; -use ethers_providers::{Middleware, Provider, Http}; +use group::ff::PrimeField; +use k256::Scalar; use frost::{ curve::Secp256k1, @@ -13,24 +11,34 @@ use frost::{ tests::{algorithm_machines, sign}, }; +use alloy_core::primitives::Address; + +use alloy_sol_types::SolCall; + +use alloy_rpc_types::{TransactionInput, TransactionRequest}; +use alloy_simple_request_transport::SimpleRequest; +use alloy_rpc_client::ClientBuilder; +use alloy_provider::{Provider, RootProvider}; + +use alloy_node_bindings::{Anvil, AnvilInstance}; + use crate::{ + Error, crypto::*, - schnorr::*, - tests::{key_gen, deploy_contract}, + tests::{key_gen, deploy_contract, abi::schnorr as abi}, }; -async fn setup_test() -> (u32, AnvilInstance, Schnorr>) { +async fn setup_test() -> (AnvilInstance, Arc>, Address) { let anvil = Anvil::new().spawn(); - let provider = Provider::::try_from(anvil.endpoint()).unwrap(); - let chain_id = provider.get_chainid().await.unwrap().as_u32(); + let provider = RootProvider::new( + ClientBuilder::default().transport(SimpleRequest::new(anvil.endpoint()), true), + ); let wallet = anvil.keys()[0].clone().into(); let client = Arc::new(provider); - let contract_address = - deploy_contract(chain_id, client.clone(), &wallet, "Schnorr").await.unwrap(); - let contract = Schnorr::new(contract_address, client.clone()); - (chain_id, anvil, contract) + let address = deploy_contract(client.clone(), &wallet, "TestSchnorr").await.unwrap(); + (anvil, client, address) } #[tokio::test] @@ -38,30 +46,48 @@ async fn test_deploy_contract() { setup_test().await; } +pub async fn call_verify( + provider: &RootProvider, + contract: Address, + public_key: &PublicKey, + message: &[u8], + signature: &Signature, +) -> Result<(), Error> { + let px: [u8; 32] = public_key.px.to_repr().into(); + let c_bytes: [u8; 32] = signature.c.to_repr().into(); + let s_bytes: [u8; 32] = signature.s.to_repr().into(); + let call = TransactionRequest::default().to(Some(contract)).input(TransactionInput::new( + abi::verifyCall::new((px.into(), message.to_vec().into(), c_bytes.into(), s_bytes.into())) + .abi_encode() + .into(), + )); + let bytes = provider.call(&call, None).await.map_err(|_| Error::ConnectionError)?; + let res = + abi::verifyCall::abi_decode_returns(&bytes, true).map_err(|_| Error::ConnectionError)?; + + if res._0 { + Ok(()) + } else { + Err(Error::InvalidSignature) + } +} + #[tokio::test] async fn test_ecrecover_hack() { - let (chain_id, _anvil, contract) = setup_test().await; - let chain_id = U256::from(chain_id); + let (_anvil, client, contract) = setup_test().await; let (keys, public_key) = key_gen(); const MESSAGE: &[u8] = b"Hello, World!"; - let hashed_message = keccak256(MESSAGE); - let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat(); let algo = IetfSchnorr::::ietf(); - let sig = sign( - &mut OsRng, - &algo, - keys.clone(), - algorithm_machines(&mut OsRng, &algo, &keys), - full_message, - ); - let sig = Signature::new(&public_key, chain_id, MESSAGE, sig).unwrap(); + let sig = + sign(&mut OsRng, &algo, keys.clone(), algorithm_machines(&mut OsRng, &algo, &keys), MESSAGE); + let sig = Signature::new(&public_key, MESSAGE, sig).unwrap(); - call_verify(&contract, &public_key, MESSAGE, &sig).await.unwrap(); + call_verify(&client, contract, &public_key, MESSAGE, &sig).await.unwrap(); // Test an invalid signature fails let mut sig = sig; sig.s += Scalar::ONE; - assert!(call_verify(&contract, &public_key, MESSAGE, &sig).await.is_err()); + assert!(call_verify(&client, contract, &public_key, MESSAGE, &sig).await.is_err()); } diff --git a/deny.toml b/deny.toml index 2e516b99..60331289 100644 --- a/deny.toml +++ b/deny.toml @@ -99,6 +99,7 @@ allow-git = [ "https://github.com/rust-lang-nursery/lazy-static.rs", "https://github.com/serai-dex/substrate-bip39", "https://github.com/serai-dex/substrate", + "https://github.com/alloy-rs/alloy", "https://github.com/monero-rs/base58-monero", "https://github.com/kayabaNerve/dockertest-rs", ] diff --git a/processor/Cargo.toml b/processor/Cargo.toml index 73a34efe..cbc022a1 100644 --- a/processor/Cargo.toml +++ b/processor/Cargo.toml @@ -28,6 +28,7 @@ rand_core = { version = "0.6", default-features = false, features = ["std", "get rand_chacha = { version = "0.3", default-features = false, features = ["std"] } # Encoders +const-hex = { version = "1", default-features = false } hex = { version = "0.4", default-features = false, features = ["std"] } scale = { package = "parity-scale-codec", version = "3", default-features = false, features = ["std"] } borsh = { version = "1", default-features = false, features = ["std", "derive", "de_strict_order"] } @@ -40,11 +41,16 @@ transcript = { package = "flexible-transcript", path = "../crypto/transcript", d frost = { package = "modular-frost", path = "../crypto/frost", default-features = false, features = ["ristretto"] } frost-schnorrkel = { path = "../crypto/schnorrkel", default-features = false } +# Bitcoin/Ethereum +k256 = { version = "^0.13.1", default-features = false, features = ["std"], optional = true } + # Bitcoin secp256k1 = { version = "0.28", default-features = false, features = ["std", "global-context", "rand-std"], optional = true } -k256 = { version = "^0.13.1", default-features = false, features = ["std"], optional = true } bitcoin-serai = { path = "../coins/bitcoin", default-features = false, features = ["std"], optional = true } +# Ethereum +ethereum-serai = { path = "../coins/ethereum", default-features = false, optional = true } + # Monero dalek-ff-group = { path = "../crypto/dalek-ff-group", default-features = false, features = ["std"], optional = true } monero-serai = { path = "../coins/monero", default-features = false, features = ["std", "http-rpc", "multisig"], optional = true } @@ -55,12 +61,12 @@ env_logger = { version = "0.10", default-features = false, features = ["humantim tokio = { version = "1", default-features = false, features = ["rt-multi-thread", "sync", "time", "macros"] } zalloc = { path = "../common/zalloc" } -serai-db = { path = "../common/db", optional = true } +serai-db = { path = "../common/db" } serai-env = { path = "../common/env", optional = true } # TODO: Replace with direct usage of primitives serai-client = { path = "../substrate/client", default-features = false, features = ["serai"] } -messages = { package = "serai-processor-messages", path = "./messages", optional = true } +messages = { package = "serai-processor-messages", path = "./messages" } message-queue = { package = "serai-message-queue", path = "../message-queue", optional = true } @@ -69,6 +75,8 @@ frost = { package = "modular-frost", path = "../crypto/frost", features = ["test sp-application-crypto = { git = "https://github.com/serai-dex/substrate", default-features = false, features = ["std"] } +ethereum-serai = { path = "../coins/ethereum", default-features = false, features = ["tests"] } + dockertest = "0.4" serai-docker-tests = { path = "../tests/docker" } @@ -76,9 +84,11 @@ serai-docker-tests = { path = "../tests/docker" } secp256k1 = ["k256", "frost/secp256k1"] bitcoin = ["dep:secp256k1", "secp256k1", "bitcoin-serai", "serai-client/bitcoin"] +ethereum = ["secp256k1", "ethereum-serai"] + ed25519 = ["dalek-ff-group", "frost/ed25519"] monero = ["ed25519", "monero-serai", "serai-client/monero"] -binaries = ["env_logger", "serai-env", "messages", "message-queue"] +binaries = ["env_logger", "serai-env", "message-queue"] parity-db = ["serai-db/parity-db"] rocksdb = ["serai-db/rocksdb"] diff --git a/processor/src/lib.rs b/processor/src/lib.rs index 378b852d..19f67508 100644 --- a/processor/src/lib.rs +++ b/processor/src/lib.rs @@ -1,7 +1,15 @@ +#![allow(dead_code)] + mod plan; pub use plan::*; +mod db; +pub(crate) use db::*; + +mod key_gen; + pub mod networks; +pub(crate) mod multisigs; mod additional_key; pub use additional_key::additional_key; diff --git a/processor/src/main.rs b/processor/src/main.rs index a4e9552d..1a50effa 100644 --- a/processor/src/main.rs +++ b/processor/src/main.rs @@ -31,6 +31,8 @@ mod networks; use networks::{Block, Network}; #[cfg(feature = "bitcoin")] use networks::Bitcoin; +#[cfg(feature = "ethereum")] +use networks::Ethereum; #[cfg(feature = "monero")] use networks::Monero; @@ -735,6 +737,7 @@ async fn main() { }; let network_id = match env::var("NETWORK").expect("network wasn't specified").as_str() { "bitcoin" => NetworkId::Bitcoin, + "ethereum" => NetworkId::Ethereum, "monero" => NetworkId::Monero, _ => panic!("unrecognized network"), }; @@ -744,6 +747,8 @@ async fn main() { match network_id { #[cfg(feature = "bitcoin")] NetworkId::Bitcoin => run(db, Bitcoin::new(url).await, coordinator).await, + #[cfg(feature = "ethereum")] + NetworkId::Ethereum => run(db.clone(), Ethereum::new(db, url).await, coordinator).await, #[cfg(feature = "monero")] NetworkId::Monero => run(db, Monero::new(url).await, coordinator).await, _ => panic!("spawning a processor for an unsupported network"), diff --git a/processor/src/multisigs/db.rs b/processor/src/multisigs/db.rs index 51287a0e..339b7bdc 100644 --- a/processor/src/multisigs/db.rs +++ b/processor/src/multisigs/db.rs @@ -1,3 +1,5 @@ +use std::io; + use ciphersuite::Ciphersuite; pub use serai_db::*; @@ -6,9 +8,59 @@ use serai_client::{primitives::Balance, in_instructions::primitives::InInstructi use crate::{ Get, Plan, - networks::{Transaction, Network}, + networks::{Output, Transaction, Network}, }; +#[derive(Clone, PartialEq, Eq, Debug)] +pub enum PlanFromScanning { + Refund(N::Output, N::Address), + Forward(N::Output), +} + +impl PlanFromScanning { + fn read(reader: &mut R) -> io::Result { + let mut kind = [0xff]; + reader.read_exact(&mut kind)?; + match kind[0] { + 0 => { + let output = N::Output::read(reader)?; + + let mut address_vec_len = [0; 4]; + reader.read_exact(&mut address_vec_len)?; + let mut address_vec = + vec![0; usize::try_from(u32::from_le_bytes(address_vec_len)).unwrap()]; + reader.read_exact(&mut address_vec)?; + let address = + N::Address::try_from(address_vec).map_err(|_| "invalid address saved to disk").unwrap(); + + Ok(PlanFromScanning::Refund(output, address)) + } + 1 => { + let output = N::Output::read(reader)?; + Ok(PlanFromScanning::Forward(output)) + } + _ => panic!("reading unrecognized PlanFromScanning"), + } + } + fn write(&self, writer: &mut W) -> io::Result<()> { + match self { + PlanFromScanning::Refund(output, address) => { + writer.write_all(&[0])?; + output.write(writer)?; + + let address_vec: Vec = + address.clone().try_into().map_err(|_| "invalid address being refunded to").unwrap(); + writer.write_all(&u32::try_from(address_vec.len()).unwrap().to_le_bytes())?; + writer.write_all(&address_vec) + } + PlanFromScanning::Forward(output) => { + writer.write_all(&[1])?; + output.write(writer) + } + } + } +} + create_db!( MultisigsDb { NextBatchDb: () -> u32, @@ -80,7 +132,11 @@ impl PlanDb { ) -> bool { let plan = Plan::::read::<&[u8]>(&mut &Self::get(getter, &id).unwrap()[8 ..]).unwrap(); assert_eq!(plan.id(), id); - (key == plan.key) && (Some(N::change_address(plan.key)) == plan.change) + if let Some(change) = N::change_address(plan.key) { + (key == plan.key) && (Some(change) == plan.change) + } else { + false + } } } @@ -130,7 +186,7 @@ impl PlansFromScanningDb { pub fn set_plans_from_scanning( txn: &mut impl DbTxn, block_number: usize, - plans: Vec>, + plans: Vec>, ) { let mut buf = vec![]; for plan in plans { @@ -142,13 +198,13 @@ impl PlansFromScanningDb { pub fn take_plans_from_scanning( txn: &mut impl DbTxn, block_number: usize, - ) -> Option>> { + ) -> Option>> { let block_number = u64::try_from(block_number).unwrap(); let res = Self::get(txn, block_number).map(|plans| { let mut plans_ref = plans.as_slice(); let mut res = vec![]; while !plans_ref.is_empty() { - res.push(Plan::::read(&mut plans_ref).unwrap()); + res.push(PlanFromScanning::::read(&mut plans_ref).unwrap()); } res }); diff --git a/processor/src/multisigs/mod.rs b/processor/src/multisigs/mod.rs index a6e8bbc9..75c91675 100644 --- a/processor/src/multisigs/mod.rs +++ b/processor/src/multisigs/mod.rs @@ -7,7 +7,7 @@ use scale::{Encode, Decode}; use messages::SubstrateContext; use serai_client::{ - primitives::{MAX_DATA_LEN, NetworkId, Coin, ExternalAddress, BlockHash, Data}, + primitives::{MAX_DATA_LEN, ExternalAddress, BlockHash, Data}, in_instructions::primitives::{ InInstructionWithBalance, Batch, RefundableInInstruction, Shorthand, MAX_BATCH_SIZE, }, @@ -28,15 +28,12 @@ use scanner::{ScannerEvent, ScannerHandle, Scanner}; mod db; use db::*; -#[cfg(not(test))] -mod scheduler; -#[cfg(test)] -pub mod scheduler; +pub(crate) mod scheduler; use scheduler::Scheduler; use crate::{ Get, Db, Payment, Plan, - networks::{OutputType, Output, Transaction, SignableTransaction, Block, PreparedSend, Network}, + networks::{OutputType, Output, SignableTransaction, Eventuality, Block, PreparedSend, Network}, }; // InInstructionWithBalance from an external output @@ -95,6 +92,8 @@ enum RotationStep { ClosingExisting, } +// This explicitly shouldn't take the database as we prepare Plans we won't execute for fee +// estimates async fn prepare_send( network: &N, block_number: usize, @@ -122,7 +121,7 @@ async fn prepare_send( pub struct MultisigViewer { activation_block: usize, key: ::G, - scheduler: Scheduler, + scheduler: N::Scheduler, } #[allow(clippy::type_complexity)] @@ -131,7 +130,7 @@ pub enum MultisigEvent { // Batches to publish Batches(Option<(::G, ::G)>, Vec), // Eventuality completion found on-chain - Completed(Vec, [u8; 32], N::Transaction), + Completed(Vec, [u8; 32], ::Completion), } pub struct MultisigManager { @@ -157,20 +156,7 @@ impl MultisigManager { assert!(current_keys.len() <= 2); let mut actively_signing = vec![]; for (_, key) in ¤t_keys { - schedulers.push( - Scheduler::from_db( - raw_db, - *key, - match N::NETWORK { - NetworkId::Serai => panic!("adding a key for Serai"), - NetworkId::Bitcoin => Coin::Bitcoin, - // TODO: This is incomplete to DAI - NetworkId::Ethereum => Coin::Ether, - NetworkId::Monero => Coin::Monero, - }, - ) - .unwrap(), - ); + schedulers.push(N::Scheduler::from_db(raw_db, *key, N::NETWORK).unwrap()); // Load any TXs being actively signed let key = key.to_bytes(); @@ -245,17 +231,7 @@ impl MultisigManager { let viewer = Some(MultisigViewer { activation_block, key: external_key, - scheduler: Scheduler::::new::( - txn, - external_key, - match N::NETWORK { - NetworkId::Serai => panic!("adding a key for Serai"), - NetworkId::Bitcoin => Coin::Bitcoin, - // TODO: This is incomplete to DAI - NetworkId::Ethereum => Coin::Ether, - NetworkId::Monero => Coin::Monero, - }, - ), + scheduler: N::Scheduler::new::(txn, external_key, N::NETWORK), }); if self.existing.is_none() { @@ -352,48 +328,30 @@ impl MultisigManager { (existing_outputs, new_outputs) } - fn refund_plan(output: N::Output, refund_to: N::Address) -> Plan { + fn refund_plan( + scheduler: &mut N::Scheduler, + txn: &mut D::Transaction<'_>, + output: N::Output, + refund_to: N::Address, + ) -> Plan { log::info!("creating refund plan for {}", hex::encode(output.id())); assert_eq!(output.kind(), OutputType::External); - Plan { - key: output.key(), - // Uses a payment as this will still be successfully sent due to fee amortization, - // and because change is currently always a Serai key - payments: vec![Payment { address: refund_to, data: None, balance: output.balance() }], - inputs: vec![output], - change: None, - } + scheduler.refund_plan::(txn, output, refund_to) } - fn forward_plan(&self, output: N::Output) -> Plan { + // Returns the plan for forwarding if one is needed. + // Returns None if one is not needed to forward this output. + fn forward_plan(&mut self, txn: &mut D::Transaction<'_>, output: &N::Output) -> Option> { log::info!("creating forwarding plan for {}", hex::encode(output.id())); - - /* - Sending a Plan, with arbitrary data proxying the InInstruction, would require adding - a flow for networks which drop their data to still embed arbitrary data. It'd also have - edge cases causing failures (we'd need to manually provide the origin if it was implied, - which may exceed the encoding limit). - - Instead, we save the InInstruction as we scan this output. Then, when the output is - successfully forwarded, we simply read it from the local database. This also saves the - costs of embedding arbitrary data. - - Since we can't rely on the Eventuality system to detect if it's a forwarded transaction, - due to the asynchonicity of the Eventuality system, we instead interpret an Forwarded - output which has an amount associated with an InInstruction which was forwarded as having - been forwarded. - */ - - Plan { - key: self.existing.as_ref().unwrap().key, - payments: vec![Payment { - address: N::forward_address(self.new.as_ref().unwrap().key), - data: None, - balance: output.balance(), - }], - inputs: vec![output], - change: None, + let res = self.existing.as_mut().unwrap().scheduler.forward_plan::( + txn, + output.clone(), + self.new.as_ref().expect("forwarding plan yet no new multisig").key, + ); + if res.is_none() { + log::info!("no forwarding plan was necessary for {}", hex::encode(output.id())); } + res } // Filter newly received outputs due to the step being RotationStep::ClosingExisting. @@ -605,7 +563,31 @@ impl MultisigManager { block_number { // Load plans crated when we scanned the block - plans = PlansFromScanningDb::take_plans_from_scanning::(txn, block_number).unwrap(); + let scanning_plans = + PlansFromScanningDb::take_plans_from_scanning::(txn, block_number).unwrap(); + // Expand into actual plans + plans = scanning_plans + .into_iter() + .map(|plan| match plan { + PlanFromScanning::Refund(output, refund_to) => { + let existing = self.existing.as_mut().unwrap(); + if output.key() == existing.key { + Self::refund_plan(&mut existing.scheduler, txn, output, refund_to) + } else { + let new = self + .new + .as_mut() + .expect("new multisig didn't expect yet output wasn't for existing multisig"); + assert_eq!(output.key(), new.key, "output wasn't for existing nor new multisig"); + Self::refund_plan(&mut new.scheduler, txn, output, refund_to) + } + } + PlanFromScanning::Forward(output) => self + .forward_plan(txn, &output) + .expect("supposed to forward an output yet no forwarding plan"), + }) + .collect(); + for plan in &plans { plans_from_scanning.insert(plan.id()); } @@ -665,13 +647,23 @@ impl MultisigManager { }); for plan in &plans { - if plan.change == Some(N::change_address(plan.key)) { - // Assert these are only created during the expected step - match *step { - RotationStep::UseExisting => {} - RotationStep::NewAsChange | - RotationStep::ForwardFromExisting | - RotationStep::ClosingExisting => panic!("change was set to self despite rotating"), + // This first equality should 'never meaningfully' be false + // All created plans so far are by the existing multisig EXCEPT: + // A) If we created a refund plan from the new multisig (yet that wouldn't have change) + // B) The existing Scheduler returned a Plan for the new key (yet that happens with the SC + // scheduler, yet that doesn't have change) + // Despite being 'unnecessary' now, it's better to explicitly ensure and be robust + if plan.key == self.existing.as_ref().unwrap().key { + if let Some(change) = N::change_address(plan.key) { + if plan.change == Some(change) { + // Assert these (self-change) are only created during the expected step + match *step { + RotationStep::UseExisting => {} + RotationStep::NewAsChange | + RotationStep::ForwardFromExisting | + RotationStep::ClosingExisting => panic!("change was set to self despite rotating"), + } + } } } } @@ -853,15 +845,20 @@ impl MultisigManager { let plans_at_start = plans.len(); let (refund_to, instruction) = instruction_from_output::(output); if let Some(mut instruction) = instruction { - // Build a dedicated Plan forwarding this - let forward_plan = self.forward_plan(output.clone()); - plans.push(forward_plan.clone()); + let Some(shimmed_plan) = N::Scheduler::shim_forward_plan( + output.clone(), + self.new.as_ref().expect("forwarding from existing yet no new multisig").key, + ) else { + // If this network doesn't need forwarding, report the output now + return true; + }; + plans.push(PlanFromScanning::::Forward(output.clone())); // Set the instruction for this output to be returned // We need to set it under the amount it's forwarded with, so prepare its forwarding // TX to determine the fees involved let PreparedSend { tx, post_fee_branches: _, operating_costs } = - prepare_send(network, block_number, forward_plan, 0).await; + prepare_send(network, block_number, shimmed_plan, 0).await; // operating_costs should not increase in a forwarding TX assert_eq!(operating_costs, 0); @@ -872,12 +869,28 @@ impl MultisigManager { // letting it die out if let Some(tx) = &tx { instruction.balance.amount.0 -= tx.0.fee(); + + /* + Sending a Plan, with arbitrary data proxying the InInstruction, would require + adding a flow for networks which drop their data to still embed arbitrary data. + It'd also have edge cases causing failures (we'd need to manually provide the + origin if it was implied, which may exceed the encoding limit). + + Instead, we save the InInstruction as we scan this output. Then, when the + output is successfully forwarded, we simply read it from the local database. + This also saves the costs of embedding arbitrary data. + + Since we can't rely on the Eventuality system to detect if it's a forwarded + transaction, due to the asynchonicity of the Eventuality system, we instead + interpret an Forwarded output which has an amount associated with an + InInstruction which was forwarded as having been forwarded. + */ ForwardedOutputDb::save_forwarded_output(txn, &instruction); } } else if let Some(refund_to) = refund_to { if let Ok(refund_to) = refund_to.consume().try_into() { // Build a dedicated Plan refunding this - plans.push(Self::refund_plan(output.clone(), refund_to)); + plans.push(PlanFromScanning::Refund(output.clone(), refund_to)); } } @@ -909,7 +922,7 @@ impl MultisigManager { let Some(instruction) = instruction else { if let Some(refund_to) = refund_to { if let Ok(refund_to) = refund_to.consume().try_into() { - plans.push(Self::refund_plan(output.clone(), refund_to)); + plans.push(PlanFromScanning::Refund(output.clone(), refund_to)); } } continue; @@ -999,9 +1012,9 @@ impl MultisigManager { // This must be emitted before ScannerEvent::Block for all completions of known Eventualities // within the block. Unknown Eventualities may have their Completed events emitted after // ScannerEvent::Block however. - ScannerEvent::Completed(key, block_number, id, tx) => { - ResolvedDb::resolve_plan::(txn, &key, id, &tx.id()); - (block_number, MultisigEvent::Completed(key, id, tx)) + ScannerEvent::Completed(key, block_number, id, tx_id, completion) => { + ResolvedDb::resolve_plan::(txn, &key, id, &tx_id); + (block_number, MultisigEvent::Completed(key, id, completion)) } }; diff --git a/processor/src/multisigs/scanner.rs b/processor/src/multisigs/scanner.rs index cefa8a25..20c61192 100644 --- a/processor/src/multisigs/scanner.rs +++ b/processor/src/multisigs/scanner.rs @@ -17,15 +17,25 @@ use tokio::{ use crate::{ Get, DbTxn, Db, - networks::{Output, Transaction, EventualitiesTracker, Block, Network}, + networks::{Output, Transaction, Eventuality, EventualitiesTracker, Block, Network}, }; #[derive(Clone, Debug)] pub enum ScannerEvent { // Block scanned - Block { is_retirement_block: bool, block: >::Id, outputs: Vec }, + Block { + is_retirement_block: bool, + block: >::Id, + outputs: Vec, + }, // Eventuality completion found on-chain - Completed(Vec, usize, [u8; 32], N::Transaction), + Completed( + Vec, + usize, + [u8; 32], + >::Id, + ::Completion, + ), } pub type ScannerEventChannel = mpsc::UnboundedReceiver>; @@ -555,19 +565,25 @@ impl Scanner { } } - for (id, (block_number, tx)) in network + for (id, (block_number, tx, completion)) in network .get_eventuality_completions(scanner.eventualities.get_mut(&key_vec).unwrap(), &block) .await { info!( "eventuality {} resolved by {}, as found on chain", hex::encode(id), - hex::encode(&tx.id()) + hex::encode(tx.as_ref()) ); completion_block_numbers.push(block_number); // This must be before the mission of ScannerEvent::Block, per commentary in mod.rs - if !scanner.emit(ScannerEvent::Completed(key_vec.clone(), block_number, id, tx)) { + if !scanner.emit(ScannerEvent::Completed( + key_vec.clone(), + block_number, + id, + tx, + completion, + )) { return; } } diff --git a/processor/src/multisigs/scheduler/mod.rs b/processor/src/multisigs/scheduler/mod.rs new file mode 100644 index 00000000..6ec95fc4 --- /dev/null +++ b/processor/src/multisigs/scheduler/mod.rs @@ -0,0 +1,95 @@ +use core::fmt::Debug; +use std::io; + +use ciphersuite::Ciphersuite; + +use serai_client::primitives::{NetworkId, Balance}; + +use crate::{networks::Network, Db, Payment, Plan}; + +pub(crate) mod utxo; +pub(crate) mod smart_contract; + +pub trait SchedulerAddendum: Send + Clone + PartialEq + Debug { + fn read(reader: &mut R) -> io::Result; + fn write(&self, writer: &mut W) -> io::Result<()>; +} + +impl SchedulerAddendum for () { + fn read(_: &mut R) -> io::Result { + Ok(()) + } + fn write(&self, _: &mut W) -> io::Result<()> { + Ok(()) + } +} + +pub trait Scheduler: Sized + Clone + PartialEq + Debug { + type Addendum: SchedulerAddendum; + + /// Check if this Scheduler is empty. + fn empty(&self) -> bool; + + /// Create a new Scheduler. + fn new( + txn: &mut D::Transaction<'_>, + key: ::G, + network: NetworkId, + ) -> Self; + + /// Load a Scheduler from the DB. + fn from_db( + db: &D, + key: ::G, + network: NetworkId, + ) -> io::Result; + + /// Check if a branch is usable. + fn can_use_branch(&self, balance: Balance) -> bool; + + /// Schedule a series of outputs/payments. + fn schedule( + &mut self, + txn: &mut D::Transaction<'_>, + utxos: Vec, + payments: Vec>, + key_for_any_change: ::G, + force_spend: bool, + ) -> Vec>; + + /// Consume all payments still pending within this Scheduler, without scheduling them. + fn consume_payments(&mut self, txn: &mut D::Transaction<'_>) -> Vec>; + + /// Note a branch output as having been created, with the amount it was actually created with, + /// or not having been created due to being too small. + fn created_output( + &mut self, + txn: &mut D::Transaction<'_>, + expected: u64, + actual: Option, + ); + + /// Refund a specific output. + fn refund_plan( + &mut self, + txn: &mut D::Transaction<'_>, + output: N::Output, + refund_to: N::Address, + ) -> Plan; + + /// Shim the forwarding Plan as necessary to obtain a fee estimate. + /// + /// If this Scheduler is for a Network which requires forwarding, this must return Some with a + /// plan with identical fee behavior. If forwarding isn't necessary, returns None. + fn shim_forward_plan(output: N::Output, to: ::G) -> Option>; + + /// Forward a specific output to the new multisig. + /// + /// Returns None if no forwarding is necessary. Must return Some if forwarding is necessary. + fn forward_plan( + &mut self, + txn: &mut D::Transaction<'_>, + output: N::Output, + to: ::G, + ) -> Option>; +} diff --git a/processor/src/multisigs/scheduler/smart_contract.rs b/processor/src/multisigs/scheduler/smart_contract.rs new file mode 100644 index 00000000..27268b82 --- /dev/null +++ b/processor/src/multisigs/scheduler/smart_contract.rs @@ -0,0 +1,208 @@ +use std::{io, collections::HashSet}; + +use ciphersuite::{group::GroupEncoding, Ciphersuite}; + +use serai_client::primitives::{NetworkId, Coin, Balance}; + +use crate::{ + Get, DbTxn, Db, Payment, Plan, create_db, + networks::{Output, Network}, + multisigs::scheduler::{SchedulerAddendum, Scheduler as SchedulerTrait}, +}; + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct Scheduler { + key: ::G, + coins: HashSet, + rotated: bool, +} + +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +pub enum Addendum { + Nonce(u64), + RotateTo { nonce: u64, new_key: ::G }, +} + +impl SchedulerAddendum for Addendum { + fn read(reader: &mut R) -> io::Result { + let mut kind = [0xff]; + reader.read_exact(&mut kind)?; + match kind[0] { + 0 => { + let mut nonce = [0; 8]; + reader.read_exact(&mut nonce)?; + Ok(Addendum::Nonce(u64::from_le_bytes(nonce))) + } + 1 => { + let mut nonce = [0; 8]; + reader.read_exact(&mut nonce)?; + let nonce = u64::from_le_bytes(nonce); + + let new_key = N::Curve::read_G(reader)?; + Ok(Addendum::RotateTo { nonce, new_key }) + } + _ => Err(io::Error::other("reading unknown Addendum type"))?, + } + } + fn write(&self, writer: &mut W) -> io::Result<()> { + match self { + Addendum::Nonce(nonce) => { + writer.write_all(&[0])?; + writer.write_all(&nonce.to_le_bytes()) + } + Addendum::RotateTo { nonce, new_key } => { + writer.write_all(&[1])?; + writer.write_all(&nonce.to_le_bytes())?; + writer.write_all(new_key.to_bytes().as_ref()) + } + } + } +} + +create_db! { + SchedulerDb { + LastNonce: () -> u64, + RotatedTo: (key: &[u8]) -> Vec, + } +} + +impl> SchedulerTrait for Scheduler { + type Addendum = Addendum; + + /// Check if this Scheduler is empty. + fn empty(&self) -> bool { + self.rotated + } + + /// Create a new Scheduler. + fn new( + _txn: &mut D::Transaction<'_>, + key: ::G, + network: NetworkId, + ) -> Self { + assert!(N::branch_address(key).is_none()); + assert!(N::change_address(key).is_none()); + assert!(N::forward_address(key).is_none()); + + Scheduler { key, coins: network.coins().iter().copied().collect(), rotated: false } + } + + /// Load a Scheduler from the DB. + fn from_db( + db: &D, + key: ::G, + network: NetworkId, + ) -> io::Result { + Ok(Scheduler { + key, + coins: network.coins().iter().copied().collect(), + rotated: RotatedTo::get(db, key.to_bytes().as_ref()).is_some(), + }) + } + + fn can_use_branch(&self, _balance: Balance) -> bool { + false + } + + fn schedule( + &mut self, + txn: &mut D::Transaction<'_>, + utxos: Vec, + payments: Vec>, + key_for_any_change: ::G, + force_spend: bool, + ) -> Vec> { + for utxo in utxos { + assert!(self.coins.contains(&utxo.balance().coin)); + } + + let mut nonce = LastNonce::get(txn).map_or(0, |nonce| nonce + 1); + let mut plans = vec![]; + for chunk in payments.as_slice().chunks(N::MAX_OUTPUTS) { + // Once we rotate, all further payments should be scheduled via the new multisig + assert!(!self.rotated); + plans.push(Plan { + key: self.key, + inputs: vec![], + payments: chunk.to_vec(), + change: None, + scheduler_addendum: Addendum::Nonce(nonce), + }); + nonce += 1; + } + + // If we're supposed to rotate to the new key, create an empty Plan which will signify the key + // update + if force_spend && (!self.rotated) { + plans.push(Plan { + key: self.key, + inputs: vec![], + payments: vec![], + change: None, + scheduler_addendum: Addendum::RotateTo { nonce, new_key: key_for_any_change }, + }); + nonce += 1; + self.rotated = true; + RotatedTo::set( + txn, + self.key.to_bytes().as_ref(), + &key_for_any_change.to_bytes().as_ref().to_vec(), + ); + } + + LastNonce::set(txn, &nonce); + + plans + } + + fn consume_payments(&mut self, _txn: &mut D::Transaction<'_>) -> Vec> { + vec![] + } + + fn created_output( + &mut self, + _txn: &mut D::Transaction<'_>, + _expected: u64, + _actual: Option, + ) { + panic!("Smart Contract Scheduler created a Branch output") + } + + /// Refund a specific output. + fn refund_plan( + &mut self, + txn: &mut D::Transaction<'_>, + output: N::Output, + refund_to: N::Address, + ) -> Plan { + let current_key = RotatedTo::get(txn, self.key.to_bytes().as_ref()) + .and_then(|key_bytes| ::read_G(&mut key_bytes.as_slice()).ok()) + .unwrap_or(self.key); + + let nonce = LastNonce::get(txn).map_or(0, |nonce| nonce + 1); + LastNonce::set(txn, &(nonce + 1)); + Plan { + key: current_key, + inputs: vec![], + payments: vec![Payment { address: refund_to, data: None, balance: output.balance() }], + change: None, + scheduler_addendum: Addendum::Nonce(nonce), + } + } + + fn shim_forward_plan(_output: N::Output, _to: ::G) -> Option> { + None + } + + /// Forward a specific output to the new multisig. + /// + /// Returns None if no forwarding is necessary. + fn forward_plan( + &mut self, + _txn: &mut D::Transaction<'_>, + _output: N::Output, + _to: ::G, + ) -> Option> { + None + } +} diff --git a/processor/src/multisigs/scheduler.rs b/processor/src/multisigs/scheduler/utxo.rs similarity index 80% rename from processor/src/multisigs/scheduler.rs rename to processor/src/multisigs/scheduler/utxo.rs index abc81a80..e9aa3351 100644 --- a/processor/src/multisigs/scheduler.rs +++ b/processor/src/multisigs/scheduler/utxo.rs @@ -5,16 +5,17 @@ use std::{ use ciphersuite::{group::GroupEncoding, Ciphersuite}; -use serai_client::primitives::{Coin, Amount, Balance}; +use serai_client::primitives::{NetworkId, Coin, Amount, Balance}; use crate::{ - networks::{OutputType, Output, Network}, DbTxn, Db, Payment, Plan, + networks::{OutputType, Output, Network, UtxoNetwork}, + multisigs::scheduler::Scheduler as SchedulerTrait, }; -/// Stateless, deterministic output/payment manager. -#[derive(PartialEq, Eq, Debug)] -pub struct Scheduler { +/// Deterministic output/payment manager. +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct Scheduler { key: ::G, coin: Coin, @@ -46,7 +47,7 @@ fn scheduler_key(key: &G) -> Vec { D::key(b"SCHEDULER", b"scheduler", key.to_bytes()) } -impl Scheduler { +impl> Scheduler { pub fn empty(&self) -> bool { self.queued_plans.is_empty() && self.plans.is_empty() && @@ -144,8 +145,18 @@ impl Scheduler { pub fn new( txn: &mut D::Transaction<'_>, key: ::G, - coin: Coin, + network: NetworkId, ) -> Self { + assert!(N::branch_address(key).is_some()); + assert!(N::change_address(key).is_some()); + assert!(N::forward_address(key).is_some()); + + let coin = { + let coins = network.coins(); + assert_eq!(coins.len(), 1); + coins[0] + }; + let res = Scheduler { key, coin, @@ -159,7 +170,17 @@ impl Scheduler { res } - pub fn from_db(db: &D, key: ::G, coin: Coin) -> io::Result { + pub fn from_db( + db: &D, + key: ::G, + network: NetworkId, + ) -> io::Result { + let coin = { + let coins = network.coins(); + assert_eq!(coins.len(), 1); + coins[0] + }; + let scheduler = db.get(scheduler_key::(&key)).unwrap_or_else(|| { panic!("loading scheduler from DB without scheduler for {}", hex::encode(key.to_bytes())) }); @@ -201,7 +222,7 @@ impl Scheduler { amount }; - let branch_address = N::branch_address(self.key); + let branch_address = N::branch_address(self.key).unwrap(); // If we have more payments than we can handle in a single TX, create plans for them // TODO2: This isn't perfect. For 258 outputs, and a MAX_OUTPUTS of 16, this will create: @@ -237,7 +258,8 @@ impl Scheduler { key: self.key, inputs, payments, - change: Some(N::change_address(key_for_any_change)).filter(|_| change), + change: Some(N::change_address(key_for_any_change).unwrap()).filter(|_| change), + scheduler_addendum: (), } } @@ -305,7 +327,7 @@ impl Scheduler { its *own* branch address, since created_output is called on the signer's Scheduler. */ { - let branch_address = N::branch_address(self.key); + let branch_address = N::branch_address(self.key).unwrap(); payments = payments.drain(..).filter(|payment| payment.address != branch_address).collect::>(); } @@ -357,7 +379,8 @@ impl Scheduler { key: self.key, inputs: chunk, payments: vec![], - change: Some(N::change_address(key_for_any_change)), + change: Some(N::change_address(key_for_any_change).unwrap()), + scheduler_addendum: (), }) } @@ -403,7 +426,8 @@ impl Scheduler { key: self.key, inputs: self.utxos.drain(..).collect::>(), payments: vec![], - change: Some(N::change_address(key_for_any_change)), + change: Some(N::change_address(key_for_any_change).unwrap()), + scheduler_addendum: (), }); } @@ -435,9 +459,6 @@ impl Scheduler { // Note a branch output as having been created, with the amount it was actually created with, // or not having been created due to being too small - // This can be called whenever, so long as it's properly ordered - // (it's independent to Serai/the chain we're scheduling over, yet still expects outputs to be - // created in the same order Plans are returned in) pub fn created_output( &mut self, txn: &mut D::Transaction<'_>, @@ -501,3 +522,106 @@ impl Scheduler { txn.put(scheduler_key::(&self.key), self.serialize()); } } + +impl> SchedulerTrait for Scheduler { + type Addendum = (); + + /// Check if this Scheduler is empty. + fn empty(&self) -> bool { + Scheduler::empty(self) + } + + /// Create a new Scheduler. + fn new( + txn: &mut D::Transaction<'_>, + key: ::G, + network: NetworkId, + ) -> Self { + Scheduler::new::(txn, key, network) + } + + /// Load a Scheduler from the DB. + fn from_db( + db: &D, + key: ::G, + network: NetworkId, + ) -> io::Result { + Scheduler::from_db::(db, key, network) + } + + /// Check if a branch is usable. + fn can_use_branch(&self, balance: Balance) -> bool { + Scheduler::can_use_branch(self, balance) + } + + /// Schedule a series of outputs/payments. + fn schedule( + &mut self, + txn: &mut D::Transaction<'_>, + utxos: Vec, + payments: Vec>, + key_for_any_change: ::G, + force_spend: bool, + ) -> Vec> { + Scheduler::schedule::(self, txn, utxos, payments, key_for_any_change, force_spend) + } + + /// Consume all payments still pending within this Scheduler, without scheduling them. + fn consume_payments(&mut self, txn: &mut D::Transaction<'_>) -> Vec> { + Scheduler::consume_payments::(self, txn) + } + + /// Note a branch output as having been created, with the amount it was actually created with, + /// or not having been created due to being too small. + // TODO: Move this to Balance. + fn created_output( + &mut self, + txn: &mut D::Transaction<'_>, + expected: u64, + actual: Option, + ) { + Scheduler::created_output::(self, txn, expected, actual) + } + + fn refund_plan( + &mut self, + _: &mut D::Transaction<'_>, + output: N::Output, + refund_to: N::Address, + ) -> Plan { + Plan { + key: output.key(), + // Uses a payment as this will still be successfully sent due to fee amortization, + // and because change is currently always a Serai key + payments: vec![Payment { address: refund_to, data: None, balance: output.balance() }], + inputs: vec![output], + change: None, + scheduler_addendum: (), + } + } + + fn shim_forward_plan(output: N::Output, to: ::G) -> Option> { + Some(Plan { + key: output.key(), + payments: vec![Payment { + address: N::forward_address(to).unwrap(), + data: None, + balance: output.balance(), + }], + inputs: vec![output], + change: None, + scheduler_addendum: (), + }) + } + + fn forward_plan( + &mut self, + _: &mut D::Transaction<'_>, + output: N::Output, + to: ::G, + ) -> Option> { + assert_eq!(self.key, output.key()); + // Call shim as shim returns the actual + Self::shim_forward_plan(output, to) + } +} diff --git a/processor/src/networks/bitcoin.rs b/processor/src/networks/bitcoin.rs index 606a3e12..96f76949 100644 --- a/processor/src/networks/bitcoin.rs +++ b/processor/src/networks/bitcoin.rs @@ -52,9 +52,10 @@ use crate::{ networks::{ NetworkError, Block as BlockTrait, OutputType, Output as OutputTrait, Transaction as TransactionTrait, SignableTransaction as SignableTransactionTrait, - Eventuality as EventualityTrait, EventualitiesTracker, Network, + Eventuality as EventualityTrait, EventualitiesTracker, Network, UtxoNetwork, }, Payment, + multisigs::scheduler::utxo::Scheduler, }; #[derive(Clone, PartialEq, Eq, Debug)] @@ -178,14 +179,6 @@ impl TransactionTrait for Transaction { hash.reverse(); hash } - fn serialize(&self) -> Vec { - let mut buf = vec![]; - self.consensus_encode(&mut buf).unwrap(); - buf - } - fn read(reader: &mut R) -> io::Result { - Transaction::consensus_decode(reader).map_err(|e| io::Error::other(format!("{e}"))) - } #[cfg(test)] async fn fee(&self, network: &Bitcoin) -> u64 { @@ -209,7 +202,23 @@ impl TransactionTrait for Transaction { #[derive(Clone, PartialEq, Eq, Debug)] pub struct Eventuality([u8; 32]); +#[derive(Clone, PartialEq, Eq, Default, Debug)] +pub struct EmptyClaim; +impl AsRef<[u8]> for EmptyClaim { + fn as_ref(&self) -> &[u8] { + &[] + } +} +impl AsMut<[u8]> for EmptyClaim { + fn as_mut(&mut self) -> &mut [u8] { + &mut [] + } +} + impl EventualityTrait for Eventuality { + type Claim = EmptyClaim; + type Completion = Transaction; + fn lookup(&self) -> Vec { self.0.to_vec() } @@ -224,6 +233,18 @@ impl EventualityTrait for Eventuality { fn serialize(&self) -> Vec { self.0.to_vec() } + + fn claim(_: &Transaction) -> EmptyClaim { + EmptyClaim + } + fn serialize_completion(completion: &Transaction) -> Vec { + let mut buf = vec![]; + completion.consensus_encode(&mut buf).unwrap(); + buf + } + fn read_completion(reader: &mut R) -> io::Result { + Transaction::consensus_decode(reader).map_err(|e| io::Error::other(format!("{e}"))) + } } #[derive(Clone, Debug)] @@ -374,8 +395,12 @@ impl Bitcoin { for input in &tx.input { let mut input_tx = input.previous_output.txid.to_raw_hash().to_byte_array(); input_tx.reverse(); - in_value += self.get_transaction(&input_tx).await?.output - [usize::try_from(input.previous_output.vout).unwrap()] + in_value += self + .rpc + .get_transaction(&input_tx) + .await + .map_err(|_| NetworkError::ConnectionError)? + .output[usize::try_from(input.previous_output.vout).unwrap()] .value .to_sat(); } @@ -537,6 +562,25 @@ impl Bitcoin { } } +// Bitcoin has a max weight of 400,000 (MAX_STANDARD_TX_WEIGHT) +// A non-SegWit TX will have 4 weight units per byte, leaving a max size of 100,000 bytes +// While our inputs are entirely SegWit, such fine tuning is not necessary and could create +// issues in the future (if the size decreases or we misevaluate it) +// It also offers a minimal amount of benefit when we are able to logarithmically accumulate +// inputs +// For 128-byte inputs (36-byte output specification, 64-byte signature, whatever overhead) and +// 64-byte outputs (40-byte script, 8-byte amount, whatever overhead), they together take up 192 +// bytes +// 100,000 / 192 = 520 +// 520 * 192 leaves 160 bytes of overhead for the transaction structure itself +const MAX_INPUTS: usize = 520; +const MAX_OUTPUTS: usize = 520; + +fn address_from_key(key: ProjectivePoint) -> Address { + Address::new(BAddress::::new(BNetwork::Bitcoin, address_payload(key).unwrap())) + .unwrap() +} + #[async_trait] impl Network for Bitcoin { type Curve = Secp256k1; @@ -549,6 +593,8 @@ impl Network for Bitcoin { type Eventuality = Eventuality; type TransactionMachine = TransactionMachine; + type Scheduler = Scheduler; + type Address = Address; const NETWORK: NetworkId = NetworkId::Bitcoin; @@ -598,19 +644,7 @@ impl Network for Bitcoin { // aggregation TX const COST_TO_AGGREGATE: u64 = 800; - // Bitcoin has a max weight of 400,000 (MAX_STANDARD_TX_WEIGHT) - // A non-SegWit TX will have 4 weight units per byte, leaving a max size of 100,000 bytes - // While our inputs are entirely SegWit, such fine tuning is not necessary and could create - // issues in the future (if the size decreases or we misevaluate it) - // It also offers a minimal amount of benefit when we are able to logarithmically accumulate - // inputs - // For 128-byte inputs (36-byte output specification, 64-byte signature, whatever overhead) and - // 64-byte outputs (40-byte script, 8-byte amount, whatever overhead), they together take up 192 - // bytes - // 100,000 / 192 = 520 - // 520 * 192 leaves 160 bytes of overhead for the transaction structure itself - const MAX_INPUTS: usize = 520; - const MAX_OUTPUTS: usize = 520; + const MAX_OUTPUTS: usize = MAX_OUTPUTS; fn tweak_keys(keys: &mut ThresholdKeys) { *keys = tweak_keys(keys); @@ -618,24 +652,24 @@ impl Network for Bitcoin { scanner(keys.group_key()); } - fn external_address(key: ProjectivePoint) -> Address { - Address::new(BAddress::::new(BNetwork::Bitcoin, address_payload(key).unwrap())) - .unwrap() + #[cfg(test)] + async fn external_address(&self, key: ProjectivePoint) -> Address { + address_from_key(key) } - fn branch_address(key: ProjectivePoint) -> Address { + fn branch_address(key: ProjectivePoint) -> Option
{ let (_, offsets, _) = scanner(key); - Self::external_address(key + (ProjectivePoint::GENERATOR * offsets[&OutputType::Branch])) + Some(address_from_key(key + (ProjectivePoint::GENERATOR * offsets[&OutputType::Branch]))) } - fn change_address(key: ProjectivePoint) -> Address { + fn change_address(key: ProjectivePoint) -> Option
{ let (_, offsets, _) = scanner(key); - Self::external_address(key + (ProjectivePoint::GENERATOR * offsets[&OutputType::Change])) + Some(address_from_key(key + (ProjectivePoint::GENERATOR * offsets[&OutputType::Change]))) } - fn forward_address(key: ProjectivePoint) -> Address { + fn forward_address(key: ProjectivePoint) -> Option
{ let (_, offsets, _) = scanner(key); - Self::external_address(key + (ProjectivePoint::GENERATOR * offsets[&OutputType::Forwarded])) + Some(address_from_key(key + (ProjectivePoint::GENERATOR * offsets[&OutputType::Forwarded]))) } async fn get_latest_block_number(&self) -> Result { @@ -682,7 +716,7 @@ impl Network for Bitcoin { spent_tx.reverse(); let mut tx; while { - tx = self.get_transaction(&spent_tx).await; + tx = self.rpc.get_transaction(&spent_tx).await; tx.is_err() } { log::error!("couldn't get transaction from bitcoin node: {tx:?}"); @@ -710,7 +744,7 @@ impl Network for Bitcoin { &self, eventualities: &mut EventualitiesTracker, block: &Self::Block, - ) -> HashMap<[u8; 32], (usize, Transaction)> { + ) -> HashMap<[u8; 32], (usize, [u8; 32], Transaction)> { let mut res = HashMap::new(); if eventualities.map.is_empty() { return res; @@ -719,11 +753,11 @@ impl Network for Bitcoin { fn check_block( eventualities: &mut EventualitiesTracker, block: &Block, - res: &mut HashMap<[u8; 32], (usize, Transaction)>, + res: &mut HashMap<[u8; 32], (usize, [u8; 32], Transaction)>, ) { for tx in &block.txdata[1 ..] { if let Some((plan, _)) = eventualities.map.remove(tx.id().as_slice()) { - res.insert(plan, (eventualities.block_number, tx.clone())); + res.insert(plan, (eventualities.block_number, tx.id(), tx.clone())); } } @@ -770,7 +804,6 @@ impl Network for Bitcoin { async fn needed_fee( &self, block_number: usize, - _: &[u8; 32], inputs: &[Output], payments: &[Payment], change: &Option
, @@ -787,9 +820,11 @@ impl Network for Bitcoin { &self, block_number: usize, plan_id: &[u8; 32], + _key: ProjectivePoint, inputs: &[Output], payments: &[Payment], change: &Option
, + (): &(), ) -> Result, NetworkError> { Ok(self.make_signable_transaction(block_number, inputs, payments, change, false).await?.map( |signable| { @@ -803,7 +838,7 @@ impl Network for Bitcoin { )) } - async fn attempt_send( + async fn attempt_sign( &self, keys: ThresholdKeys, transaction: Self::SignableTransaction, @@ -817,7 +852,7 @@ impl Network for Bitcoin { ) } - async fn publish_transaction(&self, tx: &Self::Transaction) -> Result<(), NetworkError> { + async fn publish_completion(&self, tx: &Transaction) -> Result<(), NetworkError> { match self.rpc.send_raw_transaction(tx).await { Ok(_) => (), Err(RpcError::ConnectionError) => Err(NetworkError::ConnectionError)?, @@ -828,12 +863,14 @@ impl Network for Bitcoin { Ok(()) } - async fn get_transaction(&self, id: &[u8; 32]) -> Result { - self.rpc.get_transaction(id).await.map_err(|_| NetworkError::ConnectionError) - } - - fn confirm_completion(&self, eventuality: &Self::Eventuality, tx: &Transaction) -> bool { - eventuality.0 == tx.id() + async fn confirm_completion( + &self, + eventuality: &Self::Eventuality, + _: &EmptyClaim, + ) -> Result, NetworkError> { + Ok(Some( + self.rpc.get_transaction(&eventuality.0).await.map_err(|_| NetworkError::ConnectionError)?, + )) } #[cfg(test)] @@ -841,6 +878,20 @@ impl Network for Bitcoin { self.rpc.get_block_number(id).await.unwrap() } + #[cfg(test)] + async fn check_eventuality_by_claim( + &self, + eventuality: &Self::Eventuality, + _: &EmptyClaim, + ) -> bool { + self.rpc.get_transaction(&eventuality.0).await.is_ok() + } + + #[cfg(test)] + async fn get_transaction_by_eventuality(&self, _: usize, id: &Eventuality) -> Transaction { + self.rpc.get_transaction(&id.0).await.unwrap() + } + #[cfg(test)] async fn mine_block(&self) { self @@ -892,3 +943,7 @@ impl Network for Bitcoin { self.get_block(block).await.unwrap() } } + +impl UtxoNetwork for Bitcoin { + const MAX_INPUTS: usize = MAX_INPUTS; +} diff --git a/processor/src/networks/ethereum.rs b/processor/src/networks/ethereum.rs new file mode 100644 index 00000000..36051980 --- /dev/null +++ b/processor/src/networks/ethereum.rs @@ -0,0 +1,827 @@ +use core::{fmt::Debug, time::Duration}; +use std::{ + sync::Arc, + collections::{HashSet, HashMap}, + io, +}; + +use async_trait::async_trait; + +use ciphersuite::{group::GroupEncoding, Ciphersuite, Secp256k1}; +use frost::ThresholdKeys; + +use ethereum_serai::{ + alloy_core::primitives::U256, + alloy_rpc_types::{BlockNumberOrTag, Transaction}, + alloy_simple_request_transport::SimpleRequest, + alloy_rpc_client::ClientBuilder, + alloy_provider::{Provider, RootProvider}, + crypto::{PublicKey, Signature}, + deployer::Deployer, + router::{Router, Coin as EthereumCoin, InInstruction as EthereumInInstruction}, + machine::*, +}; +#[cfg(test)] +use ethereum_serai::alloy_core::primitives::B256; + +use tokio::{ + time::sleep, + sync::{RwLock, RwLockReadGuard}, +}; + +use serai_client::{ + primitives::{Coin, Amount, Balance, NetworkId}, + validator_sets::primitives::Session, +}; + +use crate::{ + Db, Payment, + networks::{ + OutputType, Output, Transaction as TransactionTrait, SignableTransaction, Block, + Eventuality as EventualityTrait, EventualitiesTracker, NetworkError, Network, + }, + key_gen::NetworkKeyDb, + multisigs::scheduler::{ + Scheduler as SchedulerTrait, + smart_contract::{Addendum, Scheduler}, + }, +}; + +#[cfg(not(test))] +const DAI: [u8; 20] = + match const_hex::const_decode_to_array(b"0x6B175474E89094C44Da98b954EedeAC495271d0F") { + Ok(res) => res, + Err(_) => panic!("invalid non-test DAI hex address"), + }; +#[cfg(test)] // TODO +const DAI: [u8; 20] = + match const_hex::const_decode_to_array(b"0000000000000000000000000000000000000000") { + Ok(res) => res, + Err(_) => panic!("invalid test DAI hex address"), + }; + +fn coin_to_serai_coin(coin: &EthereumCoin) -> Option { + match coin { + EthereumCoin::Ether => Some(Coin::Ether), + EthereumCoin::Erc20(token) => { + if *token == DAI { + return Some(Coin::Dai); + } + None + } + } +} + +fn amount_to_serai_amount(coin: Coin, amount: U256) -> Amount { + assert_eq!(coin.network(), NetworkId::Ethereum); + assert_eq!(coin.decimals(), 8); + // Remove 10 decimals so we go from 18 decimals to 8 decimals + let divisor = U256::from(10_000_000_000u64); + // This is valid up to 184b, which is assumed for the coins allowed + Amount(u64::try_from(amount / divisor).unwrap()) +} + +fn balance_to_ethereum_amount(balance: Balance) -> U256 { + assert_eq!(balance.coin.network(), NetworkId::Ethereum); + assert_eq!(balance.coin.decimals(), 8); + // Restore 10 decimals so we go from 8 decimals to 18 decimals + let factor = U256::from(10_000_000_000u64); + U256::from(balance.amount.0) * factor +} + +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +pub struct Address(pub [u8; 20]); +impl TryFrom> for Address { + type Error = (); + fn try_from(bytes: Vec) -> Result { + if bytes.len() != 20 { + Err(())?; + } + let mut res = [0; 20]; + res.copy_from_slice(&bytes); + Ok(Address(res)) + } +} +impl TryInto> for Address { + type Error = (); + fn try_into(self) -> Result, ()> { + Ok(self.0.to_vec()) + } +} +impl ToString for Address { + fn to_string(&self) -> String { + ethereum_serai::alloy_core::primitives::Address::from(self.0).to_string() + } +} + +impl SignableTransaction for RouterCommand { + fn fee(&self) -> u64 { + // Return a fee of 0 as we'll handle amortization on our end + 0 + } +} + +#[async_trait] +impl TransactionTrait> for Transaction { + type Id = [u8; 32]; + fn id(&self) -> Self::Id { + self.hash.0 + } + + #[cfg(test)] + async fn fee(&self, _network: &Ethereum) -> u64 { + // Return a fee of 0 as we'll handle amortization on our end + 0 + } +} + +// We use 32-block Epochs to represent blocks. +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +pub struct Epoch { + // The hash of the block which ended the prior Epoch. + prior_end_hash: [u8; 32], + // The first block number within this Epoch. + start: u64, + // The hash of the last block within this Epoch. + end_hash: [u8; 32], + // The monotonic time for this Epoch. + time: u64, +} + +impl Epoch { + fn end(&self) -> u64 { + self.start + 31 + } +} + +#[async_trait] +impl Block> for Epoch { + type Id = [u8; 32]; + fn id(&self) -> [u8; 32] { + self.end_hash + } + fn parent(&self) -> [u8; 32] { + self.prior_end_hash + } + async fn time(&self, _: &Ethereum) -> u64 { + self.time + } +} + +impl Output> for EthereumInInstruction { + type Id = [u8; 32]; + + fn kind(&self) -> OutputType { + OutputType::External + } + + fn id(&self) -> Self::Id { + let mut id = [0; 40]; + id[.. 32].copy_from_slice(&self.id.0); + id[32 ..].copy_from_slice(&self.id.1.to_le_bytes()); + *ethereum_serai::alloy_core::primitives::keccak256(id) + } + fn tx_id(&self) -> [u8; 32] { + self.id.0 + } + fn key(&self) -> ::G { + self.key_at_end_of_block + } + + fn presumed_origin(&self) -> Option
{ + Some(Address(self.from)) + } + + fn balance(&self) -> Balance { + let coin = coin_to_serai_coin(&self.coin).unwrap_or_else(|| { + panic!( + "requesting coin for an EthereumInInstruction with a coin {}", + "we don't handle. this never should have been yielded" + ) + }); + Balance { coin, amount: amount_to_serai_amount(coin, self.amount) } + } + fn data(&self) -> &[u8] { + &self.data + } + + fn write(&self, writer: &mut W) -> io::Result<()> { + EthereumInInstruction::write(self, writer) + } + fn read(reader: &mut R) -> io::Result { + EthereumInInstruction::read(reader) + } +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct Claim { + signature: [u8; 64], +} +impl AsRef<[u8]> for Claim { + fn as_ref(&self) -> &[u8] { + &self.signature + } +} +impl AsMut<[u8]> for Claim { + fn as_mut(&mut self) -> &mut [u8] { + &mut self.signature + } +} +impl Default for Claim { + fn default() -> Self { + Self { signature: [0; 64] } + } +} +impl From<&Signature> for Claim { + fn from(sig: &Signature) -> Self { + Self { signature: sig.to_bytes() } + } +} + +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct Eventuality(PublicKey, RouterCommand); +impl EventualityTrait for Eventuality { + type Claim = Claim; + type Completion = SignedRouterCommand; + + fn lookup(&self) -> Vec { + match self.1 { + RouterCommand::UpdateSeraiKey { nonce, .. } | RouterCommand::Execute { nonce, .. } => { + nonce.as_le_bytes().to_vec() + } + } + } + + fn read(reader: &mut R) -> io::Result { + let point = Secp256k1::read_G(reader)?; + let command = RouterCommand::read(reader)?; + Ok(Eventuality( + PublicKey::new(point).ok_or(io::Error::other("unusable key within Eventuality"))?, + command, + )) + } + fn serialize(&self) -> Vec { + let mut res = vec![]; + res.extend(self.0.point().to_bytes().as_slice()); + self.1.write(&mut res).unwrap(); + res + } + + fn claim(completion: &Self::Completion) -> Self::Claim { + Claim::from(completion.signature()) + } + fn serialize_completion(completion: &Self::Completion) -> Vec { + let mut res = vec![]; + completion.write(&mut res).unwrap(); + res + } + fn read_completion(reader: &mut R) -> io::Result { + SignedRouterCommand::read(reader) + } +} + +#[derive(Clone, Debug)] +pub struct Ethereum { + // This DB is solely used to access the first key generated, as needed to determine the Router's + // address. Accordingly, all methods present are consistent to a Serai chain with a finalized + // first key (regardless of local state), and this is safe. + db: D, + provider: Arc>, + deployer: Deployer, + router: Arc>>, +} +impl PartialEq for Ethereum { + fn eq(&self, _other: &Ethereum) -> bool { + true + } +} +impl Ethereum { + pub async fn new(db: D, url: String) -> Self { + let provider = Arc::new(RootProvider::new( + ClientBuilder::default().transport(SimpleRequest::new(url), true), + )); + + #[cfg(test)] // TODO: Move to test code + provider.raw_request::<_, ()>("evm_setAutomine".into(), false).await.unwrap(); + + let mut deployer = Deployer::new(provider.clone()).await; + while !matches!(deployer, Ok(Some(_))) { + log::error!("Deployer wasn't deployed yet or networking error"); + sleep(Duration::from_secs(5)).await; + deployer = Deployer::new(provider.clone()).await; + } + let deployer = deployer.unwrap().unwrap(); + + Ethereum { db, provider, deployer, router: Arc::new(RwLock::new(None)) } + } + + // Obtain a reference to the Router, sleeping until it's deployed if it hasn't already been. + // This is guaranteed to return Some. + pub async fn router(&self) -> RwLockReadGuard<'_, Option> { + // If we've already instantiated the Router, return a read reference + { + let router = self.router.read().await; + if router.is_some() { + return router; + } + } + + // Instantiate it + let mut router = self.router.write().await; + // If another attempt beat us to it, return + if router.is_some() { + drop(router); + return self.router.read().await; + } + + // Get the first key from the DB + let first_key = + NetworkKeyDb::get(&self.db, Session(0)).expect("getting outputs before confirming a key"); + let key = Secp256k1::read_G(&mut first_key.as_slice()).unwrap(); + let public_key = PublicKey::new(key).unwrap(); + + // Find the router + let mut found = self.deployer.find_router(self.provider.clone(), &public_key).await; + while !matches!(found, Ok(Some(_))) { + log::error!("Router wasn't deployed yet or networking error"); + sleep(Duration::from_secs(5)).await; + found = self.deployer.find_router(self.provider.clone(), &public_key).await; + } + + // Set it + *router = Some(found.unwrap().unwrap()); + + // Downgrade to a read lock + // Explicitly doesn't use `downgrade` so that another pending write txn can realize it's no + // longer necessary + drop(router); + self.router.read().await + } +} + +#[async_trait] +impl Network for Ethereum { + type Curve = Secp256k1; + + type Transaction = Transaction; + type Block = Epoch; + + type Output = EthereumInInstruction; + type SignableTransaction = RouterCommand; + type Eventuality = Eventuality; + type TransactionMachine = RouterCommandMachine; + + type Scheduler = Scheduler; + + type Address = Address; + + const NETWORK: NetworkId = NetworkId::Ethereum; + const ID: &'static str = "Ethereum"; + const ESTIMATED_BLOCK_TIME_IN_SECONDS: usize = 32 * 12; + const CONFIRMATIONS: usize = 1; + + const DUST: u64 = 0; // TODO + + const COST_TO_AGGREGATE: u64 = 0; + + // TODO: usize::max, with a merkle tree in the router + const MAX_OUTPUTS: usize = 256; + + fn tweak_keys(keys: &mut ThresholdKeys) { + while PublicKey::new(keys.group_key()).is_none() { + *keys = keys.offset(::F::ONE); + } + } + + #[cfg(test)] + async fn external_address(&self, _key: ::G) -> Address { + Address(self.router().await.as_ref().unwrap().address()) + } + + fn branch_address(_key: ::G) -> Option
{ + None + } + + fn change_address(_key: ::G) -> Option
{ + None + } + + fn forward_address(_key: ::G) -> Option
{ + None + } + + async fn get_latest_block_number(&self) -> Result { + let actual_number = self + .provider + .get_block(BlockNumberOrTag::Finalized.into(), false) + .await + .map_err(|_| NetworkError::ConnectionError)? + .expect("no blocks were finalized") + .header + .number + .unwrap(); + // Error if there hasn't been a full epoch yet + if actual_number < 32 { + Err(NetworkError::ConnectionError)? + } + // If this is 33, the division will return 1, yet 1 is the epoch in progress + let latest_full_epoch = (actual_number / 32).saturating_sub(1); + Ok(latest_full_epoch.try_into().unwrap()) + } + + async fn get_block(&self, number: usize) -> Result { + let latest_finalized = self.get_latest_block_number().await?; + if number > latest_finalized { + Err(NetworkError::ConnectionError)? + } + + let start = number * 32; + let prior_end_hash = if start == 0 { + [0; 32] + } else { + self + .provider + .get_block(u64::try_from(start - 1).unwrap().into(), false) + .await + .ok() + .flatten() + .ok_or(NetworkError::ConnectionError)? + .header + .hash + .unwrap() + .into() + }; + + let end_header = self + .provider + .get_block(u64::try_from(start + 31).unwrap().into(), false) + .await + .ok() + .flatten() + .ok_or(NetworkError::ConnectionError)? + .header; + + let end_hash = end_header.hash.unwrap().into(); + let time = end_header.timestamp; + + Ok(Epoch { prior_end_hash, start: start.try_into().unwrap(), end_hash, time }) + } + + async fn get_outputs( + &self, + block: &Self::Block, + _: ::G, + ) -> Vec { + let router = self.router().await; + let router = router.as_ref().unwrap(); + + // TODO: Top-level transfers + + let mut all_events = vec![]; + for block in block.start .. (block.start + 32) { + let mut events = router.in_instructions(block, &HashSet::from([DAI])).await; + while let Err(e) = events { + log::error!("couldn't connect to Ethereum node for the Router's events: {e:?}"); + sleep(Duration::from_secs(5)).await; + events = router.in_instructions(block, &HashSet::from([DAI])).await; + } + all_events.extend(events.unwrap()); + } + + for event in &all_events { + assert!( + coin_to_serai_coin(&event.coin).is_some(), + "router yielded events for unrecognized coins" + ); + } + all_events + } + + async fn get_eventuality_completions( + &self, + eventualities: &mut EventualitiesTracker, + block: &Self::Block, + ) -> HashMap< + [u8; 32], + ( + usize, + >::Id, + ::Completion, + ), + > { + let mut res = HashMap::new(); + if eventualities.map.is_empty() { + return res; + } + + let router = self.router().await; + let router = router.as_ref().unwrap(); + + let past_scanned_epoch = loop { + match self.get_block(eventualities.block_number).await { + Ok(block) => break block, + Err(e) => log::error!("couldn't get the last scanned block in the tracker: {}", e), + } + sleep(Duration::from_secs(10)).await; + }; + assert_eq!( + past_scanned_epoch.start / 32, + u64::try_from(eventualities.block_number).unwrap(), + "assumption of tracker block number's relation to epoch start is incorrect" + ); + + // Iterate from after the epoch number in the tracker to the end of this epoch + for block_num in (past_scanned_epoch.end() + 1) ..= block.end() { + let executed = loop { + match router.executed_commands(block_num).await { + Ok(executed) => break executed, + Err(e) => log::error!("couldn't get the executed commands in block {block_num}: {e}"), + } + sleep(Duration::from_secs(10)).await; + }; + + for executed in executed { + let lookup = executed.nonce.to_le_bytes().to_vec(); + if let Some((plan_id, eventuality)) = eventualities.map.get(&lookup) { + if let Some(command) = + SignedRouterCommand::new(&eventuality.0, eventuality.1.clone(), &executed.signature) + { + res.insert(*plan_id, (block_num.try_into().unwrap(), executed.tx_id, command)); + eventualities.map.remove(&lookup); + } + } + } + } + eventualities.block_number = (block.start / 32).try_into().unwrap(); + + res + } + + async fn needed_fee( + &self, + _block_number: usize, + inputs: &[Self::Output], + _payments: &[Payment], + _change: &Option, + ) -> Result, NetworkError> { + assert_eq!(inputs.len(), 0); + // Claim no fee is needed so we can perform amortization ourselves + Ok(Some(0)) + } + + async fn signable_transaction( + &self, + _block_number: usize, + _plan_id: &[u8; 32], + key: ::G, + inputs: &[Self::Output], + payments: &[Payment], + change: &Option, + scheduler_addendum: &>::Addendum, + ) -> Result, NetworkError> { + assert_eq!(inputs.len(), 0); + assert!(change.is_none()); + let chain_id = self.provider.get_chain_id().await.map_err(|_| NetworkError::ConnectionError)?; + + // TODO: Perform fee amortization (in scheduler? + // TODO: Make this function internal and have needed_fee properly return None as expected? + // TODO: signable_transaction is written as cannot return None if needed_fee returns Some + // TODO: Why can this return None at all if it isn't allowed to return None? + + let command = match scheduler_addendum { + Addendum::Nonce(nonce) => RouterCommand::Execute { + chain_id: U256::try_from(chain_id).unwrap(), + nonce: U256::try_from(*nonce).unwrap(), + outs: payments + .iter() + .filter_map(|payment| { + Some(OutInstruction { + target: if let Some(data) = payment.data.as_ref() { + // This introspects the Call serialization format, expecting the first 20 bytes to + // be the address + // This avoids wasting the 20-bytes allocated within address + let full_data = [payment.address.0.as_slice(), data].concat(); + let mut reader = full_data.as_slice(); + + let mut calls = vec![]; + while !reader.is_empty() { + calls.push(Call::read(&mut reader).ok()?) + } + // The above must have executed at least once since reader contains the address + assert_eq!(calls[0].to, payment.address.0); + + OutInstructionTarget::Calls(calls) + } else { + OutInstructionTarget::Direct(payment.address.0) + }, + value: { + assert_eq!(payment.balance.coin, Coin::Ether); // TODO + balance_to_ethereum_amount(payment.balance) + }, + }) + }) + .collect(), + }, + Addendum::RotateTo { nonce, new_key } => { + assert!(payments.is_empty()); + RouterCommand::UpdateSeraiKey { + chain_id: U256::try_from(chain_id).unwrap(), + nonce: U256::try_from(*nonce).unwrap(), + key: PublicKey::new(*new_key).expect("new key wasn't a valid ETH public key"), + } + } + }; + Ok(Some(( + command.clone(), + Eventuality(PublicKey::new(key).expect("key wasn't a valid ETH public key"), command), + ))) + } + + async fn attempt_sign( + &self, + keys: ThresholdKeys, + transaction: Self::SignableTransaction, + ) -> Result { + Ok( + RouterCommandMachine::new(keys, transaction) + .expect("keys weren't usable to sign router commands"), + ) + } + + async fn publish_completion( + &self, + completion: &::Completion, + ) -> Result<(), NetworkError> { + // Publish this to the dedicated TX server for a solver to actually publish + #[cfg(not(test))] + { + let _ = completion; + todo!("TODO"); + } + + // Publish this using a dummy account we fund with magic RPC commands + #[cfg(test)] + { + use rand_core::OsRng; + use ciphersuite::group::ff::Field; + + let key = ::F::random(&mut OsRng); + let address = ethereum_serai::crypto::address(&(Secp256k1::generator() * key)); + + // Set a 1.1 ETH balance + self + .provider + .raw_request::<_, ()>( + "anvil_setBalance".into(), + [Address(address).to_string(), "1100000000000000000".into()], + ) + .await + .unwrap(); + + let router = self.router().await; + let router = router.as_ref().unwrap(); + + let mut tx = match completion.command() { + RouterCommand::UpdateSeraiKey { key, .. } => { + router.update_serai_key(key, completion.signature()) + } + RouterCommand::Execute { outs, .. } => router.execute( + &outs.iter().cloned().map(Into::into).collect::>(), + completion.signature(), + ), + }; + tx.gas_price = 100_000_000_000u128; + + use ethereum_serai::alloy_consensus::SignableTransaction; + let sig = + k256::ecdsa::SigningKey::from(k256::elliptic_curve::NonZeroScalar::new(key).unwrap()) + .sign_prehash_recoverable(tx.signature_hash().as_ref()) + .unwrap(); + + let mut bytes = vec![]; + tx.encode_with_signature_fields(&sig.into(), &mut bytes); + let _ = self.provider.send_raw_transaction(&bytes).await.ok().unwrap(); + + Ok(()) + } + } + + async fn confirm_completion( + &self, + eventuality: &Self::Eventuality, + claim: &::Claim, + ) -> Result::Completion>, NetworkError> { + Ok(SignedRouterCommand::new(&eventuality.0, eventuality.1.clone(), &claim.signature)) + } + + #[cfg(test)] + async fn get_block_number(&self, id: &>::Id) -> usize { + self + .provider + .get_block(B256::from(*id).into(), false) + .await + .unwrap() + .unwrap() + .header + .number + .unwrap() + .try_into() + .unwrap() + } + + #[cfg(test)] + async fn check_eventuality_by_claim( + &self, + eventuality: &Self::Eventuality, + claim: &::Claim, + ) -> bool { + SignedRouterCommand::new(&eventuality.0, eventuality.1.clone(), &claim.signature).is_some() + } + + #[cfg(test)] + async fn get_transaction_by_eventuality( + &self, + block: usize, + eventuality: &Self::Eventuality, + ) -> Self::Transaction { + match eventuality.1 { + RouterCommand::UpdateSeraiKey { nonce, .. } | RouterCommand::Execute { nonce, .. } => { + let router = self.router().await; + let router = router.as_ref().unwrap(); + + let block = u64::try_from(block).unwrap(); + let filter = router + .key_updated_filter() + .from_block(block * 32) + .to_block(((block + 1) * 32) - 1) + .topic1(nonce); + let logs = self.provider.get_logs(&filter).await.unwrap(); + if let Some(log) = logs.first() { + return self + .provider + .get_transaction_by_hash(log.clone().transaction_hash.unwrap()) + .await + .unwrap(); + }; + + let filter = router + .executed_filter() + .from_block(block * 32) + .to_block(((block + 1) * 32) - 1) + .topic1(nonce); + let logs = self.provider.get_logs(&filter).await.unwrap(); + self.provider.get_transaction_by_hash(logs[0].transaction_hash.unwrap()).await.unwrap() + } + } + } + + #[cfg(test)] + async fn mine_block(&self) { + self.provider.raw_request::<_, ()>("anvil_mine".into(), [32]).await.unwrap(); + } + + #[cfg(test)] + async fn test_send(&self, send_to: Self::Address) -> Self::Block { + use rand_core::OsRng; + use ciphersuite::group::ff::Field; + + let key = ::F::random(&mut OsRng); + let address = ethereum_serai::crypto::address(&(Secp256k1::generator() * key)); + + // Set a 1.1 ETH balance + self + .provider + .raw_request::<_, ()>( + "anvil_setBalance".into(), + [Address(address).to_string(), "1100000000000000000".into()], + ) + .await + .unwrap(); + + let tx = ethereum_serai::alloy_consensus::TxLegacy { + chain_id: None, + nonce: 0, + gas_price: 100_000_000_000u128, + gas_limit: 21_0000u128, + to: ethereum_serai::alloy_core::primitives::TxKind::Call(send_to.0.into()), + // 1 ETH + value: U256::from_str_radix("1000000000000000000", 10).unwrap(), + input: vec![].into(), + }; + + use ethereum_serai::alloy_consensus::SignableTransaction; + let sig = k256::ecdsa::SigningKey::from(k256::elliptic_curve::NonZeroScalar::new(key).unwrap()) + .sign_prehash_recoverable(tx.signature_hash().as_ref()) + .unwrap(); + + let mut bytes = vec![]; + tx.encode_with_signature_fields(&sig.into(), &mut bytes); + let pending_tx = self.provider.send_raw_transaction(&bytes).await.ok().unwrap(); + + // Mine an epoch containing this TX + self.mine_block().await; + assert!(pending_tx.get_receipt().await.unwrap().status()); + // Yield the freshly mined block + self.get_block(self.get_latest_block_number().await.unwrap()).await.unwrap() + } +} diff --git a/processor/src/networks/mod.rs b/processor/src/networks/mod.rs index d77d43f1..803ed40a 100644 --- a/processor/src/networks/mod.rs +++ b/processor/src/networks/mod.rs @@ -21,12 +21,17 @@ pub mod bitcoin; #[cfg(feature = "bitcoin")] pub use self::bitcoin::Bitcoin; +#[cfg(feature = "ethereum")] +pub mod ethereum; +#[cfg(feature = "ethereum")] +pub use ethereum::Ethereum; + #[cfg(feature = "monero")] pub mod monero; #[cfg(feature = "monero")] pub use monero::Monero; -use crate::{Payment, Plan}; +use crate::{Payment, Plan, multisigs::scheduler::Scheduler}; #[derive(Clone, Copy, Error, Debug)] pub enum NetworkError { @@ -105,7 +110,7 @@ pub trait Output: Send + Sync + Sized + Clone + PartialEq + Eq + Deb fn kind(&self) -> OutputType; fn id(&self) -> Self::Id; - fn tx_id(&self) -> >::Id; + fn tx_id(&self) -> >::Id; // TODO: Review use of fn key(&self) -> ::G; fn presumed_origin(&self) -> Option; @@ -118,25 +123,33 @@ pub trait Output: Send + Sync + Sized + Clone + PartialEq + Eq + Deb } #[async_trait] -pub trait Transaction: Send + Sync + Sized + Clone + Debug { +pub trait Transaction: Send + Sync + Sized + Clone + PartialEq + Debug { type Id: 'static + Id; fn id(&self) -> Self::Id; - fn serialize(&self) -> Vec; - fn read(reader: &mut R) -> io::Result; - + // TODO: Move to Balance #[cfg(test)] async fn fee(&self, network: &N) -> u64; } pub trait SignableTransaction: Send + Sync + Clone + Debug { + // TODO: Move to Balance fn fee(&self) -> u64; } -pub trait Eventuality: Send + Sync + Clone + Debug { +pub trait Eventuality: Send + Sync + Clone + PartialEq + Debug { + type Claim: Send + Sync + Clone + PartialEq + Default + AsRef<[u8]> + AsMut<[u8]> + Debug; + type Completion: Send + Sync + Clone + PartialEq + Debug; + fn lookup(&self) -> Vec; fn read(reader: &mut R) -> io::Result; fn serialize(&self) -> Vec; + + fn claim(completion: &Self::Completion) -> Self::Claim; + + // TODO: Make a dedicated Completion trait + fn serialize_completion(completion: &Self::Completion) -> Vec; + fn read_completion(reader: &mut R) -> io::Result; } #[derive(Clone, PartialEq, Eq, Debug)] @@ -211,7 +224,7 @@ fn drop_branches( ) -> Vec { let mut branch_outputs = vec![]; for payment in payments { - if payment.address == N::branch_address(key) { + if Some(&payment.address) == N::branch_address(key).as_ref() { branch_outputs.push(PostFeeBranch { expected: payment.balance.amount.0, actual: None }); } } @@ -227,12 +240,12 @@ pub struct PreparedSend { } #[async_trait] -pub trait Network: 'static + Send + Sync + Clone + PartialEq + Eq + Debug { +pub trait Network: 'static + Send + Sync + Clone + PartialEq + Debug { /// The elliptic curve used for this network. type Curve: Curve; /// The type representing the transaction for this network. - type Transaction: Transaction; + type Transaction: Transaction; // TODO: Review use of /// The type representing the block for this network. type Block: Block; @@ -246,7 +259,12 @@ pub trait Network: 'static + Send + Sync + Clone + PartialEq + Eq + Debug { /// This must be binding to both the outputs expected and the plan ID. type Eventuality: Eventuality; /// The FROST machine to sign a transaction. - type TransactionMachine: PreprocessMachine; + type TransactionMachine: PreprocessMachine< + Signature = ::Completion, + >; + + /// The scheduler for this network. + type Scheduler: Scheduler; /// The type representing an address. // This should NOT be a String, yet a tailored type representing an efficient binary encoding, @@ -269,10 +287,6 @@ pub trait Network: 'static + Send + Sync + Clone + PartialEq + Eq + Debug { const ESTIMATED_BLOCK_TIME_IN_SECONDS: usize; /// The amount of confirmations required to consider a block 'final'. const CONFIRMATIONS: usize; - /// The maximum amount of inputs which will fit in a TX. - /// This should be equal to MAX_OUTPUTS unless one is specifically limited. - /// A TX with MAX_INPUTS and MAX_OUTPUTS must not exceed the max size. - const MAX_INPUTS: usize; /// The maximum amount of outputs which will fit in a TX. /// This should be equal to MAX_INPUTS unless one is specifically limited. /// A TX with MAX_INPUTS and MAX_OUTPUTS must not exceed the max size. @@ -293,13 +307,16 @@ pub trait Network: 'static + Send + Sync + Clone + PartialEq + Eq + Debug { fn tweak_keys(key: &mut ThresholdKeys); /// Address for the given group key to receive external coins to. - fn external_address(key: ::G) -> Self::Address; + #[cfg(test)] + async fn external_address(&self, key: ::G) -> Self::Address; /// Address for the given group key to use for scheduled branches. - fn branch_address(key: ::G) -> Self::Address; + fn branch_address(key: ::G) -> Option; /// Address for the given group key to use for change. - fn change_address(key: ::G) -> Self::Address; + fn change_address(key: ::G) -> Option; /// Address for forwarded outputs from prior multisigs. - fn forward_address(key: ::G) -> Self::Address; + /// + /// forward_address must only return None if explicit forwarding isn't necessary. + fn forward_address(key: ::G) -> Option; /// Get the latest block's number. async fn get_latest_block_number(&self) -> Result; @@ -349,13 +366,24 @@ pub trait Network: 'static + Send + Sync + Clone + PartialEq + Eq + Debug { /// registered eventualities may have been completed in. /// /// This may panic if not fed a block greater than the tracker's block number. + /// + /// Plan ID -> (block number, TX ID, completion) // TODO: get_eventuality_completions_internal + provided get_eventuality_completions for common // code + // TODO: Consider having this return the Transaction + the Completion? + // Or Transaction with extract_completion? async fn get_eventuality_completions( &self, eventualities: &mut EventualitiesTracker, block: &Self::Block, - ) -> HashMap<[u8; 32], (usize, Self::Transaction)>; + ) -> HashMap< + [u8; 32], + ( + usize, + >::Id, + ::Completion, + ), + >; /// Returns the needed fee to fulfill this Plan at this fee rate. /// @@ -363,7 +391,6 @@ pub trait Network: 'static + Send + Sync + Clone + PartialEq + Eq + Debug { async fn needed_fee( &self, block_number: usize, - plan_id: &[u8; 32], inputs: &[Self::Output], payments: &[Payment], change: &Option, @@ -375,16 +402,25 @@ pub trait Network: 'static + Send + Sync + Clone + PartialEq + Eq + Debug { /// 1) Call needed_fee /// 2) If the Plan is fulfillable, amortize the fee /// 3) Call signable_transaction *which MUST NOT return None if the above was done properly* + /// + /// This takes a destructured Plan as some of these arguments are malleated from the original + /// Plan. + // TODO: Explicit AmortizedPlan? + #[allow(clippy::too_many_arguments)] async fn signable_transaction( &self, block_number: usize, plan_id: &[u8; 32], + key: ::G, inputs: &[Self::Output], payments: &[Payment], change: &Option, + scheduler_addendum: &>::Addendum, ) -> Result, NetworkError>; /// Prepare a SignableTransaction for a transaction. + /// + /// This must not persist anything as we will prepare Plans we never intend to execute. async fn prepare_send( &self, block_number: usize, @@ -395,13 +431,12 @@ pub trait Network: 'static + Send + Sync + Clone + PartialEq + Eq + Debug { assert!((!plan.payments.is_empty()) || plan.change.is_some()); let plan_id = plan.id(); - let Plan { key, inputs, mut payments, change } = plan; + let Plan { key, inputs, mut payments, change, scheduler_addendum } = plan; let theoretical_change_amount = inputs.iter().map(|input| input.balance().amount.0).sum::() - payments.iter().map(|payment| payment.balance.amount.0).sum::(); - let Some(tx_fee) = self.needed_fee(block_number, &plan_id, &inputs, &payments, &change).await? - else { + let Some(tx_fee) = self.needed_fee(block_number, &inputs, &payments, &change).await? else { // This Plan is not fulfillable // TODO: Have Plan explicitly distinguish payments and branches in two separate Vecs? return Ok(PreparedSend { @@ -466,7 +501,7 @@ pub trait Network: 'static + Send + Sync + Clone + PartialEq + Eq + Debug { // Note the branch outputs' new values let mut branch_outputs = vec![]; for (initial_amount, payment) in initial_payment_amounts.into_iter().zip(&payments) { - if payment.address == Self::branch_address(key) { + if Some(&payment.address) == Self::branch_address(key).as_ref() { branch_outputs.push(PostFeeBranch { expected: initial_amount, actual: if payment.balance.amount.0 == 0 { @@ -508,11 +543,20 @@ pub trait Network: 'static + Send + Sync + Clone + PartialEq + Eq + Debug { ) })(); - let Some(tx) = - self.signable_transaction(block_number, &plan_id, &inputs, &payments, &change).await? + let Some(tx) = self + .signable_transaction( + block_number, + &plan_id, + key, + &inputs, + &payments, + &change, + &scheduler_addendum, + ) + .await? else { panic!( - "{}. {}: {}, {}: {:?}, {}: {:?}, {}: {:?}, {}: {}", + "{}. {}: {}, {}: {:?}, {}: {:?}, {}: {:?}, {}: {}, {}: {:?}", "signable_transaction returned None for a TX we prior successfully calculated the fee for", "id", hex::encode(plan_id), @@ -524,6 +568,8 @@ pub trait Network: 'static + Send + Sync + Clone + PartialEq + Eq + Debug { change, "successfully amoritized fee", tx_fee, + "scheduler's addendum", + scheduler_addendum, ) }; @@ -546,31 +592,49 @@ pub trait Network: 'static + Send + Sync + Clone + PartialEq + Eq + Debug { } /// Attempt to sign a SignableTransaction. - async fn attempt_send( + async fn attempt_sign( &self, keys: ThresholdKeys, transaction: Self::SignableTransaction, ) -> Result; - /// Publish a transaction. - async fn publish_transaction(&self, tx: &Self::Transaction) -> Result<(), NetworkError>; - - /// Get a transaction by its ID. - async fn get_transaction( + /// Publish a completion. + async fn publish_completion( &self, - id: &>::Id, - ) -> Result; + completion: &::Completion, + ) -> Result<(), NetworkError>; - /// Confirm a plan was completed by the specified transaction. - // This is allowed to take shortcuts. - // This may assume an honest multisig, solely checking the inputs specified were spent. - // This may solely check the outputs are equivalent *so long as it's locked to the plan ID*. - fn confirm_completion(&self, eventuality: &Self::Eventuality, tx: &Self::Transaction) -> bool; + /// Confirm a plan was completed by the specified transaction, per our bounds. + /// + /// Returns Err if there was an error with the confirmation methodology. + /// Returns Ok(None) if this is not a valid completion. + /// Returns Ok(Some(_)) with the completion if it's valid. + async fn confirm_completion( + &self, + eventuality: &Self::Eventuality, + claim: &::Claim, + ) -> Result::Completion>, NetworkError>; /// Get a block's number by its ID. #[cfg(test)] async fn get_block_number(&self, id: &>::Id) -> usize; + /// Check an Eventuality is fulfilled by a claim. + #[cfg(test)] + async fn check_eventuality_by_claim( + &self, + eventuality: &Self::Eventuality, + claim: &::Claim, + ) -> bool; + + /// Get a transaction by the Eventuality it completes. + #[cfg(test)] + async fn get_transaction_by_eventuality( + &self, + block: usize, + eventuality: &Self::Eventuality, + ) -> Self::Transaction; + #[cfg(test)] async fn mine_block(&self); @@ -579,3 +643,10 @@ pub trait Network: 'static + Send + Sync + Clone + PartialEq + Eq + Debug { #[cfg(test)] async fn test_send(&self, key: Self::Address) -> Self::Block; } + +pub trait UtxoNetwork: Network { + /// The maximum amount of inputs which will fit in a TX. + /// This should be equal to MAX_OUTPUTS unless one is specifically limited. + /// A TX with MAX_INPUTS and MAX_OUTPUTS must not exceed the max size. + const MAX_INPUTS: usize; +} diff --git a/processor/src/networks/monero.rs b/processor/src/networks/monero.rs index 8d58ee1a..8d4d1760 100644 --- a/processor/src/networks/monero.rs +++ b/processor/src/networks/monero.rs @@ -39,8 +39,9 @@ use crate::{ networks::{ NetworkError, Block as BlockTrait, OutputType, Output as OutputTrait, Transaction as TransactionTrait, SignableTransaction as SignableTransactionTrait, - Eventuality as EventualityTrait, EventualitiesTracker, Network, + Eventuality as EventualityTrait, EventualitiesTracker, Network, UtxoNetwork, }, + multisigs::scheduler::utxo::Scheduler, }; #[derive(Clone, PartialEq, Eq, Debug)] @@ -117,12 +118,6 @@ impl TransactionTrait for Transaction { fn id(&self) -> Self::Id { self.hash() } - fn serialize(&self) -> Vec { - self.serialize() - } - fn read(reader: &mut R) -> io::Result { - Transaction::read(reader) - } #[cfg(test)] async fn fee(&self, _: &Monero) -> u64 { @@ -131,6 +126,9 @@ impl TransactionTrait for Transaction { } impl EventualityTrait for Eventuality { + type Claim = [u8; 32]; + type Completion = Transaction; + // Use the TX extra to look up potential matches // While anyone can forge this, a transaction with distinct outputs won't actually match // Extra includess the one time keys which are derived from the plan ID, so a collision here is a @@ -145,6 +143,16 @@ impl EventualityTrait for Eventuality { fn serialize(&self) -> Vec { self.serialize() } + + fn claim(tx: &Transaction) -> [u8; 32] { + tx.id() + } + fn serialize_completion(completion: &Transaction) -> Vec { + completion.serialize() + } + fn read_completion(reader: &mut R) -> io::Result { + Transaction::read(reader) + } } #[derive(Clone, Debug)] @@ -274,7 +282,8 @@ impl Monero { async fn median_fee(&self, block: &Block) -> Result { let mut fees = vec![]; for tx_hash in &block.txs { - let tx = self.get_transaction(tx_hash).await?; + let tx = + self.rpc.get_transaction(*tx_hash).await.map_err(|_| NetworkError::ConnectionError)?; // Only consider fees from RCT transactions, else the fee property read wouldn't be accurate if tx.rct_signatures.rct_type() != RctType::Null { continue; @@ -454,6 +463,8 @@ impl Network for Monero { type Eventuality = Eventuality; type TransactionMachine = TransactionMachine; + type Scheduler = Scheduler; + type Address = Address; const NETWORK: NetworkId = NetworkId::Monero; @@ -461,11 +472,6 @@ impl Network for Monero { const ESTIMATED_BLOCK_TIME_IN_SECONDS: usize = 120; const CONFIRMATIONS: usize = 10; - // wallet2 will not create a transaction larger than 100kb, and Monero won't relay a transaction - // larger than 150kb. This fits within the 100kb mark - // Technically, it can be ~124, yet a small bit of buffer is appreciated - // TODO: Test creating a TX this big - const MAX_INPUTS: usize = 120; const MAX_OUTPUTS: usize = 16; // 0.01 XMR @@ -478,20 +484,21 @@ impl Network for Monero { // Monero doesn't require/benefit from tweaking fn tweak_keys(_: &mut ThresholdKeys) {} - fn external_address(key: EdwardsPoint) -> Address { + #[cfg(test)] + async fn external_address(&self, key: EdwardsPoint) -> Address { Self::address_internal(key, EXTERNAL_SUBADDRESS) } - fn branch_address(key: EdwardsPoint) -> Address { - Self::address_internal(key, BRANCH_SUBADDRESS) + fn branch_address(key: EdwardsPoint) -> Option
{ + Some(Self::address_internal(key, BRANCH_SUBADDRESS)) } - fn change_address(key: EdwardsPoint) -> Address { - Self::address_internal(key, CHANGE_SUBADDRESS) + fn change_address(key: EdwardsPoint) -> Option
{ + Some(Self::address_internal(key, CHANGE_SUBADDRESS)) } - fn forward_address(key: EdwardsPoint) -> Address { - Self::address_internal(key, FORWARD_SUBADDRESS) + fn forward_address(key: EdwardsPoint) -> Option
{ + Some(Self::address_internal(key, FORWARD_SUBADDRESS)) } async fn get_latest_block_number(&self) -> Result { @@ -558,7 +565,7 @@ impl Network for Monero { &self, eventualities: &mut EventualitiesTracker, block: &Block, - ) -> HashMap<[u8; 32], (usize, Transaction)> { + ) -> HashMap<[u8; 32], (usize, [u8; 32], Transaction)> { let mut res = HashMap::new(); if eventualities.map.is_empty() { return res; @@ -568,13 +575,13 @@ impl Network for Monero { network: &Monero, eventualities: &mut EventualitiesTracker, block: &Block, - res: &mut HashMap<[u8; 32], (usize, Transaction)>, + res: &mut HashMap<[u8; 32], (usize, [u8; 32], Transaction)>, ) { for hash in &block.txs { let tx = { let mut tx; while { - tx = network.get_transaction(hash).await; + tx = network.rpc.get_transaction(*hash).await; tx.is_err() } { log::error!("couldn't get transaction {}: {}", hex::encode(hash), tx.err().unwrap()); @@ -587,7 +594,7 @@ impl Network for Monero { if eventuality.matches(&tx) { res.insert( eventualities.map.remove(&tx.prefix.extra).unwrap().0, - (usize::try_from(block.number().unwrap()).unwrap(), tx), + (usize::try_from(block.number().unwrap()).unwrap(), tx.id(), tx), ); } } @@ -625,14 +632,13 @@ impl Network for Monero { async fn needed_fee( &self, block_number: usize, - plan_id: &[u8; 32], inputs: &[Output], payments: &[Payment], change: &Option
, ) -> Result, NetworkError> { Ok( self - .make_signable_transaction(block_number, plan_id, inputs, payments, change, true) + .make_signable_transaction(block_number, &[0; 32], inputs, payments, change, true) .await? .map(|(_, signable)| signable.fee()), ) @@ -642,9 +648,11 @@ impl Network for Monero { &self, block_number: usize, plan_id: &[u8; 32], + _key: EdwardsPoint, inputs: &[Output], payments: &[Payment], change: &Option
, + (): &(), ) -> Result, NetworkError> { Ok( self @@ -658,7 +666,7 @@ impl Network for Monero { ) } - async fn attempt_send( + async fn attempt_sign( &self, keys: ThresholdKeys, transaction: SignableTransaction, @@ -669,7 +677,7 @@ impl Network for Monero { } } - async fn publish_transaction(&self, tx: &Self::Transaction) -> Result<(), NetworkError> { + async fn publish_completion(&self, tx: &Transaction) -> Result<(), NetworkError> { match self.rpc.publish_transaction(tx).await { Ok(()) => Ok(()), Err(RpcError::ConnectionError(e)) => { @@ -682,12 +690,17 @@ impl Network for Monero { } } - async fn get_transaction(&self, id: &[u8; 32]) -> Result { - self.rpc.get_transaction(*id).await.map_err(map_rpc_err) - } - - fn confirm_completion(&self, eventuality: &Eventuality, tx: &Transaction) -> bool { - eventuality.matches(tx) + async fn confirm_completion( + &self, + eventuality: &Eventuality, + id: &[u8; 32], + ) -> Result, NetworkError> { + let tx = self.rpc.get_transaction(*id).await.map_err(map_rpc_err)?; + if eventuality.matches(&tx) { + Ok(Some(tx)) + } else { + Ok(None) + } } #[cfg(test)] @@ -695,6 +708,31 @@ impl Network for Monero { self.rpc.get_block(*id).await.unwrap().number().unwrap().try_into().unwrap() } + #[cfg(test)] + async fn check_eventuality_by_claim( + &self, + eventuality: &Self::Eventuality, + claim: &[u8; 32], + ) -> bool { + return eventuality.matches(&self.rpc.get_transaction(*claim).await.unwrap()); + } + + #[cfg(test)] + async fn get_transaction_by_eventuality( + &self, + block: usize, + eventuality: &Eventuality, + ) -> Transaction { + let block = self.rpc.get_block_by_number(block).await.unwrap(); + for tx in &block.txs { + let tx = self.rpc.get_transaction(*tx).await.unwrap(); + if eventuality.matches(&tx) { + return tx; + } + } + panic!("block didn't have a transaction for this eventuality") + } + #[cfg(test)] async fn mine_block(&self) { // https://github.com/serai-dex/serai/issues/198 @@ -775,3 +813,11 @@ impl Network for Monero { self.get_block(block).await.unwrap() } } + +impl UtxoNetwork for Monero { + // wallet2 will not create a transaction larger than 100kb, and Monero won't relay a transaction + // larger than 150kb. This fits within the 100kb mark + // Technically, it can be ~124, yet a small bit of buffer is appreciated + // TODO: Test creating a TX this big + const MAX_INPUTS: usize = 120; +} diff --git a/processor/src/plan.rs b/processor/src/plan.rs index 3e10c7d3..58a8a5e1 100644 --- a/processor/src/plan.rs +++ b/processor/src/plan.rs @@ -8,7 +8,10 @@ use frost::curve::Ciphersuite; use serai_client::primitives::Balance; -use crate::networks::{Output, Network}; +use crate::{ + networks::{Output, Network}, + multisigs::scheduler::{SchedulerAddendum, Scheduler}, +}; #[derive(Clone, PartialEq, Eq, Debug)] pub struct Payment { @@ -73,7 +76,7 @@ impl Payment { } } -#[derive(Clone, PartialEq, Eq)] +#[derive(Clone, PartialEq)] pub struct Plan { pub key: ::G, pub inputs: Vec, @@ -90,7 +93,11 @@ pub struct Plan { /// This MUST contain a Serai address. Operating costs may be deducted from the payments in this /// Plan on the premise that the change address is Serai's, and accordingly, Serai will recoup /// the operating costs. + // + // TODO: Consider moving to ::G? pub change: Option, + /// The scheduler's additional data. + pub scheduler_addendum: >::Addendum, } impl core::fmt::Debug for Plan { fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { @@ -100,6 +107,7 @@ impl core::fmt::Debug for Plan { .field("inputs", &self.inputs) .field("payments", &self.payments) .field("change", &self.change.as_ref().map(ToString::to_string)) + .field("scheduler_addendum", &self.scheduler_addendum) .finish() } } @@ -125,6 +133,10 @@ impl Plan { transcript.append_message(b"change", change.to_string()); } + let mut addendum_bytes = vec![]; + self.scheduler_addendum.write(&mut addendum_bytes).unwrap(); + transcript.append_message(b"scheduler_addendum", addendum_bytes); + transcript } @@ -161,7 +173,8 @@ impl Plan { }; assert!(serai_client::primitives::MAX_ADDRESS_LEN <= u8::MAX.into()); writer.write_all(&[u8::try_from(change.len()).unwrap()])?; - writer.write_all(&change) + writer.write_all(&change)?; + self.scheduler_addendum.write(writer) } pub fn read(reader: &mut R) -> io::Result { @@ -193,6 +206,7 @@ impl Plan { })?) }; - Ok(Plan { key, inputs, payments, change }) + let scheduler_addendum = >::Addendum::read(reader)?; + Ok(Plan { key, inputs, payments, change, scheduler_addendum }) } } diff --git a/processor/src/signer.rs b/processor/src/signer.rs index 7a4fcbed..cab0bceb 100644 --- a/processor/src/signer.rs +++ b/processor/src/signer.rs @@ -2,7 +2,6 @@ use core::{marker::PhantomData, fmt}; use std::collections::HashMap; use rand_core::OsRng; -use ciphersuite::group::GroupEncoding; use frost::{ ThresholdKeys, FrostError, sign::{Writable, PreprocessMachine, SignMachine, SignatureMachine}, @@ -17,7 +16,7 @@ pub use serai_db::*; use crate::{ Get, DbTxn, Db, - networks::{Transaction, Eventuality, Network}, + networks::{Eventuality, Network}, }; create_db!( @@ -25,7 +24,7 @@ create_db!( CompletionsDb: (id: [u8; 32]) -> Vec, EventualityDb: (id: [u8; 32]) -> Vec, AttemptDb: (id: &SignId) -> (), - TransactionDb: (id: &[u8]) -> Vec, + CompletionDb: (claim: &[u8]) -> Vec, ActiveSignsDb: () -> Vec<[u8; 32]>, CompletedOnChainDb: (id: &[u8; 32]) -> (), } @@ -59,12 +58,20 @@ impl CompletionsDb { fn completions( getter: &impl Get, id: [u8; 32], - ) -> Vec<>::Id> { - let completions = Self::get(getter, id).unwrap_or_default(); + ) -> Vec<::Claim> { + let Some(completions) = Self::get(getter, id) else { return vec![] }; + + // If this was set yet is empty, it's because it's the encoding of a claim with a length of 0 + if completions.is_empty() { + let default = ::Claim::default(); + assert_eq!(default.as_ref().len(), 0); + return vec![default]; + } + let mut completions_ref = completions.as_slice(); let mut res = vec![]; while !completions_ref.is_empty() { - let mut id = >::Id::default(); + let mut id = ::Claim::default(); let id_len = id.as_ref().len(); id.as_mut().copy_from_slice(&completions_ref[.. id_len]); completions_ref = &completions_ref[id_len ..]; @@ -73,25 +80,37 @@ impl CompletionsDb { res } - fn complete(txn: &mut impl DbTxn, id: [u8; 32], tx: &N::Transaction) { - let tx_id = tx.id(); - // Transactions can be completed by multiple signatures + fn complete( + txn: &mut impl DbTxn, + id: [u8; 32], + completion: &::Completion, + ) { + // Completions can be completed by multiple signatures // Save every solution in order to be robust - TransactionDb::save_transaction::(txn, tx); - let mut existing = Self::get(txn, id).unwrap_or_default(); - // Don't add this TX if it's already present - let tx_len = tx_id.as_ref().len(); - assert_eq!(existing.len() % tx_len, 0); + CompletionDb::save_completion::(txn, completion); - let mut i = 0; - while i < existing.len() { - if &existing[i .. (i + tx_len)] == tx_id.as_ref() { - return; - } - i += tx_len; + let claim = N::Eventuality::claim(completion); + let claim: &[u8] = claim.as_ref(); + + // If claim has a 0-byte encoding, the set key, even if empty, is the claim + if claim.is_empty() { + Self::set(txn, id, &vec![]); + return; } - existing.extend(tx_id.as_ref()); + let mut existing = Self::get(txn, id).unwrap_or_default(); + assert_eq!(existing.len() % claim.len(), 0); + + // Don't add this completion if it's already present + let mut i = 0; + while i < existing.len() { + if &existing[i .. (i + claim.len())] == claim { + return; + } + i += claim.len(); + } + + existing.extend(claim); Self::set(txn, id, &existing); } } @@ -110,25 +129,33 @@ impl EventualityDb { } } -impl TransactionDb { - fn save_transaction(txn: &mut impl DbTxn, tx: &N::Transaction) { - Self::set(txn, tx.id().as_ref(), &tx.serialize()); +impl CompletionDb { + fn save_completion( + txn: &mut impl DbTxn, + completion: &::Completion, + ) { + let claim = N::Eventuality::claim(completion); + let claim: &[u8] = claim.as_ref(); + Self::set(txn, claim, &N::Eventuality::serialize_completion(completion)); } - fn transaction( + fn completion( getter: &impl Get, - id: &>::Id, - ) -> Option { - Self::get(getter, id.as_ref()).map(|tx| N::Transaction::read(&mut tx.as_slice()).unwrap()) + claim: &::Claim, + ) -> Option<::Completion> { + Self::get(getter, claim.as_ref()) + .map(|completion| N::Eventuality::read_completion::<&[u8]>(&mut completion.as_ref()).unwrap()) } } type PreprocessFor = <::TransactionMachine as PreprocessMachine>::Preprocess; type SignMachineFor = <::TransactionMachine as PreprocessMachine>::SignMachine; -type SignatureShareFor = - as SignMachine<::Transaction>>::SignatureShare; -type SignatureMachineFor = - as SignMachine<::Transaction>>::SignatureMachine; +type SignatureShareFor = as SignMachine< + <::Eventuality as Eventuality>::Completion, +>>::SignatureShare; +type SignatureMachineFor = as SignMachine< + <::Eventuality as Eventuality>::Completion, +>>::SignatureMachine; pub struct Signer { db: PhantomData, @@ -164,12 +191,11 @@ impl Signer { log::info!("rebroadcasting transactions for plans whose completions yet to be confirmed..."); loop { for active in ActiveSignsDb::get(&db).unwrap_or_default() { - for completion in CompletionsDb::completions::(&db, active) { - log::info!("rebroadcasting {}", hex::encode(&completion)); + for claim in CompletionsDb::completions::(&db, active) { + log::info!("rebroadcasting completion with claim {}", hex::encode(claim.as_ref())); // TODO: Don't drop the error entirely. Check for invariants - let _ = network - .publish_transaction(&TransactionDb::transaction::(&db, &completion).unwrap()) - .await; + let _ = + network.publish_completion(&CompletionDb::completion::(&db, &claim).unwrap()).await; } } // Only run every five minutes so we aren't frequently loading tens to hundreds of KB from @@ -242,7 +268,7 @@ impl Signer { fn complete( &mut self, id: [u8; 32], - tx_id: &>::Id, + claim: &::Claim, ) -> ProcessorMessage { // Assert we're actively signing for this TX assert!(self.signable.remove(&id).is_some(), "completed a TX we weren't signing for"); @@ -256,7 +282,7 @@ impl Signer { self.signing.remove(&id); // Emit the event for it - ProcessorMessage::Completed { session: self.session, id, tx: tx_id.as_ref().to_vec() } + ProcessorMessage::Completed { session: self.session, id, tx: claim.as_ref().to_vec() } } #[must_use] @@ -264,16 +290,16 @@ impl Signer { &mut self, txn: &mut D::Transaction<'_>, id: [u8; 32], - tx: &N::Transaction, + completion: &::Completion, ) -> Option { let first_completion = !Self::already_completed(txn, id); // Save this completion to the DB CompletedOnChainDb::complete_on_chain(txn, &id); - CompletionsDb::complete::(txn, id, tx); + CompletionsDb::complete::(txn, id, completion); if first_completion { - Some(self.complete(id, &tx.id())) + Some(self.complete(id, &N::Eventuality::claim(completion))) } else { None } @@ -286,49 +312,50 @@ impl Signer { &mut self, txn: &mut D::Transaction<'_>, id: [u8; 32], - tx_id: &>::Id, + claim: &::Claim, ) -> Option { if let Some(eventuality) = EventualityDb::eventuality::(txn, id) { - // Transaction hasn't hit our mempool/was dropped for a different signature - // The latter can happen given certain latency conditions/a single malicious signer - // In the case of a single malicious signer, they can drag multiple honest validators down - // with them, so we unfortunately can't slash on this case - let Ok(tx) = self.network.get_transaction(tx_id).await else { - warn!( - "a validator claimed {} completed {} yet we didn't have that TX in our mempool {}", - hex::encode(tx_id), - hex::encode(id), - "(or had another connectivity issue)", - ); - return None; - }; + match self.network.confirm_completion(&eventuality, claim).await { + Ok(Some(completion)) => { + info!( + "signer eventuality for {} resolved in {}", + hex::encode(id), + hex::encode(claim.as_ref()) + ); - if self.network.confirm_completion(&eventuality, &tx) { - info!("signer eventuality for {} resolved in TX {}", hex::encode(id), hex::encode(tx_id)); + let first_completion = !Self::already_completed(txn, id); - let first_completion = !Self::already_completed(txn, id); + // Save this completion to the DB + CompletionsDb::complete::(txn, id, &completion); - // Save this completion to the DB - CompletionsDb::complete::(txn, id, &tx); - - if first_completion { - return Some(self.complete(id, &tx.id())); + if first_completion { + return Some(self.complete(id, claim)); + } + } + Ok(None) => { + warn!( + "a validator claimed {} completed {} when it did not", + hex::encode(claim.as_ref()), + hex::encode(id), + ); + } + Err(_) => { + // Transaction hasn't hit our mempool/was dropped for a different signature + // The latter can happen given certain latency conditions/a single malicious signer + // In the case of a single malicious signer, they can drag multiple honest validators down + // with them, so we unfortunately can't slash on this case + warn!( + "a validator claimed {} completed {} yet we couldn't check that claim", + hex::encode(claim.as_ref()), + hex::encode(id), + ); } - } else { - warn!( - "a validator claimed {} completed {} when it did not", - hex::encode(tx_id), - hex::encode(id) - ); } } else { - // If we don't have this in RAM, it should be because we already finished signing it - assert!(!CompletionsDb::completions::(txn, id).is_empty()); - info!( - "signer {} informed of the eventuality completion for plan {}, {}", - hex::encode(self.keys[0].group_key().to_bytes()), + warn!( + "informed of completion {} for eventuality {}, when we didn't have that eventuality", + hex::encode(claim.as_ref()), hex::encode(id), - "which we already marked as completed", ); } None @@ -405,7 +432,7 @@ impl Signer { let mut preprocesses = vec![]; let mut serialized_preprocesses = vec![]; for keys in &self.keys { - let machine = match self.network.attempt_send(keys.clone(), tx.clone()).await { + let machine = match self.network.attempt_sign(keys.clone(), tx.clone()).await { Err(e) => { error!("failed to attempt {}, #{}: {:?}", hex::encode(id.id), id.attempt, e); return None; @@ -572,7 +599,7 @@ impl Signer { assert!(shares.insert(self.keys[i].params().i(), our_share).is_none()); } - let tx = match machine.complete(shares) { + let completion = match machine.complete(shares) { Ok(res) => res, Err(e) => match e { FrostError::InternalError(_) | @@ -588,40 +615,39 @@ impl Signer { }, }; - // Save the transaction in case it's needed for recovery - CompletionsDb::complete::(txn, id.id, &tx); + // Save the completion in case it's needed for recovery + CompletionsDb::complete::(txn, id.id, &completion); // Publish it - let tx_id = tx.id(); - if let Err(e) = self.network.publish_transaction(&tx).await { - error!("couldn't publish {:?}: {:?}", tx, e); + if let Err(e) = self.network.publish_completion(&completion).await { + error!("couldn't publish completion for plan {}: {:?}", hex::encode(id.id), e); } else { - info!("published {} for plan {}", hex::encode(&tx_id), hex::encode(id.id)); + info!("published completion for plan {}", hex::encode(id.id)); } // Stop trying to sign for this TX - Some(self.complete(id.id, &tx_id)) + Some(self.complete(id.id, &N::Eventuality::claim(&completion))) } CoordinatorMessage::Reattempt { id } => self.attempt(txn, id.id, id.attempt).await, - CoordinatorMessage::Completed { session: _, id, tx: mut tx_vec } => { - let mut tx = >::Id::default(); - if tx.as_ref().len() != tx_vec.len() { - let true_len = tx_vec.len(); - tx_vec.truncate(2 * tx.as_ref().len()); + CoordinatorMessage::Completed { session: _, id, tx: mut claim_vec } => { + let mut claim = ::Claim::default(); + if claim.as_ref().len() != claim_vec.len() { + let true_len = claim_vec.len(); + claim_vec.truncate(2 * claim.as_ref().len()); warn!( "a validator claimed {}... (actual length {}) completed {} yet {}", - hex::encode(&tx_vec), + hex::encode(&claim_vec), true_len, hex::encode(id), - "that's not a valid TX ID", + "that's not a valid Claim", ); return None; } - tx.as_mut().copy_from_slice(&tx_vec); + claim.as_mut().copy_from_slice(&claim_vec); - self.claimed_eventuality_completion(txn, id, &tx).await + self.claimed_eventuality_completion(txn, id, &claim).await } } } diff --git a/processor/src/tests/addresses.rs b/processor/src/tests/addresses.rs index da20091b..8f730dbd 100644 --- a/processor/src/tests/addresses.rs +++ b/processor/src/tests/addresses.rs @@ -13,18 +13,23 @@ use serai_db::{DbTxn, MemDb}; use crate::{ Plan, Db, - networks::{OutputType, Output, Block, Network}, - multisigs::scanner::{ScannerEvent, Scanner, ScannerHandle}, + networks::{OutputType, Output, Block, UtxoNetwork}, + multisigs::{ + scheduler::Scheduler, + scanner::{ScannerEvent, Scanner, ScannerHandle}, + }, tests::sign, }; -async fn spend( +async fn spend( db: &mut D, network: &N, keys: &HashMap>, scanner: &mut ScannerHandle, outputs: Vec, -) { +) where + >::Addendum: From<()>, +{ let key = keys[&Participant::new(1).unwrap()].group_key(); let mut keys_txs = HashMap::new(); @@ -41,7 +46,8 @@ async fn spend( key, inputs: outputs.clone(), payments: vec![], - change: Some(N::change_address(key)), + change: Some(N::change_address(key).unwrap()), + scheduler_addendum: ().into(), }, 0, ) @@ -70,13 +76,16 @@ async fn spend( scanner.release_lock().await; txn.commit(); } - ScannerEvent::Completed(_, _, _, _) => { + ScannerEvent::Completed(_, _, _, _, _) => { panic!("unexpectedly got eventuality completion"); } } } -pub async fn test_addresses(network: N) { +pub async fn test_addresses(network: N) +where + >::Addendum: From<()>, +{ let mut keys = frost::tests::key_gen::<_, N::Curve>(&mut OsRng); for keys in keys.values_mut() { N::tweak_keys(keys); @@ -101,10 +110,10 @@ pub async fn test_addresses(network: N) { // Receive funds to the various addresses and make sure they're properly identified let mut received_outputs = vec![]; for (kind, address) in [ - (OutputType::External, N::external_address(key)), - (OutputType::Branch, N::branch_address(key)), - (OutputType::Change, N::change_address(key)), - (OutputType::Forwarded, N::forward_address(key)), + (OutputType::External, N::external_address(&network, key).await), + (OutputType::Branch, N::branch_address(key).unwrap()), + (OutputType::Change, N::change_address(key).unwrap()), + (OutputType::Forwarded, N::forward_address(key).unwrap()), ] { let block_id = network.test_send(address).await.id(); @@ -123,7 +132,7 @@ pub async fn test_addresses(network: N) { txn.commit(); received_outputs.extend(outputs); } - ScannerEvent::Completed(_, _, _, _) => { + ScannerEvent::Completed(_, _, _, _, _) => { panic!("unexpectedly got eventuality completion"); } }; diff --git a/processor/src/tests/literal/mod.rs b/processor/src/tests/literal/mod.rs index 192214eb..e2bfdc8a 100644 --- a/processor/src/tests/literal/mod.rs +++ b/processor/src/tests/literal/mod.rs @@ -65,7 +65,7 @@ mod bitcoin { .unwrap(); ::tweak_keys(&mut keys); let group_key = keys.group_key(); - let serai_btc_address = ::external_address(group_key); + let serai_btc_address = ::external_address(&btc, group_key).await; // btc key pair to send from let private_key = PrivateKey::new(SecretKey::new(&mut rand_core::OsRng), BNetwork::Regtest); diff --git a/processor/src/tests/scanner.rs b/processor/src/tests/scanner.rs index 5aad5bb5..42756d8b 100644 --- a/processor/src/tests/scanner.rs +++ b/processor/src/tests/scanner.rs @@ -11,11 +11,11 @@ use tokio::{sync::Mutex, time::timeout}; use serai_db::{DbTxn, Db, MemDb}; use crate::{ - networks::{OutputType, Output, Block, Network}, + networks::{OutputType, Output, Block, UtxoNetwork}, multisigs::scanner::{ScannerEvent, Scanner, ScannerHandle}, }; -pub async fn new_scanner( +pub async fn new_scanner( network: &N, db: &D, group_key: ::G, @@ -40,7 +40,7 @@ pub async fn new_scanner( scanner } -pub async fn test_scanner(network: N) { +pub async fn test_scanner(network: N) { let mut keys = frost::tests::key_gen::<_, N::Curve>(&mut OsRng).remove(&Participant::new(1).unwrap()).unwrap(); N::tweak_keys(&mut keys); @@ -56,7 +56,7 @@ pub async fn test_scanner(network: N) { let scanner = new_scanner(&network, &db, group_key, &first).await; // Receive funds - let block = network.test_send(N::external_address(keys.group_key())).await; + let block = network.test_send(N::external_address(&network, keys.group_key()).await).await; let block_id = block.id(); // Verify the Scanner picked them up @@ -71,7 +71,7 @@ pub async fn test_scanner(network: N) { assert_eq!(outputs[0].kind(), OutputType::External); outputs } - ScannerEvent::Completed(_, _, _, _) => { + ScannerEvent::Completed(_, _, _, _, _) => { panic!("unexpectedly got eventuality completion"); } }; @@ -101,7 +101,7 @@ pub async fn test_scanner(network: N) { .is_err()); } -pub async fn test_no_deadlock_in_multisig_completed(network: N) { +pub async fn test_no_deadlock_in_multisig_completed(network: N) { // Mine blocks so there's a confirmed block for _ in 0 .. N::CONFIRMATIONS { network.mine_block().await; @@ -142,14 +142,14 @@ pub async fn test_no_deadlock_in_multisig_completed(network: N) { assert!(!is_retirement_block); block } - ScannerEvent::Completed(_, _, _, _) => { + ScannerEvent::Completed(_, _, _, _, _) => { panic!("unexpectedly got eventuality completion"); } }; match timeout(Duration::from_secs(30), scanner.events.recv()).await.unwrap().unwrap() { ScannerEvent::Block { .. } => {} - ScannerEvent::Completed(_, _, _, _) => { + ScannerEvent::Completed(_, _, _, _, _) => { panic!("unexpectedly got eventuality completion"); } }; diff --git a/processor/src/tests/signer.rs b/processor/src/tests/signer.rs index 89d57bf3..524c5d29 100644 --- a/processor/src/tests/signer.rs +++ b/processor/src/tests/signer.rs @@ -17,19 +17,20 @@ use serai_client::{ use messages::sign::*; use crate::{ Payment, Plan, - networks::{Output, Transaction, Network}, + networks::{Output, Transaction, Eventuality, UtxoNetwork}, + multisigs::scheduler::Scheduler, signer::Signer, }; #[allow(clippy::type_complexity)] -pub async fn sign( +pub async fn sign( network: N, session: Session, mut keys_txs: HashMap< Participant, (ThresholdKeys, (N::SignableTransaction, N::Eventuality)), >, -) -> >::Id { +) -> ::Claim { let actual_id = SignId { session, id: [0xaa; 32], attempt: 0 }; let mut keys = HashMap::new(); @@ -65,14 +66,15 @@ pub async fn sign( let mut preprocesses = HashMap::new(); + let mut eventuality = None; for i in 1 ..= signers.len() { let i = Participant::new(u16::try_from(i).unwrap()).unwrap(); - let (tx, eventuality) = txs.remove(&i).unwrap(); + let (tx, this_eventuality) = txs.remove(&i).unwrap(); let mut txn = dbs.get_mut(&i).unwrap().txn(); match signers .get_mut(&i) .unwrap() - .sign_transaction(&mut txn, actual_id.id, tx, &eventuality) + .sign_transaction(&mut txn, actual_id.id, tx, &this_eventuality) .await { // All participants should emit a preprocess @@ -86,6 +88,11 @@ pub async fn sign( _ => panic!("didn't get preprocess back"), } txn.commit(); + + if eventuality.is_none() { + eventuality = Some(this_eventuality.clone()); + } + assert_eq!(eventuality, Some(this_eventuality)); } let mut shares = HashMap::new(); @@ -140,19 +147,25 @@ pub async fn sign( txn.commit(); } - let mut typed_tx_id = >::Id::default(); - typed_tx_id.as_mut().copy_from_slice(tx_id.unwrap().as_ref()); - typed_tx_id + let mut typed_claim = ::Claim::default(); + typed_claim.as_mut().copy_from_slice(tx_id.unwrap().as_ref()); + assert!(network.check_eventuality_by_claim(&eventuality.unwrap(), &typed_claim).await); + typed_claim } -pub async fn test_signer(network: N) { +pub async fn test_signer(network: N) +where + >::Addendum: From<()>, +{ let mut keys = key_gen(&mut OsRng); for keys in keys.values_mut() { N::tweak_keys(keys); } let key = keys[&Participant::new(1).unwrap()].group_key(); - let outputs = network.get_outputs(&network.test_send(N::external_address(key)).await, key).await; + let outputs = network + .get_outputs(&network.test_send(N::external_address(&network, key).await).await, key) + .await; let sync_block = network.get_latest_block_number().await.unwrap() - N::CONFIRMATIONS; let amount = 2 * N::DUST; @@ -166,7 +179,7 @@ pub async fn test_signer(network: N) { key, inputs: outputs.clone(), payments: vec![Payment { - address: N::external_address(key), + address: N::external_address(&network, key).await, data: None, balance: Balance { coin: match N::NETWORK { @@ -178,7 +191,8 @@ pub async fn test_signer(network: N) { amount: Amount(amount), }, }], - change: Some(N::change_address(key)), + change: Some(N::change_address(key).unwrap()), + scheduler_addendum: ().into(), }, 0, ) @@ -191,13 +205,12 @@ pub async fn test_signer(network: N) { keys_txs.insert(i, (keys, (signable, eventuality))); } - // The signer may not publish the TX if it has a connection error - // It doesn't fail in this case - let txid = sign(network.clone(), Session(0), keys_txs).await; - let tx = network.get_transaction(&txid).await.unwrap(); - assert_eq!(tx.id(), txid); + let claim = sign(network.clone(), Session(0), keys_txs).await; + // Mine a block, and scan it, to ensure that the TX actually made it on chain network.mine_block().await; + let block_number = network.get_latest_block_number().await.unwrap(); + let tx = network.get_transaction_by_eventuality(block_number, &eventualities[0]).await; let outputs = network .get_outputs( &network.get_block(network.get_latest_block_number().await.unwrap()).await.unwrap(), @@ -212,6 +225,7 @@ pub async fn test_signer(network: N) { // Check the eventualities pass for eventuality in eventualities { - assert!(network.confirm_completion(&eventuality, &tx)); + let completion = network.confirm_completion(&eventuality, &claim).await.unwrap().unwrap(); + assert_eq!(N::Eventuality::claim(&completion), claim); } } diff --git a/processor/src/tests/wallet.rs b/processor/src/tests/wallet.rs index c9cc6c66..4600fcbe 100644 --- a/processor/src/tests/wallet.rs +++ b/processor/src/tests/wallet.rs @@ -15,7 +15,7 @@ use serai_client::{ use crate::{ Payment, Plan, - networks::{Output, Transaction, Block, Network}, + networks::{Output, Transaction, Eventuality, Block, UtxoNetwork}, multisigs::{ scanner::{ScannerEvent, Scanner}, scheduler::Scheduler, @@ -24,7 +24,7 @@ use crate::{ }; // Tests the Scanner, Scheduler, and Signer together -pub async fn test_wallet(network: N) { +pub async fn test_wallet(network: N) { // Mine blocks so there's a confirmed block for _ in 0 .. N::CONFIRMATIONS { network.mine_block().await; @@ -47,7 +47,7 @@ pub async fn test_wallet(network: N) { network.mine_block().await; } - let block = network.test_send(N::external_address(key)).await; + let block = network.test_send(N::external_address(&network, key).await).await; let block_id = block.id(); match timeout(Duration::from_secs(30), scanner.events.recv()).await.unwrap().unwrap() { @@ -58,7 +58,7 @@ pub async fn test_wallet(network: N) { assert_eq!(outputs.len(), 1); (block_id, outputs) } - ScannerEvent::Completed(_, _, _, _) => { + ScannerEvent::Completed(_, _, _, _, _) => { panic!("unexpectedly got eventuality completion"); } } @@ -69,22 +69,13 @@ pub async fn test_wallet(network: N) { txn.commit(); let mut txn = db.txn(); - let mut scheduler = Scheduler::new::( - &mut txn, - key, - match N::NETWORK { - NetworkId::Serai => panic!("test_wallet called with Serai"), - NetworkId::Bitcoin => Coin::Bitcoin, - NetworkId::Ethereum => Coin::Ether, - NetworkId::Monero => Coin::Monero, - }, - ); + let mut scheduler = N::Scheduler::new::(&mut txn, key, N::NETWORK); let amount = 2 * N::DUST; let plans = scheduler.schedule::( &mut txn, outputs.clone(), vec![Payment { - address: N::external_address(key), + address: N::external_address(&network, key).await, data: None, balance: Balance { coin: match N::NETWORK { @@ -100,27 +91,26 @@ pub async fn test_wallet(network: N) { false, ); txn.commit(); + assert_eq!(plans.len(), 1); + assert_eq!(plans[0].key, key); + assert_eq!(plans[0].inputs, outputs); assert_eq!( - plans, - vec![Plan { - key, - inputs: outputs.clone(), - payments: vec![Payment { - address: N::external_address(key), - data: None, - balance: Balance { - coin: match N::NETWORK { - NetworkId::Serai => panic!("test_wallet called with Serai"), - NetworkId::Bitcoin => Coin::Bitcoin, - NetworkId::Ethereum => Coin::Ether, - NetworkId::Monero => Coin::Monero, - }, - amount: Amount(amount), - } - }], - change: Some(N::change_address(key)), + plans[0].payments, + vec![Payment { + address: N::external_address(&network, key).await, + data: None, + balance: Balance { + coin: match N::NETWORK { + NetworkId::Serai => panic!("test_wallet called with Serai"), + NetworkId::Bitcoin => Coin::Bitcoin, + NetworkId::Ethereum => Coin::Ether, + NetworkId::Monero => Coin::Monero, + }, + amount: Amount(amount), + } }] ); + assert_eq!(plans[0].change, Some(N::change_address(key).unwrap())); { let mut buf = vec![]; @@ -143,10 +133,10 @@ pub async fn test_wallet(network: N) { keys_txs.insert(i, (keys, (signable, eventuality))); } - let txid = sign(network.clone(), Session(0), keys_txs).await; - let tx = network.get_transaction(&txid).await.unwrap(); + let claim = sign(network.clone(), Session(0), keys_txs).await; network.mine_block().await; let block_number = network.get_latest_block_number().await.unwrap(); + let tx = network.get_transaction_by_eventuality(block_number, &eventualities[0]).await; let block = network.get_block(block_number).await.unwrap(); let outputs = network.get_outputs(&block, key).await; assert_eq!(outputs.len(), 2); @@ -154,7 +144,8 @@ pub async fn test_wallet(network: N) { assert!((outputs[0].balance().amount.0 == amount) || (outputs[1].balance().amount.0 == amount)); for eventuality in eventualities { - assert!(network.confirm_completion(&eventuality, &tx)); + let completion = network.confirm_completion(&eventuality, &claim).await.unwrap().unwrap(); + assert_eq!(N::Eventuality::claim(&completion), claim); } for _ in 1 .. N::CONFIRMATIONS { @@ -168,7 +159,7 @@ pub async fn test_wallet(network: N) { assert_eq!(block_id, block.id()); assert_eq!(these_outputs, outputs); } - ScannerEvent::Completed(_, _, _, _) => { + ScannerEvent::Completed(_, _, _, _, _) => { panic!("unexpectedly got eventuality completion"); } } diff --git a/spec/integrations/Ethereum.md b/spec/integrations/Ethereum.md index e66a1f5b..bf32f101 100644 --- a/spec/integrations/Ethereum.md +++ b/spec/integrations/Ethereum.md @@ -15,24 +15,11 @@ is the caller. `data` is limited to 512 bytes. -If `data` is provided, the Ethereum Router will call a contract-calling child -contract in order to sandbox it. The first byte of `data` designates which child -child contract to call. After this byte is read, `data` is solely considered as -`data`, post its first byte. The child contract is sent the funds before this -call is performed. +If `data` isn't provided or is malformed, ETH transfers will execute with 5,000 +gas and token transfers with 100,000 gas. -##### Child Contract 0 - -This contract is intended to enable connecting with other protocols, and should -be used to convert withdrawn assets to other assets on Ethereum. - - 1) Transfers the asset to `destination`. - 2) Calls `destination` with `data`. - -##### Child Contract 1 - -This contract is intended to enable authenticated calls from Serai. - - 1) Transfers the asset to `destination`. - 2) Calls `destination` with `data[.. 4], serai_address, data[4 ..]`, where -`serai_address` is the address which triggered this Out Instruction. +If `data` is provided and well-formed, `destination` is ignored and the Ethereum +Router will construct and call a new contract to proxy the contained calls. The +transfer executes to the constructed contract as above, before the constructed +contract is called with the calls inside `data`. The sandboxed execution has a +gas limit of 350,000. diff --git a/tests/processor/src/lib.rs b/tests/processor/src/lib.rs index 511382ab..e400057a 100644 --- a/tests/processor/src/lib.rs +++ b/tests/processor/src/lib.rs @@ -416,7 +416,11 @@ impl Coordinator { } } - pub async fn get_transaction(&self, ops: &DockerOperations, tx: &[u8]) -> Option> { + pub async fn get_published_transaction( + &self, + ops: &DockerOperations, + tx: &[u8], + ) -> Option> { let rpc_url = network_rpc(self.network, ops, &self.network_handle); match self.network { NetworkId::Bitcoin => { @@ -424,8 +428,15 @@ impl Coordinator { let rpc = Rpc::new(rpc_url).await.expect("couldn't connect to the coordinator's Bitcoin RPC"); + + // Bitcoin publishes a 0-byte TX ID to reduce variables + // Accordingly, read the mempool to find the (presumed relevant) TX + let entries: Vec = + rpc.rpc_call("getrawmempool", serde_json::json!([false])).await.unwrap(); + assert_eq!(entries.len(), 1, "more than one entry in the mempool, so unclear which to get"); + let mut hash = [0; 32]; - hash.copy_from_slice(tx); + hash.copy_from_slice(&hex::decode(&entries[0]).unwrap()); if let Ok(tx) = rpc.get_transaction(&hash).await { let mut buf = vec![]; tx.consensus_encode(&mut buf).unwrap(); diff --git a/tests/processor/src/tests/send.rs b/tests/processor/src/tests/send.rs index 8685af04..4d0d3cd6 100644 --- a/tests/processor/src/tests/send.rs +++ b/tests/processor/src/tests/send.rs @@ -261,12 +261,12 @@ fn send_test() { let participating = participating.iter().map(|p| usize::from(u16::from(*p) - 1)).collect::>(); for participant in &participating { - assert!(coordinators[*participant].get_transaction(&ops, &tx_id).await.is_some()); + assert!(coordinators[*participant].get_published_transaction(&ops, &tx_id).await.is_some()); } // Publish this transaction to the left out nodes let tx = coordinators[*participating.iter().next().unwrap()] - .get_transaction(&ops, &tx_id) + .get_published_transaction(&ops, &tx_id) .await .unwrap(); for (i, coordinator) in coordinators.iter_mut().enumerate() {