mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 12:19:24 +00:00
Compare commits
545 Commits
aggressive
...
coordinato
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
01a2c98018 | ||
|
|
964284e774 | ||
|
|
8d54419ddc | ||
|
|
0ad8ac9520 | ||
|
|
c6c74684c9 | ||
|
|
de14687a0d | ||
|
|
d60e007126 | ||
|
|
b296be8515 | ||
|
|
6b2876351e | ||
|
|
0ea90d054d | ||
|
|
eb1d00aa55 | ||
|
|
372149c2cc | ||
|
|
c6cf33e370 | ||
|
|
f58478ad87 | ||
|
|
88a1726399 | ||
|
|
08e6669403 | ||
|
|
fcfdadc791 | ||
|
|
07c657306b | ||
|
|
9df8c9476e | ||
|
|
9ab2a2cfe0 | ||
|
|
a315040aee | ||
|
|
1822e31142 | ||
|
|
6efc313d76 | ||
|
|
4b85d4b03b | ||
|
|
05d8c32be8 | ||
|
|
8634d90b6b | ||
|
|
aa9daea6b0 | ||
|
|
942873f99d | ||
|
|
7c9b581723 | ||
|
|
b37a0db538 | ||
|
|
797604ad73 | ||
|
|
05b975dff9 | ||
|
|
74a8df4c7b | ||
|
|
bd14db9f76 | ||
|
|
25c02c1311 | ||
|
|
b018fc432c | ||
|
|
6e4ecbc90c | ||
|
|
be48dcc4a4 | ||
|
|
25066437da | ||
|
|
db49a63c2b | ||
|
|
ee50f584aa | ||
|
|
14f3f330db | ||
|
|
30a77d863f | ||
|
|
a03a1edbff | ||
|
|
c03afbe03e | ||
|
|
af611a39bf | ||
|
|
0d080e6d34 | ||
|
|
369af0fab5 | ||
|
|
d25e3d86a2 | ||
|
|
96f1d26f7a | ||
|
|
79e4cce2f6 | ||
|
|
0c341e3546 | ||
|
|
9f0790fb83 | ||
|
|
bb8e034e68 | ||
|
|
3f7bdaa64b | ||
|
|
351436a258 | ||
|
|
c328e5ea68 | ||
|
|
995734c960 | ||
|
|
54f1929078 | ||
|
|
d015ee96a3 | ||
|
|
a43815f101 | ||
|
|
7f1732c8c0 | ||
|
|
ed2445390f | ||
|
|
52a0c56016 | ||
|
|
42e8f2c8d8 | ||
|
|
b51204a4eb | ||
|
|
ec51fa233a | ||
|
|
ce4091695f | ||
|
|
24919cfc54 | ||
|
|
bf41009c5a | ||
|
|
e8e9e212df | ||
|
|
19187d2c30 | ||
|
|
43ae6794db | ||
|
|
978134a9d1 | ||
|
|
2eb155753a | ||
|
|
7d72e224f0 | ||
|
|
ffedba7a05 | ||
|
|
06e627a562 | ||
|
|
11f66c741d | ||
|
|
a0a2ef22e4 | ||
|
|
5e290a29d9 | ||
|
|
a688350f44 | ||
|
|
bc07e14b1e | ||
|
|
e1c07d89e0 | ||
|
|
56fd11ab8d | ||
|
|
c03fb6c71b | ||
|
|
96f94966b7 | ||
|
|
b65ba17007 | ||
|
|
c9003874ad | ||
|
|
205bec36e5 | ||
|
|
df8b455d54 | ||
|
|
84a0bcad51 | ||
|
|
b680bb532b | ||
|
|
b9983bf133 | ||
|
|
cddb44ae3f | ||
|
|
bd3272a9f2 | ||
|
|
de41be6e26 | ||
|
|
b8ac8e697b | ||
|
|
899a9604e1 | ||
|
|
facb5817c4 | ||
|
|
97fedf65d0 | ||
|
|
257323c1e5 | ||
|
|
deecd77aec | ||
|
|
360b264a0f | ||
|
|
e05b77d830 | ||
|
|
5970a455d0 | ||
|
|
4c9e3b085b | ||
|
|
a2089c61fb | ||
|
|
ae449535ff | ||
|
|
05dc474cb3 | ||
|
|
34bcb9eb01 | ||
|
|
2958f196fc | ||
|
|
92cebcd911 | ||
|
|
a3278dfb31 | ||
|
|
f02db19670 | ||
|
|
652c878f54 | ||
|
|
da01de08c9 | ||
|
|
052ef39a25 | ||
|
|
87fdc8ce35 | ||
|
|
86ff0ae71b | ||
|
|
3069138475 | ||
|
|
f22aedc007 | ||
|
|
7be20c451d | ||
|
|
0198d4cc46 | ||
|
|
7c10873cd5 | ||
|
|
08180cc563 | ||
|
|
c4bdbdde11 | ||
|
|
0d23964762 | ||
|
|
fbf51e53ec | ||
|
|
fd1826cca9 | ||
|
|
f561fa9ba1 | ||
|
|
f4fc539e14 | ||
|
|
0fff5391a8 | ||
|
|
a71a789912 | ||
|
|
83c41eccd4 | ||
|
|
e5113c333e | ||
|
|
6068978676 | ||
|
|
e7e30150f0 | ||
|
|
55fe27f41a | ||
|
|
d66a7ee43e | ||
|
|
a702d65c3d | ||
|
|
52eb68677a | ||
|
|
d29d19bdfe | ||
|
|
c3fdb9d9df | ||
|
|
46e1f85085 | ||
|
|
1bff2a0447 | ||
|
|
b66203ae3f | ||
|
|
43a182fc4c | ||
|
|
8ead7a2581 | ||
|
|
3797679755 | ||
|
|
c056b751fe | ||
|
|
5977121c48 | ||
|
|
7b6181ecdb | ||
|
|
f976bc86ac | ||
|
|
441bf62e11 | ||
|
|
4852dcaab7 | ||
|
|
d6bc1c1ea3 | ||
|
|
7b2dec63ce | ||
|
|
d833254b84 | ||
|
|
a1b2bdf0a2 | ||
|
|
43841f95fc | ||
|
|
fdfce9e207 | ||
|
|
3255c0ace5 | ||
|
|
057c3b7cf1 | ||
|
|
a29c410caf | ||
|
|
a6f40978cb | ||
|
|
bdf6afc144 | ||
|
|
a362b0a451 | ||
|
|
041ed46171 | ||
|
|
9accddb2d7 | ||
|
|
e06e4edc8e | ||
|
|
e4d59eeeca | ||
|
|
1f5b1bb514 | ||
|
|
1a3b6005c6 | ||
|
|
7dc1a24bce | ||
|
|
3483f7fa73 | ||
|
|
a300a1029a | ||
|
|
7409d0b3cf | ||
|
|
19e90b28b0 | ||
|
|
584943d1e9 | ||
|
|
62e1d63f47 | ||
|
|
e4adaa8947 | ||
|
|
3b3fdd104b | ||
|
|
5897efd7c7 | ||
|
|
863a7842ca | ||
|
|
f414735be5 | ||
|
|
5c5c097da9 | ||
|
|
7d4e8b59db | ||
|
|
e3e9939eaf | ||
|
|
530fba51dd | ||
|
|
cb61c9052a | ||
|
|
96cc5d0157 | ||
|
|
7275a95907 | ||
|
|
80e5ca9328 | ||
|
|
67951c4971 | ||
|
|
4143fe9f47 | ||
|
|
a73b19e2b8 | ||
|
|
97c328e5fb | ||
|
|
96c397caa0 | ||
|
|
d5c6ed1a03 | ||
|
|
f6e8bc3352 | ||
|
|
7d0d1dc382 | ||
|
|
d50fe87801 | ||
|
|
e6aa9df428 | ||
|
|
02edfd2935 | ||
|
|
9aeece5bf6 | ||
|
|
bb84f7cf1d | ||
|
|
bb25baf3bc | ||
|
|
013a0cddfc | ||
|
|
ed7300b406 | ||
|
|
88b5efda99 | ||
|
|
0712e6f107 | ||
|
|
6a4c57e86f | ||
|
|
b7746aa71d | ||
|
|
8dd41ee798 | ||
|
|
9a1d10f4ea | ||
|
|
6587590986 | ||
|
|
b0fcdd3367 | ||
|
|
15edea1389 | ||
|
|
1d9e2efc33 | ||
|
|
f25f5cd368 | ||
|
|
f847ac7077 | ||
|
|
29fcf6be4d | ||
|
|
108e2b57d9 | ||
|
|
3da5577950 | ||
|
|
f692047b8b | ||
|
|
2401266374 | ||
|
|
ed90d1752a | ||
|
|
3261fde853 | ||
|
|
7492adc473 | ||
|
|
04f9a1fa31 | ||
|
|
13cbc99149 | ||
|
|
1a0b4198ba | ||
|
|
22371a6585 | ||
|
|
b2d6a85ac0 | ||
|
|
44ca5e6520 | ||
|
|
83b7146e1a | ||
|
|
f193b896c1 | ||
|
|
985795e99d | ||
|
|
9bf8c92325 | ||
|
|
ab5af57dae | ||
|
|
98190b7b83 | ||
|
|
2f45bba2d4 | ||
|
|
30d0bad175 | ||
|
|
b8abc1e3cc | ||
|
|
b2ed2e961c | ||
|
|
9cdca1d3d6 | ||
|
|
4ee65ed243 | ||
|
|
aa59f53ead | ||
|
|
bd5491dfd5 | ||
|
|
0eff3d9453 | ||
|
|
0be567ff69 | ||
|
|
83b3a5c31c | ||
|
|
4d1212ec65 | ||
|
|
7e27315207 | ||
|
|
aa1faefe33 | ||
|
|
7d738a3677 | ||
|
|
4a32f22418 | ||
|
|
01a4b9e694 | ||
|
|
3b01d3039b | ||
|
|
40b7bc59d0 | ||
|
|
269db1c4be | ||
|
|
90318d7214 | ||
|
|
64d370ac11 | ||
|
|
db8dc1e864 | ||
|
|
086458d041 | ||
|
|
2e0f8138e2 | ||
|
|
32a9a33226 | ||
|
|
2508633de9 | ||
|
|
7312428a44 | ||
|
|
e1801b57c9 | ||
|
|
60491a091f | ||
|
|
9f3840d1cf | ||
|
|
7120bddc6f | ||
|
|
77f7794452 | ||
|
|
62a1a45135 | ||
|
|
0440e60645 | ||
|
|
4babf898d7 | ||
|
|
ca69f97fef | ||
|
|
fe19e8246e | ||
|
|
c62d9b448f | ||
|
|
98ab6acbd5 | ||
|
|
092f17932a | ||
|
|
e455332e01 | ||
|
|
a9468bf355 | ||
|
|
3d464c4736 | ||
|
|
142552f024 | ||
|
|
e3a7ee4927 | ||
|
|
9eaaa7d2e8 | ||
|
|
8adef705c3 | ||
|
|
3fd6d45b3e | ||
|
|
d263413e36 | ||
|
|
6f8a5d0ede | ||
|
|
24bdd7ed9b | ||
|
|
aa724c06bc | ||
|
|
1e6655408e | ||
|
|
9ab43407e1 | ||
|
|
7ac0de3a8d | ||
|
|
06a6cd29b0 | ||
|
|
2472ec7ba8 | ||
|
|
69c3fad7ce | ||
|
|
bd9a05feef | ||
|
|
7d8e08d5b4 | ||
|
|
f7e49e1f90 | ||
|
|
cd4c3a6c88 | ||
|
|
fddc605c65 | ||
|
|
2ad6b38be9 | ||
|
|
fda90e23c9 | ||
|
|
3f3f6b2d0c | ||
|
|
fa8ff62b09 | ||
|
|
5113ab9ec4 | ||
|
|
9b7cb688ed | ||
|
|
9a5f8fc5dd | ||
|
|
2dc35193c9 | ||
|
|
9bf24480f4 | ||
|
|
3af9dc5d6f | ||
|
|
148bc380fe | ||
|
|
8dad62f300 | ||
|
|
1e79de87e8 | ||
|
|
2f57a69cb6 | ||
|
|
493a222421 | ||
|
|
d5a19eca8c | ||
|
|
e9fca37181 | ||
|
|
83c25eff03 | ||
|
|
285422f71a | ||
|
|
1838c37ecf | ||
|
|
a3649b2062 | ||
|
|
1bd14163a0 | ||
|
|
89a6ee9290 | ||
|
|
a66994aade | ||
|
|
34ffd2fa76 | ||
|
|
775353f8cd | ||
|
|
c9b2490ab9 | ||
|
|
2db53d5434 | ||
|
|
6268bbd7c8 | ||
|
|
bc2f23f72b | ||
|
|
72337b17f5 | ||
|
|
36b193992f | ||
|
|
37b6de9c0c | ||
|
|
22f3c9e58f | ||
|
|
9adefa4c2c | ||
|
|
c245bcdc9b | ||
|
|
f249e20028 | ||
|
|
3745f8b6af | ||
|
|
ba46aa76f0 | ||
|
|
8c1d8a2658 | ||
|
|
2702384c70 | ||
|
|
d3093c92dc | ||
|
|
32df302cc4 | ||
|
|
ea8e26eca3 | ||
|
|
bccdabb53d | ||
|
|
b91bd44476 | ||
|
|
dc2656a538 | ||
|
|
67109c648c | ||
|
|
61418b4e9f | ||
|
|
506ded205a | ||
|
|
9801f9f753 | ||
|
|
8a4ef3ddae | ||
|
|
bfb5401336 | ||
|
|
f2872a2e07 | ||
|
|
c739f00d0b | ||
|
|
310a09b5a4 | ||
|
|
c65bb70741 | ||
|
|
718c44d50e | ||
|
|
1f45c2c6b5 | ||
|
|
76a30fd572 | ||
|
|
a52c86ad81 | ||
|
|
108504d6e2 | ||
|
|
27cd2ee2bb | ||
|
|
dc88b29b92 | ||
|
|
45ea805620 | ||
|
|
1e68cff6dc | ||
|
|
906d3b9a7c | ||
|
|
e319762c69 | ||
|
|
f161135119 | ||
|
|
d2a0ff13f2 | ||
|
|
498aa45619 | ||
|
|
39ce819876 | ||
|
|
8973eb8ac4 | ||
|
|
34397b31b1 | ||
|
|
96583da3b9 | ||
|
|
34c6974311 | ||
|
|
5a4db3efad | ||
|
|
28399b0310 | ||
|
|
426e89d6fb | ||
|
|
4850376664 | ||
|
|
f366d65d4b | ||
|
|
e680eabb62 | ||
|
|
a3441a6871 | ||
|
|
9895ec0f41 | ||
|
|
666bb3e96b | ||
|
|
acc19e2817 | ||
|
|
5e02f936e4 | ||
|
|
6b41c91dc2 | ||
|
|
8992504e69 | ||
|
|
e2901cab06 | ||
|
|
13a8b0afc1 | ||
|
|
7e71450dc4 | ||
|
|
049fefb5fd | ||
|
|
6a89cfcd08 | ||
|
|
4571a8ad85 | ||
|
|
96db784b10 | ||
|
|
dd523b22c2 | ||
|
|
fa406c507f | ||
|
|
ad51c123e3 | ||
|
|
f6f945e747 | ||
|
|
0dd8aed134 | ||
|
|
cee788eac3 | ||
|
|
bebe2fae0e | ||
|
|
adb48cd737 | ||
|
|
363a88c4ec | ||
|
|
5ba1dd2524 | ||
|
|
784c7b47a4 | ||
|
|
376b36974f | ||
|
|
38ad1d4bc4 | ||
|
|
aab8a417db | ||
|
|
d5c787fea2 | ||
|
|
e3a70ef0dc | ||
|
|
044b299cda | ||
|
|
53d86e2a29 | ||
|
|
c338b92067 | ||
|
|
3c38a0ec11 | ||
|
|
88f88b574c | ||
|
|
9f143a9742 | ||
|
|
2815046b21 | ||
|
|
d8033504c8 | ||
|
|
48ad5fe20b | ||
|
|
4c801df4f2 | ||
|
|
9b79c4dc0c | ||
|
|
e010d66c5d | ||
|
|
3d91fd88a3 | ||
|
|
f988c43f8d | ||
|
|
857e3ea72b | ||
|
|
8b14bb54bb | ||
|
|
f78332453b | ||
|
|
22da7aedde | ||
|
|
2641b83b3e | ||
|
|
4980e6b704 | ||
|
|
c091b86919 | ||
|
|
a8c7bb96c8 | ||
|
|
09a95c9bd2 | ||
|
|
101da0a641 | ||
|
|
6d5851a9ee | ||
|
|
64c309f8db | ||
|
|
e00aa3031c | ||
|
|
f8afb040dc | ||
|
|
7823ece4fe | ||
|
|
b205391b28 | ||
|
|
32a937ddb9 | ||
|
|
bdbeedc723 | ||
|
|
f306618e84 | ||
|
|
89865b549c | ||
|
|
39eae2795f | ||
|
|
0eb56406a4 | ||
|
|
afb385fba4 | ||
|
|
821f5d8de4 | ||
|
|
3862731a12 | ||
|
|
42eb674d1a | ||
|
|
1af5f1bcdc | ||
|
|
32435d8a4c | ||
|
|
49ce792b91 | ||
|
|
4949793c3f | ||
|
|
61d46dccd4 | ||
|
|
88a1fce15c | ||
|
|
a2493cfafc | ||
|
|
e3de64d5ff | ||
|
|
ecd0457d5b | ||
|
|
7990ee689a | ||
|
|
f05e909d0e | ||
|
|
5e565fa3ef | ||
|
|
6df1b46313 | ||
|
|
24dba66bad | ||
|
|
fd585d496c | ||
|
|
5703591eb2 | ||
|
|
9ac3b203c8 | ||
|
|
23e1c9769c | ||
|
|
8e6e05ae2d | ||
|
|
713660c79c | ||
|
|
cb8c8031b0 | ||
|
|
d07447fe97 | ||
|
|
c26beae0f9 | ||
|
|
818215b570 | ||
|
|
79943c3a6c | ||
|
|
076a8e4d62 | ||
|
|
ffd1457927 | ||
|
|
523a055b74 | ||
|
|
624fb2781d | ||
|
|
641077a089 | ||
|
|
298d1fd3ba | ||
|
|
92c3403698 | ||
|
|
37af8b51b3 | ||
|
|
900298b94b | ||
|
|
9effd5ccdc | ||
|
|
ceeb57470f | ||
|
|
69454fa9bb | ||
|
|
5121ca7519 | ||
|
|
1eb3b364f4 | ||
|
|
f66fe3c1cb | ||
|
|
6f9d02fdf8 | ||
|
|
28613400b8 | ||
|
|
b6579d5a2a | ||
|
|
c2f32e7882 | ||
|
|
228e36a12d | ||
|
|
38bcedbf11 | ||
|
|
98f9fc2c2f | ||
|
|
d5cfb3fb25 | ||
|
|
b1dbe1f50d | ||
|
|
1b57d655ed | ||
|
|
64402914ba | ||
|
|
a7c9c1ef55 | ||
|
|
a05961974a | ||
|
|
acc9495429 | ||
|
|
344ac9cbfc | ||
|
|
6ccac2d0ab | ||
|
|
56f7037084 | ||
|
|
a0f8214d48 | ||
|
|
5f93140ba5 | ||
|
|
712f11d879 | ||
|
|
808a633e4d | ||
|
|
0a367bfbda | ||
|
|
845c2842b5 | ||
|
|
8543487db2 | ||
|
|
a9072e6b1b | ||
|
|
b0c28a1cf0 | ||
|
|
23b9d57305 | ||
|
|
807ec30762 | ||
|
|
5424886d63 | ||
|
|
2bebe0755d | ||
|
|
f0ce6e6388 | ||
|
|
62504b2622 | ||
|
|
c9bb284570 | ||
|
|
6a4bccba74 | ||
|
|
df67b7d94c | ||
|
|
677b9b681f | ||
|
|
fa1b569b78 | ||
|
|
d75115ce13 | ||
|
|
3d00d405a3 | ||
|
|
3480fc5e16 | ||
|
|
7fa5d291b8 | ||
|
|
b54548b13a | ||
|
|
c878d38c60 | ||
|
|
5d9067b84d | ||
|
|
13f48a406e | ||
|
|
35fcd11096 | ||
|
|
93b1656f86 |
2
.github/actions/bitcoin/action.yml
vendored
2
.github/actions/bitcoin/action.yml
vendored
@@ -12,7 +12,7 @@ runs:
|
||||
steps:
|
||||
- name: Bitcoin Daemon Cache
|
||||
id: cache-bitcoind
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
|
||||
with:
|
||||
path: bitcoin.tar.gz
|
||||
key: bitcoind-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||
|
||||
42
.github/actions/build-dependencies/action.yml
vendored
42
.github/actions/build-dependencies/action.yml
vendored
@@ -7,37 +7,35 @@ inputs:
|
||||
require: true
|
||||
default:
|
||||
|
||||
rust-toolchain:
|
||||
description: "Rust toolchain to install"
|
||||
required: false
|
||||
default: stable
|
||||
|
||||
rust-components:
|
||||
description: "Rust components to install"
|
||||
required: false
|
||||
default:
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Remove unused packages
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt remove -y "*msbuild*" "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli
|
||||
sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*"
|
||||
sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*"
|
||||
sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*"
|
||||
sudo apt autoremove -y
|
||||
sudo apt clean
|
||||
docker system prune -a --volumes
|
||||
|
||||
- name: Install apt dependencies
|
||||
shell: bash
|
||||
run: sudo apt install -y ca-certificates
|
||||
|
||||
- name: Install Protobuf
|
||||
uses: arduino/setup-protoc@v2.0.0
|
||||
uses: arduino/setup-protoc@a8b67ba40b37d35169e222f3bb352603327985b6
|
||||
with:
|
||||
repo-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Install solc
|
||||
shell: bash
|
||||
run: |
|
||||
pip3 install solc-select==0.2.1
|
||||
solc-select install 0.8.16
|
||||
solc-select use 0.8.16
|
||||
|
||||
- name: Install Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ inputs.rust-toolchain }}
|
||||
components: ${{ inputs.rust-components }}
|
||||
targets: wasm32-unknown-unknown, riscv32imac-unknown-none-elf
|
||||
cargo install svm-rs
|
||||
svm install 0.8.16
|
||||
svm use 0.8.16
|
||||
|
||||
# - name: Cache Rust
|
||||
# uses: Swatinem/rust-cache@v2
|
||||
# uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43
|
||||
|
||||
2
.github/actions/monero-wallet-rpc/action.yml
vendored
2
.github/actions/monero-wallet-rpc/action.yml
vendored
@@ -12,7 +12,7 @@ runs:
|
||||
steps:
|
||||
- name: Monero Wallet RPC Cache
|
||||
id: cache-monero-wallet-rpc
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
|
||||
with:
|
||||
path: monero-wallet-rpc
|
||||
key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||
|
||||
2
.github/actions/monero/action.yml
vendored
2
.github/actions/monero/action.yml
vendored
@@ -12,7 +12,7 @@ runs:
|
||||
steps:
|
||||
- name: Monero Daemon Cache
|
||||
id: cache-monerod
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
|
||||
with:
|
||||
path: monerod
|
||||
key: monerod-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||
|
||||
5
.github/actions/test-dependencies/action.yml
vendored
5
.github/actions/test-dependencies/action.yml
vendored
@@ -26,9 +26,10 @@ runs:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Install Foundry
|
||||
uses: foundry-rs/foundry-toolchain@v1
|
||||
uses: foundry-rs/foundry-toolchain@cb603ca0abb544f301eaed59ac0baf579aa6aecf
|
||||
with:
|
||||
version: nightly
|
||||
version: nightly-09fe3e041369a816365a020f715ad6f94dbce9f2
|
||||
cache: false
|
||||
|
||||
- name: Run a Monero Regtest Node
|
||||
uses: ./.github/actions/monero
|
||||
|
||||
2
.github/nightly-version
vendored
2
.github/nightly-version
vendored
@@ -1 +1 @@
|
||||
nightly-2023-07-01
|
||||
nightly-2023-11-01
|
||||
|
||||
37
.github/workflows/coins-tests.yml
vendored
Normal file
37
.github/workflows/coins-tests.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
name: coins/ Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-coins:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Test Dependencies
|
||||
uses: ./.github/actions/test-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p bitcoin-serai \
|
||||
-p ethereum-serai \
|
||||
-p monero-generators \
|
||||
-p monero-serai
|
||||
33
.github/workflows/common-tests.yml
vendored
Normal file
33
.github/workflows/common-tests.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
name: common/ Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-common:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p std-shims \
|
||||
-p zalloc \
|
||||
-p serai-db \
|
||||
-p serai-env
|
||||
44
.github/workflows/coordinator-tests.yml
vendored
Normal file
44
.github/workflows/coordinator-tests.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: Coordinator Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "message-queue/**"
|
||||
- "orchestration/message-queue/**"
|
||||
- "coordinator/**"
|
||||
- "orchestration/coordinator/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/coordinator/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "message-queue/**"
|
||||
- "orchestration/message-queue/**"
|
||||
- "coordinator/**"
|
||||
- "orchestration/coordinator/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/coordinator/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Run coordinator Docker tests
|
||||
run: cd tests/coordinator && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||
42
.github/workflows/crypto-tests.yml
vendored
Normal file
42
.github/workflows/crypto-tests.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
name: crypto/ Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-crypto:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p flexible-transcript \
|
||||
-p ff-group-tests \
|
||||
-p dalek-ff-group \
|
||||
-p minimal-ed448 \
|
||||
-p ciphersuite \
|
||||
-p multiexp \
|
||||
-p schnorr-signatures \
|
||||
-p dleq \
|
||||
-p dkg \
|
||||
-p modular-frost \
|
||||
-p frost-schnorrkel
|
||||
7
.github/workflows/daily-deny.yml
vendored
7
.github/workflows/daily-deny.yml
vendored
@@ -9,17 +9,14 @@ jobs:
|
||||
name: Run cargo deny
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Advisory Cache
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
|
||||
with:
|
||||
path: ~/.cargo/advisory-db
|
||||
key: rust-advisory-db
|
||||
|
||||
- name: Install cargo
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Install cargo deny
|
||||
run: cargo install --locked cargo-deny
|
||||
|
||||
|
||||
24
.github/workflows/full-stack-tests.yml
vendored
Normal file
24
.github/workflows/full-stack-tests.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: Full Stack Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
pull_request:
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Run Full Stack Docker tests
|
||||
run: cd tests/full-stack && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||
69
.github/workflows/lint.yml
vendored
Normal file
69
.github/workflows/lint.yml
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
name: Lint
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
clippy:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Install nightly rust
|
||||
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32-unknown-unknown -c clippy
|
||||
|
||||
- name: Run Clippy
|
||||
run: cargo +${{ steps.nightly.outputs.version }} clippy --all-features --all-targets -- -D warnings -A clippy::items_after_test_module
|
||||
|
||||
deny:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Advisory Cache
|
||||
uses: actions/cache@704facf57e6136b1bc63b828d79edcd491f0ee84
|
||||
with:
|
||||
path: ~/.cargo/advisory-db
|
||||
key: rust-advisory-db
|
||||
|
||||
- name: Install cargo deny
|
||||
run: cargo install --locked cargo-deny
|
||||
|
||||
- name: Run cargo deny
|
||||
run: cargo deny -L error --all-features check
|
||||
|
||||
fmt:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install nightly rust
|
||||
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -c rustfmt
|
||||
|
||||
- name: Run rustfmt
|
||||
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check
|
||||
|
||||
dockerfiles:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
- name: Verify Dockerfiles are up to date
|
||||
# Runs the file which generates them and checks the diff has no lines
|
||||
run: cd orchestration && ./dockerfiles.sh && git diff | wc -l | grep -x "0"
|
||||
38
.github/workflows/message-queue-tests.yml
vendored
Normal file
38
.github/workflows/message-queue-tests.yml
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
name: Message Queue Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "message-queue/**"
|
||||
- "orchestration/message-queue/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/message-queue/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "message-queue/**"
|
||||
- "orchestration/message-queue/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/message-queue/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Run message-queue Docker tests
|
||||
run: cd tests/message-queue && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||
28
.github/workflows/mini-tests.yml
vendored
Normal file
28
.github/workflows/mini-tests.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
name: mini/ Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "mini/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "mini/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-common:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Tests
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p mini-serai
|
||||
14
.github/workflows/monero-tests.yaml
vendored
14
.github/workflows/monero-tests.yaml
vendored
@@ -13,12 +13,14 @@ on:
|
||||
- "coins/monero/**"
|
||||
- "processor/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
# Only run these once since they will be consistent regardless of any node
|
||||
unit-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Test Dependencies
|
||||
uses: ./.github/actions/test-dependencies
|
||||
@@ -26,7 +28,7 @@ jobs:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run Unit Tests Without Features
|
||||
run: cargo test --package monero-serai --lib
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib
|
||||
|
||||
# Doesn't run unit tests with features as the tests workflow will
|
||||
|
||||
@@ -38,7 +40,7 @@ jobs:
|
||||
version: [v0.17.3.2, v0.18.2.0]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Test Dependencies
|
||||
uses: ./.github/actions/test-dependencies
|
||||
@@ -49,11 +51,9 @@ jobs:
|
||||
- name: Run Integration Tests Without Features
|
||||
# Runs with the binaries feature so the binaries build
|
||||
# https://github.com/rust-lang/cargo/issues/8396
|
||||
run: cargo test --package monero-serai --features binaries --test '*'
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --features binaries --test '*'
|
||||
|
||||
- name: Run Integration Tests
|
||||
# Don't run if the the tests workflow also will
|
||||
if: ${{ matrix.version != 'v0.18.2.0' }}
|
||||
run: |
|
||||
cargo test --package monero-serai --all-features --test '*'
|
||||
cargo test --package serai-processor --all-features monero
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
|
||||
|
||||
4
.github/workflows/monthly-nightly-update.yml
vendored
4
.github/workflows/monthly-nightly-update.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
name: Update nightly
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
with:
|
||||
submodules: "recursive"
|
||||
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
git push -u origin $(date +"nightly-%Y-%m")
|
||||
|
||||
- name: Pull Request
|
||||
uses: actions/github-script@v6
|
||||
uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410
|
||||
with:
|
||||
script: |
|
||||
const { repo, owner } = context.repo;
|
||||
|
||||
20
.github/workflows/no-std.yml
vendored
20
.github/workflows/no-std.yml
vendored
@@ -4,18 +4,34 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "tests/no-std/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "tests/no-std/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Install RISC-V Toolchain
|
||||
run: sudo apt update && sudo apt install -y gcc-riscv64-unknown-elf gcc-multilib && rustup target add riscv32imac-unknown-none-elf
|
||||
|
||||
- name: Verify no-std builds
|
||||
run: cd tests/no-std && cargo build --target riscv32imac-unknown-none-elf
|
||||
run: cd tests/no-std && CFLAGS=-I/usr/include cargo build --target riscv32imac-unknown-none-elf
|
||||
|
||||
44
.github/workflows/processor-tests.yml
vendored
Normal file
44
.github/workflows/processor-tests.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
name: Processor Tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "message-queue/**"
|
||||
- "orchestration/message-queue/**"
|
||||
- "processor/**"
|
||||
- "orchestration/processor/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/processor/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "message-queue/**"
|
||||
- "orchestration/message-queue/**"
|
||||
- "processor/**"
|
||||
- "orchestration/processor/**"
|
||||
- "tests/docker/**"
|
||||
- "tests/processor/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Run processor Docker tests
|
||||
run: cd tests/processor && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||
38
.github/workflows/reproducible-runtime.yml
vendored
Normal file
38
.github/workflows/reproducible-runtime.yml
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
name: Reproducible Runtime
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "Cargo.lock"
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "substrate/**"
|
||||
- "orchestration/runtime/**"
|
||||
- "tests/reproducible-runtime/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "Cargo.lock"
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "substrate/**"
|
||||
- "orchestration/runtime/**"
|
||||
- "tests/reproducible-runtime/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Install Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ inputs.github-token }}
|
||||
|
||||
- name: Run Reproducible Runtime tests
|
||||
run: cd tests/reproducible-runtime && GITHUB_CI=true RUST_BACKTRACE=1 cargo test
|
||||
106
.github/workflows/tests.yml
vendored
106
.github/workflows/tests.yml
vendored
@@ -4,81 +4,83 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "message-queue/**"
|
||||
- "processor/**"
|
||||
- "coordinator/**"
|
||||
- "substrate/**"
|
||||
|
||||
pull_request:
|
||||
paths:
|
||||
- "common/**"
|
||||
- "crypto/**"
|
||||
- "coins/**"
|
||||
- "message-queue/**"
|
||||
- "processor/**"
|
||||
- "coordinator/**"
|
||||
- "substrate/**"
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
clippy:
|
||||
test-infra:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
rust-toolchain: ${{ steps.nightly.outputs.version }}
|
||||
rust-components: clippy
|
||||
|
||||
- name: Run Clippy
|
||||
run: cargo clippy --all-features --all-targets -- -D warnings -A clippy::items_after_test_module
|
||||
- name: Run Tests
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p serai-message-queue \
|
||||
-p serai-processor-messages \
|
||||
-p serai-processor \
|
||||
-p tendermint-machine \
|
||||
-p tributary-chain \
|
||||
-p serai-coordinator \
|
||||
-p serai-docker-tests
|
||||
|
||||
deny:
|
||||
test-substrate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Advisory Cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/advisory-db
|
||||
key: rust-advisory-db
|
||||
|
||||
- name: Install cargo
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Install cargo deny
|
||||
run: cargo install --locked cargo-deny
|
||||
|
||||
- name: Run cargo deny
|
||||
run: cargo deny -L error --all-features check
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Test Dependencies
|
||||
uses: ./.github/actions/test-dependencies
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build node
|
||||
run: |
|
||||
cd substrate/node
|
||||
cargo build
|
||||
|
||||
- name: Run Tests
|
||||
run: GITHUB_CI=true cargo test --all-features
|
||||
run: |
|
||||
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||
-p serai-primitives \
|
||||
-p serai-coins-primitives \
|
||||
-p serai-coins-pallet \
|
||||
-p serai-dex-pallet \
|
||||
-p serai-validator-sets-primitives \
|
||||
-p serai-validator-sets-pallet \
|
||||
-p serai-in-instructions-primitives \
|
||||
-p serai-in-instructions-pallet \
|
||||
-p serai-signals-pallet \
|
||||
-p serai-runtime \
|
||||
-p serai-node
|
||||
|
||||
fmt:
|
||||
test-serai-client:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||
|
||||
- name: Get nightly version to use
|
||||
id: nightly
|
||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Install rustfmt
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
- name: Build Dependencies
|
||||
uses: ./.github/actions/build-dependencies
|
||||
with:
|
||||
toolchain: ${{ steps.nightly.outputs.version }}
|
||||
components: rustfmt
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run rustfmt
|
||||
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check
|
||||
- name: Run Tests
|
||||
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,2 +1,3 @@
|
||||
target
|
||||
.vscode
|
||||
.test-logs
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
edition = "2021"
|
||||
tab_spaces = 2
|
||||
|
||||
max_width = 100
|
||||
|
||||
6639
Cargo.lock
generated
6639
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
27
Cargo.toml
27
Cargo.toml
@@ -1,8 +1,11 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"common/std-shims",
|
||||
"common/zalloc",
|
||||
"common/db",
|
||||
"common/env",
|
||||
"common/request",
|
||||
|
||||
"crypto/transcript",
|
||||
|
||||
@@ -19,6 +22,7 @@ members = [
|
||||
"crypto/frost",
|
||||
"crypto/schnorrkel",
|
||||
|
||||
"coins/bitcoin",
|
||||
"coins/ethereum",
|
||||
"coins/monero/generators",
|
||||
"coins/monero",
|
||||
@@ -34,8 +38,8 @@ members = [
|
||||
|
||||
"substrate/primitives",
|
||||
|
||||
"substrate/tokens/primitives",
|
||||
"substrate/tokens/pallet",
|
||||
"substrate/coins/primitives",
|
||||
"substrate/coins/pallet",
|
||||
|
||||
"substrate/in-instructions/primitives",
|
||||
"substrate/in-instructions/pallet",
|
||||
@@ -43,12 +47,23 @@ members = [
|
||||
"substrate/validator-sets/primitives",
|
||||
"substrate/validator-sets/pallet",
|
||||
|
||||
"substrate/signals/pallet",
|
||||
|
||||
"substrate/runtime",
|
||||
"substrate/node",
|
||||
|
||||
"substrate/client",
|
||||
|
||||
"mini",
|
||||
|
||||
"tests/no-std",
|
||||
|
||||
"tests/docker",
|
||||
"tests/message-queue",
|
||||
"tests/processor",
|
||||
"tests/coordinator",
|
||||
"tests/full-stack",
|
||||
"tests/reproducible-runtime",
|
||||
]
|
||||
|
||||
# Always compile Monero (and a variety of dependencies) with optimizations due
|
||||
@@ -70,3 +85,11 @@ monero-serai = { opt-level = 3 }
|
||||
|
||||
[profile.release]
|
||||
panic = "unwind"
|
||||
|
||||
[patch.crates-io]
|
||||
# https://github.com/rust-lang-nursery/lazy-static.rs/issues/201
|
||||
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
|
||||
|
||||
# subxt *can* pull these off crates.io yet there's no benefit to this
|
||||
sp-core-hashing = { git = "https://github.com/serai-dex/substrate" }
|
||||
sp-std = { git = "https://github.com/serai-dex/substrate" }
|
||||
|
||||
33
README.md
33
README.md
@@ -17,7 +17,7 @@ wallet.
|
||||
Serai, none neatly fitting under another category.
|
||||
|
||||
- `crypto`: A series of composable cryptographic libraries built around the
|
||||
`ff`/`group` APIs achieving a variety of tasks. These range from generic
|
||||
`ff`/`group` APIs, achieving a variety of tasks. These range from generic
|
||||
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
|
||||
needed for Bitcoin-Monero atomic swaps.
|
||||
|
||||
@@ -25,6 +25,9 @@ wallet.
|
||||
wider community. This means they will always support the functionality Serai
|
||||
needs, yet won't disadvantage other use cases when possible.
|
||||
|
||||
- `message-queue`: An ordered message server so services can talk to each other,
|
||||
even when the other is offline.
|
||||
|
||||
- `processor`: A generic chain processor to process data for Serai and process
|
||||
events from Serai, executing transactions as expected and needed.
|
||||
|
||||
@@ -33,12 +36,28 @@ wallet.
|
||||
|
||||
- `substrate`: Substrate crates used to instantiate the Serai network.
|
||||
|
||||
- `deploy`: Scripts to deploy a Serai node/test environment.
|
||||
- `orchestration`: Dockerfiles and scripts to deploy a Serai node/test
|
||||
environment.
|
||||
|
||||
- `tests`: Tests for various crates. Generally, `crate/src/tests` is used, or
|
||||
`crate/tests`, yet any tests requiring crates' binaries are placed here.
|
||||
|
||||
### Security
|
||||
|
||||
Serai hosts a bug bounty program via
|
||||
[Immunefi](https://immunefi.com/bounty/serai/). For in-scope critical
|
||||
vulnerabilities, we will reward whitehats with up to $30,000.
|
||||
|
||||
Anything not in-scope should still be submitted through Immunefi, with rewards
|
||||
issued at the discretion of the Immunefi program managers.
|
||||
|
||||
### Links
|
||||
|
||||
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
|
||||
- [Mastodon](https://cryptodon.lol/@serai): https://cryptodon.lol/@serai
|
||||
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
|
||||
- [Matrix](https://matrix.to/#/#serai:matrix.org):
|
||||
https://matrix.to/#/#serai:matrix.org
|
||||
- [Website](https://serai.exchange/): https://serai.exchange/
|
||||
- [Immunefi](https://immunefi.com/bounty/serai/): https://immunefi.com/bounty/serai/
|
||||
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
|
||||
- [Mastodon](https://cryptodon.lol/@serai): https://cryptodon.lol/@serai
|
||||
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
|
||||
- [Matrix](https://matrix.to/#/#serai:matrix.org): https://matrix.to/#/#serai:matrix.org
|
||||
- [Reddit](https://www.reddit.com/r/SeraiDEX/): https://www.reddit.com/r/SeraiDEX/
|
||||
- [Telegram](https://t.me/SeraiDEX): https://t.me/SeraiDEX
|
||||
|
||||
BIN
audits/Cypher Stack coins bitcoin August 2023/Audit.pdf
Normal file
BIN
audits/Cypher Stack coins bitcoin August 2023/Audit.pdf
Normal file
Binary file not shown.
21
audits/Cypher Stack coins bitcoin August 2023/LICENSE
Normal file
21
audits/Cypher Stack coins bitcoin August 2023/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023 Cypher Stack
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
6
audits/Cypher Stack coins bitcoin August 2023/README.md
Normal file
6
audits/Cypher Stack coins bitcoin August 2023/README.md
Normal file
@@ -0,0 +1,6 @@
|
||||
# Cypher Stack /coins/bitcoin Audit, August 2023
|
||||
|
||||
This audit was over the /coins/bitcoin folder. It is encompassing up to commit
|
||||
5121ca75199dff7bd34230880a1fdd793012068c.
|
||||
|
||||
Please see https://github.com/cypherstack/serai-btc-audit for provenance.
|
||||
@@ -1,37 +1,61 @@
|
||||
[package]
|
||||
name = "bitcoin-serai"
|
||||
version = "0.2.0"
|
||||
version = "0.3.0"
|
||||
description = "A Bitcoin library for FROST-signing transactions"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.74"
|
||||
|
||||
[dependencies]
|
||||
lazy_static = "1"
|
||||
thiserror = "1"
|
||||
std-shims = { version = "0.1.1", path = "../../common/std-shims", default-features = false }
|
||||
|
||||
zeroize = "^1.5"
|
||||
rand_core = "0.6"
|
||||
thiserror = { version = "1", default-features = false, optional = true }
|
||||
|
||||
sha2 = "0.10"
|
||||
zeroize = { version = "^1.5", default-features = false }
|
||||
rand_core = { version = "0.6", default-features = false }
|
||||
|
||||
secp256k1 = { version = "0.27", features = ["global-context"] }
|
||||
bitcoin = { version = "0.30", features = ["serde"] }
|
||||
bitcoin = { version = "0.31", default-features = false, features = ["no-std"] }
|
||||
|
||||
k256 = { version = "^0.13.1", default-features = false, features = ["std", "arithmetic", "bits"] }
|
||||
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", features = ["recommended"] }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["secp256k1"] }
|
||||
k256 = { version = "^0.13.1", default-features = false, features = ["arithmetic", "bits"] }
|
||||
|
||||
hex = "0.4"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
reqwest = { version = "0.11", features = ["json"] }
|
||||
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["secp256k1"], optional = true }
|
||||
|
||||
hex = { version = "0.4", default-features = false, optional = true }
|
||||
serde = { version = "1", default-features = false, features = ["derive"], optional = true }
|
||||
serde_json = { version = "1", default-features = false, optional = true }
|
||||
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls", "basic-auth"], optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["tests"] }
|
||||
secp256k1 = { version = "0.28", default-features = false, features = ["std"] }
|
||||
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
|
||||
|
||||
tokio = { version = "1", features = ["macros"] }
|
||||
|
||||
[features]
|
||||
std = [
|
||||
"std-shims/std",
|
||||
|
||||
"thiserror",
|
||||
|
||||
"zeroize/std",
|
||||
"rand_core/std",
|
||||
|
||||
"bitcoin/std",
|
||||
"bitcoin/serde",
|
||||
|
||||
"k256/std",
|
||||
|
||||
"transcript/std",
|
||||
"frost",
|
||||
|
||||
"hex/std",
|
||||
"serde/std",
|
||||
"serde_json/std",
|
||||
"simple-request",
|
||||
]
|
||||
hazmat = []
|
||||
default = ["std"]
|
||||
|
||||
@@ -1,26 +1,6 @@
|
||||
use core::fmt::Debug;
|
||||
use std::io;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
use zeroize::Zeroizing;
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use sha2::{Digest, Sha256};
|
||||
use transcript::Transcript;
|
||||
|
||||
use secp256k1::schnorr::Signature;
|
||||
use k256::{
|
||||
elliptic_curve::{
|
||||
ops::Reduce,
|
||||
sec1::{Tag, ToEncodedPoint},
|
||||
},
|
||||
U256, Scalar, ProjectivePoint,
|
||||
};
|
||||
use frost::{
|
||||
curve::{Ciphersuite, Secp256k1},
|
||||
Participant, ThresholdKeys, ThresholdView, FrostError,
|
||||
algorithm::{Hram as HramTrait, Algorithm, Schnorr as FrostSchnorr},
|
||||
elliptic_curve::sec1::{Tag, ToEncodedPoint},
|
||||
ProjectivePoint,
|
||||
};
|
||||
|
||||
use bitcoin::key::XOnlyPublicKey;
|
||||
@@ -32,13 +12,15 @@ pub fn x(key: &ProjectivePoint) -> [u8; 32] {
|
||||
(*encoded.x().expect("point at infinity")).into()
|
||||
}
|
||||
|
||||
/// Convert a non-infinite even point to a XOnlyPublicKey. Panics on invalid input.
|
||||
/// Convert a non-infinity even point to a XOnlyPublicKey. Panics on invalid input.
|
||||
pub fn x_only(key: &ProjectivePoint) -> XOnlyPublicKey {
|
||||
XOnlyPublicKey::from_slice(&x(key)).unwrap()
|
||||
XOnlyPublicKey::from_slice(&x(key)).expect("x_only was passed a point which was infinity or odd")
|
||||
}
|
||||
|
||||
/// Make a point even by adding the generator until it is even. Returns the even point and the
|
||||
/// amount of additions required.
|
||||
/// Make a point even by adding the generator until it is even.
|
||||
///
|
||||
/// Returns the even point and the amount of additions required.
|
||||
#[cfg(any(feature = "std", feature = "hazmat"))]
|
||||
pub fn make_even(mut key: ProjectivePoint) -> (ProjectivePoint, u64) {
|
||||
let mut c = 0;
|
||||
while key.to_encoded_point(true).tag() == Tag::CompressedOddY {
|
||||
@@ -48,113 +30,137 @@ pub fn make_even(mut key: ProjectivePoint) -> (ProjectivePoint, u64) {
|
||||
(key, c)
|
||||
}
|
||||
|
||||
/// A BIP-340 compatible HRAm for use with the modular-frost Schnorr Algorithm.
|
||||
///
|
||||
/// If passed an odd nonce, it will have the generator added until it is even.
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct Hram {}
|
||||
#[cfg(feature = "std")]
|
||||
mod frost_crypto {
|
||||
use core::fmt::Debug;
|
||||
use std_shims::{vec::Vec, io};
|
||||
|
||||
lazy_static! {
|
||||
static ref TAG_HASH: [u8; 32] = Sha256::digest(b"BIP0340/challenge").into();
|
||||
}
|
||||
use zeroize::Zeroizing;
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
impl HramTrait<Secp256k1> for Hram {
|
||||
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
||||
// Convert the nonce to be even
|
||||
let (R, _) = make_even(*R);
|
||||
use bitcoin::hashes::{HashEngine, Hash, sha256::Hash as Sha256};
|
||||
|
||||
let mut data = Sha256::new();
|
||||
data.update(*TAG_HASH);
|
||||
data.update(*TAG_HASH);
|
||||
data.update(x(&R));
|
||||
data.update(x(A));
|
||||
data.update(m);
|
||||
use transcript::Transcript;
|
||||
|
||||
Scalar::reduce(U256::from_be_slice(&data.finalize()))
|
||||
}
|
||||
}
|
||||
|
||||
/// BIP-340 Schnorr signature algorithm.
|
||||
///
|
||||
/// This must be used with a ThresholdKeys whose group key is even. If it is odd, this will panic.
|
||||
#[derive(Clone)]
|
||||
pub struct Schnorr<T: Sync + Clone + Debug + Transcript>(FrostSchnorr<Secp256k1, T, Hram>);
|
||||
impl<T: Sync + Clone + Debug + Transcript> Schnorr<T> {
|
||||
/// Construct a Schnorr algorithm continuing the specified transcript.
|
||||
pub fn new(transcript: T) -> Schnorr<T> {
|
||||
Schnorr(FrostSchnorr::new(transcript))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Sync + Clone + Debug + Transcript> Algorithm<Secp256k1> for Schnorr<T> {
|
||||
type Transcript = T;
|
||||
type Addendum = ();
|
||||
type Signature = Signature;
|
||||
|
||||
fn transcript(&mut self) -> &mut Self::Transcript {
|
||||
self.0.transcript()
|
||||
}
|
||||
|
||||
fn nonces(&self) -> Vec<Vec<ProjectivePoint>> {
|
||||
self.0.nonces()
|
||||
}
|
||||
|
||||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R,
|
||||
keys: &ThresholdKeys<Secp256k1>,
|
||||
) {
|
||||
self.0.preprocess_addendum(rng, keys)
|
||||
}
|
||||
|
||||
fn read_addendum<R: io::Read>(&self, reader: &mut R) -> io::Result<Self::Addendum> {
|
||||
self.0.read_addendum(reader)
|
||||
}
|
||||
|
||||
fn process_addendum(
|
||||
&mut self,
|
||||
view: &ThresholdView<Secp256k1>,
|
||||
i: Participant,
|
||||
addendum: (),
|
||||
) -> Result<(), FrostError> {
|
||||
self.0.process_addendum(view, i, addendum)
|
||||
}
|
||||
|
||||
fn sign_share(
|
||||
&mut self,
|
||||
params: &ThresholdView<Secp256k1>,
|
||||
nonce_sums: &[Vec<<Secp256k1 as Ciphersuite>::G>],
|
||||
nonces: Vec<Zeroizing<<Secp256k1 as Ciphersuite>::F>>,
|
||||
msg: &[u8],
|
||||
) -> <Secp256k1 as Ciphersuite>::F {
|
||||
self.0.sign_share(params, nonce_sums, nonces, msg)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn verify(
|
||||
&self,
|
||||
group_key: ProjectivePoint,
|
||||
nonces: &[Vec<ProjectivePoint>],
|
||||
sum: Scalar,
|
||||
) -> Option<Self::Signature> {
|
||||
self.0.verify(group_key, nonces, sum).map(|mut sig| {
|
||||
// Make the R of the final signature even
|
||||
let offset;
|
||||
(sig.R, offset) = make_even(sig.R);
|
||||
// s = r + cx. Since we added to the r, add to s
|
||||
sig.s += Scalar::from(offset);
|
||||
// Convert to a secp256k1 signature
|
||||
Signature::from_slice(&sig.serialize()[1 ..]).unwrap()
|
||||
})
|
||||
}
|
||||
|
||||
fn verify_share(
|
||||
&self,
|
||||
verification_share: ProjectivePoint,
|
||||
nonces: &[Vec<ProjectivePoint>],
|
||||
share: Scalar,
|
||||
) -> Result<Vec<(Scalar, ProjectivePoint)>, ()> {
|
||||
self.0.verify_share(verification_share, nonces, share)
|
||||
use k256::{elliptic_curve::ops::Reduce, U256, Scalar};
|
||||
|
||||
use frost::{
|
||||
curve::{Ciphersuite, Secp256k1},
|
||||
Participant, ThresholdKeys, ThresholdView, FrostError,
|
||||
algorithm::{Hram as HramTrait, Algorithm, Schnorr as FrostSchnorr},
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
/// A BIP-340 compatible HRAm for use with the modular-frost Schnorr Algorithm.
|
||||
///
|
||||
/// If passed an odd nonce, it will have the generator added until it is even.
|
||||
///
|
||||
/// If the key is odd, this will panic.
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct Hram;
|
||||
#[allow(non_snake_case)]
|
||||
impl HramTrait<Secp256k1> for Hram {
|
||||
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
||||
// Convert the nonce to be even
|
||||
let (R, _) = make_even(*R);
|
||||
|
||||
const TAG_HASH: Sha256 = Sha256::const_hash(b"BIP0340/challenge");
|
||||
|
||||
let mut data = Sha256::engine();
|
||||
data.input(TAG_HASH.as_ref());
|
||||
data.input(TAG_HASH.as_ref());
|
||||
data.input(&x(&R));
|
||||
data.input(&x(A));
|
||||
data.input(m);
|
||||
|
||||
Scalar::reduce(U256::from_be_slice(Sha256::from_engine(data).as_ref()))
|
||||
}
|
||||
}
|
||||
|
||||
/// BIP-340 Schnorr signature algorithm.
|
||||
///
|
||||
/// This must be used with a ThresholdKeys whose group key is even. If it is odd, this will panic.
|
||||
#[derive(Clone)]
|
||||
pub struct Schnorr<T: Sync + Clone + Debug + Transcript>(FrostSchnorr<Secp256k1, T, Hram>);
|
||||
impl<T: Sync + Clone + Debug + Transcript> Schnorr<T> {
|
||||
/// Construct a Schnorr algorithm continuing the specified transcript.
|
||||
pub fn new(transcript: T) -> Schnorr<T> {
|
||||
Schnorr(FrostSchnorr::new(transcript))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Sync + Clone + Debug + Transcript> Algorithm<Secp256k1> for Schnorr<T> {
|
||||
type Transcript = T;
|
||||
type Addendum = ();
|
||||
type Signature = [u8; 64];
|
||||
|
||||
fn transcript(&mut self) -> &mut Self::Transcript {
|
||||
self.0.transcript()
|
||||
}
|
||||
|
||||
fn nonces(&self) -> Vec<Vec<ProjectivePoint>> {
|
||||
self.0.nonces()
|
||||
}
|
||||
|
||||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
||||
&mut self,
|
||||
rng: &mut R,
|
||||
keys: &ThresholdKeys<Secp256k1>,
|
||||
) {
|
||||
self.0.preprocess_addendum(rng, keys)
|
||||
}
|
||||
|
||||
fn read_addendum<R: io::Read>(&self, reader: &mut R) -> io::Result<Self::Addendum> {
|
||||
self.0.read_addendum(reader)
|
||||
}
|
||||
|
||||
fn process_addendum(
|
||||
&mut self,
|
||||
view: &ThresholdView<Secp256k1>,
|
||||
i: Participant,
|
||||
addendum: (),
|
||||
) -> Result<(), FrostError> {
|
||||
self.0.process_addendum(view, i, addendum)
|
||||
}
|
||||
|
||||
fn sign_share(
|
||||
&mut self,
|
||||
params: &ThresholdView<Secp256k1>,
|
||||
nonce_sums: &[Vec<<Secp256k1 as Ciphersuite>::G>],
|
||||
nonces: Vec<Zeroizing<<Secp256k1 as Ciphersuite>::F>>,
|
||||
msg: &[u8],
|
||||
) -> <Secp256k1 as Ciphersuite>::F {
|
||||
self.0.sign_share(params, nonce_sums, nonces, msg)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn verify(
|
||||
&self,
|
||||
group_key: ProjectivePoint,
|
||||
nonces: &[Vec<ProjectivePoint>],
|
||||
sum: Scalar,
|
||||
) -> Option<Self::Signature> {
|
||||
self.0.verify(group_key, nonces, sum).map(|mut sig| {
|
||||
// Make the R of the final signature even
|
||||
let offset;
|
||||
(sig.R, offset) = make_even(sig.R);
|
||||
// s = r + cx. Since we added to the r, add to s
|
||||
sig.s += Scalar::from(offset);
|
||||
// Convert to a Bitcoin signature by dropping the byte for the point's sign bit
|
||||
sig.serialize()[1 ..].try_into().unwrap()
|
||||
})
|
||||
}
|
||||
|
||||
fn verify_share(
|
||||
&self,
|
||||
verification_share: ProjectivePoint,
|
||||
nonces: &[Vec<ProjectivePoint>],
|
||||
share: Scalar,
|
||||
) -> Result<Vec<(Scalar, ProjectivePoint)>, ()> {
|
||||
self.0.verify_share(verification_share, nonces, share)
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "std")]
|
||||
pub use frost_crypto::*;
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
extern crate alloc;
|
||||
|
||||
/// The bitcoin Rust library.
|
||||
pub use bitcoin;
|
||||
@@ -13,6 +17,7 @@ pub(crate) mod crypto;
|
||||
/// Wallet functionality to create transactions.
|
||||
pub mod wallet;
|
||||
/// A minimal asynchronous Bitcoin RPC client.
|
||||
#[cfg(feature = "std")]
|
||||
pub mod rpc;
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
use core::fmt::Debug;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use thiserror::Error;
|
||||
|
||||
use serde::{Deserialize, de::DeserializeOwned};
|
||||
use serde_json::json;
|
||||
|
||||
use simple_request::{hyper, Request, Client};
|
||||
|
||||
use bitcoin::{
|
||||
hashes::{Hash, hex::FromHex},
|
||||
consensus::encode,
|
||||
@@ -26,7 +29,10 @@ enum RpcResponse<T> {
|
||||
|
||||
/// A minimal asynchronous Bitcoin RPC client.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Rpc(String);
|
||||
pub struct Rpc {
|
||||
client: Client,
|
||||
url: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
||||
pub enum RpcError {
|
||||
@@ -34,15 +40,64 @@ pub enum RpcError {
|
||||
ConnectionError,
|
||||
#[error("request had an error: {0:?}")]
|
||||
RequestError(Error),
|
||||
#[error("node sent an invalid response")]
|
||||
InvalidResponse,
|
||||
#[error("node replied with invalid JSON")]
|
||||
InvalidJson(serde_json::error::Category),
|
||||
#[error("node sent an invalid response ({0})")]
|
||||
InvalidResponse(&'static str),
|
||||
#[error("node was missing expected methods")]
|
||||
MissingMethods(HashSet<&'static str>),
|
||||
}
|
||||
|
||||
impl Rpc {
|
||||
/// Create a new connection to a Bitcoin RPC.
|
||||
///
|
||||
/// An RPC call is performed to ensure the node is reachable (and that an invalid URL wasn't
|
||||
/// provided).
|
||||
///
|
||||
/// Additionally, a set of expected methods is checked to be offered by the Bitcoin RPC. If these
|
||||
/// methods aren't provided, an error with the missing methods is returned. This ensures all RPC
|
||||
/// routes explicitly provided by this library are at least possible.
|
||||
///
|
||||
/// Each individual RPC route may still fail at time-of-call, regardless of the arguments
|
||||
/// provided to this library, if the RPC has an incompatible argument layout. That is not checked
|
||||
/// at time of RPC creation.
|
||||
pub async fn new(url: String) -> Result<Rpc, RpcError> {
|
||||
let rpc = Rpc(url);
|
||||
let rpc = Rpc { client: Client::with_connection_pool(), url };
|
||||
|
||||
// Make an RPC request to verify the node is reachable and sane
|
||||
rpc.get_latest_block_number().await?;
|
||||
let res: String = rpc.rpc_call("help", json!([])).await?;
|
||||
|
||||
// Verify all methods we expect are present
|
||||
// If we had a more expanded RPC, due to differences in RPC versions, it wouldn't make sense to
|
||||
// error if all methods weren't present
|
||||
// We only provide a very minimal set of methods which have been largely consistent, hence why
|
||||
// this is sane
|
||||
let mut expected_methods = HashSet::from([
|
||||
"help",
|
||||
"getblockcount",
|
||||
"getblockhash",
|
||||
"getblockheader",
|
||||
"getblock",
|
||||
"sendrawtransaction",
|
||||
"getrawtransaction",
|
||||
]);
|
||||
for line in res.split('\n') {
|
||||
// This doesn't check if the arguments are as expected
|
||||
// This is due to Bitcoin supporting a large amount of optional arguments, which
|
||||
// occassionally change, with their own mechanism of text documentation, making matching off
|
||||
// it a quite involved task
|
||||
// Instead, once we've confirmed the methods are present, we assume our arguments are aligned
|
||||
// Else we'll error at time of call
|
||||
if expected_methods.remove(line.split(' ').next().unwrap_or("")) &&
|
||||
expected_methods.is_empty()
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
if !expected_methods.is_empty() {
|
||||
Err(RpcError::MissingMethods(expected_methods))?;
|
||||
};
|
||||
|
||||
Ok(rpc)
|
||||
}
|
||||
|
||||
@@ -52,19 +107,28 @@ impl Rpc {
|
||||
method: &str,
|
||||
params: serde_json::Value,
|
||||
) -> Result<Response, RpcError> {
|
||||
let client = reqwest::Client::new();
|
||||
let res = client
|
||||
.post(&self.0)
|
||||
.json(&json!({ "jsonrpc": "2.0", "method": method, "params": params }))
|
||||
.send()
|
||||
let mut request = Request::from(
|
||||
hyper::Request::post(&self.url)
|
||||
.header("Content-Type", "application/json")
|
||||
.body(
|
||||
serde_json::to_vec(&json!({ "jsonrpc": "2.0", "method": method, "params": params }))
|
||||
.unwrap()
|
||||
.into(),
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
request.with_basic_auth();
|
||||
let mut res = self
|
||||
.client
|
||||
.request(request)
|
||||
.await
|
||||
.map_err(|_| RpcError::ConnectionError)?
|
||||
.text()
|
||||
.body()
|
||||
.await
|
||||
.map_err(|_| RpcError::ConnectionError)?;
|
||||
|
||||
let res: RpcResponse<Response> =
|
||||
serde_json::from_str(&res).map_err(|_| RpcError::InvalidResponse)?;
|
||||
serde_json::from_reader(&mut res).map_err(|e| RpcError::InvalidJson(e.classify()))?;
|
||||
match res {
|
||||
RpcResponse::Ok { result } => Ok(result),
|
||||
RpcResponse::Err { error } => Err(RpcError::RequestError(error)),
|
||||
@@ -107,13 +171,15 @@ impl Rpc {
|
||||
/// Get a block by its hash.
|
||||
pub async fn get_block(&self, hash: &[u8; 32]) -> Result<Block, RpcError> {
|
||||
let hex = self.rpc_call::<String>("getblock", json!([hex::encode(hash), 0])).await?;
|
||||
let bytes: Vec<u8> = FromHex::from_hex(&hex).map_err(|_| RpcError::InvalidResponse)?;
|
||||
let block: Block = encode::deserialize(&bytes).map_err(|_| RpcError::InvalidResponse)?;
|
||||
let bytes: Vec<u8> = FromHex::from_hex(&hex)
|
||||
.map_err(|_| RpcError::InvalidResponse("node didn't use hex to encode the block"))?;
|
||||
let block: Block = encode::deserialize(&bytes)
|
||||
.map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized block"))?;
|
||||
|
||||
let mut block_hash = *block.block_hash().as_raw_hash().as_byte_array();
|
||||
block_hash.reverse();
|
||||
if hash != &block_hash {
|
||||
Err(RpcError::InvalidResponse)?;
|
||||
Err(RpcError::InvalidResponse("node replied with a different block"))?;
|
||||
}
|
||||
|
||||
Ok(block)
|
||||
@@ -123,7 +189,7 @@ impl Rpc {
|
||||
pub async fn send_raw_transaction(&self, tx: &Transaction) -> Result<Txid, RpcError> {
|
||||
let txid = self.rpc_call("sendrawtransaction", json!([encode::serialize_hex(tx)])).await?;
|
||||
if txid != tx.txid() {
|
||||
Err(RpcError::InvalidResponse)?;
|
||||
Err(RpcError::InvalidResponse("returned TX ID inequals calculated TX ID"))?;
|
||||
}
|
||||
Ok(txid)
|
||||
}
|
||||
@@ -131,13 +197,15 @@ impl Rpc {
|
||||
/// Get a transaction by its hash.
|
||||
pub async fn get_transaction(&self, hash: &[u8; 32]) -> Result<Transaction, RpcError> {
|
||||
let hex = self.rpc_call::<String>("getrawtransaction", json!([hex::encode(hash)])).await?;
|
||||
let bytes: Vec<u8> = FromHex::from_hex(&hex).map_err(|_| RpcError::InvalidResponse)?;
|
||||
let tx: Transaction = encode::deserialize(&bytes).map_err(|_| RpcError::InvalidResponse)?;
|
||||
let bytes: Vec<u8> = FromHex::from_hex(&hex)
|
||||
.map_err(|_| RpcError::InvalidResponse("node didn't use hex to encode the transaction"))?;
|
||||
let tx: Transaction = encode::deserialize(&bytes)
|
||||
.map_err(|_| RpcError::InvalidResponse("node sent an improperly serialized transaction"))?;
|
||||
|
||||
let mut tx_hash = *tx.txid().as_raw_hash().as_byte_array();
|
||||
tx_hash.reverse();
|
||||
if hash != &tx_hash {
|
||||
Err(RpcError::InvalidResponse)?;
|
||||
Err(RpcError::InvalidResponse("node replied with a different transaction"))?;
|
||||
}
|
||||
|
||||
Ok(tx)
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use rand_core::OsRng;
|
||||
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
use secp256k1::{SECP256K1, Message};
|
||||
use secp256k1::{Secp256k1 as BContext, Message, schnorr::Signature};
|
||||
|
||||
use k256::Scalar;
|
||||
use transcript::{Transcript, RecommendedTranscript};
|
||||
@@ -34,12 +32,13 @@ fn test_algorithm() {
|
||||
algo.clone(),
|
||||
keys.clone(),
|
||||
algorithm_machines(&mut OsRng, algo, &keys),
|
||||
&Sha256::digest(MESSAGE),
|
||||
Hash::hash(MESSAGE).as_ref(),
|
||||
);
|
||||
|
||||
SECP256K1
|
||||
BContext::new()
|
||||
.verify_schnorr(
|
||||
&sig,
|
||||
&Signature::from_slice(&sig)
|
||||
.expect("couldn't convert produced signature to secp256k1::Signature"),
|
||||
&Message::from(Hash::hash(MESSAGE)),
|
||||
&x_only(&keys[&Participant::new(1).unwrap()].group_key()),
|
||||
)
|
||||
|
||||
@@ -1,41 +1,57 @@
|
||||
use std::{
|
||||
io::{self, Read, Write},
|
||||
use std_shims::{
|
||||
vec::Vec,
|
||||
collections::HashMap,
|
||||
io::{self, Write},
|
||||
};
|
||||
#[cfg(feature = "std")]
|
||||
use std_shims::io::Read;
|
||||
|
||||
use k256::{
|
||||
elliptic_curve::sec1::{Tag, ToEncodedPoint},
|
||||
Scalar, ProjectivePoint,
|
||||
};
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
use frost::{
|
||||
curve::{Ciphersuite, Secp256k1},
|
||||
ThresholdKeys,
|
||||
};
|
||||
|
||||
use bitcoin::{
|
||||
consensus::encode::{Decodable, serialize},
|
||||
key::TweakedPublicKey,
|
||||
OutPoint, ScriptBuf, TxOut, Transaction, Block, Network, Address,
|
||||
consensus::encode::serialize, key::TweakedPublicKey, address::Payload, OutPoint, ScriptBuf,
|
||||
TxOut, Transaction, Block,
|
||||
};
|
||||
#[cfg(feature = "std")]
|
||||
use bitcoin::consensus::encode::Decodable;
|
||||
|
||||
use crate::crypto::{x_only, make_even};
|
||||
use crate::crypto::x_only;
|
||||
#[cfg(feature = "std")]
|
||||
use crate::crypto::make_even;
|
||||
|
||||
#[cfg(feature = "std")]
|
||||
mod send;
|
||||
#[cfg(feature = "std")]
|
||||
pub use send::*;
|
||||
|
||||
/// Tweak keys to ensure they're usable with Bitcoin.
|
||||
///
|
||||
/// Taproot keys, which these keys are used as, must be even. This offsets the keys until they're
|
||||
/// even.
|
||||
#[cfg(feature = "std")]
|
||||
pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
|
||||
let (_, offset) = make_even(keys.group_key());
|
||||
keys.offset(Scalar::from(offset))
|
||||
}
|
||||
|
||||
/// Return the Taproot address for a public key.
|
||||
pub fn address(network: Network, key: ProjectivePoint) -> Option<Address> {
|
||||
/// Return the Taproot address payload for a public key.
|
||||
///
|
||||
/// If the key is odd, this will return None.
|
||||
pub fn address_payload(key: ProjectivePoint) -> Option<Payload> {
|
||||
if key.to_encoded_point(true).tag() != Tag::CompressedEvenY {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(Address::p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key)), network))
|
||||
Some(Payload::p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key))))
|
||||
}
|
||||
|
||||
/// A spendable output.
|
||||
@@ -55,6 +71,11 @@ impl ReceivedOutput {
|
||||
self.offset
|
||||
}
|
||||
|
||||
/// The Bitcoin output for this output.
|
||||
pub fn output(&self) -> &TxOut {
|
||||
&self.output
|
||||
}
|
||||
|
||||
/// The outpoint for this output.
|
||||
pub fn outpoint(&self) -> &OutPoint {
|
||||
&self.outpoint
|
||||
@@ -62,17 +83,16 @@ impl ReceivedOutput {
|
||||
|
||||
/// The value of this output.
|
||||
pub fn value(&self) -> u64 {
|
||||
self.output.value
|
||||
self.output.value.to_sat()
|
||||
}
|
||||
|
||||
/// Read a ReceivedOutput from a generic satisfying Read.
|
||||
#[cfg(feature = "std")]
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
|
||||
Ok(ReceivedOutput {
|
||||
offset: Secp256k1::read_F(r)?,
|
||||
output: TxOut::consensus_decode(r)
|
||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid TxOut"))?,
|
||||
outpoint: OutPoint::consensus_decode(r)
|
||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid OutPoint"))?,
|
||||
output: TxOut::consensus_decode(r).map_err(|_| io::Error::other("invalid TxOut"))?,
|
||||
outpoint: OutPoint::consensus_decode(r).map_err(|_| io::Error::other("invalid OutPoint"))?,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -83,9 +103,9 @@ impl ReceivedOutput {
|
||||
w.write_all(&serialize(&self.outpoint))
|
||||
}
|
||||
|
||||
/// Serialize a ReceivedOutput to a Vec<u8>.
|
||||
/// Serialize a ReceivedOutput to a `Vec<u8>`.
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
let mut res = vec![];
|
||||
let mut res = Vec::new();
|
||||
self.write(&mut res).unwrap();
|
||||
res
|
||||
}
|
||||
@@ -104,8 +124,7 @@ impl Scanner {
|
||||
/// Returns None if this key can't be scanned for.
|
||||
pub fn new(key: ProjectivePoint) -> Option<Scanner> {
|
||||
let mut scripts = HashMap::new();
|
||||
// Uses Network::Bitcoin since network is irrelevant here
|
||||
scripts.insert(address(Network::Bitcoin, key)?.script_pubkey(), Scalar::ZERO);
|
||||
scripts.insert(address_payload(key)?.script_pubkey(), Scalar::ZERO);
|
||||
Some(Scanner { key, scripts })
|
||||
}
|
||||
|
||||
@@ -114,9 +133,15 @@ impl Scanner {
|
||||
/// Due to Bitcoin's requirement that points are even, not every offset may be used.
|
||||
/// If an offset isn't usable, it will be incremented until it is. If this offset is already
|
||||
/// present, None is returned. Else, Some(offset) will be, with the used offset.
|
||||
///
|
||||
/// This means offsets are surjective, not bijective, and the order offsets are registered in
|
||||
/// may determine the validity of future offsets.
|
||||
pub fn register_offset(&mut self, mut offset: Scalar) -> Option<Scalar> {
|
||||
// This loop will terminate as soon as an even point is found, with any point having a ~50%
|
||||
// chance of being even
|
||||
// That means this should terminate within a very small amount of iterations
|
||||
loop {
|
||||
match address(Network::Bitcoin, self.key + (ProjectivePoint::GENERATOR * offset)) {
|
||||
match address_payload(self.key + (ProjectivePoint::GENERATOR * offset)) {
|
||||
Some(address) => {
|
||||
let script = address.script_pubkey();
|
||||
if self.scripts.contains_key(&script) {
|
||||
@@ -132,13 +157,16 @@ impl Scanner {
|
||||
|
||||
/// Scan a transaction.
|
||||
pub fn scan_transaction(&self, tx: &Transaction) -> Vec<ReceivedOutput> {
|
||||
let mut res = vec![];
|
||||
let mut res = Vec::new();
|
||||
for (vout, output) in tx.output.iter().enumerate() {
|
||||
// If the vout index exceeds 2**32, stop scanning outputs
|
||||
let Ok(vout) = u32::try_from(vout) else { break };
|
||||
|
||||
if let Some(offset) = self.scripts.get(&output.script_pubkey) {
|
||||
res.push(ReceivedOutput {
|
||||
offset: *offset,
|
||||
output: output.clone(),
|
||||
outpoint: OutPoint::new(tx.txid(), u32::try_from(vout).unwrap()),
|
||||
outpoint: OutPoint::new(tx.txid(), vout),
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -151,7 +179,7 @@ impl Scanner {
|
||||
/// must be immediately spendable, a post-processing pass is needed to remove those outputs.
|
||||
/// Alternatively, scan_transaction can be called on `block.txdata[1 ..]`.
|
||||
pub fn scan_block(&self, block: &Block) -> Vec<ReceivedOutput> {
|
||||
let mut res = vec![];
|
||||
let mut res = Vec::new();
|
||||
for tx in &block.txdata {
|
||||
res.extend(self.scan_transaction(tx));
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::{
|
||||
use std_shims::{
|
||||
io::{self, Read},
|
||||
collections::HashMap,
|
||||
};
|
||||
@@ -16,21 +16,21 @@ use bitcoin::{
|
||||
sighash::{TapSighashType, SighashCache, Prevouts},
|
||||
absolute::LockTime,
|
||||
script::{PushBytesBuf, ScriptBuf},
|
||||
OutPoint, Sequence, Witness, TxIn, TxOut, Transaction, Network, Address,
|
||||
transaction::{Version, Transaction},
|
||||
OutPoint, Sequence, Witness, TxIn, Amount, TxOut, Address,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
crypto::Schnorr,
|
||||
wallet::{address, ReceivedOutput},
|
||||
wallet::{ReceivedOutput, address_payload},
|
||||
};
|
||||
|
||||
#[rustfmt::skip]
|
||||
// https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04/src/policy/policy.h#L27
|
||||
const MAX_STANDARD_TX_WEIGHT: u64 = 400_000;
|
||||
|
||||
#[rustfmt::skip]
|
||||
//https://github.com/bitcoin/bitcoin/blob/a245429d680eb95cf4c0c78e58e63e3f0f5d979a/src/test/transaction_tests.cpp#L815-L816
|
||||
const DUST: u64 = 674;
|
||||
// https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04/src/policy/policy.cpp#L26-L63
|
||||
// As the above notes, a lower amount may not be considered dust if contained in a SegWit output
|
||||
// This doesn't bother with delineation due to how marginal these values are, and because it isn't
|
||||
// worth the complexity to implement differentation
|
||||
pub const DUST: u64 = 546;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
||||
pub enum TransactionError {
|
||||
@@ -42,6 +42,8 @@ pub enum TransactionError {
|
||||
DustPayment,
|
||||
#[error("too much data was specified")]
|
||||
TooMuchData,
|
||||
#[error("fee was too low to pass the default minimum fee rate")]
|
||||
TooLowFee,
|
||||
#[error("not enough funds for these payments")]
|
||||
NotEnoughFunds,
|
||||
#[error("transaction was too large")]
|
||||
@@ -61,7 +63,7 @@ impl SignableTransaction {
|
||||
fn calculate_weight(inputs: usize, payments: &[(Address, u64)], change: Option<&Address>) -> u64 {
|
||||
// Expand this a full transaction in order to use the bitcoin library's weight function
|
||||
let mut tx = Transaction {
|
||||
version: 2,
|
||||
version: Version(2),
|
||||
lock_time: LockTime::ZERO,
|
||||
input: vec![
|
||||
TxIn {
|
||||
@@ -81,15 +83,18 @@ impl SignableTransaction {
|
||||
.iter()
|
||||
// The payment is a fixed size so we don't have to use it here
|
||||
// The script pub key is not of a fixed size and does have to be used here
|
||||
.map(|payment| TxOut { value: payment.1, script_pubkey: payment.0.script_pubkey() })
|
||||
.map(|payment| TxOut {
|
||||
value: Amount::from_sat(payment.1),
|
||||
script_pubkey: payment.0.script_pubkey(),
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
if let Some(change) = change {
|
||||
// Use a 0 value since we're currently unsure what the change amount will be, and since
|
||||
// the value is fixed size (so any value could be used here)
|
||||
tx.output.push(TxOut { value: 0, script_pubkey: change.script_pubkey() });
|
||||
tx.output.push(TxOut { value: Amount::ZERO, script_pubkey: change.script_pubkey() });
|
||||
}
|
||||
u64::try_from(tx.weight()).unwrap()
|
||||
u64::from(tx.weight())
|
||||
}
|
||||
|
||||
/// Returns the fee necessary for this transaction to achieve the fee rate specified at
|
||||
@@ -100,6 +105,12 @@ impl SignableTransaction {
|
||||
self.needed_fee
|
||||
}
|
||||
|
||||
/// Returns the fee this transaction will use.
|
||||
pub fn fee(&self) -> u64 {
|
||||
self.prevouts.iter().map(|prevout| prevout.value.to_sat()).sum::<u64>() -
|
||||
self.tx.output.iter().map(|prevout| prevout.value.to_sat()).sum::<u64>()
|
||||
}
|
||||
|
||||
/// Create a new SignableTransaction.
|
||||
///
|
||||
/// If a change address is specified, any leftover funds will be sent to it if the leftover funds
|
||||
@@ -132,7 +143,7 @@ impl SignableTransaction {
|
||||
Err(TransactionError::TooMuchData)?;
|
||||
}
|
||||
|
||||
let input_sat = inputs.iter().map(|input| input.output.value).sum::<u64>();
|
||||
let input_sat = inputs.iter().map(|input| input.output.value.to_sat()).sum::<u64>();
|
||||
let offsets = inputs.iter().map(|input| input.offset).collect();
|
||||
let tx_ins = inputs
|
||||
.iter()
|
||||
@@ -147,15 +158,18 @@ impl SignableTransaction {
|
||||
let payment_sat = payments.iter().map(|payment| payment.1).sum::<u64>();
|
||||
let mut tx_outs = payments
|
||||
.iter()
|
||||
.map(|payment| TxOut { value: payment.1, script_pubkey: payment.0.script_pubkey() })
|
||||
.map(|payment| TxOut {
|
||||
value: Amount::from_sat(payment.1),
|
||||
script_pubkey: payment.0.script_pubkey(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Add the OP_RETURN output
|
||||
if let Some(data) = data {
|
||||
tx_outs.push(TxOut {
|
||||
value: 0,
|
||||
value: Amount::ZERO,
|
||||
script_pubkey: ScriptBuf::new_op_return(
|
||||
&PushBytesBuf::try_from(data)
|
||||
PushBytesBuf::try_from(data)
|
||||
.expect("data didn't fit into PushBytes depsite being checked"),
|
||||
),
|
||||
})
|
||||
@@ -163,6 +177,35 @@ impl SignableTransaction {
|
||||
|
||||
let mut weight = Self::calculate_weight(tx_ins.len(), payments, None);
|
||||
let mut needed_fee = fee_per_weight * weight;
|
||||
|
||||
// "Virtual transaction size" is weight ceildiv 4 per
|
||||
// https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki
|
||||
|
||||
// https://github.com/bitcoin/bitcoin/blob/306ccd4927a2efe325c8d84be1bdb79edeb29b04/
|
||||
// src/policy/policy.cpp#L295-L298
|
||||
// implements this as expected
|
||||
|
||||
// Technically, it takes whatever's greater, the weight or the amount of signature operatons
|
||||
// multiplied by DEFAULT_BYTES_PER_SIGOP (20)
|
||||
// We only use 1 signature per input, and our inputs have a weight exceeding 20
|
||||
// Accordingly, our inputs' weight will always be greater than the cost of the signature ops
|
||||
let vsize = weight.div_ceil(4);
|
||||
debug_assert_eq!(
|
||||
u64::try_from(bitcoin::policy::get_virtual_tx_size(
|
||||
weight.try_into().unwrap(),
|
||||
tx_ins.len().try_into().unwrap()
|
||||
))
|
||||
.unwrap(),
|
||||
vsize
|
||||
);
|
||||
// Technically, if there isn't change, this TX may still pay enough of a fee to pass the
|
||||
// minimum fee. Such edge cases aren't worth programming when they go against intent, as the
|
||||
// specified fee rate is too low to be valid
|
||||
// bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE is in sats/kilo-vbyte
|
||||
if needed_fee < ((u64::from(bitcoin::policy::DEFAULT_MIN_RELAY_TX_FEE) * vsize) / 1000) {
|
||||
Err(TransactionError::TooLowFee)?;
|
||||
}
|
||||
|
||||
if input_sat < (payment_sat + needed_fee) {
|
||||
Err(TransactionError::NotEnoughFunds)?;
|
||||
}
|
||||
@@ -173,7 +216,8 @@ impl SignableTransaction {
|
||||
let fee_with_change = fee_per_weight * weight_with_change;
|
||||
if let Some(value) = input_sat.checked_sub(payment_sat + fee_with_change) {
|
||||
if value >= DUST {
|
||||
tx_outs.push(TxOut { value, script_pubkey: change.script_pubkey() });
|
||||
tx_outs
|
||||
.push(TxOut { value: Amount::from_sat(value), script_pubkey: change.script_pubkey() });
|
||||
weight = weight_with_change;
|
||||
needed_fee = fee_with_change;
|
||||
}
|
||||
@@ -184,18 +228,28 @@ impl SignableTransaction {
|
||||
Err(TransactionError::NoOutputs)?;
|
||||
}
|
||||
|
||||
if weight > MAX_STANDARD_TX_WEIGHT {
|
||||
if weight > u64::from(bitcoin::policy::MAX_STANDARD_TX_WEIGHT) {
|
||||
Err(TransactionError::TooLargeTransaction)?;
|
||||
}
|
||||
|
||||
Ok(SignableTransaction {
|
||||
tx: Transaction { version: 2, lock_time: LockTime::ZERO, input: tx_ins, output: tx_outs },
|
||||
tx: Transaction {
|
||||
version: Version(2),
|
||||
lock_time: LockTime::ZERO,
|
||||
input: tx_ins,
|
||||
output: tx_outs,
|
||||
},
|
||||
offsets,
|
||||
prevouts: inputs.drain(..).map(|input| input.output).collect(),
|
||||
needed_fee,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the outputs this transaction will create.
|
||||
pub fn outputs(&self) -> &[TxOut] {
|
||||
&self.tx.output
|
||||
}
|
||||
|
||||
/// Create a multisig machine for this transaction.
|
||||
///
|
||||
/// Returns None if the wrong keys are used.
|
||||
@@ -215,18 +269,18 @@ impl SignableTransaction {
|
||||
}
|
||||
for payment in &tx.output {
|
||||
transcript.append_message(b"output_script", payment.script_pubkey.as_bytes());
|
||||
transcript.append_message(b"output_amount", payment.value.to_le_bytes());
|
||||
transcript.append_message(b"output_amount", payment.value.to_sat().to_le_bytes());
|
||||
}
|
||||
|
||||
let mut sigs = vec![];
|
||||
for i in 0 .. tx.input.len() {
|
||||
let mut transcript = transcript.clone();
|
||||
// This unwrap is safe since any transaction with this many inputs violates the maximum
|
||||
// size allowed under standards, which this lib will error on creation of
|
||||
transcript.append_message(b"signing_input", u32::try_from(i).unwrap().to_le_bytes());
|
||||
|
||||
let offset = keys.clone().offset(self.offsets[i]);
|
||||
if address(Network::Bitcoin, offset.group_key())?.script_pubkey() !=
|
||||
self.prevouts[i].script_pubkey
|
||||
{
|
||||
if address_payload(offset.group_key())?.script_pubkey() != self.prevouts[i].script_pubkey {
|
||||
None?;
|
||||
}
|
||||
|
||||
@@ -243,7 +297,7 @@ impl SignableTransaction {
|
||||
/// A FROST signing machine to produce a Bitcoin transaction.
|
||||
///
|
||||
/// This does not support caching its preprocess. When sign is called, the message must be empty.
|
||||
/// This will panic if it isn't.
|
||||
/// This will panic if either `cache` is called or the message isn't empty.
|
||||
pub struct TransactionMachine {
|
||||
tx: SignableTransaction,
|
||||
sigs: Vec<AlgorithmMachine<Secp256k1, Schnorr<RecommendedTranscript>>>,
|
||||
@@ -339,7 +393,9 @@ impl SignMachine<Transaction> for TransactionSignMachine {
|
||||
commitments[i].clone(),
|
||||
cache
|
||||
.taproot_key_spend_signature_hash(i, &prevouts, TapSighashType::Default)
|
||||
.unwrap()
|
||||
// This should never happen since the inputs align with the TX the cache was
|
||||
// constructed with, and because i is always < prevouts.len()
|
||||
.expect("taproot_key_spend_signature_hash failed to return a hash")
|
||||
.as_ref(),
|
||||
)?;
|
||||
shares.push(share);
|
||||
@@ -373,7 +429,7 @@ impl SignatureMachine<Transaction> for TransactionSignatureMachine {
|
||||
)?;
|
||||
|
||||
let mut witness = Witness::new();
|
||||
witness.push(sig.as_ref());
|
||||
witness.push(sig);
|
||||
input.witness = witness;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use bitcoin_serai::rpc::Rpc;
|
||||
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref SEQUENTIAL: Mutex<()> = Mutex::new(());
|
||||
static SEQUENTIAL_CELL: OnceLock<Mutex<()>> = OnceLock::new();
|
||||
#[allow(non_snake_case)]
|
||||
pub fn SEQUENTIAL() -> &'static Mutex<()> {
|
||||
SEQUENTIAL_CELL.get_or_init(|| Mutex::new(()))
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
@@ -30,7 +34,7 @@ macro_rules! async_sequential {
|
||||
$(
|
||||
#[tokio::test]
|
||||
async fn $name() {
|
||||
let guard = runner::SEQUENTIAL.lock().await;
|
||||
let guard = runner::SEQUENTIAL().lock().await;
|
||||
let local = tokio::task::LocalSet::new();
|
||||
local.run_until(async move {
|
||||
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {
|
||||
|
||||
@@ -22,9 +22,12 @@ use bitcoin_serai::{
|
||||
hashes::Hash as HashTrait,
|
||||
blockdata::opcodes::all::OP_RETURN,
|
||||
script::{PushBytesBuf, Instruction, Instructions, Script},
|
||||
OutPoint, TxOut, Transaction, Network, Address,
|
||||
address::NetworkChecked,
|
||||
OutPoint, Amount, TxOut, Transaction, Network, Address,
|
||||
},
|
||||
wallet::{
|
||||
tweak_keys, address_payload, ReceivedOutput, Scanner, TransactionError, SignableTransaction,
|
||||
},
|
||||
wallet::{tweak_keys, address, ReceivedOutput, Scanner, TransactionError, SignableTransaction},
|
||||
rpc::Rpc,
|
||||
};
|
||||
|
||||
@@ -43,7 +46,10 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
|
||||
rpc
|
||||
.rpc_call::<Vec<String>>(
|
||||
"generatetoaddress",
|
||||
serde_json::json!([1, address(Network::Regtest, key).unwrap()]),
|
||||
serde_json::json!([
|
||||
1,
|
||||
Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())
|
||||
]),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -52,7 +58,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
|
||||
rpc
|
||||
.rpc_call::<Vec<String>>(
|
||||
"generatetoaddress",
|
||||
serde_json::json!([100, Address::p2sh(Script::empty(), Network::Regtest).unwrap()]),
|
||||
serde_json::json!([100, Address::p2sh(Script::new(), Network::Regtest).unwrap()]),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -64,7 +70,7 @@ async fn send_and_get_output(rpc: &Rpc, scanner: &Scanner, key: ProjectivePoint)
|
||||
|
||||
assert_eq!(outputs.len(), 1);
|
||||
assert_eq!(outputs[0].outpoint(), &OutPoint::new(block.txdata[0].txid(), 0));
|
||||
assert_eq!(outputs[0].value(), block.txdata[0].output[0].value);
|
||||
assert_eq!(outputs[0].value(), block.txdata[0].output[0].value.to_sat());
|
||||
|
||||
assert_eq!(
|
||||
ReceivedOutput::read::<&[u8]>(&mut outputs[0].serialize().as_ref()).unwrap(),
|
||||
@@ -187,7 +193,7 @@ async_sequential! {
|
||||
assert_eq!(output.offset(), Scalar::ZERO);
|
||||
|
||||
let inputs = vec![output];
|
||||
let addr = || address(Network::Regtest, key).unwrap();
|
||||
let addr = || Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap());
|
||||
let payments = vec![(addr(), 1000)];
|
||||
|
||||
assert!(SignableTransaction::new(inputs.clone(), &payments, None, None, FEE).is_ok());
|
||||
@@ -222,13 +228,18 @@ async_sequential! {
|
||||
Err(TransactionError::TooMuchData),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
SignableTransaction::new(inputs.clone(), &[], Some(addr()), None, 0),
|
||||
Err(TransactionError::TooLowFee),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
SignableTransaction::new(inputs.clone(), &[(addr(), inputs[0].value() * 2)], None, None, FEE),
|
||||
Err(TransactionError::NotEnoughFunds),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
SignableTransaction::new(inputs, &vec![(addr(), 1000); 10000], None, None, 0),
|
||||
SignableTransaction::new(inputs, &vec![(addr(), 1000); 10000], None, None, FEE),
|
||||
Err(TransactionError::TooLargeTransaction),
|
||||
);
|
||||
}
|
||||
@@ -250,13 +261,14 @@ async_sequential! {
|
||||
|
||||
// Declare payments, change, fee
|
||||
let payments = [
|
||||
(address(Network::Regtest, key).unwrap(), 1005),
|
||||
(address(Network::Regtest, offset_key).unwrap(), 1007)
|
||||
(Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap()), 1005),
|
||||
(Address::<NetworkChecked>::new(Network::Regtest, address_payload(offset_key).unwrap()), 1007)
|
||||
];
|
||||
|
||||
let change_offset = scanner.register_offset(Scalar::random(&mut OsRng)).unwrap();
|
||||
let change_key = key + (ProjectivePoint::GENERATOR * change_offset);
|
||||
let change_addr = address(Network::Regtest, change_key).unwrap();
|
||||
let change_addr =
|
||||
Address::<NetworkChecked>::new(Network::Regtest, address_payload(change_key).unwrap());
|
||||
|
||||
// Create and sign the TX
|
||||
let tx = SignableTransaction::new(
|
||||
@@ -284,21 +296,24 @@ async_sequential! {
|
||||
|
||||
// Make sure the payments were properly created
|
||||
for ((output, scanned), payment) in tx.output.iter().zip(outputs.iter()).zip(payments.iter()) {
|
||||
assert_eq!(output, &TxOut { script_pubkey: payment.0.script_pubkey(), value: payment.1 });
|
||||
assert_eq!(
|
||||
output,
|
||||
&TxOut { script_pubkey: payment.0.script_pubkey(), value: Amount::from_sat(payment.1) },
|
||||
);
|
||||
assert_eq!(scanned.value(), payment.1 );
|
||||
}
|
||||
|
||||
// Make sure the change is correct
|
||||
assert_eq!(needed_fee, u64::try_from(tx.weight()).unwrap() * FEE);
|
||||
assert_eq!(needed_fee, u64::from(tx.weight()) * FEE);
|
||||
let input_value = output.value() + offset_output.value();
|
||||
let output_value = tx.output.iter().map(|output| output.value).sum::<u64>();
|
||||
let output_value = tx.output.iter().map(|output| output.value.to_sat()).sum::<u64>();
|
||||
assert_eq!(input_value - output_value, needed_fee);
|
||||
|
||||
let change_amount =
|
||||
input_value - payments.iter().map(|payment| payment.1).sum::<u64>() - needed_fee;
|
||||
assert_eq!(
|
||||
tx.output[2],
|
||||
TxOut { script_pubkey: change_addr.script_pubkey(), value: change_amount },
|
||||
TxOut { script_pubkey: change_addr.script_pubkey(), value: Amount::from_sat(change_amount) },
|
||||
);
|
||||
|
||||
// This also tests send_raw_transaction and get_transaction, which the RPC test can't
|
||||
@@ -327,7 +342,7 @@ async_sequential! {
|
||||
SignableTransaction::new(
|
||||
vec![output],
|
||||
&[],
|
||||
address(Network::Regtest, key),
|
||||
Some(Address::<NetworkChecked>::new(Network::Regtest, address_payload(key).unwrap())),
|
||||
Some(data.clone()),
|
||||
FEE
|
||||
).unwrap()
|
||||
|
||||
@@ -7,31 +7,33 @@ repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
|
||||
edition = "2021"
|
||||
publish = false
|
||||
rust-version = "1.74"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
thiserror = "1"
|
||||
rand_core = "0.6"
|
||||
thiserror = { version = "1", default-features = false }
|
||||
eyre = { version = "0.6", default-features = false }
|
||||
|
||||
serde_json = "1"
|
||||
serde = "1"
|
||||
sha3 = { version = "0.10", default-features = false, features = ["std"] }
|
||||
|
||||
sha2 = "0.10"
|
||||
sha3 = "0.10"
|
||||
|
||||
group = "0.13"
|
||||
k256 = { version = "^0.13.1", default-features = false, features = ["std", "arithmetic", "bits", "ecdsa"] }
|
||||
group = { version = "0.13", default-features = false }
|
||||
k256 = { version = "^0.13.1", default-features = false, features = ["std", "ecdsa"] }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] }
|
||||
|
||||
eyre = "0.6"
|
||||
|
||||
ethers = { version = "2", default-features = false, features = ["abigen", "ethers-solc"] }
|
||||
|
||||
[build-dependencies]
|
||||
ethers-solc = "2"
|
||||
ethers-core = { version = "2", default-features = false }
|
||||
ethers-providers = { version = "2", default-features = false }
|
||||
ethers-contract = { version = "2", default-features = false, features = ["abigen", "providers"] }
|
||||
|
||||
[dev-dependencies]
|
||||
rand_core = { version = "0.6", default-features = false, features = ["std"] }
|
||||
|
||||
hex = { version = "0.4", default-features = false, features = ["std"] }
|
||||
serde = { version = "1", default-features = false, features = ["std"] }
|
||||
serde_json = { version = "1", default-features = false, features = ["std"] }
|
||||
|
||||
sha2 = { version = "0.10", default-features = false, features = ["std"] }
|
||||
|
||||
tokio = { version = "1", features = ["macros"] }
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
use ethers_solc::{Project, ProjectPathsConfig};
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=contracts");
|
||||
println!("cargo:rerun-if-changed=artifacts");
|
||||
|
||||
// configure the project with all its paths, solc, cache etc.
|
||||
let project = Project::builder()
|
||||
.paths(ProjectPathsConfig::hardhat(env!("CARGO_MANIFEST_DIR")).unwrap())
|
||||
.build()
|
||||
.unwrap();
|
||||
project.compile().unwrap();
|
||||
#[rustfmt::skip]
|
||||
let args = [
|
||||
"--base-path", ".",
|
||||
"-o", "./artifacts", "--overwrite",
|
||||
"--bin", "--abi",
|
||||
"--optimize",
|
||||
"./contracts/Schnorr.sol"
|
||||
];
|
||||
|
||||
// Tell Cargo that if a source file changes, to rerun this build script.
|
||||
project.rerun_if_sources_changed();
|
||||
assert!(std::process::Command::new("solc").args(args).status().unwrap().success());
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
use crate::crypto::ProcessedSignature;
|
||||
use ethers::{contract::ContractFactory, prelude::*, solc::artifacts::contract::ContractBytecode};
|
||||
use eyre::{eyre, Result};
|
||||
use std::fs::File;
|
||||
use std::sync::Arc;
|
||||
use thiserror::Error;
|
||||
use eyre::{eyre, Result};
|
||||
|
||||
use ethers_providers::{Provider, Http};
|
||||
use ethers_contract::abigen;
|
||||
|
||||
use crate::crypto::ProcessedSignature;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum EthereumError {
|
||||
@@ -11,27 +12,10 @@ pub enum EthereumError {
|
||||
VerificationError,
|
||||
}
|
||||
|
||||
abigen!(
|
||||
Schnorr,
|
||||
"./artifacts/Schnorr.sol/Schnorr.json",
|
||||
event_derives(serde::Deserialize, serde::Serialize),
|
||||
);
|
||||
|
||||
pub async fn deploy_schnorr_verifier_contract(
|
||||
client: Arc<SignerMiddleware<Provider<Http>, LocalWallet>>,
|
||||
) -> Result<Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>> {
|
||||
let path = "./artifacts/Schnorr.sol/Schnorr.json";
|
||||
let artifact: ContractBytecode = serde_json::from_reader(File::open(path).unwrap()).unwrap();
|
||||
let abi = artifact.abi.unwrap();
|
||||
let bin = artifact.bytecode.unwrap().object;
|
||||
let factory = ContractFactory::new(abi, bin.into_bytes().unwrap(), client.clone());
|
||||
let contract = factory.deploy(())?.send().await?;
|
||||
let contract = Schnorr::new(contract.address(), client);
|
||||
Ok(contract)
|
||||
}
|
||||
abigen!(Schnorr, "./artifacts/Schnorr.abi");
|
||||
|
||||
pub async fn call_verify(
|
||||
contract: &Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>,
|
||||
contract: &Schnorr<Provider<Http>>,
|
||||
params: &ProcessedSignature,
|
||||
) -> Result<()> {
|
||||
if contract
|
||||
|
||||
@@ -11,7 +11,7 @@ use k256::{
|
||||
use frost::{algorithm::Hram, curve::Secp256k1};
|
||||
|
||||
pub fn keccak256(data: &[u8]) -> [u8; 32] {
|
||||
Keccak256::digest(data).try_into().unwrap()
|
||||
Keccak256::digest(data).into()
|
||||
}
|
||||
|
||||
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
use std::{convert::TryFrom, sync::Arc, time::Duration};
|
||||
use std::{convert::TryFrom, sync::Arc, time::Duration, fs::File};
|
||||
|
||||
use rand_core::OsRng;
|
||||
|
||||
use ::k256::{elliptic_curve::bigint::ArrayEncoding, U256};
|
||||
use ::k256::{
|
||||
elliptic_curve::{bigint::ArrayEncoding, PrimeField},
|
||||
U256,
|
||||
};
|
||||
|
||||
use ethers::{
|
||||
prelude::*,
|
||||
use ethers_core::{
|
||||
types::Signature,
|
||||
abi::Abi,
|
||||
utils::{keccak256, Anvil, AnvilInstance},
|
||||
};
|
||||
use ethers_contract::ContractFactory;
|
||||
use ethers_providers::{Middleware, Provider, Http};
|
||||
|
||||
use frost::{
|
||||
curve::Secp256k1,
|
||||
@@ -18,20 +24,71 @@ use frost::{
|
||||
|
||||
use ethereum_serai::{
|
||||
crypto,
|
||||
contract::{Schnorr, call_verify, deploy_schnorr_verifier_contract},
|
||||
contract::{Schnorr, call_verify},
|
||||
};
|
||||
|
||||
async fn deploy_test_contract(
|
||||
) -> (u32, AnvilInstance, Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>) {
|
||||
// TODO: Replace with a contract deployment from an unknown account, so the environment solely has
|
||||
// to fund the deployer, not create/pass a wallet
|
||||
pub async fn deploy_schnorr_verifier_contract(
|
||||
chain_id: u32,
|
||||
client: Arc<Provider<Http>>,
|
||||
wallet: &k256::ecdsa::SigningKey,
|
||||
) -> eyre::Result<Schnorr<Provider<Http>>> {
|
||||
let abi: Abi = serde_json::from_reader(File::open("./artifacts/Schnorr.abi").unwrap()).unwrap();
|
||||
|
||||
let hex_bin_buf = std::fs::read_to_string("./artifacts/Schnorr.bin").unwrap();
|
||||
let hex_bin =
|
||||
if let Some(stripped) = hex_bin_buf.strip_prefix("0x") { stripped } else { &hex_bin_buf };
|
||||
let bin = hex::decode(hex_bin).unwrap();
|
||||
let factory = ContractFactory::new(abi, bin.into(), client.clone());
|
||||
|
||||
let mut deployment_tx = factory.deploy(())?.tx;
|
||||
deployment_tx.set_chain_id(chain_id);
|
||||
deployment_tx.set_gas(500_000);
|
||||
let (max_fee_per_gas, max_priority_fee_per_gas) = client.estimate_eip1559_fees(None).await?;
|
||||
deployment_tx.as_eip1559_mut().unwrap().max_fee_per_gas = Some(max_fee_per_gas);
|
||||
deployment_tx.as_eip1559_mut().unwrap().max_priority_fee_per_gas = Some(max_priority_fee_per_gas);
|
||||
|
||||
let sig_hash = deployment_tx.sighash();
|
||||
let (sig, rid) = wallet.sign_prehash_recoverable(sig_hash.as_ref()).unwrap();
|
||||
|
||||
// EIP-155 v
|
||||
let mut v = u64::from(rid.to_byte());
|
||||
assert!((v == 0) || (v == 1));
|
||||
v += u64::from((chain_id * 2) + 35);
|
||||
|
||||
let r = sig.r().to_repr();
|
||||
let r_ref: &[u8] = r.as_ref();
|
||||
let s = sig.s().to_repr();
|
||||
let s_ref: &[u8] = s.as_ref();
|
||||
let deployment_tx = deployment_tx.rlp_signed(&Signature { r: r_ref.into(), s: s_ref.into(), v });
|
||||
|
||||
let pending_tx = client.send_raw_transaction(deployment_tx).await?;
|
||||
|
||||
let mut receipt;
|
||||
while {
|
||||
receipt = client.get_transaction_receipt(pending_tx.tx_hash()).await?;
|
||||
receipt.is_none()
|
||||
} {
|
||||
tokio::time::sleep(Duration::from_secs(6)).await;
|
||||
}
|
||||
let receipt = receipt.unwrap();
|
||||
assert!(receipt.status == Some(1.into()));
|
||||
|
||||
let contract = Schnorr::new(receipt.contract_address.unwrap(), client.clone());
|
||||
Ok(contract)
|
||||
}
|
||||
|
||||
async fn deploy_test_contract() -> (u32, AnvilInstance, Schnorr<Provider<Http>>) {
|
||||
let anvil = Anvil::new().spawn();
|
||||
|
||||
let wallet: LocalWallet = anvil.keys()[0].clone().into();
|
||||
let provider =
|
||||
Provider::<Http>::try_from(anvil.endpoint()).unwrap().interval(Duration::from_millis(10u64));
|
||||
let chain_id = provider.get_chainid().await.unwrap().as_u32();
|
||||
let client = Arc::new(SignerMiddleware::new_with_provider_chain(provider, wallet).await.unwrap());
|
||||
let wallet = anvil.keys()[0].clone().into();
|
||||
let client = Arc::new(provider);
|
||||
|
||||
(chain_id, anvil, deploy_schnorr_verifier_contract(client).await.unwrap())
|
||||
(chain_id, anvil, deploy_schnorr_verifier_contract(chain_id, client, &wallet).await.unwrap())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
||||
@@ -6,16 +6,17 @@ license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
|
||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.74"
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
std-shims = { path = "../../common/std-shims", version = "0.1", default-features = false }
|
||||
std-shims = { path = "../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||
|
||||
async-trait = { version = "0.1", default-features = false }
|
||||
thiserror = { version = "1", optional = true }
|
||||
thiserror = { version = "1", default-features = false, optional = true }
|
||||
|
||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
||||
subtle = { version = "^2.4", default-features = false }
|
||||
@@ -27,48 +28,45 @@ rand_chacha = { version = "0.3", default-features = false }
|
||||
# Used to select decoys
|
||||
rand_distr = { version = "0.4", default-features = false }
|
||||
|
||||
crc = { version = "3", default-features = false }
|
||||
sha3 = { version = "0.10", default-features = false }
|
||||
pbkdf2 = { version = "0.12", features = ["simple"], default-features = false }
|
||||
|
||||
curve25519-dalek = { version = "^3.2", default-features = false }
|
||||
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "precomputed-tables"] }
|
||||
|
||||
# Used for the hash to curve, along with the more complicated proofs
|
||||
group = { version = "0.13", default-features = false }
|
||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.3", default-features = false }
|
||||
multiexp = { path = "../../crypto/multiexp", version = "0.3", default-features = false, features = ["batch"] }
|
||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
||||
multiexp = { path = "../../crypto/multiexp", version = "0.4", default-features = false, features = ["batch"] }
|
||||
|
||||
# Needed for multisig
|
||||
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
|
||||
dleq = { path = "../../crypto/dleq", version = "0.3", features = ["serialize"], optional = true }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["ed25519"], optional = true }
|
||||
dleq = { path = "../../crypto/dleq", version = "0.4", default-features = false, features = ["serialize"], optional = true }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.8", default-features = false, features = ["ed25519"], optional = true }
|
||||
|
||||
monero-generators = { path = "generators", version = "0.3", default-features = false }
|
||||
monero-generators = { path = "generators", version = "0.4", default-features = false }
|
||||
|
||||
futures = { version = "0.3", default-features = false, features = ["alloc"], optional = true }
|
||||
|
||||
hex-literal = "0.4"
|
||||
hex = { version = "0.4", default-features = false, features = ["alloc"] }
|
||||
serde = { version = "1", default-features = false, features = ["derive"] }
|
||||
serde = { version = "1", default-features = false, features = ["derive", "alloc"] }
|
||||
serde_json = { version = "1", default-features = false, features = ["alloc"] }
|
||||
|
||||
base58-monero = { version = "1", git = "https://github.com/monero-rs/base58-monero", rev = "5045e8d2b817b3b6c1190661f504e879bc769c29", default-features = false, features = ["check"] }
|
||||
base58-monero = { version = "2", default-features = false, features = ["check"] }
|
||||
|
||||
# Used for the provided RPC
|
||||
digest_auth = { version = "0.3", optional = true }
|
||||
reqwest = { version = "0.11", features = ["json"], optional = true }
|
||||
|
||||
# Used for the binaries
|
||||
tokio = { version = "1", features = ["full"], optional = true }
|
||||
# Used for the provided HTTP RPC
|
||||
digest_auth = { version = "0.3", default-features = false, optional = true }
|
||||
simple-request = { path = "../../common/request", version = "0.1", default-features = false, features = ["tls"], optional = true }
|
||||
tokio = { version = "1", default-features = false, optional = true }
|
||||
|
||||
[build-dependencies]
|
||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.3", default-features = false }
|
||||
monero-generators = { path = "generators", version = "0.3", default-features = false }
|
||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
||||
monero-generators = { path = "generators", version = "0.4", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
monero-rpc = "0.3"
|
||||
tokio = { version = "1", features = ["sync", "macros"] }
|
||||
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["tests"] }
|
||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["tests"] }
|
||||
|
||||
[features]
|
||||
std = [
|
||||
@@ -80,28 +78,33 @@ std = [
|
||||
"subtle/std",
|
||||
|
||||
"rand_core/std",
|
||||
"rand_chacha/std",
|
||||
"rand/std",
|
||||
"rand_chacha/std",
|
||||
"rand_distr/std",
|
||||
|
||||
"sha3/std",
|
||||
|
||||
"curve25519-dalek/std",
|
||||
"pbkdf2/std",
|
||||
|
||||
"multiexp/std",
|
||||
|
||||
"transcript/std",
|
||||
"dleq/std",
|
||||
|
||||
"monero-generators/std",
|
||||
|
||||
"futures/std",
|
||||
"futures?/std",
|
||||
|
||||
"hex/std",
|
||||
"serde/std",
|
||||
"serde_json/std",
|
||||
|
||||
"base58-monero/std",
|
||||
]
|
||||
|
||||
http_rpc = ["digest_auth", "reqwest"]
|
||||
cache-distribution = ["futures"]
|
||||
http-rpc = ["digest_auth", "simple-request", "tokio"]
|
||||
multisig = ["transcript", "frost", "dleq", "std"]
|
||||
binaries = ["tokio"]
|
||||
binaries = ["tokio/rt-multi-thread", "tokio/macros", "http-rpc"]
|
||||
experimental = []
|
||||
|
||||
default = ["std", "http_rpc"]
|
||||
default = ["std", "http-rpc"]
|
||||
|
||||
@@ -41,13 +41,13 @@ fn generators(prefix: &'static str, path: &str) {
|
||||
.write_all(
|
||||
format!(
|
||||
"
|
||||
pub static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
|
||||
pub(crate) static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
|
||||
pub fn GENERATORS() -> &'static Generators {{
|
||||
GENERATORS_CELL.get_or_init(|| Generators {{
|
||||
G: [
|
||||
G: vec![
|
||||
{G_str}
|
||||
],
|
||||
H: [
|
||||
H: vec![
|
||||
{H_str}
|
||||
],
|
||||
}})
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "monero-generators"
|
||||
version = "0.3.0"
|
||||
version = "0.4.0"
|
||||
description = "Monero's hash_to_point and generators"
|
||||
license = "MIT"
|
||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
|
||||
@@ -12,17 +12,17 @@ all-features = true
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
|
||||
[dependencies]
|
||||
std-shims = { path = "../../../common/std-shims", version = "0.1", default-features = false }
|
||||
std-shims = { path = "../../../common/std-shims", version = "^0.1.1", default-features = false }
|
||||
|
||||
subtle = { version = "^2.4", default-features = false }
|
||||
|
||||
sha3 = { version = "0.10", default-features = false }
|
||||
|
||||
curve25519-dalek = { version = "3", default-features = false }
|
||||
curve25519-dalek = { version = "4", default-features = false, features = ["alloc", "zeroize", "precomputed-tables"] }
|
||||
|
||||
group = { version = "0.13", default-features = false }
|
||||
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.3" }
|
||||
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.4", default-features = false }
|
||||
|
||||
[features]
|
||||
std = ["std-shims/std"]
|
||||
std = ["std-shims/std", "subtle/std", "sha3/std", "dalek-ff-group/std"]
|
||||
default = ["std"]
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
use std_shims::sync::OnceLock;
|
||||
use std_shims::{sync::OnceLock, vec::Vec};
|
||||
|
||||
use sha3::{Digest, Keccak256};
|
||||
|
||||
@@ -56,14 +56,13 @@ const MAX_MN: usize = MAX_M * N;
|
||||
/// Container struct for Bulletproofs(+) generators.
|
||||
#[allow(non_snake_case)]
|
||||
pub struct Generators {
|
||||
pub G: [EdwardsPoint; MAX_MN],
|
||||
pub H: [EdwardsPoint; MAX_MN],
|
||||
pub G: Vec<EdwardsPoint>,
|
||||
pub H: Vec<EdwardsPoint>,
|
||||
}
|
||||
|
||||
/// Generate generators as needed for Bulletproofs(+), as Monero does.
|
||||
pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
|
||||
let mut res =
|
||||
Generators { G: [EdwardsPoint::identity(); MAX_MN], H: [EdwardsPoint::identity(); MAX_MN] };
|
||||
let mut res = Generators { G: Vec::with_capacity(MAX_MN), H: Vec::with_capacity(MAX_MN) };
|
||||
for i in 0 .. MAX_MN {
|
||||
let i = 2 * i;
|
||||
|
||||
@@ -73,8 +72,8 @@ pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
|
||||
|
||||
write_varint(&i.try_into().unwrap(), &mut even).unwrap();
|
||||
write_varint(&(i + 1).try_into().unwrap(), &mut odd).unwrap();
|
||||
res.H[i / 2] = EdwardsPoint(hash_to_point(hash(&even)));
|
||||
res.G[i / 2] = EdwardsPoint(hash_to_point(hash(&odd)));
|
||||
res.H.push(EdwardsPoint(hash_to_point(hash(&even))));
|
||||
res.G.push(EdwardsPoint(hash_to_point(hash(&odd))));
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
@@ -1,91 +1,253 @@
|
||||
use std::sync::Arc;
|
||||
#[cfg(feature = "binaries")]
|
||||
mod binaries {
|
||||
pub(crate) use std::sync::Arc;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde_json::json;
|
||||
pub(crate) use curve25519_dalek::{
|
||||
scalar::Scalar,
|
||||
edwards::{CompressedEdwardsY, EdwardsPoint},
|
||||
};
|
||||
|
||||
use monero_serai::{
|
||||
transaction::Transaction,
|
||||
block::Block,
|
||||
rpc::{Rpc, HttpRpc},
|
||||
};
|
||||
pub(crate) use multiexp::BatchVerifier;
|
||||
|
||||
use tokio::task::JoinHandle;
|
||||
pub(crate) use serde::Deserialize;
|
||||
pub(crate) use serde_json::json;
|
||||
|
||||
async fn check_block(rpc: Arc<Rpc<HttpRpc>>, block_i: usize) {
|
||||
let hash = rpc.get_block_hash(block_i).await.expect("couldn't get block {block_i}'s hash");
|
||||
pub(crate) use monero_serai::{
|
||||
Commitment,
|
||||
ringct::RctPrunable,
|
||||
transaction::{Input, Transaction},
|
||||
block::Block,
|
||||
rpc::{RpcError, Rpc, HttpRpc},
|
||||
};
|
||||
|
||||
// TODO: Grab the JSON to also check it was deserialized correctly
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct BlockResponse {
|
||||
blob: String,
|
||||
}
|
||||
let res: BlockResponse = rpc
|
||||
.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) })))
|
||||
.await
|
||||
.expect("couldn't get block {block} via block.hash()");
|
||||
pub(crate) use tokio::task::JoinHandle;
|
||||
|
||||
let blob = hex::decode(res.blob).expect("node returned non-hex block");
|
||||
let block = Block::read(&mut blob.as_slice()).expect("couldn't deserialize block {block_i}");
|
||||
assert_eq!(block.hash(), hash, "hash differs");
|
||||
assert_eq!(block.serialize(), blob, "serialization differs");
|
||||
pub(crate) async fn check_block(rpc: Arc<Rpc<HttpRpc>>, block_i: usize) {
|
||||
let hash = loop {
|
||||
match rpc.get_block_hash(block_i).await {
|
||||
Ok(hash) => break hash,
|
||||
Err(RpcError::ConnectionError(e)) => {
|
||||
println!("get_block_hash ConnectionError: {e}");
|
||||
continue;
|
||||
}
|
||||
Err(e) => panic!("couldn't get block {block_i}'s hash: {e:?}"),
|
||||
}
|
||||
};
|
||||
|
||||
let txs_len = 1 + block.txs.len();
|
||||
|
||||
if !block.txs.is_empty() {
|
||||
// TODO: Grab the JSON to also check it was deserialized correctly
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct TransactionResponse {
|
||||
tx_hash: String,
|
||||
as_hex: String,
|
||||
}
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct TransactionsResponse {
|
||||
#[serde(default)]
|
||||
missed_tx: Vec<String>,
|
||||
txs: Vec<TransactionResponse>,
|
||||
struct BlockResponse {
|
||||
blob: String,
|
||||
}
|
||||
let res: BlockResponse = loop {
|
||||
match rpc.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await {
|
||||
Ok(res) => break res,
|
||||
Err(RpcError::ConnectionError(e)) => {
|
||||
println!("get_block ConnectionError: {e}");
|
||||
continue;
|
||||
}
|
||||
Err(e) => panic!("couldn't get block {block_i} via block.hash(): {e:?}"),
|
||||
}
|
||||
};
|
||||
|
||||
let mut hashes_hex = block.txs.iter().map(hex::encode).collect::<Vec<_>>();
|
||||
let mut all_txs = vec![];
|
||||
while !hashes_hex.is_empty() {
|
||||
let txs: TransactionsResponse = rpc
|
||||
.rpc_call(
|
||||
"get_transactions",
|
||||
Some(json!({
|
||||
"txs_hashes": hashes_hex.drain(.. hashes_hex.len().min(100)).collect::<Vec<_>>(),
|
||||
})),
|
||||
let blob = hex::decode(res.blob).expect("node returned non-hex block");
|
||||
let block = Block::read(&mut blob.as_slice())
|
||||
.unwrap_or_else(|e| panic!("couldn't deserialize block {block_i}: {e}"));
|
||||
assert_eq!(block.hash(), hash, "hash differs");
|
||||
assert_eq!(block.serialize(), blob, "serialization differs");
|
||||
|
||||
let txs_len = 1 + block.txs.len();
|
||||
|
||||
if !block.txs.is_empty() {
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct TransactionResponse {
|
||||
tx_hash: String,
|
||||
as_hex: String,
|
||||
}
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct TransactionsResponse {
|
||||
#[serde(default)]
|
||||
missed_tx: Vec<String>,
|
||||
txs: Vec<TransactionResponse>,
|
||||
}
|
||||
|
||||
let mut hashes_hex = block.txs.iter().map(hex::encode).collect::<Vec<_>>();
|
||||
let mut all_txs = vec![];
|
||||
while !hashes_hex.is_empty() {
|
||||
let txs: TransactionsResponse = loop {
|
||||
match rpc
|
||||
.rpc_call(
|
||||
"get_transactions",
|
||||
Some(json!({
|
||||
"txs_hashes": hashes_hex.drain(.. hashes_hex.len().min(100)).collect::<Vec<_>>(),
|
||||
})),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(txs) => break txs,
|
||||
Err(RpcError::ConnectionError(e)) => {
|
||||
println!("get_transactions ConnectionError: {e}");
|
||||
continue;
|
||||
}
|
||||
Err(e) => panic!("couldn't call get_transactions: {e:?}"),
|
||||
}
|
||||
};
|
||||
assert!(txs.missed_tx.is_empty());
|
||||
all_txs.extend(txs.txs);
|
||||
}
|
||||
|
||||
let mut batch = BatchVerifier::new(block.txs.len());
|
||||
for (tx_hash, tx_res) in block.txs.into_iter().zip(all_txs) {
|
||||
assert_eq!(
|
||||
tx_res.tx_hash,
|
||||
hex::encode(tx_hash),
|
||||
"node returned a transaction with different hash"
|
||||
);
|
||||
|
||||
let tx = Transaction::read(
|
||||
&mut hex::decode(&tx_res.as_hex).expect("node returned non-hex transaction").as_slice(),
|
||||
)
|
||||
.await
|
||||
.expect("couldn't call get_transactions");
|
||||
assert!(txs.missed_tx.is_empty());
|
||||
all_txs.extend(txs.txs);
|
||||
.expect("couldn't deserialize transaction");
|
||||
|
||||
assert_eq!(
|
||||
hex::encode(tx.serialize()),
|
||||
tx_res.as_hex,
|
||||
"Transaction serialization was different"
|
||||
);
|
||||
assert_eq!(tx.hash(), tx_hash, "Transaction hash was different");
|
||||
|
||||
if matches!(tx.rct_signatures.prunable, RctPrunable::Null) {
|
||||
assert_eq!(tx.prefix.version, 1);
|
||||
assert!(!tx.signatures.is_empty());
|
||||
continue;
|
||||
}
|
||||
|
||||
let sig_hash = tx.signature_hash();
|
||||
// Verify all proofs we support proving for
|
||||
// This is due to having debug_asserts calling verify within their proving, and CLSAG
|
||||
// multisig explicitly calling verify as part of its signing process
|
||||
// Accordingly, making sure our signature_hash algorithm is correct is great, and further
|
||||
// making sure the verification functions are valid is appreciated
|
||||
match tx.rct_signatures.prunable {
|
||||
RctPrunable::Null |
|
||||
RctPrunable::AggregateMlsagBorromean { .. } |
|
||||
RctPrunable::MlsagBorromean { .. } => {}
|
||||
RctPrunable::MlsagBulletproofs { bulletproofs, .. } => {
|
||||
assert!(bulletproofs.batch_verify(
|
||||
&mut rand_core::OsRng,
|
||||
&mut batch,
|
||||
(),
|
||||
&tx.rct_signatures.base.commitments
|
||||
));
|
||||
}
|
||||
RctPrunable::Clsag { bulletproofs, clsags, pseudo_outs } => {
|
||||
assert!(bulletproofs.batch_verify(
|
||||
&mut rand_core::OsRng,
|
||||
&mut batch,
|
||||
(),
|
||||
&tx.rct_signatures.base.commitments
|
||||
));
|
||||
|
||||
for (i, clsag) in clsags.into_iter().enumerate() {
|
||||
let (amount, key_offsets, image) = match &tx.prefix.inputs[i] {
|
||||
Input::Gen(_) => panic!("Input::Gen"),
|
||||
Input::ToKey { amount, key_offsets, key_image } => (amount, key_offsets, key_image),
|
||||
};
|
||||
|
||||
let mut running_sum = 0;
|
||||
let mut actual_indexes = vec![];
|
||||
for offset in key_offsets {
|
||||
running_sum += offset;
|
||||
actual_indexes.push(running_sum);
|
||||
}
|
||||
|
||||
async fn get_outs(
|
||||
rpc: &Rpc<HttpRpc>,
|
||||
amount: u64,
|
||||
indexes: &[u64],
|
||||
) -> Vec<[EdwardsPoint; 2]> {
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct Out {
|
||||
key: String,
|
||||
mask: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct Outs {
|
||||
outs: Vec<Out>,
|
||||
}
|
||||
|
||||
let outs: Outs = loop {
|
||||
match rpc
|
||||
.rpc_call(
|
||||
"get_outs",
|
||||
Some(json!({
|
||||
"get_txid": true,
|
||||
"outputs": indexes.iter().map(|o| json!({
|
||||
"amount": amount,
|
||||
"index": o
|
||||
})).collect::<Vec<_>>()
|
||||
})),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(outs) => break outs,
|
||||
Err(RpcError::ConnectionError(e)) => {
|
||||
println!("get_outs ConnectionError: {e}");
|
||||
continue;
|
||||
}
|
||||
Err(e) => panic!("couldn't connect to RPC to get outs: {e:?}"),
|
||||
}
|
||||
};
|
||||
|
||||
let rpc_point = |point: &str| {
|
||||
CompressedEdwardsY(
|
||||
hex::decode(point)
|
||||
.expect("invalid hex for ring member")
|
||||
.try_into()
|
||||
.expect("invalid point len for ring member"),
|
||||
)
|
||||
.decompress()
|
||||
.expect("invalid point for ring member")
|
||||
};
|
||||
|
||||
outs
|
||||
.outs
|
||||
.iter()
|
||||
.map(|out| {
|
||||
let mask = rpc_point(&out.mask);
|
||||
if amount != 0 {
|
||||
assert_eq!(mask, Commitment::new(Scalar::from(1u8), amount).calculate());
|
||||
}
|
||||
[rpc_point(&out.key), mask]
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
clsag
|
||||
.verify(
|
||||
&get_outs(&rpc, amount.unwrap_or(0), &actual_indexes).await,
|
||||
image,
|
||||
&pseudo_outs[i],
|
||||
&sig_hash,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
assert!(batch.verify_vartime());
|
||||
}
|
||||
|
||||
for (tx_hash, tx_res) in block.txs.into_iter().zip(all_txs.into_iter()) {
|
||||
assert_eq!(
|
||||
tx_res.tx_hash,
|
||||
hex::encode(tx_hash),
|
||||
"node returned a transaction with different hash"
|
||||
);
|
||||
|
||||
let tx = Transaction::read(
|
||||
&mut hex::decode(&tx_res.as_hex).expect("node returned non-hex transaction").as_slice(),
|
||||
)
|
||||
.expect("couldn't deserialize transaction");
|
||||
|
||||
assert_eq!(
|
||||
hex::encode(tx.serialize()),
|
||||
tx_res.as_hex,
|
||||
"Transaction serialization was different"
|
||||
);
|
||||
assert_eq!(tx.hash(), tx_hash, "Transaction hash was different");
|
||||
}
|
||||
println!("Deserialized, hashed, and reserialized {block_i} with {} TXs", txs_len);
|
||||
}
|
||||
|
||||
println!("Deserialized, hashed, and reserialized {block_i} with {} TXs", txs_len);
|
||||
}
|
||||
|
||||
#[cfg(feature = "binaries")]
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
use binaries::*;
|
||||
|
||||
let args = std::env::args().collect::<Vec<String>>();
|
||||
|
||||
// Read start block as the first arg
|
||||
@@ -111,14 +273,15 @@ async fn main() {
|
||||
}
|
||||
let nodes = if specified_nodes.is_empty() { default_nodes } else { specified_nodes };
|
||||
|
||||
let rpc = |url: String| {
|
||||
let rpc = |url: String| async move {
|
||||
HttpRpc::new(url.clone())
|
||||
.await
|
||||
.unwrap_or_else(|_| panic!("couldn't create HttpRpc connected to {url}"))
|
||||
};
|
||||
let main_rpc = rpc(nodes[0].clone());
|
||||
let main_rpc = rpc(nodes[0].clone()).await;
|
||||
let mut rpcs = vec![];
|
||||
for i in 0 .. async_parallelism {
|
||||
rpcs.push(Arc::new(rpc(nodes[i % nodes.len()].clone())));
|
||||
rpcs.push(Arc::new(rpc(nodes[i % nodes.len()].clone()).await));
|
||||
}
|
||||
|
||||
let mut rpc_i = 0;
|
||||
@@ -153,3 +316,8 @@ async fn main() {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "binaries"))]
|
||||
fn main() {
|
||||
panic!("To run binaries, please build with `--feature binaries`.");
|
||||
}
|
||||
|
||||
@@ -17,8 +17,8 @@ const EXISTING_BLOCK_HASH_202612: [u8; 32] =
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct BlockHeader {
|
||||
pub major_version: u64,
|
||||
pub minor_version: u64,
|
||||
pub major_version: u8,
|
||||
pub minor_version: u8,
|
||||
pub timestamp: u64,
|
||||
pub previous: [u8; 32],
|
||||
pub nonce: u32,
|
||||
@@ -59,7 +59,7 @@ pub struct Block {
|
||||
|
||||
impl Block {
|
||||
pub fn number(&self) -> usize {
|
||||
match self.miner_tx.prefix.inputs.get(0) {
|
||||
match self.miner_tx.prefix.inputs.first() {
|
||||
Some(Input::Gen(number)) => (*number).try_into().unwrap(),
|
||||
_ => panic!("invalid block, miner TX didn't have a Input::Gen"),
|
||||
}
|
||||
@@ -68,7 +68,7 @@ impl Block {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.header.write(w)?;
|
||||
self.miner_tx.write(w)?;
|
||||
write_varint(&self.txs.len().try_into().unwrap(), w)?;
|
||||
write_varint(&self.txs.len(), w)?;
|
||||
for tx in &self.txs {
|
||||
w.write_all(tx)?;
|
||||
}
|
||||
@@ -79,20 +79,27 @@ impl Block {
|
||||
merkle_root(self.miner_tx.hash(), &self.txs)
|
||||
}
|
||||
|
||||
fn serialize_hashable(&self) -> Vec<u8> {
|
||||
/// Serialize the block as required for the proof of work hash.
|
||||
///
|
||||
/// This is distinct from the serialization required for the block hash. To get the block hash,
|
||||
/// use the [`Block::hash`] function.
|
||||
pub fn serialize_hashable(&self) -> Vec<u8> {
|
||||
let mut blob = self.header.serialize();
|
||||
blob.extend_from_slice(&self.tx_merkle_root());
|
||||
write_varint(&(1 + u64::try_from(self.txs.len()).unwrap()), &mut blob).unwrap();
|
||||
|
||||
let mut out = Vec::with_capacity(8 + blob.len());
|
||||
write_varint(&u64::try_from(blob.len()).unwrap(), &mut out).unwrap();
|
||||
out.append(&mut blob);
|
||||
|
||||
out
|
||||
blob
|
||||
}
|
||||
|
||||
pub fn hash(&self) -> [u8; 32] {
|
||||
let hash = hash(&self.serialize_hashable());
|
||||
let mut hashable = self.serialize_hashable();
|
||||
// Monero pre-appends a VarInt of the block hashing blobs length before getting the block hash
|
||||
// but doesn't do this when getting the proof of work hash :)
|
||||
let mut hashing_blob = Vec::with_capacity(8 + hashable.len());
|
||||
write_varint(&u64::try_from(hashable.len()).unwrap(), &mut hashing_blob).unwrap();
|
||||
hashing_blob.append(&mut hashable);
|
||||
|
||||
let hash = hash(&hashing_blob);
|
||||
if hash == CORRECT_BLOCK_HASH_202612 {
|
||||
return EXISTING_BLOCK_HASH_202612;
|
||||
};
|
||||
@@ -110,7 +117,7 @@ impl Block {
|
||||
Ok(Block {
|
||||
header: BlockHeader::read(r)?,
|
||||
miner_tx: Transaction::read(r)?,
|
||||
txs: (0 .. read_varint(r)?).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
|
||||
txs: (0_usize .. read_varint(r)?).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,6 +23,12 @@ mod merkle;
|
||||
mod serialize;
|
||||
use serialize::{read_byte, read_u16};
|
||||
|
||||
/// UnreducedScalar struct with functionality for recovering incorrectly reduced scalars.
|
||||
mod unreduced_scalar;
|
||||
|
||||
/// Ring Signature structs and functionality.
|
||||
pub mod ring_signatures;
|
||||
|
||||
/// RingCT structs and functionality.
|
||||
pub mod ringct;
|
||||
use ringct::RctType;
|
||||
@@ -55,7 +61,13 @@ pub(crate) fn INV_EIGHT() -> Scalar {
|
||||
pub enum Protocol {
|
||||
v14,
|
||||
v16,
|
||||
Custom { ring_len: usize, bp_plus: bool, optimal_rct_type: RctType },
|
||||
Custom {
|
||||
ring_len: usize,
|
||||
bp_plus: bool,
|
||||
optimal_rct_type: RctType,
|
||||
view_tags: bool,
|
||||
v16_fee: bool,
|
||||
},
|
||||
}
|
||||
|
||||
impl Protocol {
|
||||
@@ -88,16 +100,37 @@ impl Protocol {
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether or not the specified version uses view tags.
|
||||
pub fn view_tags(&self) -> bool {
|
||||
match self {
|
||||
Protocol::v14 => false,
|
||||
Protocol::v16 => true,
|
||||
Protocol::Custom { view_tags, .. } => *view_tags,
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether or not the specified version uses the fee algorithm from Monero
|
||||
/// hard fork version 16 (released in v18 binaries).
|
||||
pub fn v16_fee(&self) -> bool {
|
||||
match self {
|
||||
Protocol::v14 => false,
|
||||
Protocol::v16 => true,
|
||||
Protocol::Custom { v16_fee, .. } => *v16_fee,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
match self {
|
||||
Protocol::v14 => w.write_all(&[0, 14]),
|
||||
Protocol::v16 => w.write_all(&[0, 16]),
|
||||
Protocol::Custom { ring_len, bp_plus, optimal_rct_type } => {
|
||||
Protocol::Custom { ring_len, bp_plus, optimal_rct_type, view_tags, v16_fee } => {
|
||||
// Custom, version 0
|
||||
w.write_all(&[1, 0])?;
|
||||
w.write_all(&u16::try_from(*ring_len).unwrap().to_le_bytes())?;
|
||||
w.write_all(&[u8::from(*bp_plus)])?;
|
||||
w.write_all(&[optimal_rct_type.to_byte()])
|
||||
w.write_all(&[optimal_rct_type.to_byte()])?;
|
||||
w.write_all(&[u8::from(*view_tags)])?;
|
||||
w.write_all(&[u8::from(*v16_fee)])
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -108,7 +141,7 @@ impl Protocol {
|
||||
0 => match read_byte(r)? {
|
||||
14 => Protocol::v14,
|
||||
16 => Protocol::v16,
|
||||
_ => Err(io::Error::new(io::ErrorKind::Other, "unrecognized monero protocol"))?,
|
||||
_ => Err(io::Error::other("unrecognized monero protocol"))?,
|
||||
},
|
||||
// Custom
|
||||
1 => match read_byte(r)? {
|
||||
@@ -117,32 +150,46 @@ impl Protocol {
|
||||
bp_plus: match read_byte(r)? {
|
||||
0 => false,
|
||||
1 => true,
|
||||
_ => Err(io::Error::new(io::ErrorKind::Other, "invalid bool serialization"))?,
|
||||
_ => Err(io::Error::other("invalid bool serialization"))?,
|
||||
},
|
||||
optimal_rct_type: RctType::from_byte(read_byte(r)?)
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid RctType serialization"))?,
|
||||
.ok_or_else(|| io::Error::other("invalid RctType serialization"))?,
|
||||
view_tags: match read_byte(r)? {
|
||||
0 => false,
|
||||
1 => true,
|
||||
_ => Err(io::Error::other("invalid bool serialization"))?,
|
||||
},
|
||||
v16_fee: match read_byte(r)? {
|
||||
0 => false,
|
||||
1 => true,
|
||||
_ => Err(io::Error::other("invalid bool serialization"))?,
|
||||
},
|
||||
},
|
||||
_ => {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "unrecognized custom protocol serialization"))?
|
||||
}
|
||||
_ => Err(io::Error::other("unrecognized custom protocol serialization"))?,
|
||||
},
|
||||
_ => Err(io::Error::new(io::ErrorKind::Other, "unrecognized protocol serialization"))?,
|
||||
_ => Err(io::Error::other("unrecognized protocol serialization"))?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Transparent structure representing a Pedersen commitment's contents.
|
||||
#[allow(non_snake_case)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct Commitment {
|
||||
pub mask: Scalar,
|
||||
pub amount: u64,
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for Commitment {
|
||||
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||
fmt.debug_struct("Commitment").field("amount", &self.amount).finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl Commitment {
|
||||
/// The zero commitment, defined as a mask of 1 (as to not be the identity) and a 0 amount.
|
||||
/// A commitment to zero, defined with a mask of 1 (as to not be the identity).
|
||||
pub fn zero() -> Commitment {
|
||||
Commitment { mask: Scalar::one(), amount: 0 }
|
||||
Commitment { mask: Scalar::ONE, amount: 0 }
|
||||
}
|
||||
|
||||
pub fn new(mask: Scalar, amount: u64) -> Commitment {
|
||||
@@ -151,7 +198,7 @@ impl Commitment {
|
||||
|
||||
/// Calculate a Pedersen commitment, as a point, from the transparent structure.
|
||||
pub fn calculate(&self) -> EdwardsPoint {
|
||||
(&self.mask * &ED25519_BASEPOINT_TABLE) + (Scalar::from(self.amount) * H())
|
||||
(&self.mask * ED25519_BASEPOINT_TABLE) + (Scalar::from(self.amount) * H())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -173,6 +220,6 @@ pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
|
||||
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
|
||||
// not generate/verify a proof we believe to be valid when it isn't
|
||||
assert!(scalar != Scalar::zero(), "ZERO HASH: {data:?}");
|
||||
assert!(scalar != Scalar::ZERO, "ZERO HASH: {data:?}");
|
||||
scalar
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ use std_shims::vec::Vec;
|
||||
|
||||
use crate::hash;
|
||||
|
||||
pub fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] {
|
||||
pub(crate) fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] {
|
||||
match leafs.len() {
|
||||
0 => root,
|
||||
1 => hash(&[root, leafs[0]].concat()),
|
||||
|
||||
72
coins/monero/src/ring_signatures.rs
Normal file
72
coins/monero/src/ring_signatures.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use std_shims::{
|
||||
io::{self, *},
|
||||
vec::Vec,
|
||||
};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use curve25519_dalek::{EdwardsPoint, Scalar};
|
||||
|
||||
use monero_generators::hash_to_point;
|
||||
|
||||
use crate::{serialize::*, hash_to_scalar};
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct Signature {
|
||||
c: Scalar,
|
||||
r: Scalar,
|
||||
}
|
||||
|
||||
impl Signature {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
write_scalar(&self.c, w)?;
|
||||
write_scalar(&self.r, w)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Signature> {
|
||||
Ok(Signature { c: read_scalar(r)?, r: read_scalar(r)? })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct RingSignature {
|
||||
sigs: Vec<Signature>,
|
||||
}
|
||||
|
||||
impl RingSignature {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
for sig in &self.sigs {
|
||||
sig.write(w)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(members: usize, r: &mut R) -> io::Result<RingSignature> {
|
||||
Ok(RingSignature { sigs: read_raw_vec(Signature::read, members, r)? })
|
||||
}
|
||||
|
||||
pub fn verify(&self, msg: &[u8; 32], ring: &[EdwardsPoint], key_image: &EdwardsPoint) -> bool {
|
||||
if ring.len() != self.sigs.len() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let mut buf = Vec::with_capacity(32 + (32 * 2 * ring.len()));
|
||||
buf.extend_from_slice(msg);
|
||||
|
||||
let mut sum = Scalar::ZERO;
|
||||
|
||||
for (ring_member, sig) in ring.iter().zip(&self.sigs) {
|
||||
#[allow(non_snake_case)]
|
||||
let Li = EdwardsPoint::vartime_double_scalar_mul_basepoint(&sig.c, ring_member, &sig.r);
|
||||
buf.extend_from_slice(Li.compress().as_bytes());
|
||||
#[allow(non_snake_case)]
|
||||
let Ri = (sig.r * hash_to_point(ring_member.compress().to_bytes())) + (sig.c * key_image);
|
||||
buf.extend_from_slice(Ri.compress().as_bytes());
|
||||
|
||||
sum += sig.c;
|
||||
}
|
||||
|
||||
sum == hash_to_scalar(&buf)
|
||||
}
|
||||
}
|
||||
@@ -1,73 +1,63 @@
|
||||
use core::fmt::Debug;
|
||||
use std_shims::io::{self, Read, Write};
|
||||
|
||||
use curve25519_dalek::edwards::EdwardsPoint;
|
||||
#[cfg(feature = "experimental")]
|
||||
use curve25519_dalek::{traits::Identity, scalar::Scalar};
|
||||
use curve25519_dalek::{traits::Identity, Scalar, EdwardsPoint};
|
||||
|
||||
#[cfg(feature = "experimental")]
|
||||
use monero_generators::H_pow_2;
|
||||
#[cfg(feature = "experimental")]
|
||||
use crate::hash_to_scalar;
|
||||
use crate::serialize::*;
|
||||
|
||||
use crate::{hash_to_scalar, unreduced_scalar::UnreducedScalar, serialize::*};
|
||||
|
||||
/// 64 Borromean ring signatures.
|
||||
///
|
||||
/// This type keeps the data as raw bytes as Monero has some transactions with unreduced scalars in
|
||||
/// this field. While we could use `from_bytes_mod_order`, we'd then not be able to encode this
|
||||
/// back into it's original form.
|
||||
///
|
||||
/// Those scalars also have a custom reduction algorithm...
|
||||
/// s0 and s1 are stored as `UnreducedScalar`s due to Monero not requiring they were reduced.
|
||||
/// `UnreducedScalar` preserves their original byte encoding and implements a custom reduction
|
||||
/// algorithm which was in use.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct BorromeanSignatures {
|
||||
pub s0: [[u8; 32]; 64],
|
||||
pub s1: [[u8; 32]; 64],
|
||||
pub ee: [u8; 32],
|
||||
pub s0: [UnreducedScalar; 64],
|
||||
pub s1: [UnreducedScalar; 64],
|
||||
pub ee: Scalar,
|
||||
}
|
||||
|
||||
impl BorromeanSignatures {
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<BorromeanSignatures> {
|
||||
Ok(BorromeanSignatures {
|
||||
s0: read_array(read_bytes, r)?,
|
||||
s1: read_array(read_bytes, r)?,
|
||||
ee: read_bytes(r)?,
|
||||
s0: read_array(UnreducedScalar::read, r)?,
|
||||
s1: read_array(UnreducedScalar::read, r)?,
|
||||
ee: read_scalar(r)?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
for s0 in self.s0.iter() {
|
||||
w.write_all(s0)?;
|
||||
for s0 in &self.s0 {
|
||||
s0.write(w)?;
|
||||
}
|
||||
for s1 in self.s1.iter() {
|
||||
w.write_all(s1)?;
|
||||
for s1 in &self.s1 {
|
||||
s1.write(w)?;
|
||||
}
|
||||
w.write_all(&self.ee)
|
||||
write_scalar(&self.ee, w)
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental")]
|
||||
fn verify(&self, keys_a: &[EdwardsPoint], keys_b: &[EdwardsPoint]) -> bool {
|
||||
let mut transcript = [0; 2048];
|
||||
|
||||
for i in 0 .. 64 {
|
||||
// TODO: These aren't the correct reduction
|
||||
// TODO: Can either of these be tightened?
|
||||
#[allow(non_snake_case)]
|
||||
let LL = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
||||
&Scalar::from_bytes_mod_order(self.ee),
|
||||
&self.ee,
|
||||
&keys_a[i],
|
||||
&Scalar::from_bytes_mod_order(self.s0[i]),
|
||||
&self.s0[i].recover_monero_slide_scalar(),
|
||||
);
|
||||
#[allow(non_snake_case)]
|
||||
let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
||||
&hash_to_scalar(LL.compress().as_bytes()),
|
||||
&keys_b[i],
|
||||
&Scalar::from_bytes_mod_order(self.s1[i]),
|
||||
&self.s1[i].recover_monero_slide_scalar(),
|
||||
);
|
||||
transcript[i .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes());
|
||||
transcript[(i * 32) .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes());
|
||||
}
|
||||
|
||||
// TODO: This isn't the correct reduction
|
||||
// TODO: Can this be tightened to from_canonical_bytes?
|
||||
hash_to_scalar(&transcript) == Scalar::from_bytes_mod_order(self.ee)
|
||||
hash_to_scalar(&transcript) == self.ee
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,7 +80,6 @@ impl BorromeanRange {
|
||||
write_raw_vec(write_point, &self.bit_commitments, w)
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental")]
|
||||
pub fn verify(&self, commitment: &EdwardsPoint) -> bool {
|
||||
if &self.bit_commitments.iter().sum::<EdwardsPoint>() != commitment {
|
||||
return false;
|
||||
|
||||
@@ -50,7 +50,7 @@ pub(crate) fn vector_exponent(
|
||||
|
||||
pub(crate) fn hash_cache(cache: &mut Scalar, mash: &[[u8; 32]]) -> Scalar {
|
||||
let slice =
|
||||
&[cache.to_bytes().as_ref(), mash.iter().cloned().flatten().collect::<Vec<_>>().as_ref()]
|
||||
&[cache.to_bytes().as_ref(), mash.iter().copied().flatten().collect::<Vec<_>>().as_ref()]
|
||||
.concat();
|
||||
*cache = hash_to_scalar(slice);
|
||||
*cache
|
||||
@@ -118,9 +118,9 @@ pub(crate) fn LR_statements(
|
||||
let mut res = a
|
||||
.0
|
||||
.iter()
|
||||
.cloned()
|
||||
.zip(G_i.iter().cloned())
|
||||
.chain(b.0.iter().cloned().zip(H_i.iter().cloned()))
|
||||
.copied()
|
||||
.zip(G_i.iter().copied())
|
||||
.chain(b.0.iter().copied().zip(H_i.iter().copied()))
|
||||
.collect::<Vec<_>>();
|
||||
res.push((cL, U));
|
||||
res
|
||||
|
||||
@@ -19,12 +19,10 @@ pub(crate) mod core;
|
||||
use self::core::LOG_N;
|
||||
|
||||
pub(crate) mod original;
|
||||
pub use original::GENERATORS as BULLETPROOFS_GENERATORS;
|
||||
pub(crate) mod plus;
|
||||
pub use plus::GENERATORS as BULLETPROOFS_PLUS_GENERATORS;
|
||||
use self::original::OriginalStruct;
|
||||
|
||||
pub(crate) use self::original::OriginalStruct;
|
||||
pub(crate) use self::plus::PlusStruct;
|
||||
pub(crate) mod plus;
|
||||
use self::plus::*;
|
||||
|
||||
pub(crate) const MAX_OUTPUTS: usize = self::core::MAX_M;
|
||||
|
||||
@@ -33,28 +31,45 @@ pub(crate) const MAX_OUTPUTS: usize = self::core::MAX_M;
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub enum Bulletproofs {
|
||||
Original(OriginalStruct),
|
||||
Plus(PlusStruct),
|
||||
Plus(AggregateRangeProof),
|
||||
}
|
||||
|
||||
impl Bulletproofs {
|
||||
pub(crate) fn fee_weight(plus: bool, outputs: usize) -> usize {
|
||||
let fields = if plus { 6 } else { 9 };
|
||||
fn bp_fields(plus: bool) -> usize {
|
||||
if plus {
|
||||
6
|
||||
} else {
|
||||
9
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Shouldn't this use u32/u64?
|
||||
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
||||
// src/cryptonote_basic/cryptonote_format_utils.cpp#L106-L124
|
||||
pub(crate) fn calculate_bp_clawback(plus: bool, n_outputs: usize) -> (usize, usize) {
|
||||
#[allow(non_snake_case)]
|
||||
let mut LR_len = usize::try_from(usize::BITS - (outputs - 1).leading_zeros()).unwrap();
|
||||
let padded_outputs = 1 << LR_len;
|
||||
let mut LR_len = 0;
|
||||
let mut n_padded_outputs = 1;
|
||||
while n_padded_outputs < n_outputs {
|
||||
LR_len += 1;
|
||||
n_padded_outputs = 1 << LR_len;
|
||||
}
|
||||
LR_len += LOG_N;
|
||||
|
||||
let len = (fields + (2 * LR_len)) * 32;
|
||||
len +
|
||||
if padded_outputs <= 2 {
|
||||
0
|
||||
} else {
|
||||
let base = ((fields + (2 * (LOG_N + 1))) * 32) / 2;
|
||||
let size = (fields + (2 * LR_len)) * 32;
|
||||
((base * padded_outputs) - size) * 4 / 5
|
||||
}
|
||||
let mut bp_clawback = 0;
|
||||
if n_padded_outputs > 2 {
|
||||
let fields = Bulletproofs::bp_fields(plus);
|
||||
let base = ((fields + (2 * (LOG_N + 1))) * 32) / 2;
|
||||
let size = (fields + (2 * LR_len)) * 32;
|
||||
bp_clawback = ((base * n_padded_outputs) - size) * 4 / 5;
|
||||
}
|
||||
|
||||
(bp_clawback, LR_len)
|
||||
}
|
||||
|
||||
pub(crate) fn fee_weight(plus: bool, outputs: usize) -> usize {
|
||||
#[allow(non_snake_case)]
|
||||
let (bp_clawback, LR_len) = Bulletproofs::calculate_bp_clawback(plus, outputs);
|
||||
32 * (Bulletproofs::bp_fields(plus) + (2 * LR_len)) + 2 + bp_clawback
|
||||
}
|
||||
|
||||
/// Prove the list of commitments are within [0 .. 2^64).
|
||||
@@ -63,13 +78,22 @@ impl Bulletproofs {
|
||||
outputs: &[Commitment],
|
||||
plus: bool,
|
||||
) -> Result<Bulletproofs, TransactionError> {
|
||||
if outputs.is_empty() {
|
||||
Err(TransactionError::NoOutputs)?;
|
||||
}
|
||||
if outputs.len() > MAX_OUTPUTS {
|
||||
return Err(TransactionError::TooManyOutputs)?;
|
||||
Err(TransactionError::TooManyOutputs)?;
|
||||
}
|
||||
Ok(if !plus {
|
||||
Bulletproofs::Original(OriginalStruct::prove(rng, outputs))
|
||||
} else {
|
||||
Bulletproofs::Plus(PlusStruct::prove(rng, outputs))
|
||||
use dalek_ff_group::EdwardsPoint as DfgPoint;
|
||||
Bulletproofs::Plus(
|
||||
AggregateRangeStatement::new(outputs.iter().map(|com| DfgPoint(com.calculate())).collect())
|
||||
.unwrap()
|
||||
.prove(rng, AggregateRangeWitness::new(outputs).unwrap())
|
||||
.unwrap(),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -78,7 +102,22 @@ impl Bulletproofs {
|
||||
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
|
||||
match self {
|
||||
Bulletproofs::Original(bp) => bp.verify(rng, commitments),
|
||||
Bulletproofs::Plus(bp) => bp.verify(rng, commitments),
|
||||
Bulletproofs::Plus(bp) => {
|
||||
let mut verifier = BatchVerifier::new(1);
|
||||
// If this commitment is torsioned (which is allowed), this won't be a well-formed
|
||||
// dfg::EdwardsPoint (expected to be of prime-order)
|
||||
// The actual BP+ impl will perform a torsion clear though, making this safe
|
||||
// TODO: Have AggregateRangeStatement take in dalek EdwardsPoint for clarity on this
|
||||
let Some(statement) = AggregateRangeStatement::new(
|
||||
commitments.iter().map(|c| dalek_ff_group::EdwardsPoint(*c)).collect(),
|
||||
) else {
|
||||
return false;
|
||||
};
|
||||
if !statement.verify(rng, &mut verifier, (), bp.clone()) {
|
||||
return false;
|
||||
}
|
||||
verifier.verify_vartime()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -95,7 +134,14 @@ impl Bulletproofs {
|
||||
) -> bool {
|
||||
match self {
|
||||
Bulletproofs::Original(bp) => bp.batch_verify(rng, verifier, id, commitments),
|
||||
Bulletproofs::Plus(bp) => bp.batch_verify(rng, verifier, id, commitments),
|
||||
Bulletproofs::Plus(bp) => {
|
||||
let Some(statement) = AggregateRangeStatement::new(
|
||||
commitments.iter().map(|c| dalek_ff_group::EdwardsPoint(*c)).collect(),
|
||||
) else {
|
||||
return false;
|
||||
};
|
||||
statement.verify(rng, verifier, id, bp.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -120,14 +166,14 @@ impl Bulletproofs {
|
||||
}
|
||||
|
||||
Bulletproofs::Plus(bp) => {
|
||||
write_point(&bp.A, w)?;
|
||||
write_point(&bp.A1, w)?;
|
||||
write_point(&bp.B, w)?;
|
||||
write_scalar(&bp.r1, w)?;
|
||||
write_scalar(&bp.s1, w)?;
|
||||
write_scalar(&bp.d1, w)?;
|
||||
specific_write_vec(&bp.L, w)?;
|
||||
specific_write_vec(&bp.R, w)
|
||||
write_point(&bp.A.0, w)?;
|
||||
write_point(&bp.wip.A.0, w)?;
|
||||
write_point(&bp.wip.B.0, w)?;
|
||||
write_scalar(&bp.wip.r_answer.0, w)?;
|
||||
write_scalar(&bp.wip.s_answer.0, w)?;
|
||||
write_scalar(&bp.wip.delta_answer.0, w)?;
|
||||
specific_write_vec(&bp.wip.L.iter().cloned().map(|L| L.0).collect::<Vec<_>>(), w)?;
|
||||
specific_write_vec(&bp.wip.R.iter().cloned().map(|R| R.0).collect::<Vec<_>>(), w)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -165,15 +211,19 @@ impl Bulletproofs {
|
||||
|
||||
/// Read Bulletproofs+.
|
||||
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Bulletproofs> {
|
||||
Ok(Bulletproofs::Plus(PlusStruct {
|
||||
A: read_point(r)?,
|
||||
A1: read_point(r)?,
|
||||
B: read_point(r)?,
|
||||
r1: read_scalar(r)?,
|
||||
s1: read_scalar(r)?,
|
||||
d1: read_scalar(r)?,
|
||||
L: read_vec(read_point, r)?,
|
||||
R: read_vec(read_point, r)?,
|
||||
use dalek_ff_group::{Scalar as DfgScalar, EdwardsPoint as DfgPoint};
|
||||
|
||||
Ok(Bulletproofs::Plus(AggregateRangeProof {
|
||||
A: DfgPoint(read_point(r)?),
|
||||
wip: WipProof {
|
||||
A: DfgPoint(read_point(r)?),
|
||||
B: DfgPoint(read_point(r)?),
|
||||
r_answer: DfgScalar(read_scalar(r)?),
|
||||
s_answer: DfgScalar(read_scalar(r)?),
|
||||
delta_answer: DfgScalar(read_scalar(r)?),
|
||||
L: read_vec(read_point, r)?.into_iter().map(DfgPoint).collect(),
|
||||
R: read_vec(read_point, r)?.into_iter().map(DfgPoint).collect(),
|
||||
},
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -190,7 +190,7 @@ impl OriginalStruct {
|
||||
}
|
||||
|
||||
// Rebuild all challenges
|
||||
let (mut cache, commitments) = hash_commitments(commitments.iter().cloned());
|
||||
let (mut cache, commitments) = hash_commitments(commitments.iter().copied());
|
||||
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes(), self.S.compress().to_bytes()]);
|
||||
|
||||
let z = hash_to_scalar(&y.to_bytes());
|
||||
|
||||
@@ -1,312 +0,0 @@
|
||||
use std_shims::{vec::Vec, sync::OnceLock};
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
|
||||
|
||||
use group::ff::Field;
|
||||
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
|
||||
|
||||
use multiexp::BatchVerifier;
|
||||
|
||||
use crate::{
|
||||
Commitment, hash,
|
||||
ringct::{hash_to_point::raw_hash_to_point, bulletproofs::core::*},
|
||||
};
|
||||
|
||||
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
|
||||
|
||||
static TRANSCRIPT_CELL: OnceLock<[u8; 32]> = OnceLock::new();
|
||||
pub(crate) fn TRANSCRIPT() -> [u8; 32] {
|
||||
*TRANSCRIPT_CELL.get_or_init(|| {
|
||||
EdwardsPoint(raw_hash_to_point(hash(b"bulletproof_plus_transcript"))).compress().to_bytes()
|
||||
})
|
||||
}
|
||||
|
||||
// TRANSCRIPT isn't a Scalar, so we need this alternative for the first hash
|
||||
fn hash_plus<C: IntoIterator<Item = DalekPoint>>(commitments: C) -> (Scalar, Vec<EdwardsPoint>) {
|
||||
let (cache, commitments) = hash_commitments(commitments);
|
||||
(hash_to_scalar(&[TRANSCRIPT().as_ref(), &cache.to_bytes()].concat()), commitments)
|
||||
}
|
||||
|
||||
// d[j*N+i] = z**(2*(j+1)) * 2**i
|
||||
fn d(z: Scalar, M: usize, MN: usize) -> (ScalarVector, ScalarVector) {
|
||||
let zpow = ScalarVector::even_powers(z, 2 * M);
|
||||
let mut d = vec![Scalar::ZERO; MN];
|
||||
for j in 0 .. M {
|
||||
for i in 0 .. N {
|
||||
d[(j * N) + i] = zpow[j] * TWO_N()[i];
|
||||
}
|
||||
}
|
||||
(zpow, ScalarVector(d))
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct PlusStruct {
|
||||
pub(crate) A: DalekPoint,
|
||||
pub(crate) A1: DalekPoint,
|
||||
pub(crate) B: DalekPoint,
|
||||
pub(crate) r1: DalekScalar,
|
||||
pub(crate) s1: DalekScalar,
|
||||
pub(crate) d1: DalekScalar,
|
||||
pub(crate) L: Vec<DalekPoint>,
|
||||
pub(crate) R: Vec<DalekPoint>,
|
||||
}
|
||||
|
||||
impl PlusStruct {
|
||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
||||
rng: &mut R,
|
||||
commitments: &[Commitment],
|
||||
) -> PlusStruct {
|
||||
let generators = GENERATORS();
|
||||
|
||||
let (logMN, M, MN) = MN(commitments.len());
|
||||
|
||||
let (aL, aR) = bit_decompose(commitments);
|
||||
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||
let (mut cache, _) = hash_plus(commitments_points.clone());
|
||||
let (mut alpha1, A) = alpha_rho(&mut *rng, generators, &aL, &aR);
|
||||
|
||||
let y = hash_cache(&mut cache, &[A.compress().to_bytes()]);
|
||||
let mut cache = hash_to_scalar(&y.to_bytes());
|
||||
let z = cache;
|
||||
|
||||
let (zpow, d) = d(z, M, MN);
|
||||
|
||||
let aL1 = aL - z;
|
||||
|
||||
let ypow = ScalarVector::powers(y, MN + 2);
|
||||
let mut y_for_d = ScalarVector(ypow.0[1 ..= MN].to_vec());
|
||||
y_for_d.0.reverse();
|
||||
let aR1 = (aR + z) + (y_for_d * d);
|
||||
|
||||
for (j, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
|
||||
alpha1 += zpow[j] * ypow[MN + 1] * gamma;
|
||||
}
|
||||
|
||||
let mut a = aL1;
|
||||
let mut b = aR1;
|
||||
|
||||
let yinv = y.invert().unwrap();
|
||||
let yinvpow = ScalarVector::powers(yinv, MN);
|
||||
|
||||
let mut G_proof = generators.G[.. a.len()].to_vec();
|
||||
let mut H_proof = generators.H[.. a.len()].to_vec();
|
||||
|
||||
let mut L = Vec::with_capacity(logMN);
|
||||
let mut R = Vec::with_capacity(logMN);
|
||||
|
||||
while a.len() != 1 {
|
||||
let (aL, aR) = a.split();
|
||||
let (bL, bR) = b.split();
|
||||
|
||||
let cL = weighted_inner_product(&aL, &bR, y);
|
||||
let cR = weighted_inner_product(&(&aR * ypow[aR.len()]), &bL, y);
|
||||
|
||||
let (mut dL, mut dR) = (Scalar::random(&mut *rng), Scalar::random(&mut *rng));
|
||||
|
||||
let (G_L, G_R) = G_proof.split_at(aL.len());
|
||||
let (H_L, H_R) = H_proof.split_at(aL.len());
|
||||
|
||||
let mut L_i = LR_statements(&(&aL * yinvpow[aL.len()]), G_R, &bR, H_L, cL, H());
|
||||
L_i.push((dL, G));
|
||||
let L_i = prove_multiexp(&L_i);
|
||||
L.push(L_i);
|
||||
|
||||
let mut R_i = LR_statements(&(&aR * ypow[aR.len()]), G_L, &bL, H_R, cR, H());
|
||||
R_i.push((dR, G));
|
||||
let R_i = prove_multiexp(&R_i);
|
||||
R.push(R_i);
|
||||
|
||||
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
|
||||
let winv = w.invert().unwrap();
|
||||
|
||||
G_proof = hadamard_fold(G_L, G_R, winv, w * yinvpow[aL.len()]);
|
||||
H_proof = hadamard_fold(H_L, H_R, w, winv);
|
||||
|
||||
a = (&aL * w) + (aR * (winv * ypow[aL.len()]));
|
||||
b = (bL * winv) + (bR * w);
|
||||
|
||||
alpha1 += (dL * (w * w)) + (dR * (winv * winv));
|
||||
|
||||
dL.zeroize();
|
||||
dR.zeroize();
|
||||
}
|
||||
|
||||
let mut r = Scalar::random(&mut *rng);
|
||||
let mut s = Scalar::random(&mut *rng);
|
||||
let mut d = Scalar::random(&mut *rng);
|
||||
let mut eta = Scalar::random(&mut *rng);
|
||||
|
||||
let A1 = prove_multiexp(&[
|
||||
(r, G_proof[0]),
|
||||
(s, H_proof[0]),
|
||||
(d, G),
|
||||
((r * y * b[0]) + (s * y * a[0]), H()),
|
||||
]);
|
||||
let B = prove_multiexp(&[(r * y * s, H()), (eta, G)]);
|
||||
let e = hash_cache(&mut cache, &[A1.compress().to_bytes(), B.compress().to_bytes()]);
|
||||
|
||||
let r1 = (a[0] * e) + r;
|
||||
r.zeroize();
|
||||
let s1 = (b[0] * e) + s;
|
||||
s.zeroize();
|
||||
let d1 = ((d * e) + eta) + (alpha1 * (e * e));
|
||||
d.zeroize();
|
||||
eta.zeroize();
|
||||
alpha1.zeroize();
|
||||
|
||||
let res = PlusStruct {
|
||||
A: *A,
|
||||
A1: *A1,
|
||||
B: *B,
|
||||
r1: *r1,
|
||||
s1: *s1,
|
||||
d1: *d1,
|
||||
L: L.drain(..).map(|L| *L).collect(),
|
||||
R: R.drain(..).map(|R| *R).collect(),
|
||||
};
|
||||
debug_assert!(res.verify(rng, &commitments_points));
|
||||
res
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
||||
id: ID,
|
||||
commitments: &[DalekPoint],
|
||||
) -> bool {
|
||||
// Verify commitments are valid
|
||||
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify L and R are properly sized
|
||||
if self.L.len() != self.R.len() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let (logMN, M, MN) = MN(commitments.len());
|
||||
if self.L.len() != logMN {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Rebuild all challenges
|
||||
let (mut cache, commitments) = hash_plus(commitments.iter().cloned());
|
||||
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes()]);
|
||||
let yinv = y.invert().unwrap();
|
||||
let z = hash_to_scalar(&y.to_bytes());
|
||||
cache = z;
|
||||
|
||||
let mut w = Vec::with_capacity(logMN);
|
||||
let mut winv = Vec::with_capacity(logMN);
|
||||
for (L, R) in self.L.iter().zip(&self.R) {
|
||||
w.push(hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]));
|
||||
winv.push(cache.invert().unwrap());
|
||||
}
|
||||
|
||||
let e = hash_cache(&mut cache, &[self.A1.compress().to_bytes(), self.B.compress().to_bytes()]);
|
||||
|
||||
// Convert the proof from * INV_EIGHT to its actual form
|
||||
let normalize = |point: &DalekPoint| EdwardsPoint(point.mul_by_cofactor());
|
||||
|
||||
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
|
||||
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
|
||||
let A = normalize(&self.A);
|
||||
let A1 = normalize(&self.A1);
|
||||
let B = normalize(&self.B);
|
||||
|
||||
let mut commitments = commitments.iter().map(|c| c.mul_by_cofactor()).collect::<Vec<_>>();
|
||||
|
||||
// Verify it
|
||||
let mut proof = Vec::with_capacity(logMN + 5 + (2 * (MN + logMN)));
|
||||
|
||||
let mut yMN = y;
|
||||
for _ in 0 .. logMN {
|
||||
yMN *= yMN;
|
||||
}
|
||||
let yMNy = yMN * y;
|
||||
|
||||
let (zpow, d) = d(z, M, MN);
|
||||
let zsq = zpow[0];
|
||||
|
||||
let esq = e * e;
|
||||
let minus_esq = -esq;
|
||||
let commitment_weight = minus_esq * yMNy;
|
||||
for (i, commitment) in commitments.drain(..).enumerate() {
|
||||
proof.push((commitment_weight * zpow[i], commitment));
|
||||
}
|
||||
|
||||
// Invert B, instead of the Scalar, as the latter is only 2x as expensive yet enables reduction
|
||||
// to a single addition under vartime for the first BP verified in the batch, which is expected
|
||||
// to be much more significant
|
||||
proof.push((Scalar::ONE, -B));
|
||||
proof.push((-e, A1));
|
||||
proof.push((minus_esq, A));
|
||||
proof.push((Scalar(self.d1), G));
|
||||
|
||||
let d_sum = zpow.sum() * Scalar::from(u64::MAX);
|
||||
let y_sum = weighted_powers(y, MN).sum();
|
||||
proof.push((
|
||||
Scalar(self.r1 * y.0 * self.s1) + (esq * ((yMNy * z * d_sum) + ((zsq - z) * y_sum))),
|
||||
H(),
|
||||
));
|
||||
|
||||
let w_cache = challenge_products(&w, &winv);
|
||||
|
||||
let mut e_r1_y = e * Scalar(self.r1);
|
||||
let e_s1 = e * Scalar(self.s1);
|
||||
let esq_z = esq * z;
|
||||
let minus_esq_z = -esq_z;
|
||||
let mut minus_esq_y = minus_esq * yMN;
|
||||
|
||||
let generators = GENERATORS();
|
||||
for i in 0 .. MN {
|
||||
proof.push((e_r1_y * w_cache[i] + esq_z, generators.G[i]));
|
||||
proof.push((
|
||||
(e_s1 * w_cache[(!i) & (MN - 1)]) + minus_esq_z + (minus_esq_y * d[i]),
|
||||
generators.H[i],
|
||||
));
|
||||
|
||||
e_r1_y *= yinv;
|
||||
minus_esq_y *= yinv;
|
||||
}
|
||||
|
||||
for i in 0 .. logMN {
|
||||
proof.push((minus_esq * w[i] * w[i], L[i]));
|
||||
proof.push((minus_esq * winv[i] * winv[i], R[i]));
|
||||
}
|
||||
|
||||
verifier.queue(rng, id, proof);
|
||||
true
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn verify<R: RngCore + CryptoRng>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
commitments: &[DalekPoint],
|
||||
) -> bool {
|
||||
let mut verifier = BatchVerifier::new(1);
|
||||
if self.verify_core(rng, &mut verifier, (), commitments) {
|
||||
verifier.verify_vartime()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||
&self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
||||
id: ID,
|
||||
commitments: &[DalekPoint],
|
||||
) -> bool {
|
||||
self.verify_core(rng, verifier, id, commitments)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,249 @@
|
||||
use std_shims::vec::Vec;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use multiexp::{multiexp, multiexp_vartime, BatchVerifier};
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
Group, GroupEncoding,
|
||||
};
|
||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||
|
||||
use crate::{
|
||||
Commitment,
|
||||
ringct::{
|
||||
bulletproofs::core::{MAX_M, N},
|
||||
bulletproofs::plus::{
|
||||
ScalarVector, PointVector, GeneratorsList, Generators,
|
||||
transcript::*,
|
||||
weighted_inner_product::{WipStatement, WipWitness, WipProof},
|
||||
padded_pow_of_2, u64_decompose,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Figure 3
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct AggregateRangeStatement {
|
||||
generators: Generators,
|
||||
V: Vec<EdwardsPoint>,
|
||||
}
|
||||
|
||||
impl Zeroize for AggregateRangeStatement {
|
||||
fn zeroize(&mut self) {
|
||||
self.V.zeroize();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub(crate) struct AggregateRangeWitness {
|
||||
values: Vec<u64>,
|
||||
gammas: Vec<Scalar>,
|
||||
}
|
||||
|
||||
impl AggregateRangeWitness {
|
||||
pub(crate) fn new(commitments: &[Commitment]) -> Option<Self> {
|
||||
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut values = Vec::with_capacity(commitments.len());
|
||||
let mut gammas = Vec::with_capacity(commitments.len());
|
||||
for commitment in commitments {
|
||||
values.push(commitment.amount);
|
||||
gammas.push(Scalar(commitment.mask));
|
||||
}
|
||||
Some(AggregateRangeWitness { values, gammas })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct AggregateRangeProof {
|
||||
pub(crate) A: EdwardsPoint,
|
||||
pub(crate) wip: WipProof,
|
||||
}
|
||||
|
||||
impl AggregateRangeStatement {
|
||||
pub(crate) fn new(V: Vec<EdwardsPoint>) -> Option<Self> {
|
||||
if V.is_empty() || (V.len() > MAX_M) {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(Self { generators: Generators::new(), V })
|
||||
}
|
||||
|
||||
fn transcript_A(transcript: &mut Scalar, A: EdwardsPoint) -> (Scalar, Scalar) {
|
||||
let y = hash_to_scalar(&[transcript.to_repr().as_ref(), A.to_bytes().as_ref()].concat());
|
||||
let z = hash_to_scalar(y.to_bytes().as_ref());
|
||||
*transcript = z;
|
||||
(y, z)
|
||||
}
|
||||
|
||||
fn d_j(j: usize, m: usize) -> ScalarVector {
|
||||
let mut d_j = Vec::with_capacity(m * N);
|
||||
for _ in 0 .. (j - 1) * N {
|
||||
d_j.push(Scalar::ZERO);
|
||||
}
|
||||
d_j.append(&mut ScalarVector::powers(Scalar::from(2u8), N).0);
|
||||
for _ in 0 .. (m - j) * N {
|
||||
d_j.push(Scalar::ZERO);
|
||||
}
|
||||
ScalarVector(d_j)
|
||||
}
|
||||
|
||||
fn compute_A_hat(
|
||||
mut V: PointVector,
|
||||
generators: &Generators,
|
||||
transcript: &mut Scalar,
|
||||
mut A: EdwardsPoint,
|
||||
) -> (Scalar, ScalarVector, Scalar, Scalar, ScalarVector, EdwardsPoint) {
|
||||
let (y, z) = Self::transcript_A(transcript, A);
|
||||
A = A.mul_by_cofactor();
|
||||
|
||||
while V.len() < padded_pow_of_2(V.len()) {
|
||||
V.0.push(EdwardsPoint::identity());
|
||||
}
|
||||
let mn = V.len() * N;
|
||||
|
||||
let mut z_pow = Vec::with_capacity(V.len());
|
||||
|
||||
let mut d = ScalarVector::new(mn);
|
||||
for j in 1 ..= V.len() {
|
||||
z_pow.push(z.pow(Scalar::from(2 * u64::try_from(j).unwrap()))); // TODO: Optimize this
|
||||
d = d.add_vec(&Self::d_j(j, V.len()).mul(z_pow[j - 1]));
|
||||
}
|
||||
|
||||
let mut ascending_y = ScalarVector(vec![y]);
|
||||
for i in 1 .. d.len() {
|
||||
ascending_y.0.push(ascending_y[i - 1] * y);
|
||||
}
|
||||
let y_pows = ascending_y.clone().sum();
|
||||
|
||||
let mut descending_y = ascending_y.clone();
|
||||
descending_y.0.reverse();
|
||||
|
||||
let d_descending_y = d.mul_vec(&descending_y);
|
||||
|
||||
let y_mn_plus_one = descending_y[0] * y;
|
||||
|
||||
let mut commitment_accum = EdwardsPoint::identity();
|
||||
for (j, commitment) in V.0.iter().enumerate() {
|
||||
commitment_accum += *commitment * z_pow[j];
|
||||
}
|
||||
|
||||
let neg_z = -z;
|
||||
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 2);
|
||||
for (i, d_y_z) in d_descending_y.add(z).0.drain(..).enumerate() {
|
||||
A_terms.push((neg_z, generators.generator(GeneratorsList::GBold1, i)));
|
||||
A_terms.push((d_y_z, generators.generator(GeneratorsList::HBold1, i)));
|
||||
}
|
||||
A_terms.push((y_mn_plus_one, commitment_accum));
|
||||
A_terms.push((
|
||||
((y_pows * z) - (d.sum() * y_mn_plus_one * z) - (y_pows * z.square())),
|
||||
generators.g(),
|
||||
));
|
||||
|
||||
(y, d_descending_y, y_mn_plus_one, z, ScalarVector(z_pow), A + multiexp_vartime(&A_terms))
|
||||
}
|
||||
|
||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
||||
self,
|
||||
rng: &mut R,
|
||||
witness: AggregateRangeWitness,
|
||||
) -> Option<AggregateRangeProof> {
|
||||
// Check for consistency with the witness
|
||||
if self.V.len() != witness.values.len() {
|
||||
return None;
|
||||
}
|
||||
for (commitment, (value, gamma)) in
|
||||
self.V.iter().zip(witness.values.iter().zip(witness.gammas.iter()))
|
||||
{
|
||||
if Commitment::new(**gamma, *value).calculate() != **commitment {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
let Self { generators, V } = self;
|
||||
// Monero expects all of these points to be torsion-free
|
||||
// Generally, for Bulletproofs, it sends points * INV_EIGHT and then performs a torsion clear
|
||||
// by multiplying by 8
|
||||
// This also restores the original value due to the preprocessing
|
||||
// Commitments aren't transmitted INV_EIGHT though, so this multiplies by INV_EIGHT to enable
|
||||
// clearing its cofactor without mutating the value
|
||||
// For some reason, these values are transcripted * INV_EIGHT, not as transmitted
|
||||
let mut V = V.into_iter().map(|V| EdwardsPoint(V.0 * crate::INV_EIGHT())).collect::<Vec<_>>();
|
||||
let mut transcript = initial_transcript(V.iter());
|
||||
V.iter_mut().for_each(|V| *V = V.mul_by_cofactor());
|
||||
|
||||
// Pad V
|
||||
while V.len() < padded_pow_of_2(V.len()) {
|
||||
V.push(EdwardsPoint::identity());
|
||||
}
|
||||
|
||||
let generators = generators.reduce(V.len() * N);
|
||||
|
||||
let mut d_js = Vec::with_capacity(V.len());
|
||||
let mut a_l = ScalarVector(Vec::with_capacity(V.len() * N));
|
||||
for j in 1 ..= V.len() {
|
||||
d_js.push(Self::d_j(j, V.len()));
|
||||
a_l.0.append(&mut u64_decompose(*witness.values.get(j - 1).unwrap_or(&0)).0);
|
||||
}
|
||||
|
||||
let a_r = a_l.sub(Scalar::ONE);
|
||||
|
||||
let alpha = Scalar::random(&mut *rng);
|
||||
|
||||
let mut A_terms = Vec::with_capacity((generators.len() * 2) + 1);
|
||||
for (i, a_l) in a_l.0.iter().enumerate() {
|
||||
A_terms.push((*a_l, generators.generator(GeneratorsList::GBold1, i)));
|
||||
}
|
||||
for (i, a_r) in a_r.0.iter().enumerate() {
|
||||
A_terms.push((*a_r, generators.generator(GeneratorsList::HBold1, i)));
|
||||
}
|
||||
A_terms.push((alpha, generators.h()));
|
||||
let mut A = multiexp(&A_terms);
|
||||
A_terms.zeroize();
|
||||
|
||||
// Multiply by INV_EIGHT per earlier commentary
|
||||
A.0 *= crate::INV_EIGHT();
|
||||
|
||||
let (y, d_descending_y, y_mn_plus_one, z, z_pow, A_hat) =
|
||||
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, A);
|
||||
|
||||
let a_l = a_l.sub(z);
|
||||
let a_r = a_r.add_vec(&d_descending_y).add(z);
|
||||
let mut alpha = alpha;
|
||||
for j in 1 ..= witness.gammas.len() {
|
||||
alpha += z_pow[j - 1] * witness.gammas[j - 1] * y_mn_plus_one;
|
||||
}
|
||||
|
||||
Some(AggregateRangeProof {
|
||||
A,
|
||||
wip: WipStatement::new(generators, A_hat, y)
|
||||
.prove(rng, transcript, WipWitness::new(a_l, a_r, alpha).unwrap())
|
||||
.unwrap(),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn verify<Id: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||
self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<Id, EdwardsPoint>,
|
||||
id: Id,
|
||||
proof: AggregateRangeProof,
|
||||
) -> bool {
|
||||
let Self { generators, V } = self;
|
||||
|
||||
let mut V = V.into_iter().map(|V| EdwardsPoint(V.0 * crate::INV_EIGHT())).collect::<Vec<_>>();
|
||||
let mut transcript = initial_transcript(V.iter());
|
||||
V.iter_mut().for_each(|V| *V = V.mul_by_cofactor());
|
||||
|
||||
let generators = generators.reduce(V.len() * N);
|
||||
|
||||
let (y, _, _, _, _, A_hat) =
|
||||
Self::compute_A_hat(PointVector(V), &generators, &mut transcript, proof.A);
|
||||
WipStatement::new(generators, A_hat, y).verify(rng, verifier, id, transcript, proof.wip)
|
||||
}
|
||||
}
|
||||
92
coins/monero/src/ringct/bulletproofs/plus/mod.rs
Normal file
92
coins/monero/src/ringct/bulletproofs/plus/mod.rs
Normal file
@@ -0,0 +1,92 @@
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use group::Group;
|
||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||
|
||||
mod scalar_vector;
|
||||
pub(crate) use scalar_vector::{ScalarVector, weighted_inner_product};
|
||||
mod point_vector;
|
||||
pub(crate) use point_vector::PointVector;
|
||||
|
||||
pub(crate) mod transcript;
|
||||
pub(crate) mod weighted_inner_product;
|
||||
pub(crate) use weighted_inner_product::*;
|
||||
pub(crate) mod aggregate_range_proof;
|
||||
pub(crate) use aggregate_range_proof::*;
|
||||
|
||||
pub(crate) fn padded_pow_of_2(i: usize) -> usize {
|
||||
let mut next_pow_of_2 = 1;
|
||||
while next_pow_of_2 < i {
|
||||
next_pow_of_2 <<= 1;
|
||||
}
|
||||
next_pow_of_2
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||
pub(crate) enum GeneratorsList {
|
||||
GBold1,
|
||||
HBold1,
|
||||
}
|
||||
|
||||
// TODO: Table these
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct Generators {
|
||||
g: EdwardsPoint,
|
||||
|
||||
g_bold1: &'static [EdwardsPoint],
|
||||
h_bold1: &'static [EdwardsPoint],
|
||||
}
|
||||
|
||||
mod generators {
|
||||
use std_shims::sync::OnceLock;
|
||||
use monero_generators::Generators;
|
||||
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
|
||||
}
|
||||
|
||||
impl Generators {
|
||||
#[allow(clippy::new_without_default)]
|
||||
pub(crate) fn new() -> Self {
|
||||
let gens = generators::GENERATORS();
|
||||
Generators { g: dalek_ff_group::EdwardsPoint(crate::H()), g_bold1: &gens.G, h_bold1: &gens.H }
|
||||
}
|
||||
|
||||
pub(crate) fn len(&self) -> usize {
|
||||
self.g_bold1.len()
|
||||
}
|
||||
|
||||
pub(crate) fn g(&self) -> EdwardsPoint {
|
||||
self.g
|
||||
}
|
||||
|
||||
pub(crate) fn h(&self) -> EdwardsPoint {
|
||||
EdwardsPoint::generator()
|
||||
}
|
||||
|
||||
pub(crate) fn generator(&self, list: GeneratorsList, i: usize) -> EdwardsPoint {
|
||||
match list {
|
||||
GeneratorsList::GBold1 => self.g_bold1[i],
|
||||
GeneratorsList::HBold1 => self.h_bold1[i],
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn reduce(&self, generators: usize) -> Self {
|
||||
// Round to the nearest power of 2
|
||||
let generators = padded_pow_of_2(generators);
|
||||
assert!(generators <= self.g_bold1.len());
|
||||
|
||||
Generators {
|
||||
g: self.g,
|
||||
g_bold1: &self.g_bold1[.. generators],
|
||||
h_bold1: &self.h_bold1[.. generators],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the little-endian decomposition.
|
||||
fn u64_decompose(value: u64) -> ScalarVector {
|
||||
let mut bits = ScalarVector::new(64);
|
||||
for bit in 0 .. 64 {
|
||||
bits[bit] = Scalar::from((value >> bit) & 1);
|
||||
}
|
||||
bits
|
||||
}
|
||||
50
coins/monero/src/ringct/bulletproofs/plus/point_vector.rs
Normal file
50
coins/monero/src/ringct/bulletproofs/plus/point_vector.rs
Normal file
@@ -0,0 +1,50 @@
|
||||
use core::ops::{Index, IndexMut};
|
||||
use std_shims::vec::Vec;
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use dalek_ff_group::EdwardsPoint;
|
||||
|
||||
#[cfg(test)]
|
||||
use multiexp::multiexp;
|
||||
#[cfg(test)]
|
||||
use crate::ringct::bulletproofs::plus::ScalarVector;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub(crate) struct PointVector(pub(crate) Vec<EdwardsPoint>);
|
||||
|
||||
impl Index<usize> for PointVector {
|
||||
type Output = EdwardsPoint;
|
||||
fn index(&self, index: usize) -> &EdwardsPoint {
|
||||
&self.0[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexMut<usize> for PointVector {
|
||||
fn index_mut(&mut self, index: usize) -> &mut EdwardsPoint {
|
||||
&mut self.0[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl PointVector {
|
||||
#[cfg(test)]
|
||||
pub(crate) fn multiexp(&self, vector: &ScalarVector) -> EdwardsPoint {
|
||||
debug_assert_eq!(self.len(), vector.len());
|
||||
let mut res = Vec::with_capacity(self.len());
|
||||
for (point, scalar) in self.0.iter().copied().zip(vector.0.iter().copied()) {
|
||||
res.push((scalar, point));
|
||||
}
|
||||
multiexp(&res)
|
||||
}
|
||||
|
||||
pub(crate) fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub(crate) fn split(mut self) -> (Self, Self) {
|
||||
debug_assert!(self.len() > 1);
|
||||
let r = self.0.split_off(self.0.len() / 2);
|
||||
debug_assert_eq!(self.len(), r.len());
|
||||
(self, PointVector(r))
|
||||
}
|
||||
}
|
||||
114
coins/monero/src/ringct/bulletproofs/plus/scalar_vector.rs
Normal file
114
coins/monero/src/ringct/bulletproofs/plus/scalar_vector.rs
Normal file
@@ -0,0 +1,114 @@
|
||||
use core::{
|
||||
borrow::Borrow,
|
||||
ops::{Index, IndexMut},
|
||||
};
|
||||
use std_shims::vec::Vec;
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use group::ff::Field;
|
||||
use dalek_ff_group::Scalar;
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
||||
|
||||
impl Index<usize> for ScalarVector {
|
||||
type Output = Scalar;
|
||||
fn index(&self, index: usize) -> &Scalar {
|
||||
&self.0[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexMut<usize> for ScalarVector {
|
||||
fn index_mut(&mut self, index: usize) -> &mut Scalar {
|
||||
&mut self.0[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl ScalarVector {
|
||||
pub(crate) fn new(len: usize) -> Self {
|
||||
ScalarVector(vec![Scalar::ZERO; len])
|
||||
}
|
||||
|
||||
pub(crate) fn add(&self, scalar: impl Borrow<Scalar>) -> Self {
|
||||
let mut res = self.clone();
|
||||
for val in res.0.iter_mut() {
|
||||
*val += scalar.borrow();
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn sub(&self, scalar: impl Borrow<Scalar>) -> Self {
|
||||
let mut res = self.clone();
|
||||
for val in res.0.iter_mut() {
|
||||
*val -= scalar.borrow();
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn mul(&self, scalar: impl Borrow<Scalar>) -> Self {
|
||||
let mut res = self.clone();
|
||||
for val in res.0.iter_mut() {
|
||||
*val *= scalar.borrow();
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn add_vec(&self, vector: &Self) -> Self {
|
||||
debug_assert_eq!(self.len(), vector.len());
|
||||
let mut res = self.clone();
|
||||
for (i, val) in res.0.iter_mut().enumerate() {
|
||||
*val += vector.0[i];
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn mul_vec(&self, vector: &Self) -> Self {
|
||||
debug_assert_eq!(self.len(), vector.len());
|
||||
let mut res = self.clone();
|
||||
for (i, val) in res.0.iter_mut().enumerate() {
|
||||
*val *= vector.0[i];
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn inner_product(&self, vector: &Self) -> Scalar {
|
||||
self.mul_vec(vector).sum()
|
||||
}
|
||||
|
||||
pub(crate) fn powers(x: Scalar, len: usize) -> Self {
|
||||
debug_assert!(len != 0);
|
||||
|
||||
let mut res = Vec::with_capacity(len);
|
||||
res.push(Scalar::ONE);
|
||||
res.push(x);
|
||||
for i in 2 .. len {
|
||||
res.push(res[i - 1] * x);
|
||||
}
|
||||
res.truncate(len);
|
||||
ScalarVector(res)
|
||||
}
|
||||
|
||||
pub(crate) fn sum(mut self) -> Scalar {
|
||||
self.0.drain(..).sum()
|
||||
}
|
||||
|
||||
pub(crate) fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
pub(crate) fn split(mut self) -> (Self, Self) {
|
||||
debug_assert!(self.len() > 1);
|
||||
let r = self.0.split_off(self.0.len() / 2);
|
||||
debug_assert_eq!(self.len(), r.len());
|
||||
(self, ScalarVector(r))
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn weighted_inner_product(
|
||||
a: &ScalarVector,
|
||||
b: &ScalarVector,
|
||||
y: &ScalarVector,
|
||||
) -> Scalar {
|
||||
a.inner_product(&b.mul_vec(y))
|
||||
}
|
||||
24
coins/monero/src/ringct/bulletproofs/plus/transcript.rs
Normal file
24
coins/monero/src/ringct/bulletproofs/plus/transcript.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use std_shims::{sync::OnceLock, vec::Vec};
|
||||
|
||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||
|
||||
use monero_generators::{hash_to_point as raw_hash_to_point};
|
||||
use crate::{hash, hash_to_scalar as dalek_hash};
|
||||
|
||||
// Monero starts BP+ transcripts with the following constant.
|
||||
static TRANSCRIPT_CELL: OnceLock<[u8; 32]> = OnceLock::new();
|
||||
pub(crate) fn TRANSCRIPT() -> [u8; 32] {
|
||||
// Why this uses a hash_to_point is completely unknown.
|
||||
*TRANSCRIPT_CELL
|
||||
.get_or_init(|| raw_hash_to_point(hash(b"bulletproof_plus_transcript")).compress().to_bytes())
|
||||
}
|
||||
|
||||
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
|
||||
Scalar(dalek_hash(data))
|
||||
}
|
||||
|
||||
pub(crate) fn initial_transcript(commitments: core::slice::Iter<'_, EdwardsPoint>) -> Scalar {
|
||||
let commitments_hash =
|
||||
hash_to_scalar(&commitments.flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>());
|
||||
hash_to_scalar(&[TRANSCRIPT().as_ref(), &commitments_hash.to_bytes()].concat())
|
||||
}
|
||||
@@ -0,0 +1,447 @@
|
||||
use std_shims::vec::Vec;
|
||||
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
|
||||
use multiexp::{multiexp, multiexp_vartime, BatchVerifier};
|
||||
use group::{
|
||||
ff::{Field, PrimeField},
|
||||
GroupEncoding,
|
||||
};
|
||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||
|
||||
use crate::ringct::bulletproofs::plus::{
|
||||
ScalarVector, PointVector, GeneratorsList, Generators, padded_pow_of_2, weighted_inner_product,
|
||||
transcript::*,
|
||||
};
|
||||
|
||||
// Figure 1
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct WipStatement {
|
||||
generators: Generators,
|
||||
P: EdwardsPoint,
|
||||
y: ScalarVector,
|
||||
}
|
||||
|
||||
impl Zeroize for WipStatement {
|
||||
fn zeroize(&mut self) {
|
||||
self.P.zeroize();
|
||||
self.y.zeroize();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub(crate) struct WipWitness {
|
||||
a: ScalarVector,
|
||||
b: ScalarVector,
|
||||
alpha: Scalar,
|
||||
}
|
||||
|
||||
impl WipWitness {
|
||||
pub(crate) fn new(mut a: ScalarVector, mut b: ScalarVector, alpha: Scalar) -> Option<Self> {
|
||||
if a.0.is_empty() || (a.len() != b.len()) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Pad to the nearest power of 2
|
||||
let missing = padded_pow_of_2(a.len()) - a.len();
|
||||
a.0.reserve(missing);
|
||||
b.0.reserve(missing);
|
||||
for _ in 0 .. missing {
|
||||
a.0.push(Scalar::ZERO);
|
||||
b.0.push(Scalar::ZERO);
|
||||
}
|
||||
|
||||
Some(Self { a, b, alpha })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub(crate) struct WipProof {
|
||||
pub(crate) L: Vec<EdwardsPoint>,
|
||||
pub(crate) R: Vec<EdwardsPoint>,
|
||||
pub(crate) A: EdwardsPoint,
|
||||
pub(crate) B: EdwardsPoint,
|
||||
pub(crate) r_answer: Scalar,
|
||||
pub(crate) s_answer: Scalar,
|
||||
pub(crate) delta_answer: Scalar,
|
||||
}
|
||||
|
||||
impl WipStatement {
|
||||
pub(crate) fn new(generators: Generators, P: EdwardsPoint, y: Scalar) -> Self {
|
||||
debug_assert_eq!(generators.len(), padded_pow_of_2(generators.len()));
|
||||
|
||||
// y ** n
|
||||
let mut y_vec = ScalarVector::new(generators.len());
|
||||
y_vec[0] = y;
|
||||
for i in 1 .. y_vec.len() {
|
||||
y_vec[i] = y_vec[i - 1] * y;
|
||||
}
|
||||
|
||||
Self { generators, P, y: y_vec }
|
||||
}
|
||||
|
||||
fn transcript_L_R(transcript: &mut Scalar, L: EdwardsPoint, R: EdwardsPoint) -> Scalar {
|
||||
let e = hash_to_scalar(
|
||||
&[transcript.to_repr().as_ref(), L.to_bytes().as_ref(), R.to_bytes().as_ref()].concat(),
|
||||
);
|
||||
*transcript = e;
|
||||
e
|
||||
}
|
||||
|
||||
fn transcript_A_B(transcript: &mut Scalar, A: EdwardsPoint, B: EdwardsPoint) -> Scalar {
|
||||
let e = hash_to_scalar(
|
||||
&[transcript.to_repr().as_ref(), A.to_bytes().as_ref(), B.to_bytes().as_ref()].concat(),
|
||||
);
|
||||
*transcript = e;
|
||||
e
|
||||
}
|
||||
|
||||
// Prover's variant of the shared code block to calculate G/H/P when n > 1
|
||||
// Returns each permutation of G/H since the prover needs to do operation on each permutation
|
||||
// P is dropped as it's unused in the prover's path
|
||||
// TODO: It'd still probably be faster to keep in terms of the original generators, both between
|
||||
// the reduced amount of group operations and the potential tabling of the generators under
|
||||
// multiexp
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn next_G_H(
|
||||
transcript: &mut Scalar,
|
||||
mut g_bold1: PointVector,
|
||||
mut g_bold2: PointVector,
|
||||
mut h_bold1: PointVector,
|
||||
mut h_bold2: PointVector,
|
||||
L: EdwardsPoint,
|
||||
R: EdwardsPoint,
|
||||
y_inv_n_hat: Scalar,
|
||||
) -> (Scalar, Scalar, Scalar, Scalar, PointVector, PointVector) {
|
||||
debug_assert_eq!(g_bold1.len(), g_bold2.len());
|
||||
debug_assert_eq!(h_bold1.len(), h_bold2.len());
|
||||
debug_assert_eq!(g_bold1.len(), h_bold1.len());
|
||||
|
||||
let e = Self::transcript_L_R(transcript, L, R);
|
||||
let inv_e = e.invert().unwrap();
|
||||
|
||||
// This vartime is safe as all of these arguments are public
|
||||
let mut new_g_bold = Vec::with_capacity(g_bold1.len());
|
||||
let e_y_inv = e * y_inv_n_hat;
|
||||
for g_bold in g_bold1.0.drain(..).zip(g_bold2.0.drain(..)) {
|
||||
new_g_bold.push(multiexp_vartime(&[(inv_e, g_bold.0), (e_y_inv, g_bold.1)]));
|
||||
}
|
||||
|
||||
let mut new_h_bold = Vec::with_capacity(h_bold1.len());
|
||||
for h_bold in h_bold1.0.drain(..).zip(h_bold2.0.drain(..)) {
|
||||
new_h_bold.push(multiexp_vartime(&[(e, h_bold.0), (inv_e, h_bold.1)]));
|
||||
}
|
||||
|
||||
let e_square = e.square();
|
||||
let inv_e_square = inv_e.square();
|
||||
|
||||
(e, inv_e, e_square, inv_e_square, PointVector(new_g_bold), PointVector(new_h_bold))
|
||||
}
|
||||
|
||||
/*
|
||||
This has room for optimization worth investigating further. It currently takes
|
||||
an iterative approach. It can be optimized further via divide and conquer.
|
||||
|
||||
Assume there are 4 challenges.
|
||||
|
||||
Iterative approach (current):
|
||||
1. Do the optimal multiplications across challenge column 0 and 1.
|
||||
2. Do the optimal multiplications across that result and column 2.
|
||||
3. Do the optimal multiplications across that result and column 3.
|
||||
|
||||
Divide and conquer (worth investigating further):
|
||||
1. Do the optimal multiplications across challenge column 0 and 1.
|
||||
2. Do the optimal multiplications across challenge column 2 and 3.
|
||||
3. Multiply both results together.
|
||||
|
||||
When there are 4 challenges (n=16), the iterative approach does 28 multiplications
|
||||
versus divide and conquer's 24.
|
||||
*/
|
||||
fn challenge_products(challenges: &[(Scalar, Scalar)]) -> Vec<Scalar> {
|
||||
let mut products = vec![Scalar::ONE; 1 << challenges.len()];
|
||||
|
||||
if !challenges.is_empty() {
|
||||
products[0] = challenges[0].1;
|
||||
products[1] = challenges[0].0;
|
||||
|
||||
for (j, challenge) in challenges.iter().enumerate().skip(1) {
|
||||
let mut slots = (1 << (j + 1)) - 1;
|
||||
while slots > 0 {
|
||||
products[slots] = products[slots / 2] * challenge.0;
|
||||
products[slots - 1] = products[slots / 2] * challenge.1;
|
||||
|
||||
slots = slots.saturating_sub(2);
|
||||
}
|
||||
}
|
||||
|
||||
// Sanity check since if the above failed to populate, it'd be critical
|
||||
for product in &products {
|
||||
debug_assert!(!bool::from(product.is_zero()));
|
||||
}
|
||||
}
|
||||
|
||||
products
|
||||
}
|
||||
|
||||
pub(crate) fn prove<R: RngCore + CryptoRng>(
|
||||
self,
|
||||
rng: &mut R,
|
||||
mut transcript: Scalar,
|
||||
witness: WipWitness,
|
||||
) -> Option<WipProof> {
|
||||
let WipStatement { generators, P, mut y } = self;
|
||||
#[cfg(not(debug_assertions))]
|
||||
let _ = P;
|
||||
|
||||
if generators.len() != witness.a.len() {
|
||||
return None;
|
||||
}
|
||||
let (g, h) = (generators.g(), generators.h());
|
||||
let mut g_bold = vec![];
|
||||
let mut h_bold = vec![];
|
||||
for i in 0 .. generators.len() {
|
||||
g_bold.push(generators.generator(GeneratorsList::GBold1, i));
|
||||
h_bold.push(generators.generator(GeneratorsList::HBold1, i));
|
||||
}
|
||||
let mut g_bold = PointVector(g_bold);
|
||||
let mut h_bold = PointVector(h_bold);
|
||||
|
||||
// Check P has the expected relationship
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
let mut P_terms = witness
|
||||
.a
|
||||
.0
|
||||
.iter()
|
||||
.copied()
|
||||
.zip(g_bold.0.iter().copied())
|
||||
.chain(witness.b.0.iter().copied().zip(h_bold.0.iter().copied()))
|
||||
.collect::<Vec<_>>();
|
||||
P_terms.push((weighted_inner_product(&witness.a, &witness.b, &y), g));
|
||||
P_terms.push((witness.alpha, h));
|
||||
debug_assert_eq!(multiexp(&P_terms), P);
|
||||
P_terms.zeroize();
|
||||
}
|
||||
|
||||
let mut a = witness.a.clone();
|
||||
let mut b = witness.b.clone();
|
||||
let mut alpha = witness.alpha;
|
||||
|
||||
// From here on, g_bold.len() is used as n
|
||||
debug_assert_eq!(g_bold.len(), a.len());
|
||||
|
||||
let mut L_vec = vec![];
|
||||
let mut R_vec = vec![];
|
||||
|
||||
// else n > 1 case from figure 1
|
||||
while g_bold.len() > 1 {
|
||||
let (a1, a2) = a.clone().split();
|
||||
let (b1, b2) = b.clone().split();
|
||||
let (g_bold1, g_bold2) = g_bold.split();
|
||||
let (h_bold1, h_bold2) = h_bold.split();
|
||||
|
||||
let n_hat = g_bold1.len();
|
||||
debug_assert_eq!(a1.len(), n_hat);
|
||||
debug_assert_eq!(a2.len(), n_hat);
|
||||
debug_assert_eq!(b1.len(), n_hat);
|
||||
debug_assert_eq!(b2.len(), n_hat);
|
||||
debug_assert_eq!(g_bold1.len(), n_hat);
|
||||
debug_assert_eq!(g_bold2.len(), n_hat);
|
||||
debug_assert_eq!(h_bold1.len(), n_hat);
|
||||
debug_assert_eq!(h_bold2.len(), n_hat);
|
||||
|
||||
let y_n_hat = y[n_hat - 1];
|
||||
y.0.truncate(n_hat);
|
||||
|
||||
let d_l = Scalar::random(&mut *rng);
|
||||
let d_r = Scalar::random(&mut *rng);
|
||||
|
||||
let c_l = weighted_inner_product(&a1, &b2, &y);
|
||||
let c_r = weighted_inner_product(&(a2.mul(y_n_hat)), &b1, &y);
|
||||
|
||||
// TODO: Calculate these with a batch inversion
|
||||
let y_inv_n_hat = y_n_hat.invert().unwrap();
|
||||
|
||||
let mut L_terms = a1
|
||||
.mul(y_inv_n_hat)
|
||||
.0
|
||||
.drain(..)
|
||||
.zip(g_bold2.0.iter().copied())
|
||||
.chain(b2.0.iter().copied().zip(h_bold1.0.iter().copied()))
|
||||
.collect::<Vec<_>>();
|
||||
L_terms.push((c_l, g));
|
||||
L_terms.push((d_l, h));
|
||||
let L = multiexp(&L_terms) * Scalar(crate::INV_EIGHT());
|
||||
L_vec.push(L);
|
||||
L_terms.zeroize();
|
||||
|
||||
let mut R_terms = a2
|
||||
.mul(y_n_hat)
|
||||
.0
|
||||
.drain(..)
|
||||
.zip(g_bold1.0.iter().copied())
|
||||
.chain(b1.0.iter().copied().zip(h_bold2.0.iter().copied()))
|
||||
.collect::<Vec<_>>();
|
||||
R_terms.push((c_r, g));
|
||||
R_terms.push((d_r, h));
|
||||
let R = multiexp(&R_terms) * Scalar(crate::INV_EIGHT());
|
||||
R_vec.push(R);
|
||||
R_terms.zeroize();
|
||||
|
||||
let (e, inv_e, e_square, inv_e_square);
|
||||
(e, inv_e, e_square, inv_e_square, g_bold, h_bold) =
|
||||
Self::next_G_H(&mut transcript, g_bold1, g_bold2, h_bold1, h_bold2, L, R, y_inv_n_hat);
|
||||
|
||||
a = a1.mul(e).add_vec(&a2.mul(y_n_hat * inv_e));
|
||||
b = b1.mul(inv_e).add_vec(&b2.mul(e));
|
||||
alpha += (d_l * e_square) + (d_r * inv_e_square);
|
||||
|
||||
debug_assert_eq!(g_bold.len(), a.len());
|
||||
debug_assert_eq!(g_bold.len(), h_bold.len());
|
||||
debug_assert_eq!(g_bold.len(), b.len());
|
||||
}
|
||||
|
||||
// n == 1 case from figure 1
|
||||
debug_assert_eq!(g_bold.len(), 1);
|
||||
debug_assert_eq!(h_bold.len(), 1);
|
||||
|
||||
debug_assert_eq!(a.len(), 1);
|
||||
debug_assert_eq!(b.len(), 1);
|
||||
|
||||
let r = Scalar::random(&mut *rng);
|
||||
let s = Scalar::random(&mut *rng);
|
||||
let delta = Scalar::random(&mut *rng);
|
||||
let eta = Scalar::random(&mut *rng);
|
||||
|
||||
let ry = r * y[0];
|
||||
|
||||
let mut A_terms =
|
||||
vec![(r, g_bold[0]), (s, h_bold[0]), ((ry * b[0]) + (s * y[0] * a[0]), g), (delta, h)];
|
||||
let A = multiexp(&A_terms) * Scalar(crate::INV_EIGHT());
|
||||
A_terms.zeroize();
|
||||
|
||||
let mut B_terms = vec![(ry * s, g), (eta, h)];
|
||||
let B = multiexp(&B_terms) * Scalar(crate::INV_EIGHT());
|
||||
B_terms.zeroize();
|
||||
|
||||
let e = Self::transcript_A_B(&mut transcript, A, B);
|
||||
|
||||
let r_answer = r + (a[0] * e);
|
||||
let s_answer = s + (b[0] * e);
|
||||
let delta_answer = eta + (delta * e) + (alpha * e.square());
|
||||
|
||||
Some(WipProof { L: L_vec, R: R_vec, A, B, r_answer, s_answer, delta_answer })
|
||||
}
|
||||
|
||||
pub(crate) fn verify<Id: Copy + Zeroize, R: RngCore + CryptoRng>(
|
||||
self,
|
||||
rng: &mut R,
|
||||
verifier: &mut BatchVerifier<Id, EdwardsPoint>,
|
||||
id: Id,
|
||||
mut transcript: Scalar,
|
||||
mut proof: WipProof,
|
||||
) -> bool {
|
||||
let WipStatement { generators, P, y } = self;
|
||||
|
||||
let (g, h) = (generators.g(), generators.h());
|
||||
|
||||
// Verify the L/R lengths
|
||||
{
|
||||
let mut lr_len = 0;
|
||||
while (1 << lr_len) < generators.len() {
|
||||
lr_len += 1;
|
||||
}
|
||||
if (proof.L.len() != lr_len) ||
|
||||
(proof.R.len() != lr_len) ||
|
||||
(generators.len() != (1 << lr_len))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
let inv_y = {
|
||||
let inv_y = y[0].invert().unwrap();
|
||||
let mut res = Vec::with_capacity(y.len());
|
||||
res.push(inv_y);
|
||||
while res.len() < y.len() {
|
||||
res.push(inv_y * res.last().unwrap());
|
||||
}
|
||||
res
|
||||
};
|
||||
|
||||
let mut P_terms = vec![(Scalar::ONE, P)];
|
||||
P_terms.reserve(6 + (2 * generators.len()) + proof.L.len());
|
||||
|
||||
let mut challenges = Vec::with_capacity(proof.L.len());
|
||||
let product_cache = {
|
||||
let mut es = Vec::with_capacity(proof.L.len());
|
||||
for (L, R) in proof.L.iter_mut().zip(proof.R.iter_mut()) {
|
||||
es.push(Self::transcript_L_R(&mut transcript, *L, *R));
|
||||
*L = L.mul_by_cofactor();
|
||||
*R = R.mul_by_cofactor();
|
||||
}
|
||||
|
||||
let mut inv_es = es.clone();
|
||||
let mut scratch = vec![Scalar::ZERO; es.len()];
|
||||
group::ff::BatchInverter::invert_with_external_scratch(&mut inv_es, &mut scratch);
|
||||
drop(scratch);
|
||||
|
||||
debug_assert_eq!(es.len(), inv_es.len());
|
||||
debug_assert_eq!(es.len(), proof.L.len());
|
||||
debug_assert_eq!(es.len(), proof.R.len());
|
||||
for ((e, inv_e), (L, R)) in
|
||||
es.drain(..).zip(inv_es.drain(..)).zip(proof.L.iter().zip(proof.R.iter()))
|
||||
{
|
||||
debug_assert_eq!(e.invert().unwrap(), inv_e);
|
||||
|
||||
challenges.push((e, inv_e));
|
||||
|
||||
let e_square = e.square();
|
||||
let inv_e_square = inv_e.square();
|
||||
P_terms.push((e_square, *L));
|
||||
P_terms.push((inv_e_square, *R));
|
||||
}
|
||||
|
||||
Self::challenge_products(&challenges)
|
||||
};
|
||||
|
||||
let e = Self::transcript_A_B(&mut transcript, proof.A, proof.B);
|
||||
proof.A = proof.A.mul_by_cofactor();
|
||||
proof.B = proof.B.mul_by_cofactor();
|
||||
let neg_e_square = -e.square();
|
||||
|
||||
let mut multiexp = P_terms;
|
||||
multiexp.reserve(4 + (2 * generators.len()));
|
||||
for (scalar, _) in multiexp.iter_mut() {
|
||||
*scalar *= neg_e_square;
|
||||
}
|
||||
|
||||
let re = proof.r_answer * e;
|
||||
for i in 0 .. generators.len() {
|
||||
let mut scalar = product_cache[i] * re;
|
||||
if i > 0 {
|
||||
scalar *= inv_y[i - 1];
|
||||
}
|
||||
multiexp.push((scalar, generators.generator(GeneratorsList::GBold1, i)));
|
||||
}
|
||||
|
||||
let se = proof.s_answer * e;
|
||||
for i in 0 .. generators.len() {
|
||||
multiexp.push((
|
||||
se * product_cache[product_cache.len() - 1 - i],
|
||||
generators.generator(GeneratorsList::HBold1, i),
|
||||
));
|
||||
}
|
||||
|
||||
multiexp.push((-e, proof.A));
|
||||
multiexp.push((proof.r_answer * y[0] * proof.s_answer, g));
|
||||
multiexp.push((proof.delta_answer, h));
|
||||
multiexp.push((-Scalar::ONE, proof.B));
|
||||
|
||||
verifier.queue(rng, id, multiexp);
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,7 @@ use multiexp::multiexp;
|
||||
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
||||
macro_rules! math_op {
|
||||
($Op: ident, $op: ident, $f: expr) => {
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
impl $Op<Scalar> for ScalarVector {
|
||||
type Output = ScalarVector;
|
||||
fn $op(self, b: Scalar) -> ScalarVector {
|
||||
@@ -19,6 +20,7 @@ macro_rules! math_op {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
impl $Op<Scalar> for &ScalarVector {
|
||||
type Output = ScalarVector;
|
||||
fn $op(self, b: Scalar) -> ScalarVector {
|
||||
@@ -26,6 +28,7 @@ macro_rules! math_op {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
impl $Op<ScalarVector> for ScalarVector {
|
||||
type Output = ScalarVector;
|
||||
fn $op(self, b: ScalarVector) -> ScalarVector {
|
||||
@@ -34,6 +37,7 @@ macro_rules! math_op {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
impl $Op<&ScalarVector> for &ScalarVector {
|
||||
type Output = ScalarVector;
|
||||
fn $op(self, b: &ScalarVector) -> ScalarVector {
|
||||
@@ -63,24 +67,6 @@ impl ScalarVector {
|
||||
ScalarVector(res)
|
||||
}
|
||||
|
||||
pub(crate) fn even_powers(x: Scalar, pow: usize) -> ScalarVector {
|
||||
debug_assert!(pow != 0);
|
||||
// Verify pow is a power of two
|
||||
debug_assert_eq!(((pow - 1) & pow), 0);
|
||||
|
||||
let xsq = x * x;
|
||||
let mut res = ScalarVector(Vec::with_capacity(pow / 2));
|
||||
res.0.push(xsq);
|
||||
|
||||
let mut prev = 2;
|
||||
while prev < pow {
|
||||
res.0.push(res[res.len() - 1] * xsq);
|
||||
prev += 2;
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
pub(crate) fn sum(mut self) -> Scalar {
|
||||
self.0.drain(..).sum()
|
||||
}
|
||||
@@ -106,20 +92,11 @@ pub(crate) fn inner_product(a: &ScalarVector, b: &ScalarVector) -> Scalar {
|
||||
(a * b).sum()
|
||||
}
|
||||
|
||||
pub(crate) fn weighted_powers(x: Scalar, len: usize) -> ScalarVector {
|
||||
ScalarVector(ScalarVector::powers(x, len + 1).0[1 ..].to_vec())
|
||||
}
|
||||
|
||||
pub(crate) fn weighted_inner_product(a: &ScalarVector, b: &ScalarVector, y: Scalar) -> Scalar {
|
||||
// y ** 0 is not used as a power
|
||||
(a * b * weighted_powers(y, a.len())).sum()
|
||||
}
|
||||
|
||||
impl Mul<&[EdwardsPoint]> for &ScalarVector {
|
||||
type Output = EdwardsPoint;
|
||||
fn mul(self, b: &[EdwardsPoint]) -> EdwardsPoint {
|
||||
debug_assert_eq!(self.len(), b.len());
|
||||
multiexp(&self.0.iter().cloned().zip(b.iter().cloned()).collect::<Vec<_>>())
|
||||
multiexp(&self.0.iter().copied().zip(b.iter().copied()).collect::<Vec<_>>())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -169,7 +169,7 @@ fn core(
|
||||
}
|
||||
|
||||
// Perform the core loop
|
||||
let mut c1 = CtOption::new(Scalar::zero(), Choice::from(0));
|
||||
let mut c1 = CtOption::new(Scalar::ZERO, Choice::from(0));
|
||||
for i in (start .. end).map(|i| i % n) {
|
||||
// This will only execute once and shouldn't need to be constant time. Making it constant time
|
||||
// removes the risk of branch prediction creating timing differences depending on ring index
|
||||
@@ -179,8 +179,8 @@ fn core(
|
||||
let c_p = mu_P * c;
|
||||
let c_c = mu_C * c;
|
||||
|
||||
let L = (&s[i] * &ED25519_BASEPOINT_TABLE) + (c_p * P[i]) + (c_c * C[i]);
|
||||
let PH = hash_to_point(P[i]);
|
||||
let L = (&s[i] * ED25519_BASEPOINT_TABLE) + (c_p * P[i]) + (c_c * C[i]);
|
||||
let PH = hash_to_point(&P[i]);
|
||||
// Shouldn't be an issue as all of the variables in this vartime statement are public
|
||||
let R = (s[i] * PH) + images_precomp.vartime_multiscalar_mul([c_p, c_c]);
|
||||
|
||||
@@ -219,7 +219,7 @@ impl Clsag {
|
||||
let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate();
|
||||
let z = input.commitment.mask - mask;
|
||||
|
||||
let H = hash_to_point(input.decoys.ring[r][0]);
|
||||
let H = hash_to_point(&input.decoys.ring[r][0]);
|
||||
let D = H * z;
|
||||
let mut s = Vec::with_capacity(input.decoys.ring.len());
|
||||
for _ in 0 .. input.decoys.ring.len() {
|
||||
@@ -241,7 +241,7 @@ impl Clsag {
|
||||
msg: [u8; 32],
|
||||
) -> Vec<(Clsag, EdwardsPoint)> {
|
||||
let mut res = Vec::with_capacity(inputs.len());
|
||||
let mut sum_pseudo_outs = Scalar::zero();
|
||||
let mut sum_pseudo_outs = Scalar::ZERO;
|
||||
for i in 0 .. inputs.len() {
|
||||
let mut mask = random_scalar(rng);
|
||||
if i == (inputs.len() - 1) {
|
||||
@@ -257,9 +257,9 @@ impl Clsag {
|
||||
&inputs[i].2,
|
||||
mask,
|
||||
&msg,
|
||||
nonce.deref() * &ED25519_BASEPOINT_TABLE,
|
||||
nonce.deref() * ED25519_BASEPOINT_TABLE,
|
||||
nonce.deref() *
|
||||
hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
|
||||
hash_to_point(&inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
|
||||
);
|
||||
clsag.s[usize::from(inputs[i].2.decoys.i)] =
|
||||
(-((p * inputs[i].0.deref()) + c)) + nonce.deref();
|
||||
|
||||
@@ -7,11 +7,7 @@ use rand_chacha::ChaCha20Rng;
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||
|
||||
use curve25519_dalek::{
|
||||
traits::{Identity, IsIdentity},
|
||||
scalar::Scalar,
|
||||
edwards::EdwardsPoint,
|
||||
};
|
||||
use curve25519_dalek::{scalar::Scalar, edwards::EdwardsPoint};
|
||||
|
||||
use group::{ff::Field, Group, GroupEncoding};
|
||||
|
||||
@@ -120,7 +116,7 @@ impl ClsagMultisig {
|
||||
ClsagMultisig {
|
||||
transcript,
|
||||
|
||||
H: hash_to_point(output_key),
|
||||
H: hash_to_point(&output_key),
|
||||
image: EdwardsPoint::identity(),
|
||||
|
||||
details,
|
||||
@@ -147,7 +143,7 @@ pub(crate) fn add_key_image_share(
|
||||
participant: Participant,
|
||||
share: EdwardsPoint,
|
||||
) {
|
||||
if image.is_identity() {
|
||||
if image.is_identity().into() {
|
||||
*image = generator * offset;
|
||||
}
|
||||
*image += share * lagrange::<dfg::Scalar>(participant, included).0;
|
||||
@@ -188,10 +184,10 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
||||
reader.read_exact(&mut bytes)?;
|
||||
// dfg ensures the point is torsion free
|
||||
let xH = Option::<dfg::EdwardsPoint>::from(dfg::EdwardsPoint::from_bytes(&bytes))
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid key image"))?;
|
||||
.ok_or_else(|| io::Error::other("invalid key image"))?;
|
||||
// Ensure this is a canonical point
|
||||
if xH.to_bytes() != bytes {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "non-canonical key image"))?;
|
||||
Err(io::Error::other("non-canonical key image"))?;
|
||||
}
|
||||
|
||||
Ok(ClsagAddendum { key_image: xH, dleq: DLEqProof::<dfg::EdwardsPoint>::read(reader)? })
|
||||
@@ -203,7 +199,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
||||
l: Participant,
|
||||
addendum: ClsagAddendum,
|
||||
) -> Result<(), FrostError> {
|
||||
if self.image.is_identity() {
|
||||
if self.image.is_identity().into() {
|
||||
self.transcript.domain_separate(b"CLSAG");
|
||||
self.input().transcript(&mut self.transcript);
|
||||
self.transcript.append_message(b"mask", self.mask().to_bytes());
|
||||
|
||||
@@ -3,6 +3,6 @@ use curve25519_dalek::edwards::EdwardsPoint;
|
||||
pub use monero_generators::{hash_to_point as raw_hash_to_point};
|
||||
|
||||
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
|
||||
pub fn hash_to_point(key: EdwardsPoint) -> EdwardsPoint {
|
||||
pub fn hash_to_point(key: &EdwardsPoint) -> EdwardsPoint {
|
||||
raw_hash_to_point(key.compress().to_bytes())
|
||||
}
|
||||
|
||||
@@ -3,69 +3,211 @@ use std_shims::{
|
||||
io::{self, Read, Write},
|
||||
};
|
||||
|
||||
use curve25519_dalek::scalar::Scalar;
|
||||
#[cfg(feature = "experimental")]
|
||||
use curve25519_dalek::edwards::EdwardsPoint;
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use crate::serialize::*;
|
||||
#[cfg(feature = "experimental")]
|
||||
use crate::{hash_to_scalar, ringct::hash_to_point};
|
||||
use curve25519_dalek::{traits::IsIdentity, Scalar, EdwardsPoint};
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
use monero_generators::H;
|
||||
|
||||
use crate::{hash_to_scalar, ringct::hash_to_point, serialize::*};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
||||
pub enum MlsagError {
|
||||
#[cfg_attr(feature = "std", error("invalid ring"))]
|
||||
InvalidRing,
|
||||
#[cfg_attr(feature = "std", error("invalid amount of key images"))]
|
||||
InvalidAmountOfKeyImages,
|
||||
#[cfg_attr(feature = "std", error("invalid ss"))]
|
||||
InvalidSs,
|
||||
#[cfg_attr(feature = "std", error("key image was identity"))]
|
||||
IdentityKeyImage,
|
||||
#[cfg_attr(feature = "std", error("invalid ci"))]
|
||||
InvalidCi,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct RingMatrix {
|
||||
matrix: Vec<Vec<EdwardsPoint>>,
|
||||
}
|
||||
|
||||
impl RingMatrix {
|
||||
pub fn new(matrix: Vec<Vec<EdwardsPoint>>) -> Result<Self, MlsagError> {
|
||||
if matrix.is_empty() {
|
||||
Err(MlsagError::InvalidRing)?;
|
||||
}
|
||||
for member in &matrix {
|
||||
if member.is_empty() || (member.len() != matrix[0].len()) {
|
||||
Err(MlsagError::InvalidRing)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(RingMatrix { matrix })
|
||||
}
|
||||
|
||||
/// Construct a ring matrix for an individual output.
|
||||
pub fn individual(
|
||||
ring: &[[EdwardsPoint; 2]],
|
||||
pseudo_out: EdwardsPoint,
|
||||
) -> Result<Self, MlsagError> {
|
||||
let mut matrix = Vec::with_capacity(ring.len());
|
||||
for ring_member in ring {
|
||||
matrix.push(vec![ring_member[0], ring_member[1] - pseudo_out]);
|
||||
}
|
||||
RingMatrix::new(matrix)
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = &[EdwardsPoint]> {
|
||||
self.matrix.iter().map(AsRef::as_ref)
|
||||
}
|
||||
|
||||
/// Return the amount of members in the ring.
|
||||
pub fn members(&self) -> usize {
|
||||
self.matrix.len()
|
||||
}
|
||||
|
||||
/// Returns the length of a ring member.
|
||||
///
|
||||
/// A ring member is a vector of points for which the signer knows all of the discrete logarithms
|
||||
/// of.
|
||||
pub fn member_len(&self) -> usize {
|
||||
// this is safe to do as the constructors don't allow empty rings
|
||||
self.matrix[0].len()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct Mlsag {
|
||||
pub ss: Vec<[Scalar; 2]>,
|
||||
pub ss: Vec<Vec<Scalar>>,
|
||||
pub cc: Scalar,
|
||||
}
|
||||
|
||||
impl Mlsag {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
for ss in self.ss.iter() {
|
||||
for ss in &self.ss {
|
||||
write_raw_vec(write_scalar, ss, w)?;
|
||||
}
|
||||
write_scalar(&self.cc, w)
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(mixins: usize, r: &mut R) -> io::Result<Mlsag> {
|
||||
pub fn read<R: Read>(mixins: usize, ss_2_elements: usize, r: &mut R) -> io::Result<Mlsag> {
|
||||
Ok(Mlsag {
|
||||
ss: (0 .. mixins).map(|_| read_array(read_scalar, r)).collect::<Result<_, _>>()?,
|
||||
ss: (0 .. mixins)
|
||||
.map(|_| read_raw_vec(read_scalar, ss_2_elements, r))
|
||||
.collect::<Result<_, _>>()?,
|
||||
cc: read_scalar(r)?,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "experimental")]
|
||||
pub fn verify(
|
||||
&self,
|
||||
msg: &[u8; 32],
|
||||
ring: &[[EdwardsPoint; 2]],
|
||||
key_image: &EdwardsPoint,
|
||||
) -> bool {
|
||||
if ring.is_empty() {
|
||||
return false;
|
||||
ring: &RingMatrix,
|
||||
key_images: &[EdwardsPoint],
|
||||
) -> Result<(), MlsagError> {
|
||||
// Mlsag allows for layers to not need linkability, hence they don't need key images
|
||||
// Monero requires that there is always only 1 non-linkable layer - the amount commitments.
|
||||
if ring.member_len() != (key_images.len() + 1) {
|
||||
Err(MlsagError::InvalidAmountOfKeyImages)?;
|
||||
}
|
||||
|
||||
let mut buf = Vec::with_capacity(6 * 32);
|
||||
buf.extend_from_slice(msg);
|
||||
|
||||
let mut ci = self.cc;
|
||||
for (i, ring_member) in ring.iter().enumerate() {
|
||||
buf.extend_from_slice(msg);
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let L =
|
||||
|r| EdwardsPoint::vartime_double_scalar_mul_basepoint(&ci, &ring_member[r], &self.ss[i][r]);
|
||||
// This is an iterator over the key images as options with an added entry of `None` at the
|
||||
// end for the non-linkable layer
|
||||
let key_images_iter = key_images.iter().map(|ki| Some(*ki)).chain(core::iter::once(None));
|
||||
|
||||
buf.extend_from_slice(ring_member[0].compress().as_bytes());
|
||||
buf.extend_from_slice(L(0).compress().as_bytes());
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let R = (self.ss[i][0] * hash_to_point(ring_member[0])) + (ci * key_image);
|
||||
buf.extend_from_slice(R.compress().as_bytes());
|
||||
|
||||
buf.extend_from_slice(ring_member[1].compress().as_bytes());
|
||||
buf.extend_from_slice(L(1).compress().as_bytes());
|
||||
|
||||
ci = hash_to_scalar(&buf);
|
||||
buf.clear();
|
||||
if ring.matrix.len() != self.ss.len() {
|
||||
Err(MlsagError::InvalidSs)?;
|
||||
}
|
||||
|
||||
ci == self.cc
|
||||
for (ring_member, ss) in ring.iter().zip(&self.ss) {
|
||||
if ring_member.len() != ss.len() {
|
||||
Err(MlsagError::InvalidSs)?;
|
||||
}
|
||||
|
||||
for ((ring_member_entry, s), ki) in ring_member.iter().zip(ss).zip(key_images_iter.clone()) {
|
||||
#[allow(non_snake_case)]
|
||||
let L = EdwardsPoint::vartime_double_scalar_mul_basepoint(&ci, ring_member_entry, s);
|
||||
|
||||
buf.extend_from_slice(ring_member_entry.compress().as_bytes());
|
||||
buf.extend_from_slice(L.compress().as_bytes());
|
||||
|
||||
// Not all dimensions need to be linkable, e.g. commitments, and only linkable layers need
|
||||
// to have key images.
|
||||
if let Some(ki) = ki {
|
||||
if ki.is_identity() {
|
||||
Err(MlsagError::IdentityKeyImage)?;
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let R = (s * hash_to_point(ring_member_entry)) + (ci * ki);
|
||||
buf.extend_from_slice(R.compress().as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
ci = hash_to_scalar(&buf);
|
||||
// keep the msg in the buffer.
|
||||
buf.drain(msg.len() ..);
|
||||
}
|
||||
|
||||
if ci != self.cc {
|
||||
Err(MlsagError::InvalidCi)?
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// An aggregate ring matrix builder, usable to set up the ring matrix to prove/verify an aggregate
|
||||
/// MLSAG signature.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
||||
pub struct AggregateRingMatrixBuilder {
|
||||
key_ring: Vec<Vec<EdwardsPoint>>,
|
||||
amounts_ring: Vec<EdwardsPoint>,
|
||||
sum_out: EdwardsPoint,
|
||||
}
|
||||
|
||||
impl AggregateRingMatrixBuilder {
|
||||
/// Create a new AggregateRingMatrixBuilder.
|
||||
///
|
||||
/// Takes in the transaction's outputs; commitments and fee.
|
||||
pub fn new(commitments: &[EdwardsPoint], fee: u64) -> Self {
|
||||
AggregateRingMatrixBuilder {
|
||||
key_ring: vec![],
|
||||
amounts_ring: vec![],
|
||||
sum_out: commitments.iter().sum::<EdwardsPoint>() + (H() * Scalar::from(fee)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Push a ring of [output key, commitment] to the matrix.
|
||||
pub fn push_ring(&mut self, ring: &[[EdwardsPoint; 2]]) -> Result<(), MlsagError> {
|
||||
if self.key_ring.is_empty() {
|
||||
self.key_ring = vec![vec![]; ring.len()];
|
||||
// Now that we know the length of the ring, fill the `amounts_ring`.
|
||||
self.amounts_ring = vec![-self.sum_out; ring.len()];
|
||||
}
|
||||
|
||||
if (self.amounts_ring.len() != ring.len()) || ring.is_empty() {
|
||||
// All the rings in an aggregate matrix must be the same length.
|
||||
return Err(MlsagError::InvalidRing);
|
||||
}
|
||||
|
||||
for (i, ring_member) in ring.iter().enumerate() {
|
||||
self.key_ring[i].push(ring_member[0]);
|
||||
self.amounts_ring[i] += ring_member[1]
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Build and return the [`RingMatrix`]
|
||||
pub fn build(mut self) -> Result<RingMatrix, MlsagError> {
|
||||
for (i, amount_commitment) in self.amounts_ring.drain(..).enumerate() {
|
||||
self.key_ring[i].push(amount_commitment);
|
||||
}
|
||||
RingMatrix::new(self.key_ring)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ use crate::{
|
||||
|
||||
/// Generate a key image for a given key. Defined as `x * hash_to_point(xG)`.
|
||||
pub fn generate_key_image(secret: &Zeroizing<Scalar>) -> EdwardsPoint {
|
||||
hash_to_point(&ED25519_BASEPOINT_TABLE * secret.deref()) * secret.deref()
|
||||
hash_to_point(&(ED25519_BASEPOINT_TABLE * secret.deref())) * secret.deref()
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
@@ -61,7 +61,7 @@ impl EncryptedAmount {
|
||||
pub enum RctType {
|
||||
/// No RCT proofs.
|
||||
Null,
|
||||
/// One MLSAG for a single input and a Borromean range proof (RCTTypeFull).
|
||||
/// One MLSAG for multiple inputs and Borromean range proofs (RCTTypeFull).
|
||||
MlsagAggregate,
|
||||
// One MLSAG for each input and a Borromean range proof (RCTTypeSimple).
|
||||
MlsagIndividual,
|
||||
@@ -124,8 +124,9 @@ pub struct RctBase {
|
||||
}
|
||||
|
||||
impl RctBase {
|
||||
pub(crate) fn fee_weight(outputs: usize) -> usize {
|
||||
1 + 8 + (outputs * (8 + 32))
|
||||
pub(crate) fn fee_weight(outputs: usize, fee: u64) -> usize {
|
||||
// 1 byte for the RCT signature type
|
||||
1 + (outputs * (8 + 32)) + varint_len(fee)
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W, rct_type: RctType) -> io::Result<()> {
|
||||
@@ -146,8 +147,8 @@ impl RctBase {
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(inputs: usize, outputs: usize, r: &mut R) -> io::Result<(RctBase, RctType)> {
|
||||
let rct_type = RctType::from_byte(read_byte(r)?)
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid RCT type"))?;
|
||||
let rct_type =
|
||||
RctType::from_byte(read_byte(r)?).ok_or_else(|| io::Error::other("invalid RCT type"))?;
|
||||
|
||||
match rct_type {
|
||||
RctType::Null => {}
|
||||
@@ -163,7 +164,7 @@ impl RctBase {
|
||||
// If there are Bulletproofs, there must be a matching amount of outputs, implicitly
|
||||
// banning 0 outputs
|
||||
// Since HF 12 (CLSAG being 13), a 2-output minimum has also been enforced
|
||||
Err(io::Error::new(io::ErrorKind::Other, "RCT with Bulletproofs(+) had 0 outputs"))?;
|
||||
Err(io::Error::other("RCT with Bulletproofs(+) had 0 outputs"))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -193,6 +194,10 @@ impl RctBase {
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub enum RctPrunable {
|
||||
Null,
|
||||
AggregateMlsagBorromean {
|
||||
borromean: Vec<BorromeanRange>,
|
||||
mlsag: Mlsag,
|
||||
},
|
||||
MlsagBorromean {
|
||||
borromean: Vec<BorromeanRange>,
|
||||
mlsags: Vec<Mlsag>,
|
||||
@@ -211,6 +216,7 @@ pub enum RctPrunable {
|
||||
|
||||
impl RctPrunable {
|
||||
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
|
||||
// 1 byte for number of BPs (technically a VarInt, yet there's always just zero or one)
|
||||
1 + Bulletproofs::fee_weight(protocol.bp_plus(), outputs) +
|
||||
(inputs * (Clsag::fee_weight(protocol.ring_len()) + 32))
|
||||
}
|
||||
@@ -218,6 +224,10 @@ impl RctPrunable {
|
||||
pub fn write<W: Write>(&self, w: &mut W, rct_type: RctType) -> io::Result<()> {
|
||||
match self {
|
||||
RctPrunable::Null => Ok(()),
|
||||
RctPrunable::AggregateMlsagBorromean { borromean, mlsag } => {
|
||||
write_raw_vec(BorromeanRange::write, borromean, w)?;
|
||||
mlsag.write(w)
|
||||
}
|
||||
RctPrunable::MlsagBorromean { borromean, mlsags } => {
|
||||
write_raw_vec(BorromeanRange::write, borromean, w)?;
|
||||
write_raw_vec(Mlsag::write, mlsags, w)
|
||||
@@ -255,11 +265,26 @@ impl RctPrunable {
|
||||
outputs: usize,
|
||||
r: &mut R,
|
||||
) -> io::Result<RctPrunable> {
|
||||
// While we generally don't bother with misc consensus checks, this affects the safety of
|
||||
// the below defined rct_type function
|
||||
// The exact line preventing zero-input transactions is:
|
||||
// https://github.com/monero-project/monero/blob/00fd416a99686f0956361d1cd0337fe56e58d4a7/
|
||||
// src/ringct/rctSigs.cpp#L609
|
||||
// And then for RctNull, that's only allowed for miner TXs which require one input of
|
||||
// Input::Gen
|
||||
if decoys.is_empty() {
|
||||
Err(io::Error::other("transaction had no inputs"))?;
|
||||
}
|
||||
|
||||
Ok(match rct_type {
|
||||
RctType::Null => RctPrunable::Null,
|
||||
RctType::MlsagAggregate | RctType::MlsagIndividual => RctPrunable::MlsagBorromean {
|
||||
RctType::MlsagAggregate => RctPrunable::AggregateMlsagBorromean {
|
||||
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
|
||||
mlsags: decoys.iter().map(|d| Mlsag::read(*d, r)).collect::<Result<_, _>>()?,
|
||||
mlsag: Mlsag::read(decoys[0], decoys.len() + 1, r)?,
|
||||
},
|
||||
RctType::MlsagIndividual => RctPrunable::MlsagBorromean {
|
||||
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
|
||||
mlsags: decoys.iter().map(|d| Mlsag::read(*d, 2, r)).collect::<Result<_, _>>()?,
|
||||
},
|
||||
RctType::Bulletproofs | RctType::BulletproofsCompactAmount => {
|
||||
RctPrunable::MlsagBulletproofs {
|
||||
@@ -270,18 +295,18 @@ impl RctPrunable {
|
||||
read_varint(r)?
|
||||
}) != 1
|
||||
{
|
||||
Err(io::Error::new(io::ErrorKind::Other, "n bulletproofs instead of one"))?;
|
||||
Err(io::Error::other("n bulletproofs instead of one"))?;
|
||||
}
|
||||
Bulletproofs::read(r)?
|
||||
},
|
||||
mlsags: decoys.iter().map(|d| Mlsag::read(*d, r)).collect::<Result<_, _>>()?,
|
||||
mlsags: decoys.iter().map(|d| Mlsag::read(*d, 2, r)).collect::<Result<_, _>>()?,
|
||||
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
|
||||
}
|
||||
}
|
||||
RctType::Clsag | RctType::BulletproofsPlus => RctPrunable::Clsag {
|
||||
bulletproofs: {
|
||||
if read_varint(r)? != 1 {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "n bulletproofs instead of one"))?;
|
||||
if read_varint::<_, u64>(r)? != 1 {
|
||||
Err(io::Error::other("n bulletproofs instead of one"))?;
|
||||
}
|
||||
(if rct_type == RctType::Clsag { Bulletproofs::read } else { Bulletproofs::read_plus })(
|
||||
r,
|
||||
@@ -296,6 +321,7 @@ impl RctPrunable {
|
||||
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
match self {
|
||||
RctPrunable::Null => panic!("Serializing RctPrunable::Null for a signature"),
|
||||
RctPrunable::AggregateMlsagBorromean { borromean, .. } |
|
||||
RctPrunable::MlsagBorromean { borromean, .. } => {
|
||||
borromean.iter().try_for_each(|rs| rs.write(w))
|
||||
}
|
||||
@@ -316,30 +342,8 @@ impl RctSignatures {
|
||||
pub fn rct_type(&self) -> RctType {
|
||||
match &self.prunable {
|
||||
RctPrunable::Null => RctType::Null,
|
||||
RctPrunable::MlsagBorromean { .. } => {
|
||||
/*
|
||||
This type of RctPrunable may have no outputs, yet pseudo_outs are per input
|
||||
This will only be a valid RctSignatures if it's for a TX with inputs
|
||||
That makes this valid for any valid RctSignatures
|
||||
|
||||
While it will be invalid for any invalid RctSignatures, potentially letting an invalid
|
||||
MlsagAggregate be interpreted as a valid MlsagIndividual (or vice versa), they have
|
||||
incompatible deserializations
|
||||
|
||||
This means it's impossible to receive a MlsagAggregate over the wire and interpret it
|
||||
as a MlsagIndividual (or vice versa)
|
||||
|
||||
That only makes manual manipulation unsafe, which will always be true since these fields
|
||||
are all pub
|
||||
|
||||
TODO: Consider making them private with read-only accessors?
|
||||
*/
|
||||
if self.base.pseudo_outs.is_empty() {
|
||||
RctType::MlsagAggregate
|
||||
} else {
|
||||
RctType::MlsagIndividual
|
||||
}
|
||||
}
|
||||
RctPrunable::AggregateMlsagBorromean { .. } => RctType::MlsagAggregate,
|
||||
RctPrunable::MlsagBorromean { .. } => RctType::MlsagIndividual,
|
||||
// RctBase ensures there's at least one output, making the following
|
||||
// inferences guaranteed/expects impossible on any valid RctSignatures
|
||||
RctPrunable::MlsagBulletproofs { .. } => {
|
||||
@@ -347,7 +351,7 @@ impl RctSignatures {
|
||||
self
|
||||
.base
|
||||
.encrypted_amounts
|
||||
.get(0)
|
||||
.first()
|
||||
.expect("MLSAG with Bulletproofs didn't have any outputs"),
|
||||
EncryptedAmount::Original { .. }
|
||||
) {
|
||||
@@ -366,8 +370,8 @@ impl RctSignatures {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
|
||||
RctBase::fee_weight(outputs) + RctPrunable::fee_weight(protocol, inputs, outputs)
|
||||
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize, fee: u64) -> usize {
|
||||
RctBase::fee_weight(outputs, fee) + RctPrunable::fee_weight(protocol, inputs, outputs)
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
|
||||
@@ -1,29 +1,71 @@
|
||||
use std::{sync::Arc, io::Read};
|
||||
|
||||
use async_trait::async_trait;
|
||||
|
||||
use digest_auth::AuthContext;
|
||||
use reqwest::Client;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use digest_auth::{WwwAuthenticateHeader, AuthContext};
|
||||
use simple_request::{
|
||||
hyper::{StatusCode, header::HeaderValue, Request},
|
||||
Response, Client,
|
||||
};
|
||||
|
||||
use crate::rpc::{RpcError, RpcConnection, Rpc};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
enum Authentication {
|
||||
// If unauthenticated, use a single client
|
||||
Unauthenticated(Client),
|
||||
// If authenticated, use a single client which supports being locked and tracks its nonce
|
||||
// This ensures that if a nonce is requested, another caller doesn't make a request invalidating
|
||||
// it
|
||||
Authenticated {
|
||||
username: String,
|
||||
password: String,
|
||||
#[allow(clippy::type_complexity)]
|
||||
connection: Arc<Mutex<(Option<(WwwAuthenticateHeader, u64)>, Client)>>,
|
||||
},
|
||||
}
|
||||
|
||||
/// An HTTP(S) transport for the RPC.
|
||||
///
|
||||
/// Requires tokio.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct HttpRpc {
|
||||
client: Client,
|
||||
userpass: Option<(String, String)>,
|
||||
authentication: Authentication,
|
||||
url: String,
|
||||
}
|
||||
|
||||
impl HttpRpc {
|
||||
fn digest_auth_challenge(
|
||||
response: &Response,
|
||||
) -> Result<Option<(WwwAuthenticateHeader, u64)>, RpcError> {
|
||||
Ok(if let Some(header) = response.headers().get("www-authenticate") {
|
||||
Some((
|
||||
digest_auth::parse(
|
||||
header
|
||||
.to_str()
|
||||
.map_err(|_| RpcError::InvalidNode("www-authenticate header wasn't a string"))?,
|
||||
)
|
||||
.map_err(|_| RpcError::InvalidNode("invalid digest-auth response"))?,
|
||||
0,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
/// Create a new HTTP(S) RPC connection.
|
||||
///
|
||||
/// A daemon requiring authentication can be used via including the username and password in the
|
||||
/// URL.
|
||||
pub fn new(mut url: String) -> Result<Rpc<HttpRpc>, RpcError> {
|
||||
// Parse out the username and password
|
||||
let userpass = if url.contains('@') {
|
||||
pub async fn new(mut url: String) -> Result<Rpc<HttpRpc>, RpcError> {
|
||||
let authentication = if url.contains('@') {
|
||||
// Parse out the username and password
|
||||
let url_clone = url;
|
||||
let split_url = url_clone.split('@').collect::<Vec<_>>();
|
||||
if split_url.len() != 2 {
|
||||
Err(RpcError::InvalidNode)?;
|
||||
Err(RpcError::ConnectionError("invalid amount of login specifications".to_string()))?;
|
||||
}
|
||||
let mut userpass = split_url[0];
|
||||
url = split_url[1].to_string();
|
||||
@@ -32,60 +74,186 @@ impl HttpRpc {
|
||||
if userpass.contains("://") {
|
||||
let split_userpass = userpass.split("://").collect::<Vec<_>>();
|
||||
if split_userpass.len() != 2 {
|
||||
Err(RpcError::InvalidNode)?;
|
||||
Err(RpcError::ConnectionError("invalid amount of protocol specifications".to_string()))?;
|
||||
}
|
||||
url = split_userpass[0].to_string() + "://" + &url;
|
||||
userpass = split_userpass[1];
|
||||
}
|
||||
|
||||
let split_userpass = userpass.split(':').collect::<Vec<_>>();
|
||||
if split_userpass.len() != 2 {
|
||||
Err(RpcError::InvalidNode)?;
|
||||
if split_userpass.len() > 2 {
|
||||
Err(RpcError::ConnectionError("invalid amount of passwords".to_string()))?;
|
||||
}
|
||||
|
||||
let client = Client::without_connection_pool(url.clone())
|
||||
.map_err(|_| RpcError::ConnectionError("invalid URL".to_string()))?;
|
||||
// Obtain the initial challenge, which also somewhat validates this connection
|
||||
let challenge = Self::digest_auth_challenge(
|
||||
&client
|
||||
.request(
|
||||
Request::post(url.clone())
|
||||
.body(vec![].into())
|
||||
.map_err(|e| RpcError::ConnectionError(format!("couldn't make request: {e:?}")))?,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?,
|
||||
)?;
|
||||
Authentication::Authenticated {
|
||||
username: split_userpass[0].to_string(),
|
||||
password: split_userpass.get(1).unwrap_or(&"").to_string(),
|
||||
connection: Arc::new(Mutex::new((challenge, client))),
|
||||
}
|
||||
Some((split_userpass[0].to_string(), split_userpass[1].to_string()))
|
||||
} else {
|
||||
None
|
||||
Authentication::Unauthenticated(Client::with_connection_pool())
|
||||
};
|
||||
|
||||
Ok(Rpc(HttpRpc { client: Client::new(), userpass, url }))
|
||||
Ok(Rpc(HttpRpc { authentication, url }))
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpRpc {
|
||||
async fn inner_post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError> {
|
||||
let request_fn = |uri| {
|
||||
Request::post(uri)
|
||||
.body(body.clone().into())
|
||||
.map_err(|e| RpcError::ConnectionError(format!("couldn't make request: {e:?}")))
|
||||
};
|
||||
|
||||
for attempt in 0 .. 2 {
|
||||
let response = match &self.authentication {
|
||||
Authentication::Unauthenticated(client) => client
|
||||
.request(request_fn(self.url.clone() + "/" + route)?)
|
||||
.await
|
||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?,
|
||||
Authentication::Authenticated { username, password, connection } => {
|
||||
let mut connection_lock = connection.lock().await;
|
||||
|
||||
let mut request = request_fn("/".to_string() + route)?;
|
||||
|
||||
// If we don't have an auth challenge, obtain one
|
||||
if connection_lock.0.is_none() {
|
||||
connection_lock.0 = Self::digest_auth_challenge(
|
||||
&connection_lock
|
||||
.1
|
||||
.request(request)
|
||||
.await
|
||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?,
|
||||
)?;
|
||||
request = request_fn("/".to_string() + route)?;
|
||||
}
|
||||
|
||||
// Insert the challenge response, if we have a challenge
|
||||
if let Some((challenge, cnonce)) = connection_lock.0.as_mut() {
|
||||
// Update the cnonce
|
||||
// Overflow isn't a concern as this is a u64
|
||||
*cnonce += 1;
|
||||
|
||||
let mut context = AuthContext::new_post::<_, _, _, &[u8]>(
|
||||
username,
|
||||
password,
|
||||
"/".to_string() + route,
|
||||
None,
|
||||
);
|
||||
context.set_custom_cnonce(hex::encode(cnonce.to_le_bytes()));
|
||||
|
||||
request.headers_mut().insert(
|
||||
"Authorization",
|
||||
HeaderValue::from_str(
|
||||
&challenge
|
||||
.respond(&context)
|
||||
.map_err(|_| RpcError::InvalidNode("couldn't respond to digest-auth challenge"))?
|
||||
.to_header_string(),
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
|
||||
let response_result = connection_lock
|
||||
.1
|
||||
.request(request)
|
||||
.await
|
||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")));
|
||||
|
||||
// If the connection entered an error state, drop the cached challenge as challenges are
|
||||
// per-connection
|
||||
// We don't need to create a new connection as simple-request will for us
|
||||
if response_result.is_err() {
|
||||
connection_lock.0 = None;
|
||||
}
|
||||
|
||||
// If we're not already on our second attempt and:
|
||||
// A) We had a connection error
|
||||
// B) We need to re-auth due to this token being stale
|
||||
// Move to the next loop iteration (retrying all of this)
|
||||
if (attempt == 0) &&
|
||||
(response_result.is_err() || {
|
||||
let response = response_result.as_ref().unwrap();
|
||||
if response.status() == StatusCode::UNAUTHORIZED {
|
||||
if let Some(header) = response.headers().get("www-authenticate") {
|
||||
header
|
||||
.to_str()
|
||||
.map_err(|_| RpcError::InvalidNode("www-authenticate header wasn't a string"))?
|
||||
.contains("stale")
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
{
|
||||
// Drop the cached authentication before we do
|
||||
connection_lock.0 = None;
|
||||
continue;
|
||||
}
|
||||
|
||||
response_result?
|
||||
}
|
||||
};
|
||||
|
||||
/*
|
||||
let length = usize::try_from(
|
||||
response
|
||||
.headers()
|
||||
.get("content-length")
|
||||
.ok_or(RpcError::InvalidNode("no content-length header"))?
|
||||
.to_str()
|
||||
.map_err(|_| RpcError::InvalidNode("non-ascii content-length value"))?
|
||||
.parse::<u32>()
|
||||
.map_err(|_| RpcError::InvalidNode("non-u32 content-length value"))?,
|
||||
)
|
||||
.unwrap();
|
||||
// Only pre-allocate 1 MB so a malicious node which claims a content-length of 1 GB actually
|
||||
// has to send 1 GB of data to cause a 1 GB allocation
|
||||
let mut res = Vec::with_capacity(length.max(1024 * 1024));
|
||||
let mut body = response.into_body();
|
||||
while res.len() < length {
|
||||
let Some(data) = body.data().await else { break };
|
||||
res.extend(data.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?.as_ref());
|
||||
}
|
||||
*/
|
||||
|
||||
let mut res = Vec::with_capacity(128);
|
||||
response
|
||||
.body()
|
||||
.await
|
||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?
|
||||
.read_to_end(&mut res)
|
||||
.unwrap();
|
||||
|
||||
return Ok(res);
|
||||
}
|
||||
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl RpcConnection for HttpRpc {
|
||||
async fn post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError> {
|
||||
let mut builder = self.client.post(self.url.clone() + "/" + route).body(body);
|
||||
|
||||
if let Some((user, pass)) = &self.userpass {
|
||||
let req = self.client.post(&self.url).send().await.map_err(|_| RpcError::InvalidNode)?;
|
||||
// Only provide authentication if this daemon actually expects it
|
||||
if let Some(header) = req.headers().get("www-authenticate") {
|
||||
builder = builder.header(
|
||||
"Authorization",
|
||||
digest_auth::parse(header.to_str().map_err(|_| RpcError::InvalidNode)?)
|
||||
.map_err(|_| RpcError::InvalidNode)?
|
||||
.respond(&AuthContext::new_post::<_, _, _, &[u8]>(
|
||||
user,
|
||||
pass,
|
||||
"/".to_string() + route,
|
||||
None,
|
||||
))
|
||||
.map_err(|_| RpcError::InvalidNode)?
|
||||
.to_header_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(
|
||||
builder
|
||||
.send()
|
||||
.await
|
||||
.map_err(|_| RpcError::ConnectionError)?
|
||||
.bytes()
|
||||
.await
|
||||
.map_err(|_| RpcError::ConnectionError)?
|
||||
.slice(..)
|
||||
.to_vec(),
|
||||
)
|
||||
// TODO: Make this timeout configurable
|
||||
tokio::time::timeout(core::time::Duration::from_secs(30), self.inner_post(route, body))
|
||||
.await
|
||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,14 +19,19 @@ use crate::{
|
||||
serialize::*,
|
||||
transaction::{Input, Timelock, Transaction},
|
||||
block::Block,
|
||||
wallet::Fee,
|
||||
wallet::{FeePriority, Fee},
|
||||
};
|
||||
|
||||
#[cfg(feature = "http_rpc")]
|
||||
#[cfg(feature = "http-rpc")]
|
||||
mod http;
|
||||
#[cfg(feature = "http_rpc")]
|
||||
#[cfg(feature = "http-rpc")]
|
||||
pub use http::*;
|
||||
|
||||
// Number of blocks the fee estimate will be valid for
|
||||
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
||||
// src/wallet/wallet2.cpp#L121
|
||||
const GRACE_BLOCKS_FOR_FEE_ESTIMATE: u64 = 10;
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct EmptyResponse {}
|
||||
#[derive(Deserialize, Debug)]
|
||||
@@ -52,10 +57,10 @@ struct TransactionsResponse {
|
||||
pub enum RpcError {
|
||||
#[cfg_attr(feature = "std", error("internal error ({0})"))]
|
||||
InternalError(&'static str),
|
||||
#[cfg_attr(feature = "std", error("connection error"))]
|
||||
ConnectionError,
|
||||
#[cfg_attr(feature = "std", error("invalid node"))]
|
||||
InvalidNode,
|
||||
#[cfg_attr(feature = "std", error("connection error ({0})"))]
|
||||
ConnectionError(String),
|
||||
#[cfg_attr(feature = "std", error("invalid node ({0})"))]
|
||||
InvalidNode(&'static str),
|
||||
#[cfg_attr(feature = "std", error("unsupported protocol version ({0})"))]
|
||||
UnsupportedProtocol(usize),
|
||||
#[cfg_attr(feature = "std", error("transactions not found"))]
|
||||
@@ -66,14 +71,18 @@ pub enum RpcError {
|
||||
PrunedTransaction,
|
||||
#[cfg_attr(feature = "std", error("invalid transaction ({0:?})"))]
|
||||
InvalidTransaction([u8; 32]),
|
||||
#[cfg_attr(feature = "std", error("unexpected fee response"))]
|
||||
InvalidFee,
|
||||
#[cfg_attr(feature = "std", error("invalid priority"))]
|
||||
InvalidPriority,
|
||||
}
|
||||
|
||||
fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
|
||||
hex::decode(value).map_err(|_| RpcError::InvalidNode)
|
||||
hex::decode(value).map_err(|_| RpcError::InvalidNode("expected hex wasn't hex"))
|
||||
}
|
||||
|
||||
fn hash_hex(hash: &str) -> Result<[u8; 32], RpcError> {
|
||||
rpc_hex(hash)?.try_into().map_err(|_| RpcError::InvalidNode)
|
||||
rpc_hex(hash)?.try_into().map_err(|_| RpcError::InvalidNode("hash wasn't 32-bytes"))
|
||||
}
|
||||
|
||||
fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
|
||||
@@ -122,23 +131,21 @@ impl<R: RpcConnection> Rpc<R> {
|
||||
route: &str,
|
||||
params: Option<Params>,
|
||||
) -> Result<Response, RpcError> {
|
||||
serde_json::from_str(
|
||||
std_shims::str::from_utf8(
|
||||
&self
|
||||
.0
|
||||
.post(
|
||||
route,
|
||||
if let Some(params) = params {
|
||||
serde_json::to_string(¶ms).unwrap().into_bytes()
|
||||
} else {
|
||||
vec![]
|
||||
},
|
||||
)
|
||||
.await?,
|
||||
let res = self
|
||||
.0
|
||||
.post(
|
||||
route,
|
||||
if let Some(params) = params {
|
||||
serde_json::to_string(¶ms).unwrap().into_bytes()
|
||||
} else {
|
||||
vec![]
|
||||
},
|
||||
)
|
||||
.map_err(|_| RpcError::InvalidNode)?,
|
||||
)
|
||||
.map_err(|_| RpcError::InvalidNode)
|
||||
.await?;
|
||||
let res_str = std_shims::str::from_utf8(&res)
|
||||
.map_err(|_| RpcError::InvalidNode("response wasn't utf-8"))?;
|
||||
serde_json::from_str(res_str)
|
||||
.map_err(|_| RpcError::InvalidNode("response wasn't json: {res_str}"))
|
||||
}
|
||||
|
||||
/// Perform a JSON-RPC call with the specified method with the provided parameters
|
||||
@@ -238,7 +245,7 @@ impl<R: RpcConnection> Rpc<R> {
|
||||
|
||||
// https://github.com/monero-project/monero/issues/8311
|
||||
if res.as_hex.is_empty() {
|
||||
match tx.prefix.inputs.get(0) {
|
||||
match tx.prefix.inputs.first() {
|
||||
Some(Input::Gen { .. }) => (),
|
||||
_ => Err(RpcError::PrunedTransaction)?,
|
||||
}
|
||||
@@ -247,7 +254,7 @@ impl<R: RpcConnection> Rpc<R> {
|
||||
// This does run a few keccak256 hashes, which is pointless if the node is trusted
|
||||
// In exchange, this provides resilience against invalid/malicious nodes
|
||||
if tx.hash() != hashes[i] {
|
||||
Err(RpcError::InvalidNode)?;
|
||||
Err(RpcError::InvalidNode("replied with transaction wasn't the requested transaction"))?;
|
||||
}
|
||||
|
||||
Ok(tx)
|
||||
@@ -273,7 +280,7 @@ impl<R: RpcConnection> Rpc<R> {
|
||||
|
||||
let header: BlockHeaderByHeightResponse =
|
||||
self.json_rpc_call("get_block_header_by_height", Some(json!({ "height": number }))).await?;
|
||||
rpc_hex(&header.block_header.hash)?.try_into().map_err(|_| RpcError::InvalidNode)
|
||||
hash_hex(&header.block_header.hash)
|
||||
}
|
||||
|
||||
/// Get a block from the node by its hash.
|
||||
@@ -287,30 +294,36 @@ impl<R: RpcConnection> Rpc<R> {
|
||||
let res: BlockResponse =
|
||||
self.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await?;
|
||||
|
||||
let block =
|
||||
Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref()).map_err(|_| RpcError::InvalidNode)?;
|
||||
let block = Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref())
|
||||
.map_err(|_| RpcError::InvalidNode("invalid block"))?;
|
||||
if block.hash() != hash {
|
||||
Err(RpcError::InvalidNode)?;
|
||||
Err(RpcError::InvalidNode("different block than requested (hash)"))?;
|
||||
}
|
||||
Ok(block)
|
||||
}
|
||||
|
||||
pub async fn get_block_by_number(&self, number: usize) -> Result<Block, RpcError> {
|
||||
match self.get_block(self.get_block_hash(number).await?).await {
|
||||
Ok(block) => {
|
||||
// Make sure this is actually the block for this number
|
||||
match block.miner_tx.prefix.inputs[0] {
|
||||
Input::Gen(actual) => {
|
||||
if usize::try_from(actual).unwrap() == number {
|
||||
Ok(block)
|
||||
} else {
|
||||
Err(RpcError::InvalidNode)
|
||||
}
|
||||
}
|
||||
_ => Err(RpcError::InvalidNode),
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct BlockResponse {
|
||||
blob: String,
|
||||
}
|
||||
|
||||
let res: BlockResponse =
|
||||
self.json_rpc_call("get_block", Some(json!({ "height": number }))).await?;
|
||||
|
||||
let block = Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref())
|
||||
.map_err(|_| RpcError::InvalidNode("invalid block"))?;
|
||||
|
||||
// Make sure this is actually the block for this number
|
||||
match block.miner_tx.prefix.inputs.first() {
|
||||
Some(Input::Gen(actual)) => {
|
||||
if usize::try_from(*actual).unwrap() == number {
|
||||
Ok(block)
|
||||
} else {
|
||||
Err(RpcError::InvalidNode("different block than requested (number)"))
|
||||
}
|
||||
}
|
||||
e => e,
|
||||
_ => Err(RpcError::InvalidNode("block's miner_tx didn't have an input of kind Input::Gen")),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -338,7 +351,6 @@ impl<R: RpcConnection> Rpc<R> {
|
||||
txid: [u8; 32],
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct OIndexes {
|
||||
o_indexes: Vec<u64>,
|
||||
@@ -369,11 +381,14 @@ impl<R: RpcConnection> Rpc<R> {
|
||||
let mut indexes: &[u8] = indexes_buf.as_ref();
|
||||
|
||||
(|| {
|
||||
let mut res = None;
|
||||
let mut is_okay = false;
|
||||
|
||||
if read_bytes::<_, { EPEE_HEADER.len() }>(&mut indexes)? != EPEE_HEADER {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "invalid header"))?;
|
||||
Err(io::Error::other("invalid header"))?;
|
||||
}
|
||||
|
||||
let read_object = |reader: &mut &[u8]| {
|
||||
let read_object = |reader: &mut &[u8]| -> io::Result<Vec<u64>> {
|
||||
let fields = read_byte(reader)? >> 2;
|
||||
|
||||
for _ in 0 .. fields {
|
||||
@@ -386,7 +401,7 @@ impl<R: RpcConnection> Rpc<R> {
|
||||
let iters = if type_with_array_flag != kind { read_epee_vi(reader)? } else { 1 };
|
||||
|
||||
if (&name == b"o_indexes") && (kind != 5) {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "o_indexes weren't u64s"))?;
|
||||
Err(io::Error::other("o_indexes weren't u64s"))?;
|
||||
}
|
||||
|
||||
let f = match kind {
|
||||
@@ -413,54 +428,72 @@ impl<R: RpcConnection> Rpc<R> {
|
||||
let len = read_epee_vi(reader)?;
|
||||
read_raw_vec(
|
||||
read_byte,
|
||||
len
|
||||
.try_into()
|
||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "u64 length exceeded usize"))?,
|
||||
len.try_into().map_err(|_| io::Error::other("u64 length exceeded usize"))?,
|
||||
reader,
|
||||
)
|
||||
},
|
||||
// bool
|
||||
11 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
|
||||
// object, errors here as it shouldn't be used on this call
|
||||
12 => |_: &mut &[u8]| {
|
||||
Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"node used object in reply to get_o_indexes",
|
||||
))
|
||||
},
|
||||
// array, so far unused
|
||||
13 => |_: &mut &[u8]| {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "node used the unused array type"))
|
||||
},
|
||||
_ => {
|
||||
|_: &mut &[u8]| Err(io::Error::new(io::ErrorKind::Other, "node used an invalid type"))
|
||||
12 => {
|
||||
|_: &mut &[u8]| Err(io::Error::other("node used object in reply to get_o_indexes"))
|
||||
}
|
||||
// array, so far unused
|
||||
13 => |_: &mut &[u8]| Err(io::Error::other("node used the unused array type")),
|
||||
_ => |_: &mut &[u8]| Err(io::Error::other("node used an invalid type")),
|
||||
};
|
||||
|
||||
let mut res = vec![];
|
||||
let mut bytes_res = vec![];
|
||||
for _ in 0 .. iters {
|
||||
res.push(f(reader)?);
|
||||
bytes_res.push(f(reader)?);
|
||||
}
|
||||
|
||||
let mut actual_res = Vec::with_capacity(res.len());
|
||||
if &name == b"o_indexes" {
|
||||
for o_index in res {
|
||||
actual_res.push(u64::from_le_bytes(o_index.try_into().map_err(|_| {
|
||||
io::Error::new(io::ErrorKind::Other, "node didn't provide 8 bytes for a u64")
|
||||
})?));
|
||||
let mut actual_res = Vec::with_capacity(bytes_res.len());
|
||||
match name.as_slice() {
|
||||
b"o_indexes" => {
|
||||
for o_index in bytes_res {
|
||||
actual_res.push(u64::from_le_bytes(
|
||||
o_index
|
||||
.try_into()
|
||||
.map_err(|_| io::Error::other("node didn't provide 8 bytes for a u64"))?,
|
||||
));
|
||||
}
|
||||
res = Some(actual_res);
|
||||
}
|
||||
return Ok(actual_res);
|
||||
b"status" => {
|
||||
if bytes_res
|
||||
.first()
|
||||
.ok_or_else(|| io::Error::other("status wasn't a string"))?
|
||||
.as_slice() !=
|
||||
b"OK"
|
||||
{
|
||||
// TODO: Better handle non-OK responses
|
||||
Err(io::Error::other("response wasn't OK"))?;
|
||||
}
|
||||
is_okay = true;
|
||||
}
|
||||
_ => continue,
|
||||
}
|
||||
|
||||
if is_okay && res.is_some() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Didn't return a response with o_indexes
|
||||
// TODO: Check if this didn't have o_indexes because it's an error response
|
||||
Err(io::Error::new(io::ErrorKind::Other, "response didn't contain o_indexes"))
|
||||
// Didn't return a response with a status
|
||||
// (if the status wasn't okay, we would've already errored)
|
||||
if !is_okay {
|
||||
Err(io::Error::other("response didn't contain a status"))?;
|
||||
}
|
||||
|
||||
// If the Vec was empty, it would've been omitted, hence the unwrap_or
|
||||
// TODO: Test against a 0-output TX, such as the ones found in block 202612
|
||||
Ok(res.unwrap_or(vec![]))
|
||||
};
|
||||
|
||||
read_object(&mut indexes)
|
||||
})()
|
||||
.map_err(|_| RpcError::InvalidNode)
|
||||
.map_err(|_| RpcError::InvalidNode("invalid binary response"))
|
||||
}
|
||||
|
||||
/// Get the output distribution, from the specified height to the specified height (both
|
||||
@@ -470,13 +503,11 @@ impl<R: RpcConnection> Rpc<R> {
|
||||
from: usize,
|
||||
to: usize,
|
||||
) -> Result<Vec<u64>, RpcError> {
|
||||
#[allow(dead_code)]
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct Distribution {
|
||||
distribution: Vec<u64>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct Distributions {
|
||||
distributions: Vec<Distribution>,
|
||||
@@ -499,8 +530,10 @@ impl<R: RpcConnection> Rpc<R> {
|
||||
}
|
||||
|
||||
/// Get the specified outputs from the RingCT (zero-amount) pool, but only return them if their
|
||||
/// timelock has been satisfied. This is distinct from being free of the 10-block lock applied to
|
||||
/// all Monero transactions.
|
||||
/// timelock has been satisfied.
|
||||
///
|
||||
/// The timelock being satisfied is distinct from being free of the 10-block lock applied to all
|
||||
/// Monero transactions.
|
||||
pub async fn get_unlocked_outputs(
|
||||
&self,
|
||||
indexes: &[u64],
|
||||
@@ -533,11 +566,7 @@ impl<R: RpcConnection> Rpc<R> {
|
||||
|
||||
let txs = self
|
||||
.get_transactions(
|
||||
&outs
|
||||
.outs
|
||||
.iter()
|
||||
.map(|out| rpc_hex(&out.txid)?.try_into().map_err(|_| RpcError::InvalidNode))
|
||||
.collect::<Result<Vec<_>, _>>()?,
|
||||
&outs.outs.iter().map(|out| hash_hex(&out.txid)).collect::<Result<Vec<_>, _>>()?,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -547,27 +576,102 @@ impl<R: RpcConnection> Rpc<R> {
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, out)| {
|
||||
// Allow keys to be invalid, though if they are, return None to trigger selection of a new
|
||||
// decoy
|
||||
// Only valid keys can be used in CLSAG proofs, hence the need for re-selection, yet
|
||||
// invalid keys may honestly exist on the blockchain
|
||||
// Only a recent hard fork checked output keys were valid points
|
||||
let Some(key) = CompressedEdwardsY(
|
||||
rpc_hex(&out.key)?.try_into().map_err(|_| RpcError::InvalidNode("non-32-byte point"))?,
|
||||
)
|
||||
.decompress() else {
|
||||
return Ok(None);
|
||||
};
|
||||
Ok(
|
||||
Some([rpc_point(&out.key)?, rpc_point(&out.mask)?])
|
||||
Some([key, rpc_point(&out.mask)?])
|
||||
.filter(|_| Timelock::Block(height) >= txs[i].prefix.timelock),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get the currently estimated fee from the node. This may be manipulated to unsafe levels and
|
||||
/// MUST be sanity checked.
|
||||
// TODO: Take a sanity check argument
|
||||
pub async fn get_fee(&self) -> Result<Fee, RpcError> {
|
||||
#[allow(dead_code)]
|
||||
async fn get_fee_v14(&self, priority: FeePriority) -> Result<Fee, RpcError> {
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct FeeResponse {
|
||||
struct FeeResponseV14 {
|
||||
status: String,
|
||||
fee: u64,
|
||||
quantization_mask: u64,
|
||||
}
|
||||
|
||||
let res: FeeResponse = self.json_rpc_call("get_fee_estimate", None).await?;
|
||||
Ok(Fee { per_weight: res.fee, mask: res.quantization_mask })
|
||||
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
||||
// src/wallet/wallet2.cpp#L7569-L7584
|
||||
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
||||
// src/wallet/wallet2.cpp#L7660-L7661
|
||||
let priority_idx =
|
||||
usize::try_from(if priority.fee_priority() == 0 { 1 } else { priority.fee_priority() - 1 })
|
||||
.map_err(|_| RpcError::InvalidPriority)?;
|
||||
let multipliers = [1, 5, 25, 1000];
|
||||
if priority_idx >= multipliers.len() {
|
||||
// though not an RPC error, it seems sensible to treat as such
|
||||
Err(RpcError::InvalidPriority)?;
|
||||
}
|
||||
let fee_multiplier = multipliers[priority_idx];
|
||||
|
||||
let res: FeeResponseV14 = self
|
||||
.json_rpc_call(
|
||||
"get_fee_estimate",
|
||||
Some(json!({ "grace_blocks": GRACE_BLOCKS_FOR_FEE_ESTIMATE })),
|
||||
)
|
||||
.await?;
|
||||
|
||||
if res.status != "OK" {
|
||||
Err(RpcError::InvalidFee)?;
|
||||
}
|
||||
|
||||
Ok(Fee { per_weight: res.fee * fee_multiplier, mask: res.quantization_mask })
|
||||
}
|
||||
|
||||
/// Get the currently estimated fee from the node.
|
||||
///
|
||||
/// This may be manipulated to unsafe levels and MUST be sanity checked.
|
||||
// TODO: Take a sanity check argument
|
||||
pub async fn get_fee(&self, protocol: Protocol, priority: FeePriority) -> Result<Fee, RpcError> {
|
||||
// TODO: Implement wallet2's adjust_priority which by default automatically uses a lower
|
||||
// priority than provided depending on the backlog in the pool
|
||||
if protocol.v16_fee() {
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct FeeResponse {
|
||||
status: String,
|
||||
fees: Vec<u64>,
|
||||
quantization_mask: u64,
|
||||
}
|
||||
|
||||
let res: FeeResponse = self
|
||||
.json_rpc_call(
|
||||
"get_fee_estimate",
|
||||
Some(json!({ "grace_blocks": GRACE_BLOCKS_FOR_FEE_ESTIMATE })),
|
||||
)
|
||||
.await?;
|
||||
|
||||
// https://github.com/monero-project/monero/blob/94e67bf96bbc010241f29ada6abc89f49a81759c/
|
||||
// src/wallet/wallet2.cpp#L7615-L7620
|
||||
let priority_idx = usize::try_from(if priority.fee_priority() >= 4 {
|
||||
3
|
||||
} else {
|
||||
priority.fee_priority().saturating_sub(1)
|
||||
})
|
||||
.map_err(|_| RpcError::InvalidPriority)?;
|
||||
|
||||
if res.status != "OK" {
|
||||
Err(RpcError::InvalidFee)
|
||||
} else if priority_idx >= res.fees.len() {
|
||||
Err(RpcError::InvalidPriority)
|
||||
} else {
|
||||
Ok(Fee { per_weight: res.fees[priority_idx], mask: res.quantization_mask })
|
||||
}
|
||||
} else {
|
||||
self.get_fee_v14(priority).await
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn publish_transaction(&self, tx: &Transaction) -> Result<(), RpcError> {
|
||||
@@ -598,20 +702,32 @@ impl<R: RpcConnection> Rpc<R> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn generate_blocks(&self, address: &str, block_count: usize) -> Result<(), RpcError> {
|
||||
self
|
||||
.rpc_call::<_, EmptyResponse>(
|
||||
"json_rpc",
|
||||
// TODO: Take &Address, not &str?
|
||||
pub async fn generate_blocks(
|
||||
&self,
|
||||
address: &str,
|
||||
block_count: usize,
|
||||
) -> Result<Vec<[u8; 32]>, RpcError> {
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct BlocksResponse {
|
||||
blocks: Vec<String>,
|
||||
}
|
||||
|
||||
let block_strs = self
|
||||
.json_rpc_call::<BlocksResponse>(
|
||||
"generateblocks",
|
||||
Some(json!({
|
||||
"method": "generateblocks",
|
||||
"params": {
|
||||
"wallet_address": address,
|
||||
"amount_of_blocks": block_count
|
||||
},
|
||||
"wallet_address": address,
|
||||
"amount_of_blocks": block_count
|
||||
})),
|
||||
)
|
||||
.await?;
|
||||
.await?
|
||||
.blocks;
|
||||
|
||||
Ok(())
|
||||
let mut blocks = Vec::with_capacity(block_strs.len());
|
||||
for block in block_strs {
|
||||
blocks.push(hash_hex(&block)?);
|
||||
}
|
||||
Ok(blocks)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,16 +11,27 @@ use curve25519_dalek::{
|
||||
|
||||
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
||||
|
||||
pub(crate) fn varint_len(varint: usize) -> usize {
|
||||
((usize::try_from(usize::BITS - varint.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
|
||||
mod sealed {
|
||||
pub trait VarInt: TryInto<u64> + TryFrom<u64> + Copy {}
|
||||
impl VarInt for u8 {}
|
||||
impl VarInt for u32 {}
|
||||
impl VarInt for u64 {}
|
||||
impl VarInt for usize {}
|
||||
}
|
||||
|
||||
// This will panic if the VarInt exceeds u64::MAX
|
||||
pub(crate) fn varint_len<U: sealed::VarInt>(varint: U) -> usize {
|
||||
let varint_u64: u64 = varint.try_into().map_err(|_| "varint exceeded u64").unwrap();
|
||||
((usize::try_from(u64::BITS - varint_u64.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
|
||||
}
|
||||
|
||||
pub(crate) fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(&[*byte])
|
||||
}
|
||||
|
||||
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
||||
let mut varint = *varint;
|
||||
// This will panic if the VarInt exceeds u64::MAX
|
||||
pub(crate) fn write_varint<W: Write, U: sealed::VarInt>(varint: &U, w: &mut W) -> io::Result<()> {
|
||||
let mut varint: u64 = (*varint).try_into().map_err(|_| "varint exceeded u64").unwrap();
|
||||
while {
|
||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
||||
varint >>= 7;
|
||||
@@ -57,7 +68,7 @@ pub(crate) fn write_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
||||
values: &[T],
|
||||
w: &mut W,
|
||||
) -> io::Result<()> {
|
||||
write_varint(&values.len().try_into().unwrap(), w)?;
|
||||
write_varint(&values.len(), w)?;
|
||||
write_raw_vec(f, values, w)
|
||||
}
|
||||
|
||||
@@ -83,23 +94,23 @@ pub(crate) fn read_u64<R: Read>(r: &mut R) -> io::Result<u64> {
|
||||
read_bytes(r).map(u64::from_le_bytes)
|
||||
}
|
||||
|
||||
pub(crate) fn read_varint<R: Read>(r: &mut R) -> io::Result<u64> {
|
||||
pub(crate) fn read_varint<R: Read, U: sealed::VarInt>(r: &mut R) -> io::Result<U> {
|
||||
let mut bits = 0;
|
||||
let mut res = 0;
|
||||
while {
|
||||
let b = read_byte(r)?;
|
||||
if (bits != 0) && (b == 0) {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "non-canonical varint"))?;
|
||||
Err(io::Error::other("non-canonical varint"))?;
|
||||
}
|
||||
if ((bits + 7) > 64) && (b >= (1 << (64 - bits))) {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "varint overflow"))?;
|
||||
Err(io::Error::other("varint overflow"))?;
|
||||
}
|
||||
|
||||
res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits;
|
||||
bits += 7;
|
||||
b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK
|
||||
} {}
|
||||
Ok(res)
|
||||
res.try_into().map_err(|_| io::Error::other("VarInt does not fit into integer type"))
|
||||
}
|
||||
|
||||
// All scalar fields supported by monero-serai are checked to be canonical for valid transactions
|
||||
@@ -109,8 +120,8 @@ pub(crate) fn read_varint<R: Read>(r: &mut R) -> io::Result<u64> {
|
||||
// https://github.com/monero-project/monero/issues/8438, where some scalars had an archaic
|
||||
// reduction applied
|
||||
pub(crate) fn read_scalar<R: Read>(r: &mut R) -> io::Result<Scalar> {
|
||||
Scalar::from_canonical_bytes(read_bytes(r)?)
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
|
||||
Option::from(Scalar::from_canonical_bytes(read_bytes(r)?))
|
||||
.ok_or_else(|| io::Error::other("unreduced scalar"))
|
||||
}
|
||||
|
||||
pub(crate) fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||
@@ -119,14 +130,14 @@ pub(crate) fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||
.decompress()
|
||||
// Ban points which are either unreduced or -0
|
||||
.filter(|point| point.compress().to_bytes() == bytes)
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
|
||||
.ok_or_else(|| io::Error::other("invalid point"))
|
||||
}
|
||||
|
||||
pub(crate) fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
||||
read_point(r)
|
||||
.ok()
|
||||
.filter(|point| point.is_torsion_free())
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
|
||||
.filter(EdwardsPoint::is_torsion_free)
|
||||
.ok_or_else(|| io::Error::other("invalid point"))
|
||||
}
|
||||
|
||||
pub(crate) fn read_raw_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
||||
@@ -152,5 +163,5 @@ pub(crate) fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
||||
f: F,
|
||||
r: &mut R,
|
||||
) -> io::Result<Vec<T>> {
|
||||
read_raw_vec(f, read_varint(r)?.try_into().unwrap(), r)
|
||||
read_raw_vec(f, read_varint(r)?, r)
|
||||
}
|
||||
|
||||
@@ -73,8 +73,8 @@ fn featured() {
|
||||
[(Network::Mainnet, 'C'), (Network::Testnet, 'K'), (Network::Stagenet, 'F')]
|
||||
{
|
||||
for _ in 0 .. 100 {
|
||||
let spend = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
||||
let view = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
||||
let spend = &random_scalar(&mut OsRng) * ED25519_BASEPOINT_TABLE;
|
||||
let view = &random_scalar(&mut OsRng) * ED25519_BASEPOINT_TABLE;
|
||||
|
||||
for features in 0 .. (1 << 3) {
|
||||
const SUBADDRESS_FEATURE_BIT: u8 = 1;
|
||||
@@ -142,10 +142,14 @@ fn featured_vectors() {
|
||||
}
|
||||
_ => panic!("Unknown network"),
|
||||
};
|
||||
let spend =
|
||||
CompressedEdwardsY::from_slice(&hex::decode(vector.spend).unwrap()).decompress().unwrap();
|
||||
let view =
|
||||
CompressedEdwardsY::from_slice(&hex::decode(vector.view).unwrap()).decompress().unwrap();
|
||||
let spend = CompressedEdwardsY::from_slice(&hex::decode(vector.spend).unwrap())
|
||||
.unwrap()
|
||||
.decompress()
|
||||
.unwrap();
|
||||
let view = CompressedEdwardsY::from_slice(&hex::decode(vector.view).unwrap())
|
||||
.unwrap()
|
||||
.decompress()
|
||||
.unwrap();
|
||||
|
||||
let addr = MoneroAddress::from_str(network, &vector.address).unwrap();
|
||||
assert_eq!(addr.spend, spend);
|
||||
|
||||
@@ -9,6 +9,8 @@ use crate::{
|
||||
ringct::bulletproofs::{Bulletproofs, original::OriginalStruct},
|
||||
};
|
||||
|
||||
mod plus;
|
||||
|
||||
#[test]
|
||||
fn bulletproofs_vector() {
|
||||
let scalar = |scalar| Scalar::from_canonical_bytes(scalar).unwrap();
|
||||
@@ -62,7 +64,7 @@ macro_rules! bulletproofs_tests {
|
||||
fn $name() {
|
||||
// Create Bulletproofs for all possible output quantities
|
||||
let mut verifier = BatchVerifier::new(16);
|
||||
for i in 1 .. 17 {
|
||||
for i in 1 ..= 16 {
|
||||
let commitments = (1 ..= i)
|
||||
.map(|i| Commitment::new(random_scalar(&mut OsRng), u64::try_from(i).unwrap()))
|
||||
.collect::<Vec<_>>();
|
||||
@@ -81,7 +83,7 @@ macro_rules! bulletproofs_tests {
|
||||
// Check Bulletproofs errors if we try to prove for too many outputs
|
||||
let mut commitments = vec![];
|
||||
for _ in 0 .. 17 {
|
||||
commitments.push(Commitment::new(Scalar::zero(), 0));
|
||||
commitments.push(Commitment::new(Scalar::ZERO, 0));
|
||||
}
|
||||
assert!(Bulletproofs::prove(&mut OsRng, &commitments, $plus).is_err());
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
use rand_core::{RngCore, OsRng};
|
||||
|
||||
use multiexp::BatchVerifier;
|
||||
use group::ff::Field;
|
||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||
|
||||
use crate::{
|
||||
Commitment,
|
||||
ringct::bulletproofs::plus::aggregate_range_proof::{
|
||||
AggregateRangeStatement, AggregateRangeWitness,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_aggregate_range_proof() {
|
||||
let mut verifier = BatchVerifier::new(16);
|
||||
for m in 1 ..= 16 {
|
||||
let mut commitments = vec![];
|
||||
for _ in 0 .. m {
|
||||
commitments.push(Commitment::new(*Scalar::random(&mut OsRng), OsRng.next_u64()));
|
||||
}
|
||||
let commitment_points = commitments.iter().map(|com| EdwardsPoint(com.calculate())).collect();
|
||||
let statement = AggregateRangeStatement::new(commitment_points).unwrap();
|
||||
let witness = AggregateRangeWitness::new(&commitments).unwrap();
|
||||
|
||||
let proof = statement.clone().prove(&mut OsRng, witness).unwrap();
|
||||
statement.verify(&mut OsRng, &mut verifier, (), proof);
|
||||
}
|
||||
assert!(verifier.verify_vartime());
|
||||
}
|
||||
4
coins/monero/src/tests/bulletproofs/plus/mod.rs
Normal file
4
coins/monero/src/tests/bulletproofs/plus/mod.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
#[cfg(test)]
|
||||
mod weighted_inner_product;
|
||||
#[cfg(test)]
|
||||
mod aggregate_range_proof;
|
||||
@@ -0,0 +1,82 @@
|
||||
// The inner product relation is P = sum(g_bold * a, h_bold * b, g * (a * y * b), h * alpha)
|
||||
|
||||
use rand_core::OsRng;
|
||||
|
||||
use multiexp::BatchVerifier;
|
||||
use group::{ff::Field, Group};
|
||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
||||
|
||||
use crate::ringct::bulletproofs::plus::{
|
||||
ScalarVector, PointVector, GeneratorsList, Generators,
|
||||
weighted_inner_product::{WipStatement, WipWitness},
|
||||
weighted_inner_product,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_zero_weighted_inner_product() {
|
||||
#[allow(non_snake_case)]
|
||||
let P = EdwardsPoint::identity();
|
||||
let y = Scalar::random(&mut OsRng);
|
||||
|
||||
let generators = Generators::new().reduce(1);
|
||||
let statement = WipStatement::new(generators, P, y);
|
||||
let witness = WipWitness::new(ScalarVector::new(1), ScalarVector::new(1), Scalar::ZERO).unwrap();
|
||||
|
||||
let transcript = Scalar::random(&mut OsRng);
|
||||
let proof = statement.clone().prove(&mut OsRng, transcript, witness).unwrap();
|
||||
|
||||
let mut verifier = BatchVerifier::new(1);
|
||||
statement.verify(&mut OsRng, &mut verifier, (), transcript, proof);
|
||||
assert!(verifier.verify_vartime());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_weighted_inner_product() {
|
||||
// P = sum(g_bold * a, h_bold * b, g * (a * y * b), h * alpha)
|
||||
let mut verifier = BatchVerifier::new(6);
|
||||
let generators = Generators::new();
|
||||
for i in [1, 2, 4, 8, 16, 32] {
|
||||
let generators = generators.reduce(i);
|
||||
let g = generators.g();
|
||||
let h = generators.h();
|
||||
assert_eq!(generators.len(), i);
|
||||
let mut g_bold = vec![];
|
||||
let mut h_bold = vec![];
|
||||
for i in 0 .. i {
|
||||
g_bold.push(generators.generator(GeneratorsList::GBold1, i));
|
||||
h_bold.push(generators.generator(GeneratorsList::HBold1, i));
|
||||
}
|
||||
let g_bold = PointVector(g_bold);
|
||||
let h_bold = PointVector(h_bold);
|
||||
|
||||
let mut a = ScalarVector::new(i);
|
||||
let mut b = ScalarVector::new(i);
|
||||
let alpha = Scalar::random(&mut OsRng);
|
||||
|
||||
let y = Scalar::random(&mut OsRng);
|
||||
let mut y_vec = ScalarVector::new(g_bold.len());
|
||||
y_vec[0] = y;
|
||||
for i in 1 .. y_vec.len() {
|
||||
y_vec[i] = y_vec[i - 1] * y;
|
||||
}
|
||||
|
||||
for i in 0 .. i {
|
||||
a[i] = Scalar::random(&mut OsRng);
|
||||
b[i] = Scalar::random(&mut OsRng);
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let P = g_bold.multiexp(&a) +
|
||||
h_bold.multiexp(&b) +
|
||||
(g * weighted_inner_product(&a, &b, &y_vec)) +
|
||||
(h * alpha);
|
||||
|
||||
let statement = WipStatement::new(generators, P, y);
|
||||
let witness = WipWitness::new(a, b, alpha).unwrap();
|
||||
|
||||
let transcript = Scalar::random(&mut OsRng);
|
||||
let proof = statement.clone().prove(&mut OsRng, transcript, witness).unwrap();
|
||||
statement.verify(&mut OsRng, &mut verifier, (), transcript, proof);
|
||||
}
|
||||
assert!(verifier.verify_vartime());
|
||||
}
|
||||
@@ -40,7 +40,7 @@ fn clsag() {
|
||||
for real in 0 .. RING_LEN {
|
||||
let msg = [1; 32];
|
||||
|
||||
let mut secrets = (Zeroizing::new(Scalar::zero()), Scalar::zero());
|
||||
let mut secrets = (Zeroizing::new(Scalar::ZERO), Scalar::ZERO);
|
||||
let mut ring = vec![];
|
||||
for i in 0 .. RING_LEN {
|
||||
let dest = Zeroizing::new(random_scalar(&mut OsRng));
|
||||
@@ -53,7 +53,7 @@ fn clsag() {
|
||||
amount = OsRng.next_u64();
|
||||
}
|
||||
ring
|
||||
.push([dest.deref() * &ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]);
|
||||
.push([dest.deref() * ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]);
|
||||
}
|
||||
|
||||
let image = generate_key_image(&secrets.0);
|
||||
@@ -92,7 +92,7 @@ fn clsag_multisig() {
|
||||
let mask;
|
||||
let amount;
|
||||
if i != u64::from(RING_INDEX) {
|
||||
dest = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
||||
dest = &random_scalar(&mut OsRng) * ED25519_BASEPOINT_TABLE;
|
||||
mask = random_scalar(&mut OsRng);
|
||||
amount = OsRng.next_u64();
|
||||
} else {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
mod unreduced_scalar;
|
||||
mod clsag;
|
||||
mod bulletproofs;
|
||||
mod address;
|
||||
|
||||
@@ -6,13 +6,17 @@ use curve25519_dalek::scalar::Scalar;
|
||||
|
||||
use crate::{
|
||||
hash,
|
||||
wallet::seed::{Seed, Language, classic::trim_by_lang},
|
||||
wallet::seed::{
|
||||
Seed, SeedType,
|
||||
classic::{self, trim_by_lang},
|
||||
polyseed,
|
||||
},
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_classic_seed() {
|
||||
struct Vector {
|
||||
language: Language,
|
||||
language: classic::Language,
|
||||
seed: String,
|
||||
spend: String,
|
||||
view: String,
|
||||
@@ -20,13 +24,13 @@ fn test_classic_seed() {
|
||||
|
||||
let vectors = [
|
||||
Vector {
|
||||
language: Language::Chinese,
|
||||
language: classic::Language::Chinese,
|
||||
seed: "摇 曲 艺 武 滴 然 效 似 赏 式 祥 歌 买 疑 小 碧 堆 博 键 房 鲜 悲 付 喷 武".into(),
|
||||
spend: "a5e4fff1706ef9212993a69f246f5c95ad6d84371692d63e9bb0ea112a58340d".into(),
|
||||
view: "1176c43ce541477ea2f3ef0b49b25112b084e26b8a843e1304ac4677b74cdf02".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::English,
|
||||
language: classic::Language::English,
|
||||
seed: "washing thirsty occur lectures tuesday fainted toxic adapt \
|
||||
abnormal memoir nylon mostly building shrugged online ember northern \
|
||||
ruby woes dauntless boil family illness inroads northern"
|
||||
@@ -35,7 +39,7 @@ fn test_classic_seed() {
|
||||
view: "513ba91c538a5a9069e0094de90e927c0cd147fa10428ce3ac1afd49f63e3b01".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Dutch,
|
||||
language: classic::Language::Dutch,
|
||||
seed: "setwinst riphagen vimmetje extase blief tuitelig fuiven meifeest \
|
||||
ponywagen zesmaal ripdeal matverf codetaal leut ivoor rotten \
|
||||
wisgerhof winzucht typograaf atrium rein zilt traktaat verzaagd setwinst"
|
||||
@@ -44,7 +48,7 @@ fn test_classic_seed() {
|
||||
view: "eac30b69477e3f68093d131c7fd961564458401b07f8c87ff8f6030c1a0c7301".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::French,
|
||||
language: classic::Language::French,
|
||||
seed: "poids vaseux tarte bazar poivre effet entier nuance \
|
||||
sensuel ennui pacte osselet poudre battre alibi mouton \
|
||||
stade paquet pliage gibier type question position projet pliage"
|
||||
@@ -53,7 +57,7 @@ fn test_classic_seed() {
|
||||
view: "6725b32230400a1032f31d622b44c3a227f88258939b14a7c72e00939e7bdf0e".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Spanish,
|
||||
language: classic::Language::Spanish,
|
||||
seed: "minero ocupar mirar evadir octubre cal logro miope \
|
||||
opaco disco ancla litio clase cuello nasal clase \
|
||||
fiar avance deseo mente grumo negro cordón croqueta clase"
|
||||
@@ -62,7 +66,7 @@ fn test_classic_seed() {
|
||||
view: "18deafb34d55b7a43cae2c1c1c206a3c80c12cc9d1f84640b484b95b7fec3e05".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::German,
|
||||
language: classic::Language::German,
|
||||
seed: "Kaliber Gabelung Tapir Liveband Favorit Specht Enklave Nabel \
|
||||
Jupiter Foliant Chronik nisten löten Vase Aussage Rekord \
|
||||
Yeti Gesetz Eleganz Alraune Künstler Almweide Jahr Kastanie Almweide"
|
||||
@@ -71,7 +75,7 @@ fn test_classic_seed() {
|
||||
view: "99f0ec556643bd9c038a4ed86edcb9c6c16032c4622ed2e000299d527a792701".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Italian,
|
||||
language: classic::Language::Italian,
|
||||
seed: "cavo pancetta auto fulmine alleanza filmato diavolo prato \
|
||||
forzare meritare litigare lezione segreto evasione votare buio \
|
||||
licenza cliente dorso natale crescere vento tutelare vetta evasione"
|
||||
@@ -80,7 +84,7 @@ fn test_classic_seed() {
|
||||
view: "698a1dce6018aef5516e82ca0cb3e3ec7778d17dfb41a137567bfa2e55e63a03".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Portuguese,
|
||||
language: classic::Language::Portuguese,
|
||||
seed: "agito eventualidade onus itrio holograma sodomizar objetos dobro \
|
||||
iugoslavo bcrepuscular odalisca abjeto iuane darwinista eczema acetona \
|
||||
cibernetico hoquei gleba driver buffer azoto megera nogueira agito"
|
||||
@@ -89,7 +93,7 @@ fn test_classic_seed() {
|
||||
view: "ad1b4fd35270f5f36c4da7166672b347e75c3f4d41346ec2a06d1d0193632801".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Japanese,
|
||||
language: classic::Language::Japanese,
|
||||
seed: "ぜんぶ どうぐ おたがい せんきょ おうじ そんちょう じゅしん いろえんぴつ \
|
||||
かほう つかれる えらぶ にちじょう くのう にちようび ぬまえび さんきゃく \
|
||||
おおや ちぬき うすめる いがく せつでん さうな すいえい せつだん おおや"
|
||||
@@ -98,7 +102,7 @@ fn test_classic_seed() {
|
||||
view: "6c3634a313ec2ee979d565c33888fd7c3502d696ce0134a8bc1a2698c7f2c508".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Russian,
|
||||
language: classic::Language::Russian,
|
||||
seed: "шатер икра нация ехать получать инерция доза реальный \
|
||||
рыжий таможня лопата душа веселый клетка атлас лекция \
|
||||
обгонять паек наивный лыжный дурак стать ежик задача паек"
|
||||
@@ -107,7 +111,7 @@ fn test_classic_seed() {
|
||||
view: "fcd53e41ec0df995ab43927f7c44bc3359c93523d5009fb3f5ba87431d545a03".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Esperanto,
|
||||
language: classic::Language::Esperanto,
|
||||
seed: "ukazo klini peco etikedo fabriko imitado onklino urino \
|
||||
pudro incidento kumuluso ikono smirgi hirundo uretro krii \
|
||||
sparkado super speciala pupo alpinisto cvana vokegi zombio fabriko"
|
||||
@@ -116,7 +120,7 @@ fn test_classic_seed() {
|
||||
view: "cd4d120e1ea34360af528f6a3e6156063312d9cefc9aa6b5218d366c0ed6a201".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::Lojban,
|
||||
language: classic::Language::Lojban,
|
||||
seed: "jetnu vensa julne xrotu xamsi julne cutci dakli \
|
||||
mlatu xedja muvgau palpi xindo sfubu ciste cinri \
|
||||
blabi darno dembi janli blabi fenki bukpu burcu blabi"
|
||||
@@ -125,7 +129,7 @@ fn test_classic_seed() {
|
||||
view: "c806ce62bafaa7b2d597f1a1e2dbe4a2f96bfd804bf6f8420fc7f4a6bd700c00".into(),
|
||||
},
|
||||
Vector {
|
||||
language: Language::EnglishOld,
|
||||
language: classic::Language::EnglishOld,
|
||||
seed: "glorious especially puff son moment add youth nowhere \
|
||||
throw glide grip wrong rhythm consume very swear \
|
||||
bitter heavy eventually begin reason flirt type unable"
|
||||
@@ -152,8 +156,8 @@ fn test_classic_seed() {
|
||||
let spend: [u8; 32] = hex::decode(vector.spend).unwrap().try_into().unwrap();
|
||||
// For classical seeds, Monero directly uses the entropy as a spend key
|
||||
assert_eq!(
|
||||
Scalar::from_canonical_bytes(*seed.entropy()),
|
||||
Scalar::from_canonical_bytes(spend)
|
||||
Option::<Scalar>::from(Scalar::from_canonical_bytes(*seed.entropy())),
|
||||
Option::<Scalar>::from(Scalar::from_canonical_bytes(spend)),
|
||||
);
|
||||
|
||||
let view: [u8; 32] = hex::decode(vector.view).unwrap().try_into().unwrap();
|
||||
@@ -163,15 +167,216 @@ fn test_classic_seed() {
|
||||
Scalar::from_canonical_bytes(view).unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(Seed::from_entropy(vector.language, Zeroizing::new(spend)).unwrap(), seed);
|
||||
assert_eq!(
|
||||
Seed::from_entropy(SeedType::Classic(vector.language), Zeroizing::new(spend), None)
|
||||
.unwrap(),
|
||||
seed
|
||||
);
|
||||
}
|
||||
|
||||
// Test against ourself
|
||||
// Test against ourselves
|
||||
{
|
||||
let seed = Seed::new(&mut OsRng, vector.language);
|
||||
let seed = Seed::new(&mut OsRng, SeedType::Classic(vector.language));
|
||||
assert_eq!(seed, Seed::from_string(Zeroizing::new(trim_seed(&seed.to_string()))).unwrap());
|
||||
assert_eq!(seed, Seed::from_entropy(vector.language, seed.entropy()).unwrap());
|
||||
assert_eq!(
|
||||
seed,
|
||||
Seed::from_entropy(SeedType::Classic(vector.language), seed.entropy(), None).unwrap()
|
||||
);
|
||||
assert_eq!(seed, Seed::from_string(seed.to_string()).unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_polyseed() {
|
||||
struct Vector {
|
||||
language: polyseed::Language,
|
||||
seed: String,
|
||||
entropy: String,
|
||||
birthday: u64,
|
||||
has_prefix: bool,
|
||||
has_accent: bool,
|
||||
}
|
||||
|
||||
let vectors = [
|
||||
Vector {
|
||||
language: polyseed::Language::English,
|
||||
seed: "raven tail swear infant grief assist regular lamp \
|
||||
duck valid someone little harsh puppy airport language"
|
||||
.into(),
|
||||
entropy: "dd76e7359a0ded37cd0ff0f3c829a5ae01673300000000000000000000000000".into(),
|
||||
birthday: 1638446400,
|
||||
has_prefix: true,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: polyseed::Language::Spanish,
|
||||
seed: "eje fin parte célebre tabú pestaña lienzo puma \
|
||||
prisión hora regalo lengua existir lápiz lote sonoro"
|
||||
.into(),
|
||||
entropy: "5a2b02df7db21fcbe6ec6df137d54c7b20fd2b00000000000000000000000000".into(),
|
||||
birthday: 3118651200,
|
||||
has_prefix: true,
|
||||
has_accent: true,
|
||||
},
|
||||
Vector {
|
||||
language: polyseed::Language::French,
|
||||
seed: "valable arracher décaler jeudi amusant dresser mener épaissir risible \
|
||||
prouesse réserve ampleur ajuster muter caméra enchère"
|
||||
.into(),
|
||||
entropy: "11cfd870324b26657342c37360c424a14a050b00000000000000000000000000".into(),
|
||||
birthday: 1679314966,
|
||||
has_prefix: true,
|
||||
has_accent: true,
|
||||
},
|
||||
Vector {
|
||||
language: polyseed::Language::Italian,
|
||||
seed: "caduco midollo copione meninge isotopo illogico riflesso tartaruga fermento \
|
||||
olandese normale tristezza episodio voragine forbito achille"
|
||||
.into(),
|
||||
entropy: "7ecc57c9b4652d4e31428f62bec91cfd55500600000000000000000000000000".into(),
|
||||
birthday: 1679316358,
|
||||
has_prefix: true,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: polyseed::Language::Portuguese,
|
||||
seed: "caverna custear azedo adeus senador apertada sedoso omitir \
|
||||
sujeito aurora videira molho cartaz gesso dentista tapar"
|
||||
.into(),
|
||||
entropy: "45473063711376cae38f1b3eba18c874124e1d00000000000000000000000000".into(),
|
||||
birthday: 1679316657,
|
||||
has_prefix: true,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: polyseed::Language::Czech,
|
||||
seed: "usmrtit nora dotaz komunita zavalit funkce mzda sotva akce \
|
||||
vesta kabel herna stodola uvolnit ustrnout email"
|
||||
.into(),
|
||||
entropy: "7ac8a4efd62d9c3c4c02e350d32326df37821c00000000000000000000000000".into(),
|
||||
birthday: 1679316898,
|
||||
has_prefix: true,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: polyseed::Language::Korean,
|
||||
seed: "전망 선풍기 국제 무궁화 설사 기름 이론적 해안 절망 예선 \
|
||||
지우개 보관 절망 말기 시각 귀신"
|
||||
.into(),
|
||||
entropy: "684663fda420298f42ed94b2c512ed38ddf12b00000000000000000000000000".into(),
|
||||
birthday: 1679317073,
|
||||
has_prefix: false,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: polyseed::Language::Japanese,
|
||||
seed: "うちあわせ ちつじょ つごう しはい けんこう とおる てみやげ はんとし たんとう \
|
||||
といれ おさない おさえる むかう ぬぐう なふだ せまる"
|
||||
.into(),
|
||||
entropy: "94e6665518a6286c6e3ba508a2279eb62b771f00000000000000000000000000".into(),
|
||||
birthday: 1679318722,
|
||||
has_prefix: false,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: polyseed::Language::ChineseTraditional,
|
||||
seed: "亂 挖 斤 柄 代 圈 枝 轄 魯 論 函 開 勘 番 榮 壁".into(),
|
||||
entropy: "b1594f585987ab0fd5a31da1f0d377dae5283f00000000000000000000000000".into(),
|
||||
birthday: 1679426433,
|
||||
has_prefix: false,
|
||||
has_accent: false,
|
||||
},
|
||||
Vector {
|
||||
language: polyseed::Language::ChineseSimplified,
|
||||
seed: "啊 百 族 府 票 划 伪 仓 叶 虾 借 溜 晨 左 等 鬼".into(),
|
||||
entropy: "21cdd366f337b89b8d1bc1df9fe73047c22b0300000000000000000000000000".into(),
|
||||
birthday: 1679426817,
|
||||
has_prefix: false,
|
||||
has_accent: false,
|
||||
},
|
||||
];
|
||||
|
||||
for vector in vectors {
|
||||
let add_whitespace = |mut seed: String| {
|
||||
seed.push(' ');
|
||||
seed
|
||||
};
|
||||
|
||||
let seed_without_accents = |seed: &str| {
|
||||
seed
|
||||
.split_whitespace()
|
||||
.map(|w| w.chars().filter(|c| c.is_ascii()).collect::<String>())
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
};
|
||||
|
||||
let trim_seed = |seed: &str| {
|
||||
let seed_to_trim =
|
||||
if vector.has_accent { seed_without_accents(seed) } else { seed.to_string() };
|
||||
seed_to_trim
|
||||
.split_whitespace()
|
||||
.map(|w| {
|
||||
let mut ascii = 0;
|
||||
let mut to_take = w.len();
|
||||
for (i, char) in w.chars().enumerate() {
|
||||
if char.is_ascii() {
|
||||
ascii += 1;
|
||||
}
|
||||
if ascii == polyseed::PREFIX_LEN {
|
||||
// +1 to include this character, which put us at the prefix length
|
||||
to_take = i + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
w.chars().take(to_take).collect::<String>()
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ")
|
||||
};
|
||||
|
||||
// String -> Seed
|
||||
let seed = Seed::from_string(Zeroizing::new(vector.seed.clone())).unwrap();
|
||||
|
||||
// Make sure a version with added whitespace still works
|
||||
let whitespaced_seed =
|
||||
Seed::from_string(Zeroizing::new(add_whitespace(vector.seed.clone()))).unwrap();
|
||||
assert_eq!(seed, whitespaced_seed);
|
||||
// Check trimmed versions works
|
||||
if vector.has_prefix {
|
||||
let trimmed_seed = Seed::from_string(Zeroizing::new(trim_seed(&vector.seed))).unwrap();
|
||||
assert_eq!(seed, trimmed_seed);
|
||||
}
|
||||
// Check versions without accents work
|
||||
if vector.has_accent {
|
||||
let seed_without_accents =
|
||||
Seed::from_string(Zeroizing::new(seed_without_accents(&vector.seed))).unwrap();
|
||||
assert_eq!(seed, seed_without_accents);
|
||||
}
|
||||
|
||||
let entropy = Zeroizing::new(hex::decode(vector.entropy).unwrap().try_into().unwrap());
|
||||
assert_eq!(seed.entropy(), entropy);
|
||||
assert!(seed.birthday().abs_diff(vector.birthday) < polyseed::TIME_STEP);
|
||||
|
||||
// Entropy -> Seed
|
||||
let from_entropy =
|
||||
Seed::from_entropy(SeedType::Polyseed(vector.language), entropy, Some(seed.birthday()))
|
||||
.unwrap();
|
||||
assert_eq!(seed.to_string(), from_entropy.to_string());
|
||||
|
||||
// Check against ourselves
|
||||
{
|
||||
let seed = Seed::new(&mut OsRng, SeedType::Polyseed(vector.language));
|
||||
assert_eq!(seed, Seed::from_string(seed.to_string()).unwrap());
|
||||
assert_eq!(
|
||||
seed,
|
||||
Seed::from_entropy(
|
||||
SeedType::Polyseed(vector.language),
|
||||
seed.entropy(),
|
||||
Some(seed.birthday())
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
32
coins/monero/src/tests/unreduced_scalar.rs
Normal file
32
coins/monero/src/tests/unreduced_scalar.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
use curve25519_dalek::scalar::Scalar;
|
||||
|
||||
use crate::unreduced_scalar::*;
|
||||
|
||||
#[test]
|
||||
fn recover_scalars() {
|
||||
let test_recover = |stored: &str, recovered: &str| {
|
||||
let stored = UnreducedScalar(hex::decode(stored).unwrap().try_into().unwrap());
|
||||
let recovered =
|
||||
Scalar::from_canonical_bytes(hex::decode(recovered).unwrap().try_into().unwrap()).unwrap();
|
||||
assert_eq!(stored.recover_monero_slide_scalar(), recovered);
|
||||
};
|
||||
|
||||
// https://www.moneroinflation.com/static/data_py/report_scalars_df.pdf
|
||||
// Table 4.
|
||||
test_recover(
|
||||
"cb2be144948166d0a9edb831ea586da0c376efa217871505ad77f6ff80f203f8",
|
||||
"b8ffd6a1aee47828808ab0d4c8524cb5c376efa217871505ad77f6ff80f20308",
|
||||
);
|
||||
test_recover(
|
||||
"343d3df8a1051c15a400649c423dc4ed58bef49c50caef6ca4a618b80dee22f4",
|
||||
"21113355bc682e6d7a9d5b3f2137a30259bef49c50caef6ca4a618b80dee2204",
|
||||
);
|
||||
test_recover(
|
||||
"c14f75d612800ca2c1dcfa387a42c9cc086c005bc94b18d204dd61342418eba7",
|
||||
"4f473804b1d27ab2c789c80ab21d034a096c005bc94b18d204dd61342418eb07",
|
||||
);
|
||||
test_recover(
|
||||
"000102030405060708090a0b0c0d0e0f826c4f6e2329a31bc5bc320af0b2bcbb",
|
||||
"a124cfd387f461bf3719e03965ee6877826c4f6e2329a31bc5bc320af0b2bc0b",
|
||||
);
|
||||
}
|
||||
@@ -6,15 +6,13 @@ use std_shims::{
|
||||
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use curve25519_dalek::{
|
||||
scalar::Scalar,
|
||||
edwards::{EdwardsPoint, CompressedEdwardsY},
|
||||
};
|
||||
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
||||
|
||||
use crate::{
|
||||
Protocol, hash,
|
||||
serialize::*,
|
||||
ringct::{RctBase, RctPrunable, RctSignatures},
|
||||
ring_signatures::RingSignature,
|
||||
ringct::{bulletproofs::Bulletproofs, RctType, RctBase, RctPrunable, RctSignatures},
|
||||
};
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
@@ -24,11 +22,10 @@ pub enum Input {
|
||||
}
|
||||
|
||||
impl Input {
|
||||
// Worst-case predictive len
|
||||
pub(crate) fn fee_weight(ring_len: usize) -> usize {
|
||||
pub(crate) fn fee_weight(offsets_weight: usize) -> usize {
|
||||
// Uses 1 byte for the input type
|
||||
// Uses 1 byte for the VarInt amount due to amount being 0
|
||||
// Uses 1 byte for the VarInt encoding of the length of the ring as well
|
||||
1 + 1 + 1 + (8 * ring_len) + 32
|
||||
1 + 1 + offsets_weight + 32
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
@@ -53,21 +50,24 @@ impl Input {
|
||||
res
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(interpret_as_rct: bool, r: &mut R) -> io::Result<Input> {
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Input> {
|
||||
Ok(match read_byte(r)? {
|
||||
255 => Input::Gen(read_varint(r)?),
|
||||
2 => {
|
||||
let amount = read_varint(r)?;
|
||||
let amount = if (amount == 0) && interpret_as_rct { None } else { Some(amount) };
|
||||
// https://github.com/monero-project/monero/
|
||||
// blob/00fd416a99686f0956361d1cd0337fe56e58d4a7/
|
||||
// src/cryptonote_basic/cryptonote_format_utils.cpp#L860-L863
|
||||
// A non-RCT 0-amount input can't exist because only RCT TXs can have a 0-amount output
|
||||
// That's why collapsing to None if the amount is 0 is safe, even without knowing if RCT
|
||||
let amount = if amount == 0 { None } else { Some(amount) };
|
||||
Input::ToKey {
|
||||
amount,
|
||||
key_offsets: read_vec(read_varint, r)?,
|
||||
key_image: read_torsion_free_point(r)?,
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown/unused input type"))?
|
||||
}
|
||||
_ => Err(io::Error::other("Tried to deserialize unknown/unused input type"))?,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -81,8 +81,10 @@ pub struct Output {
|
||||
}
|
||||
|
||||
impl Output {
|
||||
pub(crate) fn fee_weight() -> usize {
|
||||
1 + 1 + 32 + 1
|
||||
pub(crate) fn fee_weight(view_tags: bool) -> usize {
|
||||
// Uses 1 byte for the output type
|
||||
// Uses 1 byte for the VarInt amount due to amount being 0
|
||||
1 + 1 + 32 + if view_tags { 1 } else { 0 }
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
@@ -101,11 +103,11 @@ impl Output {
|
||||
res
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(interpret_as_rct: bool, r: &mut R) -> io::Result<Output> {
|
||||
pub fn read<R: Read>(rct: bool, r: &mut R) -> io::Result<Output> {
|
||||
let amount = read_varint(r)?;
|
||||
let amount = if interpret_as_rct {
|
||||
let amount = if rct {
|
||||
if amount != 0 {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "RCT TX output wasn't 0"))?;
|
||||
Err(io::Error::other("RCT TX output wasn't 0"))?;
|
||||
}
|
||||
None
|
||||
} else {
|
||||
@@ -115,10 +117,7 @@ impl Output {
|
||||
let view_tag = match read_byte(r)? {
|
||||
2 => false,
|
||||
3 => true,
|
||||
_ => Err(io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"Tried to deserialize unknown/unused output type",
|
||||
))?,
|
||||
_ => Err(io::Error::other("Tried to deserialize unknown/unused output type"))?,
|
||||
};
|
||||
|
||||
Ok(Output {
|
||||
@@ -180,13 +179,19 @@ pub struct TransactionPrefix {
|
||||
}
|
||||
|
||||
impl TransactionPrefix {
|
||||
pub(crate) fn fee_weight(ring_len: usize, inputs: usize, outputs: usize, extra: usize) -> usize {
|
||||
pub(crate) fn fee_weight(
|
||||
decoy_weights: &[usize],
|
||||
outputs: usize,
|
||||
view_tags: bool,
|
||||
extra: usize,
|
||||
) -> usize {
|
||||
// Assumes Timelock::None since this library won't let you create a TX with a timelock
|
||||
// 1 input for every decoy weight
|
||||
1 + 1 +
|
||||
varint_len(inputs) +
|
||||
(inputs * Input::fee_weight(ring_len)) +
|
||||
1 +
|
||||
(outputs * Output::fee_weight()) +
|
||||
varint_len(decoy_weights.len()) +
|
||||
decoy_weights.iter().map(|&offsets_weight| Input::fee_weight(offsets_weight)).sum::<usize>() +
|
||||
varint_len(outputs) +
|
||||
(outputs * Output::fee_weight(view_tags)) +
|
||||
varint_len(extra) +
|
||||
extra
|
||||
}
|
||||
@@ -196,7 +201,7 @@ impl TransactionPrefix {
|
||||
self.timelock.write(w)?;
|
||||
write_vec(Input::write, &self.inputs, w)?;
|
||||
write_vec(Output::write, &self.outputs, w)?;
|
||||
write_varint(&self.extra.len().try_into().unwrap(), w)?;
|
||||
write_varint(&self.extra.len(), w)?;
|
||||
w.write_all(&self.extra)
|
||||
}
|
||||
|
||||
@@ -210,14 +215,14 @@ impl TransactionPrefix {
|
||||
let version = read_varint(r)?;
|
||||
// TODO: Create an enum out of version
|
||||
if (version == 0) || (version > 2) {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "unrecognized transaction version"))?;
|
||||
Err(io::Error::other("unrecognized transaction version"))?;
|
||||
}
|
||||
|
||||
let timelock = Timelock::from_raw(read_varint(r)?);
|
||||
|
||||
let inputs = read_vec(|r| Input::read(version == 2, r), r)?;
|
||||
let inputs = read_vec(|r| Input::read(r), r)?;
|
||||
if inputs.is_empty() {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "transaction had no inputs"))?;
|
||||
Err(io::Error::other("transaction had no inputs"))?;
|
||||
}
|
||||
let is_miner_tx = matches!(inputs[0], Input::Gen { .. });
|
||||
|
||||
@@ -241,29 +246,27 @@ impl TransactionPrefix {
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct Transaction {
|
||||
pub prefix: TransactionPrefix,
|
||||
pub signatures: Vec<Vec<(Scalar, Scalar)>>,
|
||||
pub signatures: Vec<RingSignature>,
|
||||
pub rct_signatures: RctSignatures,
|
||||
}
|
||||
|
||||
impl Transaction {
|
||||
pub(crate) fn fee_weight(
|
||||
protocol: Protocol,
|
||||
inputs: usize,
|
||||
decoy_weights: &[usize],
|
||||
outputs: usize,
|
||||
extra: usize,
|
||||
fee: u64,
|
||||
) -> usize {
|
||||
TransactionPrefix::fee_weight(protocol.ring_len(), inputs, outputs, extra) +
|
||||
RctSignatures::fee_weight(protocol, inputs, outputs)
|
||||
TransactionPrefix::fee_weight(decoy_weights, outputs, protocol.view_tags(), extra) +
|
||||
RctSignatures::fee_weight(protocol, decoy_weights.len(), outputs, fee)
|
||||
}
|
||||
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
self.prefix.write(w)?;
|
||||
if self.prefix.version == 1 {
|
||||
for sigs in &self.signatures {
|
||||
for sig in sigs {
|
||||
write_scalar(&sig.0, w)?;
|
||||
write_scalar(&sig.1, w)?;
|
||||
}
|
||||
for ring_sig in &self.signatures {
|
||||
ring_sig.write(w)?;
|
||||
}
|
||||
Ok(())
|
||||
} else if self.prefix.version == 2 {
|
||||
@@ -292,25 +295,40 @@ impl Transaction {
|
||||
.inputs
|
||||
.iter()
|
||||
.filter_map(|input| match input {
|
||||
Input::ToKey { key_offsets, .. } => Some(
|
||||
key_offsets
|
||||
.iter()
|
||||
.map(|_| Ok((read_scalar(r)?, read_scalar(r)?)))
|
||||
.collect::<Result<_, io::Error>>(),
|
||||
),
|
||||
Input::ToKey { key_offsets, .. } => Some(RingSignature::read(key_offsets.len(), r)),
|
||||
_ => None,
|
||||
})
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
rct_signatures.base.fee = prefix
|
||||
.inputs
|
||||
.iter()
|
||||
.map(|input| match input {
|
||||
Input::Gen(..) => 0,
|
||||
Input::ToKey { amount, .. } => amount.unwrap(),
|
||||
})
|
||||
.sum::<u64>()
|
||||
.saturating_sub(prefix.outputs.iter().map(|output| output.amount.unwrap()).sum());
|
||||
if !matches!(prefix.inputs[0], Input::Gen(..)) {
|
||||
let in_amount = prefix
|
||||
.inputs
|
||||
.iter()
|
||||
.map(|input| match input {
|
||||
Input::Gen(..) => Err(io::Error::other("Input::Gen present in non-coinbase v1 TX"))?,
|
||||
// v1 TXs can burn v2 outputs
|
||||
// dcff3fe4f914d6b6bd4a5b800cc4cca8f2fdd1bd73352f0700d463d36812f328 is one such TX
|
||||
// It includes a pre-RCT signature for a RCT output, yet if you interpret the RCT
|
||||
// output as being worth 0, it passes a sum check (guaranteed since no outputs are RCT)
|
||||
Input::ToKey { amount, .. } => Ok(amount.unwrap_or(0)),
|
||||
})
|
||||
.collect::<io::Result<Vec<_>>>()?
|
||||
.into_iter()
|
||||
.sum::<u64>();
|
||||
|
||||
let mut out = 0;
|
||||
for output in &prefix.outputs {
|
||||
if output.amount.is_none() {
|
||||
Err(io::Error::other("v1 transaction had a 0-amount output"))?;
|
||||
}
|
||||
out += output.amount.unwrap();
|
||||
}
|
||||
|
||||
if in_amount < out {
|
||||
Err(io::Error::other("transaction spent more than it had as inputs"))?;
|
||||
}
|
||||
rct_signatures.base.fee = in_amount - out;
|
||||
}
|
||||
} else if prefix.version == 2 {
|
||||
rct_signatures = RctSignatures::read(
|
||||
prefix
|
||||
@@ -325,7 +343,7 @@ impl Transaction {
|
||||
r,
|
||||
)?;
|
||||
} else {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown version"))?;
|
||||
Err(io::Error::other("Tried to deserialize unknown version"))?;
|
||||
}
|
||||
|
||||
Ok(Transaction { prefix, signatures, rct_signatures })
|
||||
@@ -345,14 +363,13 @@ impl Transaction {
|
||||
hashes.extend(hash(&buf));
|
||||
buf.clear();
|
||||
|
||||
match self.rct_signatures.prunable {
|
||||
RctPrunable::Null => buf.resize(32, 0),
|
||||
hashes.extend(&match self.rct_signatures.prunable {
|
||||
RctPrunable::Null => [0; 32],
|
||||
_ => {
|
||||
self.rct_signatures.prunable.write(&mut buf, self.rct_signatures.rct_type()).unwrap();
|
||||
buf = hash(&buf).to_vec();
|
||||
hash(&buf)
|
||||
}
|
||||
}
|
||||
hashes.extend(&buf);
|
||||
});
|
||||
|
||||
hash(&hashes)
|
||||
}
|
||||
@@ -360,6 +377,10 @@ impl Transaction {
|
||||
|
||||
/// Calculate the hash of this transaction as needed for signing it.
|
||||
pub fn signature_hash(&self) -> [u8; 32] {
|
||||
if self.prefix.version == 1 {
|
||||
return self.prefix.hash();
|
||||
}
|
||||
|
||||
let mut buf = Vec::with_capacity(2048);
|
||||
let mut sig_hash = Vec::with_capacity(96);
|
||||
|
||||
@@ -374,4 +395,39 @@ impl Transaction {
|
||||
|
||||
hash(&sig_hash)
|
||||
}
|
||||
|
||||
fn is_rct_bulletproof(&self) -> bool {
|
||||
match &self.rct_signatures.rct_type() {
|
||||
RctType::Bulletproofs | RctType::BulletproofsCompactAmount | RctType::Clsag => true,
|
||||
RctType::Null |
|
||||
RctType::MlsagAggregate |
|
||||
RctType::MlsagIndividual |
|
||||
RctType::BulletproofsPlus => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_rct_bulletproof_plus(&self) -> bool {
|
||||
match &self.rct_signatures.rct_type() {
|
||||
RctType::BulletproofsPlus => true,
|
||||
RctType::Null |
|
||||
RctType::MlsagAggregate |
|
||||
RctType::MlsagIndividual |
|
||||
RctType::Bulletproofs |
|
||||
RctType::BulletproofsCompactAmount |
|
||||
RctType::Clsag => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Calculate the transaction's weight.
|
||||
pub fn weight(&self) -> usize {
|
||||
let blob_size = self.serialize().len();
|
||||
|
||||
let bp = self.is_rct_bulletproof();
|
||||
let bp_plus = self.is_rct_bulletproof_plus();
|
||||
if !(bp || bp_plus) {
|
||||
blob_size
|
||||
} else {
|
||||
blob_size + Bulletproofs::calculate_bp_clawback(bp_plus, self.prefix.outputs.len()).0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
137
coins/monero/src/unreduced_scalar.rs
Normal file
137
coins/monero/src/unreduced_scalar.rs
Normal file
@@ -0,0 +1,137 @@
|
||||
use core::cmp::Ordering;
|
||||
|
||||
use std_shims::{
|
||||
sync::OnceLock,
|
||||
io::{self, *},
|
||||
};
|
||||
|
||||
use curve25519_dalek::scalar::Scalar;
|
||||
|
||||
use crate::serialize::*;
|
||||
|
||||
static PRECOMPUTED_SCALARS_CELL: OnceLock<[Scalar; 8]> = OnceLock::new();
|
||||
/// Precomputed scalars used to recover an incorrectly reduced scalar.
|
||||
#[allow(non_snake_case)]
|
||||
pub(crate) fn PRECOMPUTED_SCALARS() -> [Scalar; 8] {
|
||||
*PRECOMPUTED_SCALARS_CELL.get_or_init(|| {
|
||||
let mut precomputed_scalars = [Scalar::ONE; 8];
|
||||
for (i, scalar) in precomputed_scalars.iter_mut().enumerate().skip(1) {
|
||||
*scalar = Scalar::from(((i * 2) + 1) as u8);
|
||||
}
|
||||
precomputed_scalars
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct UnreducedScalar(pub [u8; 32]);
|
||||
|
||||
impl UnreducedScalar {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(&self.0)
|
||||
}
|
||||
|
||||
pub fn read<R: Read>(r: &mut R) -> io::Result<UnreducedScalar> {
|
||||
Ok(UnreducedScalar(read_bytes(r)?))
|
||||
}
|
||||
|
||||
pub fn as_bytes(&self) -> &[u8; 32] {
|
||||
&self.0
|
||||
}
|
||||
|
||||
fn as_bits(&self) -> [u8; 256] {
|
||||
let mut bits = [0; 256];
|
||||
for (i, bit) in bits.iter_mut().enumerate() {
|
||||
*bit = core::hint::black_box(1 & (self.0[i / 8] >> (i % 8)))
|
||||
}
|
||||
|
||||
bits
|
||||
}
|
||||
|
||||
/// Computes the non-adjacent form of this scalar with width 5.
|
||||
///
|
||||
/// This matches Monero's `slide` function and intentionally gives incorrect outputs under
|
||||
/// certain conditions in order to match Monero.
|
||||
///
|
||||
/// This function does not execute in constant time.
|
||||
fn non_adjacent_form(&self) -> [i8; 256] {
|
||||
let bits = self.as_bits();
|
||||
let mut naf = [0i8; 256];
|
||||
for (b, bit) in bits.into_iter().enumerate() {
|
||||
naf[b] = bit as i8;
|
||||
}
|
||||
|
||||
for i in 0 .. 256 {
|
||||
if naf[i] != 0 {
|
||||
// if the bit is a one, work our way up through the window
|
||||
// combining the bits with this bit.
|
||||
for b in 1 .. 6 {
|
||||
if (i + b) >= 256 {
|
||||
// if we are at the length of the array then break out
|
||||
// the loop.
|
||||
break;
|
||||
}
|
||||
// potential_carry - the value of the bit at i+b compared to the bit at i
|
||||
let potential_carry = naf[i + b] << b;
|
||||
|
||||
if potential_carry != 0 {
|
||||
if (naf[i] + potential_carry) <= 15 {
|
||||
// if our current "bit" plus the potential carry is less than 16
|
||||
// add it to our current "bit" and set the potential carry bit to 0.
|
||||
naf[i] += potential_carry;
|
||||
naf[i + b] = 0;
|
||||
} else if (naf[i] - potential_carry) >= -15 {
|
||||
// else if our current "bit" minus the potential carry is more than -16
|
||||
// take it away from our current "bit".
|
||||
// we then work our way up through the bits setting ones to zero, when
|
||||
// we hit the first zero we change it to one then stop, this is to factor
|
||||
// in the minus.
|
||||
naf[i] -= potential_carry;
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for k in (i + b) .. 256 {
|
||||
if naf[k] == 0 {
|
||||
naf[k] = 1;
|
||||
break;
|
||||
}
|
||||
naf[k] = 0;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
naf
|
||||
}
|
||||
|
||||
/// Recover the scalar that an array of bytes was incorrectly interpreted as by Monero's `slide`
|
||||
/// function.
|
||||
///
|
||||
/// In Borromean range proofs Monero was not checking that the scalars used were
|
||||
/// reduced. This lead to the scalar stored being interpreted as a different scalar,
|
||||
/// this function recovers that scalar.
|
||||
///
|
||||
/// See: https://github.com/monero-project/monero/issues/8438
|
||||
pub fn recover_monero_slide_scalar(&self) -> Scalar {
|
||||
if self.0[31] & 128 == 0 {
|
||||
// Computing the w-NAF of a number can only give an output with 1 more bit than
|
||||
// the number, so even if the number isn't reduced, the `slide` function will be
|
||||
// correct when the last bit isn't set.
|
||||
return Scalar::from_bytes_mod_order(self.0);
|
||||
}
|
||||
|
||||
let precomputed_scalars = PRECOMPUTED_SCALARS();
|
||||
|
||||
let mut recovered = Scalar::ZERO;
|
||||
for &numb in self.non_adjacent_form().iter().rev() {
|
||||
recovered += recovered;
|
||||
match numb.cmp(&0) {
|
||||
Ordering::Greater => recovered += precomputed_scalars[(numb as usize) / 2],
|
||||
Ordering::Less => recovered -= precomputed_scalars[((-numb) as usize) / 2],
|
||||
Ordering::Equal => (),
|
||||
}
|
||||
}
|
||||
recovered
|
||||
}
|
||||
}
|
||||
@@ -182,13 +182,26 @@ impl<B: AddressBytes> AddressMeta<B> {
|
||||
}
|
||||
|
||||
/// A Monero address, composed of metadata and a spend/view key.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub struct Address<B: AddressBytes> {
|
||||
pub meta: AddressMeta<B>,
|
||||
pub spend: EdwardsPoint,
|
||||
pub view: EdwardsPoint,
|
||||
}
|
||||
|
||||
impl<B: AddressBytes> core::fmt::Debug for Address<B> {
|
||||
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||
fmt
|
||||
.debug_struct("Address")
|
||||
.field("meta", &self.meta)
|
||||
.field("spend", &hex::encode(self.spend.compress().0))
|
||||
.field("view", &hex::encode(self.view.compress().0))
|
||||
// This is not a real field yet is the most valuable thing to know when debugging
|
||||
.field("(address)", &self.to_string())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<B: AddressBytes> Zeroize for Address<B> {
|
||||
fn zeroize(&mut self) {
|
||||
self.meta.zeroize();
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
use std_shims::{sync::OnceLock, vec::Vec, collections::HashSet};
|
||||
use std_shims::{vec::Vec, collections::HashSet};
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
#[cfg(feature = "cache-distribution")]
|
||||
use std_shims::sync::OnceLock;
|
||||
|
||||
#[cfg(all(feature = "cache-distribution", not(feature = "std")))]
|
||||
use std_shims::sync::Mutex;
|
||||
#[cfg(feature = "std")]
|
||||
#[cfg(all(feature = "cache-distribution", feature = "std"))]
|
||||
use futures::lock::Mutex;
|
||||
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
||||
@@ -15,6 +18,7 @@ use rand_distr::num_traits::Float;
|
||||
use curve25519_dalek::edwards::EdwardsPoint;
|
||||
|
||||
use crate::{
|
||||
serialize::varint_len,
|
||||
wallet::SpendableOutput,
|
||||
rpc::{RpcError, RpcConnection, Rpc},
|
||||
};
|
||||
@@ -26,9 +30,11 @@ const BLOCK_TIME: usize = 120;
|
||||
const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME;
|
||||
const TIP_APPLICATION: f64 = (LOCK_WINDOW * BLOCK_TIME) as f64;
|
||||
|
||||
// TODO: Expose an API to reset this in case a reorg occurs/the RPC fails/returns garbage
|
||||
// TODO: Resolve safety of this in case a reorg occurs/the network changes
|
||||
// TODO: Update this when scanning a block, as possible
|
||||
#[cfg(feature = "cache-distribution")]
|
||||
static DISTRIBUTION_CELL: OnceLock<Mutex<Vec<u64>>> = OnceLock::new();
|
||||
#[cfg(feature = "cache-distribution")]
|
||||
#[allow(non_snake_case)]
|
||||
fn DISTRIBUTION() -> &'static Mutex<Vec<u64>> {
|
||||
DISTRIBUTION_CELL.get_or_init(|| Mutex::new(Vec::with_capacity(3000000)))
|
||||
@@ -135,12 +141,17 @@ fn offset(ring: &[u64]) -> Vec<u64> {
|
||||
/// Decoy data, containing the actual member as well (at index `i`).
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct Decoys {
|
||||
pub i: u8,
|
||||
pub offsets: Vec<u64>,
|
||||
pub ring: Vec<[EdwardsPoint; 2]>,
|
||||
pub(crate) i: u8,
|
||||
pub(crate) offsets: Vec<u64>,
|
||||
pub(crate) ring: Vec<[EdwardsPoint; 2]>,
|
||||
}
|
||||
|
||||
#[allow(clippy::len_without_is_empty)]
|
||||
impl Decoys {
|
||||
pub fn fee_weight(offsets: &[u64]) -> usize {
|
||||
varint_len(offsets.len()) + offsets.iter().map(|offset| varint_len(*offset)).sum::<usize>()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.offsets.len()
|
||||
}
|
||||
@@ -153,11 +164,16 @@ impl Decoys {
|
||||
height: usize,
|
||||
inputs: &[SpendableOutput],
|
||||
) -> Result<Vec<Decoys>, RpcError> {
|
||||
#[cfg(feature = "cache-distribution")]
|
||||
#[cfg(not(feature = "std"))]
|
||||
let mut distribution = DISTRIBUTION().lock();
|
||||
#[cfg(feature = "cache-distribution")]
|
||||
#[cfg(feature = "std")]
|
||||
let mut distribution = DISTRIBUTION().lock().await;
|
||||
|
||||
#[cfg(not(feature = "cache-distribution"))]
|
||||
let mut distribution = vec![];
|
||||
|
||||
let decoy_count = ring_len - 1;
|
||||
|
||||
// Convert the inputs in question to the raw output data
|
||||
|
||||
@@ -62,7 +62,7 @@ impl PaymentId {
|
||||
Ok(match read_byte(r)? {
|
||||
0 => PaymentId::Unencrypted(read_bytes(r)?),
|
||||
1 => PaymentId::Encrypted(read_bytes(r)?),
|
||||
_ => Err(io::Error::new(io::ErrorKind::Other, "unknown payment ID type"))?,
|
||||
_ => Err(io::Error::other("unknown payment ID type"))?,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -106,17 +106,13 @@ impl ExtraField {
|
||||
2 => ExtraField::Nonce({
|
||||
let nonce = read_vec(read_byte, r)?;
|
||||
if nonce.len() > MAX_TX_EXTRA_NONCE_SIZE {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "too long nonce"))?;
|
||||
Err(io::Error::other("too long nonce"))?;
|
||||
}
|
||||
nonce
|
||||
}),
|
||||
3 => ExtraField::MergeMining(
|
||||
usize::try_from(read_varint(r)?)
|
||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "varint for height exceeds usize"))?,
|
||||
read_bytes(r)?,
|
||||
),
|
||||
3 => ExtraField::MergeMining(read_varint(r)?, read_bytes(r)?),
|
||||
4 => ExtraField::PublicKeys(read_vec(read_point, r)?),
|
||||
_ => Err(io::Error::new(io::ErrorKind::Other, "unknown extra field"))?,
|
||||
_ => Err(io::Error::other("unknown extra field"))?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,11 +26,11 @@ use address::{Network, AddressType, SubaddressIndex, AddressSpec, AddressMeta, M
|
||||
mod scan;
|
||||
pub use scan::{ReceivedOutput, SpendableOutput, Timelocked};
|
||||
|
||||
pub(crate) mod decoys;
|
||||
pub(crate) use decoys::Decoys;
|
||||
pub mod decoys;
|
||||
pub use decoys::Decoys;
|
||||
|
||||
mod send;
|
||||
pub use send::{Fee, TransactionError, Change, SignableTransaction, Eventuality};
|
||||
pub use send::{FeePriority, Fee, TransactionError, Change, SignableTransaction, Eventuality};
|
||||
#[cfg(feature = "std")]
|
||||
pub use send::SignableTransactionBuilder;
|
||||
#[cfg(feature = "multisig")]
|
||||
@@ -74,7 +74,7 @@ pub(crate) fn shared_key(
|
||||
.copy_from_slice(&hash(&[output_derivation.as_ref(), [0x8d].as_ref()].concat())[.. 8]);
|
||||
|
||||
// || o
|
||||
write_varint(&o.try_into().unwrap(), &mut output_derivation).unwrap();
|
||||
write_varint(&o, &mut output_derivation).unwrap();
|
||||
|
||||
let view_tag = hash(&[b"view_tag".as_ref(), &output_derivation].concat())[0];
|
||||
|
||||
@@ -150,7 +150,7 @@ impl ViewPair {
|
||||
}
|
||||
|
||||
pub fn view(&self) -> EdwardsPoint {
|
||||
self.view.deref() * &ED25519_BASEPOINT_TABLE
|
||||
self.view.deref() * ED25519_BASEPOINT_TABLE
|
||||
}
|
||||
|
||||
fn subaddress_derivation(&self, index: SubaddressIndex) -> Scalar {
|
||||
@@ -167,7 +167,7 @@ impl ViewPair {
|
||||
|
||||
fn subaddress_keys(&self, index: SubaddressIndex) -> (EdwardsPoint, EdwardsPoint) {
|
||||
let scalar = self.subaddress_derivation(index);
|
||||
let spend = self.spend + (&scalar * &ED25519_BASEPOINT_TABLE);
|
||||
let spend = self.spend + (&scalar * ED25519_BASEPOINT_TABLE);
|
||||
let view = self.view.deref() * spend;
|
||||
(spend, view)
|
||||
}
|
||||
@@ -175,7 +175,7 @@ impl ViewPair {
|
||||
/// Returns an address with the provided specification.
|
||||
pub fn address(&self, network: Network, spec: AddressSpec) -> MoneroAddress {
|
||||
let mut spend = self.spend;
|
||||
let mut view: EdwardsPoint = self.view.deref() * &ED25519_BASEPOINT_TABLE;
|
||||
let mut view: EdwardsPoint = self.view.deref() * ED25519_BASEPOINT_TABLE;
|
||||
|
||||
// construct the address meta
|
||||
let meta = match spec {
|
||||
@@ -241,9 +241,13 @@ impl ZeroizeOnDrop for Scanner {}
|
||||
|
||||
impl Scanner {
|
||||
/// Create a Scanner from a ViewPair.
|
||||
///
|
||||
/// burning_bug is a HashSet of used keys, intended to prevent key reuse which would burn funds.
|
||||
///
|
||||
/// When an output is successfully scanned, the output key MUST be saved to disk.
|
||||
///
|
||||
/// When a new scanner is created, ALL saved output keys must be passed in to be secure.
|
||||
///
|
||||
/// If None is passed, a modified shared key derivation is used which is immune to the burning
|
||||
/// bug (specifically the Guaranteed feature from Featured Addresses).
|
||||
pub fn from_view(pair: ViewPair, burning_bug: Option<HashSet<CompressedEdwardsY>>) -> Scanner {
|
||||
|
||||
@@ -20,12 +20,18 @@ use crate::{
|
||||
};
|
||||
|
||||
/// An absolute output ID, defined as its transaction hash and output index.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct AbsoluteId {
|
||||
pub tx: [u8; 32],
|
||||
pub o: u8,
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for AbsoluteId {
|
||||
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||
fmt.debug_struct("AbsoluteId").field("tx", &hex::encode(self.tx)).field("o", &self.o).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl AbsoluteId {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(&self.tx)?;
|
||||
@@ -44,7 +50,7 @@ impl AbsoluteId {
|
||||
}
|
||||
|
||||
/// The data contained with an output.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct OutputData {
|
||||
pub key: EdwardsPoint,
|
||||
/// Absolute difference between the spend key and the key in this output
|
||||
@@ -52,6 +58,17 @@ pub struct OutputData {
|
||||
pub commitment: Commitment,
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for OutputData {
|
||||
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||
fmt
|
||||
.debug_struct("OutputData")
|
||||
.field("key", &hex::encode(self.key.compress().0))
|
||||
.field("key_offset", &hex::encode(self.key_offset.to_bytes()))
|
||||
.field("commitment", &self.commitment)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl OutputData {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
w.write_all(&self.key.compress().to_bytes())?;
|
||||
@@ -76,7 +93,7 @@ impl OutputData {
|
||||
}
|
||||
|
||||
/// The metadata for an output.
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct Metadata {
|
||||
/// The subaddress this output was sent to.
|
||||
pub subaddress: Option<SubaddressIndex>,
|
||||
@@ -89,6 +106,17 @@ pub struct Metadata {
|
||||
pub arbitrary_data: Vec<Vec<u8>>,
|
||||
}
|
||||
|
||||
impl core::fmt::Debug for Metadata {
|
||||
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
|
||||
fmt
|
||||
.debug_struct("Metadata")
|
||||
.field("subaddress", &self.subaddress)
|
||||
.field("payment_id", &hex::encode(self.payment_id))
|
||||
.field("arbitrary_data", &self.arbitrary_data.iter().map(hex::encode).collect::<Vec<_>>())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Metadata {
|
||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
||||
if let Some(subaddress) = self.subaddress {
|
||||
@@ -118,7 +146,7 @@ impl Metadata {
|
||||
let subaddress = if read_byte(r)? == 1 {
|
||||
Some(
|
||||
SubaddressIndex::new(read_u32(r)?, read_u32(r)?)
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid subaddress in metadata"))?,
|
||||
.ok_or_else(|| io::Error::other("invalid subaddress in metadata"))?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
@@ -201,8 +229,13 @@ impl SpendableOutput {
|
||||
&mut self,
|
||||
rpc: &Rpc<RPC>,
|
||||
) -> Result<(), RpcError> {
|
||||
self.global_index =
|
||||
rpc.get_o_indexes(self.output.absolute.tx).await?[usize::from(self.output.absolute.o)];
|
||||
self.global_index = *rpc
|
||||
.get_o_indexes(self.output.absolute.tx)
|
||||
.await?
|
||||
.get(usize::from(self.output.absolute.o))
|
||||
.ok_or(RpcError::InvalidNode(
|
||||
"node returned output indexes didn't include an index for this output",
|
||||
))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -263,6 +296,7 @@ impl<O: Clone + Zeroize> Timelocked<O> {
|
||||
}
|
||||
|
||||
/// Return the outputs if they're not timelocked, or an empty vector if they are.
|
||||
#[must_use]
|
||||
pub fn not_locked(&self) -> Vec<O> {
|
||||
if self.0 == Timelock::None {
|
||||
return self.1.clone();
|
||||
@@ -271,6 +305,7 @@ impl<O: Clone + Zeroize> Timelocked<O> {
|
||||
}
|
||||
|
||||
/// Returns None if the Timelocks aren't comparable. Returns Some(vec![]) if none are unlocked.
|
||||
#[must_use]
|
||||
pub fn unlocked(&self, timelock: Timelock) -> Option<Vec<O>> {
|
||||
// If the Timelocks are comparable, return the outputs if they're now unlocked
|
||||
if self.0 <= timelock {
|
||||
@@ -280,6 +315,7 @@ impl<O: Clone + Zeroize> Timelocked<O> {
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn ignore_timelock(&self) -> Vec<O> {
|
||||
self.1.clone()
|
||||
}
|
||||
@@ -293,16 +329,11 @@ impl Scanner {
|
||||
return Timelocked(tx.prefix.timelock, vec![]);
|
||||
}
|
||||
|
||||
let extra = Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref());
|
||||
let extra = if let Ok(extra) = extra {
|
||||
extra
|
||||
} else {
|
||||
let Ok(extra) = Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref()) else {
|
||||
return Timelocked(tx.prefix.timelock, vec![]);
|
||||
};
|
||||
|
||||
let (tx_key, additional) = if let Some((tx_key, additional)) = extra.keys() {
|
||||
(tx_key, additional)
|
||||
} else {
|
||||
let Some((tx_key, additional)) = extra.keys() else {
|
||||
return Timelocked(tx.prefix.timelock, vec![]);
|
||||
};
|
||||
|
||||
@@ -356,9 +387,8 @@ impl Scanner {
|
||||
}
|
||||
|
||||
// P - shared == spend
|
||||
let subaddress = self
|
||||
.subaddresses
|
||||
.get(&(output_key - (&shared_key * &ED25519_BASEPOINT_TABLE)).compress());
|
||||
let subaddress =
|
||||
self.subaddresses.get(&(output_key - (&shared_key * ED25519_BASEPOINT_TABLE)).compress());
|
||||
if subaddress.is_none() {
|
||||
continue;
|
||||
}
|
||||
@@ -453,7 +483,7 @@ impl Scanner {
|
||||
};
|
||||
|
||||
let mut res = vec![];
|
||||
for tx in txs.drain(..) {
|
||||
for tx in txs {
|
||||
if let Some(timelock) = map(self.scan_transaction(&tx), index) {
|
||||
res.push(timelock);
|
||||
}
|
||||
@@ -463,8 +493,9 @@ impl Scanner {
|
||||
.iter()
|
||||
// Filter to v2 miner TX outputs/RCT outputs since we're tracking the RCT output index
|
||||
.filter(|output| {
|
||||
((tx.prefix.version == 2) && matches!(tx.prefix.inputs.get(0), Some(Input::Gen(..)))) ||
|
||||
output.amount.is_none()
|
||||
let is_v2_miner_tx =
|
||||
(tx.prefix.version == 2) && matches!(tx.prefix.inputs.first(), Some(Input::Gen(..)));
|
||||
is_v2_miner_tx || output.amount.is_none()
|
||||
})
|
||||
.count(),
|
||||
)
|
||||
|
||||
@@ -9,18 +9,30 @@ use std_shims::{
|
||||
use zeroize::{Zeroize, Zeroizing};
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use crc::{Crc, CRC_32_ISO_HDLC};
|
||||
|
||||
use curve25519_dalek::scalar::Scalar;
|
||||
|
||||
use crate::{
|
||||
random_scalar,
|
||||
wallet::seed::{SeedError, Language},
|
||||
};
|
||||
use crate::{random_scalar, wallet::seed::SeedError};
|
||||
|
||||
pub(crate) const CLASSIC_SEED_LENGTH: usize = 24;
|
||||
pub(crate) const CLASSIC_SEED_LENGTH_WITH_CHECKSUM: usize = 25;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
|
||||
pub enum Language {
|
||||
Chinese,
|
||||
English,
|
||||
Dutch,
|
||||
French,
|
||||
Spanish,
|
||||
German,
|
||||
Italian,
|
||||
Portuguese,
|
||||
Japanese,
|
||||
Russian,
|
||||
Esperanto,
|
||||
Lojban,
|
||||
EnglishOld,
|
||||
}
|
||||
|
||||
fn trim(word: &str, len: usize) -> Zeroizing<String> {
|
||||
Zeroizing::new(word.chars().take(len).collect())
|
||||
}
|
||||
@@ -87,11 +99,38 @@ fn checksum_index(words: &[Zeroizing<String>], lang: &WordList) -> usize {
|
||||
*trimmed_words += &trim(w, lang.unique_prefix_length);
|
||||
}
|
||||
|
||||
let crc = Crc::<u32>::new(&CRC_32_ISO_HDLC);
|
||||
let mut digest = crc.digest();
|
||||
digest.update(trimmed_words.as_bytes());
|
||||
const fn crc32_table() -> [u32; 256] {
|
||||
let poly = 0xedb88320u32;
|
||||
|
||||
usize::try_from(digest.finalize()).unwrap() % words.len()
|
||||
let mut res = [0; 256];
|
||||
let mut i = 0;
|
||||
while i < 256 {
|
||||
let mut entry = i;
|
||||
let mut b = 0;
|
||||
while b < 8 {
|
||||
let trigger = entry & 1;
|
||||
entry >>= 1;
|
||||
if trigger == 1 {
|
||||
entry ^= poly;
|
||||
}
|
||||
b += 1;
|
||||
}
|
||||
res[i as usize] = entry;
|
||||
i += 1;
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
const CRC32_TABLE: [u32; 256] = crc32_table();
|
||||
|
||||
let trimmed_words = trimmed_words.as_bytes();
|
||||
let mut checksum = u32::MAX;
|
||||
for i in 0 .. trimmed_words.len() {
|
||||
checksum = CRC32_TABLE[usize::from(u8::try_from(checksum % 256).unwrap() ^ trimmed_words[i])] ^
|
||||
(checksum >> 8);
|
||||
}
|
||||
|
||||
usize::try_from(!checksum).unwrap() % words.len()
|
||||
}
|
||||
|
||||
// Convert a private key to a seed
|
||||
@@ -247,10 +286,11 @@ impl ClassicSeed {
|
||||
let (lang, entropy) = seed_to_bytes(&words)?;
|
||||
|
||||
// Make sure this is a valid scalar
|
||||
let mut scalar = Scalar::from_canonical_bytes(*entropy);
|
||||
if scalar.is_none() {
|
||||
let scalar = Scalar::from_canonical_bytes(*entropy);
|
||||
if scalar.is_none().into() {
|
||||
Err(SeedError::InvalidSeed)?;
|
||||
}
|
||||
let mut scalar = scalar.unwrap();
|
||||
scalar.zeroize();
|
||||
|
||||
// Call from_entropy so a trimmed seed becomes a full seed
|
||||
@@ -258,7 +298,8 @@ impl ClassicSeed {
|
||||
}
|
||||
|
||||
pub fn from_entropy(lang: Language, entropy: Zeroizing<[u8; 32]>) -> Option<ClassicSeed> {
|
||||
Scalar::from_canonical_bytes(*entropy).map(|scalar| key_to_seed(lang, Zeroizing::new(scalar)))
|
||||
Option::from(Scalar::from_canonical_bytes(*entropy))
|
||||
.map(|scalar| key_to_seed(lang, Zeroizing::new(scalar)))
|
||||
}
|
||||
|
||||
pub(crate) fn to_string(&self) -> Zeroizing<String> {
|
||||
|
||||
@@ -5,7 +5,9 @@ use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
pub(crate) mod classic;
|
||||
pub(crate) mod polyseed;
|
||||
use classic::{CLASSIC_SEED_LENGTH, CLASSIC_SEED_LENGTH_WITH_CHECKSUM, ClassicSeed};
|
||||
use polyseed::{POLYSEED_LENGTH, Polyseed};
|
||||
|
||||
/// Error when decoding a seed.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
@@ -19,74 +21,106 @@ pub enum SeedError {
|
||||
InvalidChecksum,
|
||||
#[cfg_attr(feature = "std", error("english old seeds don't support checksums"))]
|
||||
EnglishOldWithChecksum,
|
||||
#[cfg_attr(feature = "std", error("provided entropy is not valid"))]
|
||||
InvalidEntropy,
|
||||
#[cfg_attr(feature = "std", error("invalid seed"))]
|
||||
InvalidSeed,
|
||||
#[cfg_attr(feature = "std", error("provided features are not supported"))]
|
||||
UnsupportedFeatures,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
|
||||
pub enum Language {
|
||||
Chinese,
|
||||
English,
|
||||
Dutch,
|
||||
French,
|
||||
Spanish,
|
||||
German,
|
||||
Italian,
|
||||
Portuguese,
|
||||
Japanese,
|
||||
Russian,
|
||||
Esperanto,
|
||||
Lojban,
|
||||
EnglishOld,
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
||||
pub enum SeedType {
|
||||
Classic(classic::Language),
|
||||
Polyseed(polyseed::Language),
|
||||
}
|
||||
|
||||
/// A Monero seed.
|
||||
// TODO: Add polyseed to enum
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||
pub enum Seed {
|
||||
Classic(ClassicSeed),
|
||||
Polyseed(Polyseed),
|
||||
}
|
||||
|
||||
impl fmt::Debug for Seed {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Seed::Classic(_) => f.debug_struct("Seed::Classic").finish_non_exhaustive(),
|
||||
Seed::Polyseed(_) => f.debug_struct("Seed::Polyseed").finish_non_exhaustive(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Seed {
|
||||
/// Create a new seed.
|
||||
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, lang: Language) -> Seed {
|
||||
Seed::Classic(ClassicSeed::new(rng, lang))
|
||||
/// Creates a new `Seed`.
|
||||
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, seed_type: SeedType) -> Seed {
|
||||
match seed_type {
|
||||
SeedType::Classic(lang) => Seed::Classic(ClassicSeed::new(rng, lang)),
|
||||
SeedType::Polyseed(lang) => Seed::Polyseed(Polyseed::new(rng, lang)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a seed from a String.
|
||||
/// Parse a seed from a `String`.
|
||||
pub fn from_string(words: Zeroizing<String>) -> Result<Seed, SeedError> {
|
||||
match words.split_whitespace().count() {
|
||||
CLASSIC_SEED_LENGTH | CLASSIC_SEED_LENGTH_WITH_CHECKSUM => {
|
||||
ClassicSeed::from_string(words).map(Seed::Classic)
|
||||
}
|
||||
POLYSEED_LENGTH => Polyseed::from_string(words).map(Seed::Polyseed),
|
||||
_ => Err(SeedError::InvalidSeedLength)?,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a Seed from entropy.
|
||||
pub fn from_entropy(lang: Language, entropy: Zeroizing<[u8; 32]>) -> Option<Seed> {
|
||||
ClassicSeed::from_entropy(lang, entropy).map(Seed::Classic)
|
||||
/// Creates a `Seed` from an entropy and an optional birthday (denoted in seconds since the
|
||||
/// epoch).
|
||||
///
|
||||
/// For `SeedType::Classic`, the birthday is ignored.
|
||||
///
|
||||
/// For `SeedType::Polyseed`, the last 13 bytes of `entropy` must be `0`.
|
||||
// TODO: Return Result, not Option
|
||||
pub fn from_entropy(
|
||||
seed_type: SeedType,
|
||||
entropy: Zeroizing<[u8; 32]>,
|
||||
birthday: Option<u64>,
|
||||
) -> Option<Seed> {
|
||||
match seed_type {
|
||||
SeedType::Classic(lang) => ClassicSeed::from_entropy(lang, entropy).map(Seed::Classic),
|
||||
SeedType::Polyseed(lang) => {
|
||||
Polyseed::from(lang, 0, birthday.unwrap_or(0), entropy).map(Seed::Polyseed).ok()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a seed to a String.
|
||||
/// Returns seed as `String`.
|
||||
pub fn to_string(&self) -> Zeroizing<String> {
|
||||
match self {
|
||||
Seed::Classic(seed) => seed.to_string(),
|
||||
Seed::Polyseed(seed) => seed.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the entropy for this seed.
|
||||
/// Returns the entropy for this seed.
|
||||
pub fn entropy(&self) -> Zeroizing<[u8; 32]> {
|
||||
match self {
|
||||
Seed::Classic(seed) => seed.entropy(),
|
||||
Seed::Polyseed(seed) => seed.entropy().clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the key derived from this seed.
|
||||
pub fn key(&self) -> Zeroizing<[u8; 32]> {
|
||||
match self {
|
||||
// Classic does not differentiate between its entropy and its key
|
||||
Seed::Classic(seed) => seed.entropy(),
|
||||
Seed::Polyseed(seed) => seed.key(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the birthday of this seed.
|
||||
pub fn birthday(&self) -> u64 {
|
||||
match self {
|
||||
Seed::Classic(_) => 0,
|
||||
Seed::Polyseed(seed) => seed.birthday(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
430
coins/monero/src/wallet/seed/polyseed.rs
Normal file
430
coins/monero/src/wallet/seed/polyseed.rs
Normal file
@@ -0,0 +1,430 @@
|
||||
use core::fmt;
|
||||
use std_shims::{sync::OnceLock, vec::Vec, string::String, collections::HashMap};
|
||||
#[cfg(feature = "std")]
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
use subtle::ConstantTimeEq;
|
||||
use zeroize::{Zeroize, Zeroizing, ZeroizeOnDrop};
|
||||
use rand_core::{RngCore, CryptoRng};
|
||||
|
||||
use sha3::Sha3_256;
|
||||
use pbkdf2::pbkdf2_hmac;
|
||||
|
||||
use super::SeedError;
|
||||
|
||||
// Features
|
||||
const FEATURE_BITS: u8 = 5;
|
||||
#[allow(dead_code)]
|
||||
const INTERNAL_FEATURES: u8 = 2;
|
||||
const USER_FEATURES: u8 = 3;
|
||||
|
||||
const USER_FEATURES_MASK: u8 = (1 << USER_FEATURES) - 1;
|
||||
const ENCRYPTED_MASK: u8 = 1 << 4;
|
||||
const RESERVED_FEATURES_MASK: u8 = ((1 << FEATURE_BITS) - 1) ^ ENCRYPTED_MASK;
|
||||
|
||||
fn user_features(features: u8) -> u8 {
|
||||
features & USER_FEATURES_MASK
|
||||
}
|
||||
|
||||
fn polyseed_features_supported(features: u8) -> bool {
|
||||
(features & RESERVED_FEATURES_MASK) == 0
|
||||
}
|
||||
|
||||
// Dates
|
||||
const DATE_BITS: u8 = 10;
|
||||
const DATE_MASK: u16 = (1u16 << DATE_BITS) - 1;
|
||||
const POLYSEED_EPOCH: u64 = 1635768000; // 1st November 2021 12:00 UTC
|
||||
pub(crate) const TIME_STEP: u64 = 2629746; // 30.436875 days = 1/12 of the Gregorian year
|
||||
|
||||
// After ~85 years, this will roll over.
|
||||
fn birthday_encode(time: u64) -> u16 {
|
||||
u16::try_from((time.saturating_sub(POLYSEED_EPOCH) / TIME_STEP) & u64::from(DATE_MASK))
|
||||
.expect("value masked by 2**10 - 1 didn't fit into a u16")
|
||||
}
|
||||
|
||||
fn birthday_decode(birthday: u16) -> u64 {
|
||||
POLYSEED_EPOCH + (u64::from(birthday) * TIME_STEP)
|
||||
}
|
||||
|
||||
// Polyseed parameters
|
||||
const SECRET_BITS: usize = 150;
|
||||
|
||||
const BITS_PER_BYTE: usize = 8;
|
||||
const SECRET_SIZE: usize = SECRET_BITS.div_ceil(BITS_PER_BYTE); // 19
|
||||
const CLEAR_BITS: usize = (SECRET_SIZE * BITS_PER_BYTE) - SECRET_BITS; // 2
|
||||
|
||||
// Polyseed calls this CLEAR_MASK and has a very complicated formula for this fundamental
|
||||
// equivalency
|
||||
const LAST_BYTE_SECRET_BITS_MASK: u8 = ((1 << (BITS_PER_BYTE - CLEAR_BITS)) - 1) as u8;
|
||||
|
||||
const SECRET_BITS_PER_WORD: usize = 10;
|
||||
|
||||
// Amount of words in a seed
|
||||
pub(crate) const POLYSEED_LENGTH: usize = 16;
|
||||
// Amount of characters each word must have if trimmed
|
||||
pub(crate) const PREFIX_LEN: usize = 4;
|
||||
|
||||
const POLY_NUM_CHECK_DIGITS: usize = 1;
|
||||
const DATA_WORDS: usize = POLYSEED_LENGTH - POLY_NUM_CHECK_DIGITS;
|
||||
|
||||
// Polynomial
|
||||
const GF_BITS: usize = 11;
|
||||
const POLYSEED_MUL2_TABLE: [u16; 8] = [5, 7, 1, 3, 13, 15, 9, 11];
|
||||
|
||||
type Poly = [u16; POLYSEED_LENGTH];
|
||||
|
||||
fn elem_mul2(x: u16) -> u16 {
|
||||
if x < 1024 {
|
||||
return 2 * x;
|
||||
}
|
||||
POLYSEED_MUL2_TABLE[usize::from(x % 8)] + (16 * ((x - 1024) / 8))
|
||||
}
|
||||
|
||||
fn poly_eval(poly: &Poly) -> u16 {
|
||||
// Horner's method at x = 2
|
||||
let mut result = poly[POLYSEED_LENGTH - 1];
|
||||
for i in (0 .. (POLYSEED_LENGTH - 1)).rev() {
|
||||
result = elem_mul2(result) ^ poly[i];
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
// Key gen parameters
|
||||
const POLYSEED_SALT: &[u8] = b"POLYSEED key";
|
||||
const POLYSEED_KEYGEN_ITERATIONS: u32 = 10000;
|
||||
|
||||
// Polyseed technically supports multiple coins, and the value for Monero is 0
|
||||
// See: https://github.com/tevador/polyseed/blob/master/include/polyseed.h#L58
|
||||
const COIN: u16 = 0;
|
||||
|
||||
/// Language options for Polyseed.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Zeroize)]
|
||||
pub enum Language {
|
||||
English,
|
||||
Spanish,
|
||||
French,
|
||||
Italian,
|
||||
Japanese,
|
||||
Korean,
|
||||
Czech,
|
||||
Portuguese,
|
||||
ChineseSimplified,
|
||||
ChineseTraditional,
|
||||
}
|
||||
|
||||
struct WordList {
|
||||
words: Vec<String>,
|
||||
has_prefix: bool,
|
||||
has_accent: bool,
|
||||
}
|
||||
|
||||
impl WordList {
|
||||
fn new(words: &str, has_prefix: bool, has_accent: bool) -> WordList {
|
||||
let res = WordList { words: serde_json::from_str(words).unwrap(), has_prefix, has_accent };
|
||||
// This is needed for a later unwrap to not fails
|
||||
assert!(words.len() < usize::from(u16::MAX));
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
static LANGUAGES_CELL: OnceLock<HashMap<Language, WordList>> = OnceLock::new();
|
||||
#[allow(non_snake_case)]
|
||||
fn LANGUAGES() -> &'static HashMap<Language, WordList> {
|
||||
LANGUAGES_CELL.get_or_init(|| {
|
||||
HashMap::from([
|
||||
(Language::Czech, WordList::new(include_str!("./polyseed/cs.json"), true, false)),
|
||||
(Language::French, WordList::new(include_str!("./polyseed/fr.json"), true, true)),
|
||||
(Language::Korean, WordList::new(include_str!("./polyseed/ko.json"), false, false)),
|
||||
(Language::English, WordList::new(include_str!("./polyseed/en.json"), true, false)),
|
||||
(Language::Italian, WordList::new(include_str!("./polyseed/it.json"), true, false)),
|
||||
(Language::Spanish, WordList::new(include_str!("./polyseed/es.json"), true, true)),
|
||||
(Language::Japanese, WordList::new(include_str!("./polyseed/ja.json"), false, false)),
|
||||
(Language::Portuguese, WordList::new(include_str!("./polyseed/pt.json"), true, false)),
|
||||
(
|
||||
Language::ChineseSimplified,
|
||||
WordList::new(include_str!("./polyseed/zh_simplified.json"), false, false),
|
||||
),
|
||||
(
|
||||
Language::ChineseTraditional,
|
||||
WordList::new(include_str!("./polyseed/zh_traditional.json"), false, false),
|
||||
),
|
||||
])
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||
pub struct Polyseed {
|
||||
language: Language,
|
||||
features: u8,
|
||||
birthday: u16,
|
||||
entropy: Zeroizing<[u8; 32]>,
|
||||
checksum: u16,
|
||||
}
|
||||
|
||||
impl fmt::Debug for Polyseed {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("Polyseed").finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
fn valid_entropy(entropy: &Zeroizing<[u8; 32]>) -> bool {
|
||||
// Last byte of the entropy should only use certain bits
|
||||
let mut res =
|
||||
entropy[SECRET_SIZE - 1].ct_eq(&(entropy[SECRET_SIZE - 1] & LAST_BYTE_SECRET_BITS_MASK));
|
||||
// Last 13 bytes of the buffer should be unused
|
||||
for b in SECRET_SIZE .. entropy.len() {
|
||||
res &= entropy[b].ct_eq(&0);
|
||||
}
|
||||
res.into()
|
||||
}
|
||||
|
||||
impl Polyseed {
|
||||
// TODO: Clean this
|
||||
fn to_poly(&self) -> Poly {
|
||||
let mut extra_bits = u32::from(FEATURE_BITS + DATE_BITS);
|
||||
let extra_val = (u16::from(self.features) << DATE_BITS) | self.birthday;
|
||||
|
||||
let mut entropy_idx = 0;
|
||||
let mut secret_bits = BITS_PER_BYTE;
|
||||
let mut seed_rem_bits = SECRET_BITS - BITS_PER_BYTE;
|
||||
|
||||
let mut poly = [0; POLYSEED_LENGTH];
|
||||
for i in 0 .. DATA_WORDS {
|
||||
extra_bits -= 1;
|
||||
|
||||
let mut word_bits = 0;
|
||||
let mut word_val = 0;
|
||||
while word_bits < SECRET_BITS_PER_WORD {
|
||||
if secret_bits == 0 {
|
||||
entropy_idx += 1;
|
||||
secret_bits = seed_rem_bits.min(BITS_PER_BYTE);
|
||||
seed_rem_bits -= secret_bits;
|
||||
}
|
||||
let chunk_bits = secret_bits.min(SECRET_BITS_PER_WORD - word_bits);
|
||||
secret_bits -= chunk_bits;
|
||||
word_bits += chunk_bits;
|
||||
word_val <<= chunk_bits;
|
||||
word_val |=
|
||||
(u16::from(self.entropy[entropy_idx]) >> secret_bits) & ((1u16 << chunk_bits) - 1);
|
||||
}
|
||||
|
||||
word_val <<= 1;
|
||||
word_val |= (extra_val >> extra_bits) & 1;
|
||||
poly[POLY_NUM_CHECK_DIGITS + i] = word_val;
|
||||
}
|
||||
|
||||
poly
|
||||
}
|
||||
|
||||
/// Create a new `Polyseed` with specific internals.
|
||||
///
|
||||
/// `birthday` is defined in seconds since the Unix epoch.
|
||||
pub fn from(
|
||||
language: Language,
|
||||
features: u8,
|
||||
birthday: u64,
|
||||
entropy: Zeroizing<[u8; 32]>,
|
||||
) -> Result<Polyseed, SeedError> {
|
||||
let features = user_features(features);
|
||||
if !polyseed_features_supported(features) {
|
||||
Err(SeedError::UnsupportedFeatures)?;
|
||||
}
|
||||
|
||||
let birthday = birthday_encode(birthday);
|
||||
|
||||
if !valid_entropy(&entropy) {
|
||||
Err(SeedError::InvalidEntropy)?;
|
||||
}
|
||||
|
||||
let mut res = Polyseed { language, birthday, features, entropy, checksum: 0 };
|
||||
res.checksum = poly_eval(&res.to_poly());
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Create a new `Polyseed`.
|
||||
///
|
||||
/// This uses the system's time for the birthday, if available.
|
||||
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, language: Language) -> Polyseed {
|
||||
// Get the birthday
|
||||
#[cfg(feature = "std")]
|
||||
let birthday = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs();
|
||||
#[cfg(not(feature = "std"))]
|
||||
let birthday = 0;
|
||||
|
||||
// Derive entropy
|
||||
let mut entropy = Zeroizing::new([0; 32]);
|
||||
rng.fill_bytes(entropy.as_mut());
|
||||
entropy[SECRET_SIZE ..].fill(0);
|
||||
entropy[SECRET_SIZE - 1] &= LAST_BYTE_SECRET_BITS_MASK;
|
||||
|
||||
Self::from(language, 0, birthday, entropy).unwrap()
|
||||
}
|
||||
|
||||
/// Create a new `Polyseed` from a String.
|
||||
pub fn from_string(seed: Zeroizing<String>) -> Result<Polyseed, SeedError> {
|
||||
// Decode the seed into its polynomial coefficients
|
||||
let mut poly = [0; POLYSEED_LENGTH];
|
||||
let lang = (|| {
|
||||
'language: for (name, lang) in LANGUAGES().iter() {
|
||||
for (i, word) in seed.split_whitespace().enumerate() {
|
||||
// Find the word's index
|
||||
fn check_if_matches<S: AsRef<str>, I: Iterator<Item = S>>(
|
||||
has_prefix: bool,
|
||||
mut lang_words: I,
|
||||
word: &str,
|
||||
) -> Option<usize> {
|
||||
if has_prefix {
|
||||
// Get the position of the word within the iterator
|
||||
// Doesn't use starts_with and some words are substrs of others, leading to false
|
||||
// positives
|
||||
let mut get_position = || {
|
||||
lang_words.position(|lang_word| {
|
||||
let mut lang_word = lang_word.as_ref().chars();
|
||||
let mut word = word.chars();
|
||||
|
||||
let mut res = true;
|
||||
for _ in 0 .. PREFIX_LEN {
|
||||
res &= lang_word.next() == word.next();
|
||||
}
|
||||
res
|
||||
})
|
||||
};
|
||||
let res = get_position();
|
||||
// If another word has this prefix, don't call it a match
|
||||
if get_position().is_some() {
|
||||
return None;
|
||||
}
|
||||
res
|
||||
} else {
|
||||
lang_words.position(|lang_word| lang_word.as_ref() == word)
|
||||
}
|
||||
}
|
||||
|
||||
let Some(coeff) = (if lang.has_accent {
|
||||
let ascii = |word: &str| word.chars().filter(|c| c.is_ascii()).collect::<String>();
|
||||
check_if_matches(
|
||||
lang.has_prefix,
|
||||
lang.words.iter().map(|lang_word| ascii(lang_word)),
|
||||
&ascii(word),
|
||||
)
|
||||
} else {
|
||||
check_if_matches(lang.has_prefix, lang.words.iter(), word)
|
||||
}) else {
|
||||
continue 'language;
|
||||
};
|
||||
|
||||
// WordList asserts the word list length is less than u16::MAX
|
||||
poly[i] = u16::try_from(coeff).expect("coeff exceeded u16");
|
||||
}
|
||||
|
||||
return Ok(*name);
|
||||
}
|
||||
|
||||
Err(SeedError::UnknownLanguage)
|
||||
})()?;
|
||||
|
||||
// xor out the coin
|
||||
poly[POLY_NUM_CHECK_DIGITS] ^= COIN;
|
||||
|
||||
// Validate the checksum
|
||||
if poly_eval(&poly) != 0 {
|
||||
Err(SeedError::InvalidChecksum)?;
|
||||
}
|
||||
|
||||
// Convert the polynomial into entropy
|
||||
let mut entropy = Zeroizing::new([0; 32]);
|
||||
|
||||
let mut extra = 0;
|
||||
|
||||
let mut entropy_idx = 0;
|
||||
let mut entropy_bits = 0;
|
||||
|
||||
let checksum = poly[0];
|
||||
for mut word_val in poly.into_iter().skip(POLY_NUM_CHECK_DIGITS) {
|
||||
// Parse the bottom bit, which is one of the bits of extra
|
||||
// This iterates for less than 16 iters, meaning this won't drop any bits
|
||||
extra <<= 1;
|
||||
extra |= word_val & 1;
|
||||
word_val >>= 1;
|
||||
|
||||
// 10 bits per word creates a [8, 2], [6, 4], [4, 6], [2, 8] cycle
|
||||
// 15 % 4 is 3, leaving 2 bits off, and 152 (19 * 8) - 2 is 150, the amount of bits in the
|
||||
// secret
|
||||
let mut word_bits = GF_BITS - 1;
|
||||
while word_bits > 0 {
|
||||
if entropy_bits == BITS_PER_BYTE {
|
||||
entropy_idx += 1;
|
||||
entropy_bits = 0;
|
||||
}
|
||||
let chunk_bits = word_bits.min(BITS_PER_BYTE - entropy_bits);
|
||||
word_bits -= chunk_bits;
|
||||
let chunk_mask = (1u16 << chunk_bits) - 1;
|
||||
if chunk_bits < BITS_PER_BYTE {
|
||||
entropy[entropy_idx] <<= chunk_bits;
|
||||
}
|
||||
entropy[entropy_idx] |=
|
||||
u8::try_from((word_val >> word_bits) & chunk_mask).expect("chunk exceeded u8");
|
||||
entropy_bits += chunk_bits;
|
||||
}
|
||||
}
|
||||
|
||||
let birthday = extra & DATE_MASK;
|
||||
// extra is contained to u16, and DATE_BITS > 8
|
||||
let features =
|
||||
u8::try_from(extra >> DATE_BITS).expect("couldn't convert extra >> DATE_BITS to u8");
|
||||
|
||||
let res = Polyseed::from(lang, features, birthday_decode(birthday), entropy);
|
||||
if let Ok(res) = res.as_ref() {
|
||||
debug_assert_eq!(res.checksum, checksum);
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
/// When this seed was created, defined in seconds since the epoch.
|
||||
pub fn birthday(&self) -> u64 {
|
||||
birthday_decode(self.birthday)
|
||||
}
|
||||
|
||||
/// This seed's features.
|
||||
pub fn features(&self) -> u8 {
|
||||
self.features
|
||||
}
|
||||
|
||||
/// This seed's entropy.
|
||||
pub fn entropy(&self) -> &Zeroizing<[u8; 32]> {
|
||||
&self.entropy
|
||||
}
|
||||
|
||||
/// The key derived from this seed.
|
||||
pub fn key(&self) -> Zeroizing<[u8; 32]> {
|
||||
let mut key = Zeroizing::new([0; 32]);
|
||||
pbkdf2_hmac::<Sha3_256>(
|
||||
self.entropy.as_slice(),
|
||||
POLYSEED_SALT,
|
||||
POLYSEED_KEYGEN_ITERATIONS,
|
||||
key.as_mut(),
|
||||
);
|
||||
key
|
||||
}
|
||||
|
||||
pub fn to_string(&self) -> Zeroizing<String> {
|
||||
// Encode the polynomial with the existing checksum
|
||||
let mut poly = self.to_poly();
|
||||
poly[0] = self.checksum;
|
||||
|
||||
// Embed the coin
|
||||
poly[POLY_NUM_CHECK_DIGITS] ^= COIN;
|
||||
|
||||
// Output words
|
||||
let mut seed = Zeroizing::new(String::new());
|
||||
let words = &LANGUAGES()[&self.language].words;
|
||||
for i in 0 .. poly.len() {
|
||||
seed.push_str(&words[usize::from(poly[i])]);
|
||||
if i < poly.len() - 1 {
|
||||
seed.push(' ');
|
||||
}
|
||||
}
|
||||
|
||||
seed
|
||||
}
|
||||
}
|
||||
2050
coins/monero/src/wallet/seed/polyseed/cs.json
Normal file
2050
coins/monero/src/wallet/seed/polyseed/cs.json
Normal file
File diff suppressed because it is too large
Load Diff
2050
coins/monero/src/wallet/seed/polyseed/en.json
Normal file
2050
coins/monero/src/wallet/seed/polyseed/en.json
Normal file
File diff suppressed because it is too large
Load Diff
2050
coins/monero/src/wallet/seed/polyseed/es.json
Normal file
2050
coins/monero/src/wallet/seed/polyseed/es.json
Normal file
File diff suppressed because it is too large
Load Diff
2050
coins/monero/src/wallet/seed/polyseed/fr.json
Normal file
2050
coins/monero/src/wallet/seed/polyseed/fr.json
Normal file
File diff suppressed because it is too large
Load Diff
2050
coins/monero/src/wallet/seed/polyseed/it.json
Normal file
2050
coins/monero/src/wallet/seed/polyseed/it.json
Normal file
File diff suppressed because it is too large
Load Diff
2050
coins/monero/src/wallet/seed/polyseed/ja.json
Normal file
2050
coins/monero/src/wallet/seed/polyseed/ja.json
Normal file
File diff suppressed because it is too large
Load Diff
2050
coins/monero/src/wallet/seed/polyseed/ko.json
Normal file
2050
coins/monero/src/wallet/seed/polyseed/ko.json
Normal file
File diff suppressed because it is too large
Load Diff
2050
coins/monero/src/wallet/seed/polyseed/pt.json
Normal file
2050
coins/monero/src/wallet/seed/polyseed/pt.json
Normal file
File diff suppressed because it is too large
Load Diff
2050
coins/monero/src/wallet/seed/polyseed/zh_simplified.json
Normal file
2050
coins/monero/src/wallet/seed/polyseed/zh_simplified.json
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user