mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 12:19:24 +00:00
Compare commits
1165 Commits
aggressive
...
undroppabl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ce3b90541e | ||
|
|
cb410cc4e0 | ||
|
|
6c145a5ec3 | ||
|
|
a7fef2ba7a | ||
|
|
291ebf5e24 | ||
|
|
5e0e91c85d | ||
|
|
b5a6b0693e | ||
|
|
3cc2abfedc | ||
|
|
0ce9aad9b2 | ||
|
|
e35aa04afb | ||
|
|
e7de5125a2 | ||
|
|
158140c3a7 | ||
|
|
df9a9adaa8 | ||
|
|
d854807edd | ||
|
|
f501d46d44 | ||
|
|
74106b025f | ||
|
|
e731b546ab | ||
|
|
77d60660d2 | ||
|
|
3c664ff05f | ||
|
|
c05b0c9eba | ||
|
|
6d5049cab2 | ||
|
|
1419ba570a | ||
|
|
542bf2170a | ||
|
|
378d6b90cf | ||
|
|
cbe83956aa | ||
|
|
091d485fd8 | ||
|
|
2a3eaf4d7e | ||
|
|
23122712cb | ||
|
|
47eb793ce9 | ||
|
|
9b0b5fd1e2 | ||
|
|
893a24a1cc | ||
|
|
b101e2211a | ||
|
|
201a444e89 | ||
|
|
9833911e06 | ||
|
|
465e8498c4 | ||
|
|
adf20773ac | ||
|
|
295c1bd044 | ||
|
|
dda6e3e899 | ||
|
|
75a00f2a1a | ||
|
|
6cde2bb6ef | ||
|
|
20326bba73 | ||
|
|
ce83b41712 | ||
|
|
b2bd5d3a44 | ||
|
|
de2d6568a4 | ||
|
|
fd9b464b35 | ||
|
|
376a66b000 | ||
|
|
2121a9b131 | ||
|
|
419223c54e | ||
|
|
a731c0005d | ||
|
|
f27e4e3202 | ||
|
|
f55165e016 | ||
|
|
d9e9887d34 | ||
|
|
82e753db30 | ||
|
|
052388285b | ||
|
|
47a4e534ef | ||
|
|
257f691277 | ||
|
|
c6d0fb477c | ||
|
|
96518500b1 | ||
|
|
2b8f481364 | ||
|
|
479ca0410a | ||
|
|
9a5a661d04 | ||
|
|
3daeea09e6 | ||
|
|
a64e2004ab | ||
|
|
f9f6d40695 | ||
|
|
4836c1676b | ||
|
|
985261574c | ||
|
|
3f3b0255f8 | ||
|
|
5fc8500f8d | ||
|
|
49c221cca2 | ||
|
|
906e2fb669 | ||
|
|
ce676efb1f | ||
|
|
0a611cb155 | ||
|
|
bcd3f14f4f | ||
|
|
6272c40561 | ||
|
|
2240a50a0c | ||
|
|
7e2b31e5da | ||
|
|
8c9441a1a5 | ||
|
|
5a42f66dc2 | ||
|
|
b584a2beab | ||
|
|
26ccff25a1 | ||
|
|
f0094b3c7c | ||
|
|
458f4fe170 | ||
|
|
1de8136739 | ||
|
|
445c49f030 | ||
|
|
5b74fc8ac1 | ||
|
|
e67e301fc2 | ||
|
|
1d50792eed | ||
|
|
9c92709e62 | ||
|
|
3d15710a43 | ||
|
|
df06da5552 | ||
|
|
cef5bc95b0 | ||
|
|
f336ab1ece | ||
|
|
2aebfb21af | ||
|
|
56af6c44eb | ||
|
|
4b34be05bf | ||
|
|
5b337c3ce8 | ||
|
|
e119fb4c16 | ||
|
|
ef972b2658 | ||
|
|
4de1a5804d | ||
|
|
147a6e43d0 | ||
|
|
066aa9eda4 | ||
|
|
9593a428e3 | ||
|
|
5b3c5ec02b | ||
|
|
9ccfa8a9f5 | ||
|
|
18897978d0 | ||
|
|
3192370484 | ||
|
|
8013c56195 | ||
|
|
834c16930b | ||
|
|
2920987173 | ||
|
|
26230377b0 | ||
|
|
2f5c0c68d0 | ||
|
|
8de42cc2d4 | ||
|
|
cf4123b0f8 | ||
|
|
6a520a7412 | ||
|
|
b2ec58a445 | ||
|
|
8e800885fb | ||
|
|
2a427382f1 | ||
|
|
ce1689b325 | ||
|
|
0b61a75afc | ||
|
|
2aee21e507 | ||
|
|
b3e003bd5d | ||
|
|
251a6e96e8 | ||
|
|
2c8af04781 | ||
|
|
a0ed043372 | ||
|
|
2984d2f8cf | ||
|
|
554c5778e4 | ||
|
|
7e4c59a0a3 | ||
|
|
294462641e | ||
|
|
ae76749513 | ||
|
|
1e1b821d34 | ||
|
|
702b4c860c | ||
|
|
bc1bbf9951 | ||
|
|
ec9211fd84 | ||
|
|
4292660eda | ||
|
|
8ea5acbacb | ||
|
|
1b1aa74770 | ||
|
|
861a8352e5 | ||
|
|
e64827b6d7 | ||
|
|
c27aaf8658 | ||
|
|
53567e91c8 | ||
|
|
1a08d50e16 | ||
|
|
855e53164e | ||
|
|
1367e41510 | ||
|
|
a691be21c8 | ||
|
|
673cf8fd47 | ||
|
|
118d81bc90 | ||
|
|
e75c4ec6ed | ||
|
|
9e628d217f | ||
|
|
a717ae9ea7 | ||
|
|
98c3f75fa2 | ||
|
|
18178f3764 | ||
|
|
bdc3bda04a | ||
|
|
433beac93a | ||
|
|
8f2a9301cf | ||
|
|
d21034c349 | ||
|
|
381495618c | ||
|
|
ee0efe7cde | ||
|
|
7feb7aed22 | ||
|
|
cc75a92641 | ||
|
|
a7d5640642 | ||
|
|
ae61f3d359 | ||
|
|
4bcea31c2a | ||
|
|
eb9bce6862 | ||
|
|
39be23d807 | ||
|
|
3f0f4d520d | ||
|
|
80ca2b780a | ||
|
|
0813351f1f | ||
|
|
a38d135059 | ||
|
|
67f9f76fdf | ||
|
|
1c5bc2259e | ||
|
|
bdf89f5350 | ||
|
|
239127aae5 | ||
|
|
d9543bee40 | ||
|
|
8746b54a43 | ||
|
|
7761798a78 | ||
|
|
72a18bf8bb | ||
|
|
0616085109 | ||
|
|
e23176deeb | ||
|
|
5551521e58 | ||
|
|
a2d9aeaed7 | ||
|
|
e1ad897f7e | ||
|
|
2edc2f3612 | ||
|
|
e56af7fc51 | ||
|
|
947e1067d9 | ||
|
|
b4e94f3d51 | ||
|
|
1b39138472 | ||
|
|
e78236276a | ||
|
|
2c4c33e632 | ||
|
|
02409c5735 | ||
|
|
f2cf03cedf | ||
|
|
0d4c8cf032 | ||
|
|
b6811f9015 | ||
|
|
fcd5fb85df | ||
|
|
3ac0265f07 | ||
|
|
9b8c8f8231 | ||
|
|
59fa49f750 | ||
|
|
723f529659 | ||
|
|
73af09effb | ||
|
|
4054e44471 | ||
|
|
a8159e9070 | ||
|
|
b61ba9d1bb | ||
|
|
776cbbb9a4 | ||
|
|
76a3f3ec4b | ||
|
|
93c7d06684 | ||
|
|
4cb838e248 | ||
|
|
c988b7cdb0 | ||
|
|
017aab2258 | ||
|
|
ba3a6f9e91 | ||
|
|
e36b671f37 | ||
|
|
2d4b775b6e | ||
|
|
247cc8f0cc | ||
|
|
0ccf71df1e | ||
|
|
8aba71b9c4 | ||
|
|
46c12c0e66 | ||
|
|
3cc7b49492 | ||
|
|
0078858c1c | ||
|
|
a3cb514400 | ||
|
|
ed0221d804 | ||
|
|
4152bcacb2 | ||
|
|
f07ec7bee0 | ||
|
|
7484eadbbb | ||
|
|
59ff944152 | ||
|
|
8f848b1abc | ||
|
|
100c80be9f | ||
|
|
a353f9e2da | ||
|
|
b62fc3a1fa | ||
|
|
8380653855 | ||
|
|
b50b889918 | ||
|
|
d570c1d277 | ||
|
|
2da24506a2 | ||
|
|
6e9cb74022 | ||
|
|
0c1aec29bb | ||
|
|
653ead1e8c | ||
|
|
8ff019265f | ||
|
|
0601d47789 | ||
|
|
ebef38d93b | ||
|
|
75b4707002 | ||
|
|
3c787e005f | ||
|
|
f11a6b4ff1 | ||
|
|
fadc88d2ad | ||
|
|
c88ebe985e | ||
|
|
6deb60513c | ||
|
|
bd277e7032 | ||
|
|
fc765bb9e0 | ||
|
|
13b74195f7 | ||
|
|
f21838e0d5 | ||
|
|
76cbe6cf1e | ||
|
|
5999f5d65a | ||
|
|
d429a0bae6 | ||
|
|
775824f373 | ||
|
|
41a74cb513 | ||
|
|
e26da1ec34 | ||
|
|
7266e7f7ea | ||
|
|
a8b9b7bad3 | ||
|
|
2ca7fccb08 | ||
|
|
4f6d91037e | ||
|
|
8db76ed67c | ||
|
|
920303e1b4 | ||
|
|
9f4b28e5ae | ||
|
|
f9d02d43c2 | ||
|
|
8ac501028d | ||
|
|
612c67c537 | ||
|
|
04a971a024 | ||
|
|
738636c238 | ||
|
|
65f3f48517 | ||
|
|
7cc07d64d1 | ||
|
|
fdfe520f9d | ||
|
|
77ef25416b | ||
|
|
7c1025dbcb | ||
|
|
a771fbe1c6 | ||
|
|
9cebdf7c68 | ||
|
|
75251f04b4 | ||
|
|
6196642beb | ||
|
|
2bddf00222 | ||
|
|
9ab8ba0215 | ||
|
|
33e0c85f34 | ||
|
|
1e8f4e6156 | ||
|
|
66f3428051 | ||
|
|
7e71840822 | ||
|
|
b65dbacd6a | ||
|
|
2fcd9530dd | ||
|
|
379780a3c9 | ||
|
|
945f31dfc7 | ||
|
|
d5d1fc3eea | ||
|
|
fd12cc0213 | ||
|
|
ce805c8cc8 | ||
|
|
bc0cc5a754 | ||
|
|
f2ee4daf43 | ||
|
|
4e29678799 | ||
|
|
74d3075dae | ||
|
|
155ad48f4c | ||
|
|
951872b026 | ||
|
|
2b47feafed | ||
|
|
a2717d73f0 | ||
|
|
8763ef23ed | ||
|
|
57a0ba966b | ||
|
|
e843b4a2a0 | ||
|
|
2f3bd7a02a | ||
|
|
1e8a9ec5bd | ||
|
|
2f29c91d30 | ||
|
|
f3b91bd44f | ||
|
|
e4e4245ee3 | ||
|
|
669b2fef72 | ||
|
|
3af430d8de | ||
|
|
dfb5a053ae | ||
|
|
bdcc061bb4 | ||
|
|
2c7148d636 | ||
|
|
6b270bc6aa | ||
|
|
875c669a7a | ||
|
|
0d399ecb28 | ||
|
|
88440807e1 | ||
|
|
c1a9256cc5 | ||
|
|
0d5756ffcf | ||
|
|
ac7b98daac | ||
|
|
efc7d70ab1 | ||
|
|
4e834873d3 | ||
|
|
a506d74d69 | ||
|
|
394db44b30 | ||
|
|
a2df54dd6a | ||
|
|
efc45c391b | ||
|
|
cccc1fc7e6 | ||
|
|
bf1c493d9a | ||
|
|
3de1e4dee2 | ||
|
|
2591b5ade9 | ||
|
|
e6620963c7 | ||
|
|
d5205ce231 | ||
|
|
0f6878567f | ||
|
|
880565cb81 | ||
|
|
6f34c2ff77 | ||
|
|
1493f49416 | ||
|
|
2ccb0cd90d | ||
|
|
b33a6487aa | ||
|
|
491500057b | ||
|
|
d9f85fab26 | ||
|
|
7d2d739042 | ||
|
|
40cc180853 | ||
|
|
2aac6f6998 | ||
|
|
149c2a4437 | ||
|
|
e772b8a5f7 | ||
|
|
c0200df75a | ||
|
|
9955ef54a5 | ||
|
|
8e7e61adbd | ||
|
|
0cb24dde02 | ||
|
|
97bfb183e8 | ||
|
|
85fc31fd82 | ||
|
|
7b8bcae396 | ||
|
|
70fe52437c | ||
|
|
ba657e23d1 | ||
|
|
32c24917c4 | ||
|
|
4ba961b2cb | ||
|
|
c59be46e2f | ||
|
|
2c165e19ae | ||
|
|
ee10692b23 | ||
|
|
7a68b065e0 | ||
|
|
3ddf1eec0c | ||
|
|
84f0e6c26e | ||
|
|
5bb3256d1f | ||
|
|
774424b70b | ||
|
|
ed662568e2 | ||
|
|
b744ac9a76 | ||
|
|
d7f7f69738 | ||
|
|
a2c3aba82b | ||
|
|
703c6a2358 | ||
|
|
52bb918cc9 | ||
|
|
ba244e8090 | ||
|
|
3e99d68cfe | ||
|
|
4d9c2df38c | ||
|
|
8ab6f9c36e | ||
|
|
253cf3253d | ||
|
|
03445b3020 | ||
|
|
9af111b4aa | ||
|
|
41ce5b1738 | ||
|
|
2a05cf3225 | ||
|
|
f4147c39b2 | ||
|
|
cd69f3b9d6 | ||
|
|
1d2beb3ee4 | ||
|
|
ac709b2945 | ||
|
|
a473800c26 | ||
|
|
09aac20293 | ||
|
|
f93214012d | ||
|
|
400319cd29 | ||
|
|
a0a7d63dad | ||
|
|
fb7d12ee6e | ||
|
|
11ec9e3535 | ||
|
|
ae8a27b876 | ||
|
|
af79586488 | ||
|
|
d27d93480a | ||
|
|
02c4417a46 | ||
|
|
79a79db399 | ||
|
|
0c9dd5048e | ||
|
|
5501de1f3a | ||
|
|
21123590bb | ||
|
|
bc1dec7991 | ||
|
|
cef63a631a | ||
|
|
d57fef8999 | ||
|
|
d1474e9188 | ||
|
|
b39c751403 | ||
|
|
cc7202e0bf | ||
|
|
19e68f7f75 | ||
|
|
d94c9a4a5e | ||
|
|
43dc036660 | ||
|
|
95591218bb | ||
|
|
7dd587a864 | ||
|
|
023275bcb6 | ||
|
|
8cef9eff6f | ||
|
|
b5e22dca8f | ||
|
|
a41329c027 | ||
|
|
a25e6330bd | ||
|
|
558a2bfa46 | ||
|
|
c73acb3d62 | ||
|
|
933b17aa91 | ||
|
|
5fa7e3d450 | ||
|
|
749d783b1e | ||
|
|
5a3ea80943 | ||
|
|
fddbebc7c0 | ||
|
|
e01848aa9e | ||
|
|
320b5627b5 | ||
|
|
be7780e69d | ||
|
|
0ddbaefb38 | ||
|
|
0f0db14f05 | ||
|
|
43083dfd49 | ||
|
|
523d2ac911 | ||
|
|
fd4f247917 | ||
|
|
ac9e356af4 | ||
|
|
bba7d2a356 | ||
|
|
4c349ae605 | ||
|
|
a4428761f7 | ||
|
|
940e9553fd | ||
|
|
593aefd229 | ||
|
|
5830c2463d | ||
|
|
bcc88c3e86 | ||
|
|
fea16df567 | ||
|
|
4960c3222e | ||
|
|
6b4df4f2c0 | ||
|
|
dac46c8d7d | ||
|
|
db2e8376df | ||
|
|
33dd412e67 | ||
|
|
fcad402186 | ||
|
|
ab4d79628d | ||
|
|
93be7a3067 | ||
|
|
63521f6a96 | ||
|
|
3d855c75be | ||
|
|
07df9aa035 | ||
|
|
bc44fbdbac | ||
|
|
4cacce5e55 | ||
|
|
7408e26781 | ||
|
|
1f92e1cbda | ||
|
|
333a9571b8 | ||
|
|
b7d49af1d5 | ||
|
|
5ea3b1bf97 | ||
|
|
2a31d8552e | ||
|
|
bca3728a10 | ||
|
|
4914420a37 | ||
|
|
f11a08c436 | ||
|
|
35b58a45bd | ||
|
|
af9b1ad5f9 | ||
|
|
e5afcda76b | ||
|
|
08c7c1b413 | ||
|
|
bdf5a66e95 | ||
|
|
e861859dec | ||
|
|
6658d95c85 | ||
|
|
2f07d04d88 | ||
|
|
e0259f2fe5 | ||
|
|
fab7a0a7cb | ||
|
|
84cee06ac1 | ||
|
|
c706d8664a | ||
|
|
1f2b9376f9 | ||
|
|
13b147cbf6 | ||
|
|
4a6496a90b | ||
|
|
9662d94bf9 | ||
|
|
233164cefd | ||
|
|
442d8c02fc | ||
|
|
d1be9eaa2d | ||
|
|
c32d3413ba | ||
|
|
a3a009a7e9 | ||
|
|
0889627e60 | ||
|
|
ace41c79fd | ||
|
|
f7d16b3fc5 | ||
|
|
157acc47ca | ||
|
|
ae0ecf9efe | ||
|
|
6374d9987e | ||
|
|
c93f6bf901 | ||
|
|
61a81e53e1 | ||
|
|
68dc872b88 | ||
|
|
89b237af7e | ||
|
|
2347bf5fd3 | ||
|
|
97f433c694 | ||
|
|
10f5ec51ca | ||
|
|
454bebaa77 | ||
|
|
0d569ff7a3 | ||
|
|
480acfd430 | ||
|
|
e266bc2e32 | ||
|
|
6c8a0bfda6 | ||
|
|
06c23368f2 | ||
|
|
5629c94b8b | ||
|
|
b427f4b8ab | ||
|
|
1096ddb7ea | ||
|
|
5487844b9e | ||
|
|
627e7e6210 | ||
|
|
019b42c0e0 | ||
|
|
079fddbaa6 | ||
|
|
92d8b91be9 | ||
|
|
4f1f7984a6 | ||
|
|
cda14ac8b9 | ||
|
|
6f5d794f10 | ||
|
|
34b93b882c | ||
|
|
0880453f82 | ||
|
|
ebdfc9afb4 | ||
|
|
f6409d08f3 | ||
|
|
c41a8ac8f2 | ||
|
|
d88aa90ec2 | ||
|
|
c05c511938 | ||
|
|
df85c09435 | ||
|
|
62a619a312 | ||
|
|
95b7460907 | ||
|
|
95c3cfc52e | ||
|
|
f0694172ef | ||
|
|
29633ada1b | ||
|
|
337e54c672 | ||
|
|
347d4cf413 | ||
|
|
aaff74575f | ||
|
|
ad0ecc5185 | ||
|
|
af12cec3b9 | ||
|
|
89788be034 | ||
|
|
745075af6e | ||
|
|
9b25d0dad7 | ||
|
|
2b76e41c9a | ||
|
|
05219c3ce8 | ||
|
|
cc75b52a43 | ||
|
|
4913873b10 | ||
|
|
0b8c7ade6e | ||
|
|
21262d41e6 | ||
|
|
508f7eb23a | ||
|
|
90df391170 | ||
|
|
9d3d47fc9f | ||
|
|
6691f16292 | ||
|
|
9c06cbccad | ||
|
|
c507ab9fd6 | ||
|
|
3aa8007700 | ||
|
|
1ba2d8d832 | ||
|
|
e7b0ed3e7e | ||
|
|
f3429ec1ef | ||
|
|
1cff9b4264 | ||
|
|
3c5a82e915 | ||
|
|
93e85c5ce6 | ||
|
|
617ec604ee | ||
|
|
265261d3ba | ||
|
|
7eb388e546 | ||
|
|
6c8040f723 | ||
|
|
02776c54a8 | ||
|
|
ec8dfd4639 | ||
|
|
99e05e4e5e | ||
|
|
a72b547824 | ||
|
|
bad3d210ba | ||
|
|
8c676d98c5 | ||
|
|
890b70212a | ||
|
|
9f7140c3db | ||
|
|
8b26a85faa | ||
|
|
24ea65eae9 | ||
|
|
fff8dcb827 | ||
|
|
2b23252b4c | ||
|
|
b493e3e31f | ||
|
|
00774c29d7 | ||
|
|
a4c82632fb | ||
|
|
c8747e23c5 | ||
|
|
008da698bc | ||
|
|
c2fffb9887 | ||
|
|
9c3329abeb | ||
|
|
065d314e2a | ||
|
|
ea3af28139 | ||
|
|
c40ce00955 | ||
|
|
74a68c6f68 | ||
|
|
2532423d42 | ||
|
|
b60e3c2524 | ||
|
|
77edd00725 | ||
|
|
884b6a6fec | ||
|
|
6a172825aa | ||
|
|
b297b79f07 | ||
|
|
3cf46338ee | ||
|
|
2d1443eb8a | ||
|
|
4de4c186b1 | ||
|
|
11fdb6da1d | ||
|
|
6caf45ea1d | ||
|
|
32bea92742 | ||
|
|
7122e0faf4 | ||
|
|
397fca748f | ||
|
|
16b22dd105 | ||
|
|
a6947d6d21 | ||
|
|
5c047ebe74 | ||
|
|
91a024e119 | ||
|
|
c511a54d18 | ||
|
|
6416e0079b | ||
|
|
7768ea90ad | ||
|
|
6e15bb2434 | ||
|
|
d1d5ee6b3d | ||
|
|
82f7342372 | ||
|
|
3a6c7ad796 | ||
|
|
62fa31de07 | ||
|
|
095ac50ba7 | ||
|
|
8cc0adf281 | ||
|
|
91905284bf | ||
|
|
4ebfae0b63 | ||
|
|
746bf5c6ad | ||
|
|
6e9ce3ac4f | ||
|
|
797ed49e7b | ||
|
|
99c6375605 | ||
|
|
6e8a5f9cb1 | ||
|
|
4446a369b1 | ||
|
|
ce038972df | ||
|
|
2f6fb93f87 | ||
|
|
1e6cb8044c | ||
|
|
1ca66b846a | ||
|
|
c82d1283af | ||
|
|
1c3e8af922 | ||
|
|
28c2b61933 | ||
|
|
cb1ef0d71f | ||
|
|
b823413c9b | ||
|
|
d1122a6535 | ||
|
|
8040fedddf | ||
|
|
51bb434239 | ||
|
|
f0ff3a18d2 | ||
|
|
695d1f0ecf | ||
|
|
571195bfda | ||
|
|
b79cf8abde | ||
|
|
c6c74684c9 | ||
|
|
de14687a0d | ||
|
|
d60e007126 | ||
|
|
b296be8515 | ||
|
|
6b2876351e | ||
|
|
0ea90d054d | ||
|
|
eb1d00aa55 | ||
|
|
372149c2cc | ||
|
|
c6cf33e370 | ||
|
|
f58478ad87 | ||
|
|
88a1726399 | ||
|
|
08e6669403 | ||
|
|
fcfdadc791 | ||
|
|
07c657306b | ||
|
|
9df8c9476e | ||
|
|
9ab2a2cfe0 | ||
|
|
a315040aee | ||
|
|
1822e31142 | ||
|
|
6efc313d76 | ||
|
|
4b85d4b03b | ||
|
|
05d8c32be8 | ||
|
|
8634d90b6b | ||
|
|
aa9daea6b0 | ||
|
|
942873f99d | ||
|
|
7c9b581723 | ||
|
|
b37a0db538 | ||
|
|
797604ad73 | ||
|
|
05b975dff9 | ||
|
|
74a8df4c7b | ||
|
|
bd14db9f76 | ||
|
|
25c02c1311 | ||
|
|
b018fc432c | ||
|
|
6e4ecbc90c | ||
|
|
be48dcc4a4 | ||
|
|
25066437da | ||
|
|
db49a63c2b | ||
|
|
ee50f584aa | ||
|
|
14f3f330db | ||
|
|
30a77d863f | ||
|
|
a03a1edbff | ||
|
|
c03afbe03e | ||
|
|
af611a39bf | ||
|
|
0d080e6d34 | ||
|
|
369af0fab5 | ||
|
|
d25e3d86a2 | ||
|
|
96f1d26f7a | ||
|
|
79e4cce2f6 | ||
|
|
0c341e3546 | ||
|
|
9f0790fb83 | ||
|
|
bb8e034e68 | ||
|
|
3f7bdaa64b | ||
|
|
351436a258 | ||
|
|
c328e5ea68 | ||
|
|
995734c960 | ||
|
|
54f1929078 | ||
|
|
d015ee96a3 | ||
|
|
a43815f101 | ||
|
|
7f1732c8c0 | ||
|
|
ed2445390f | ||
|
|
52a0c56016 | ||
|
|
42e8f2c8d8 | ||
|
|
b51204a4eb | ||
|
|
ec51fa233a | ||
|
|
ce4091695f | ||
|
|
24919cfc54 | ||
|
|
bf41009c5a | ||
|
|
e8e9e212df | ||
|
|
19187d2c30 | ||
|
|
43ae6794db | ||
|
|
978134a9d1 | ||
|
|
2eb155753a | ||
|
|
7d72e224f0 | ||
|
|
ffedba7a05 | ||
|
|
06e627a562 | ||
|
|
11f66c741d | ||
|
|
a0a2ef22e4 | ||
|
|
5e290a29d9 | ||
|
|
a688350f44 | ||
|
|
bc07e14b1e | ||
|
|
e1c07d89e0 | ||
|
|
56fd11ab8d | ||
|
|
c03fb6c71b | ||
|
|
96f94966b7 | ||
|
|
b65ba17007 | ||
|
|
c9003874ad | ||
|
|
205bec36e5 | ||
|
|
df8b455d54 | ||
|
|
84a0bcad51 | ||
|
|
b680bb532b | ||
|
|
b9983bf133 | ||
|
|
cddb44ae3f | ||
|
|
bd3272a9f2 | ||
|
|
de41be6e26 | ||
|
|
b8ac8e697b | ||
|
|
899a9604e1 | ||
|
|
facb5817c4 | ||
|
|
97fedf65d0 | ||
|
|
257323c1e5 | ||
|
|
deecd77aec | ||
|
|
360b264a0f | ||
|
|
e05b77d830 | ||
|
|
5970a455d0 | ||
|
|
4c9e3b085b | ||
|
|
a2089c61fb | ||
|
|
ae449535ff | ||
|
|
05dc474cb3 | ||
|
|
34bcb9eb01 | ||
|
|
2958f196fc | ||
|
|
92cebcd911 | ||
|
|
a3278dfb31 | ||
|
|
f02db19670 | ||
|
|
652c878f54 | ||
|
|
da01de08c9 | ||
|
|
052ef39a25 | ||
|
|
87fdc8ce35 | ||
|
|
86ff0ae71b | ||
|
|
3069138475 | ||
|
|
f22aedc007 | ||
|
|
7be20c451d | ||
|
|
0198d4cc46 | ||
|
|
7c10873cd5 | ||
|
|
08180cc563 | ||
|
|
c4bdbdde11 | ||
|
|
0d23964762 | ||
|
|
fbf51e53ec | ||
|
|
fd1826cca9 | ||
|
|
f561fa9ba1 | ||
|
|
f4fc539e14 | ||
|
|
0fff5391a8 | ||
|
|
a71a789912 | ||
|
|
83c41eccd4 | ||
|
|
e5113c333e | ||
|
|
6068978676 | ||
|
|
e7e30150f0 | ||
|
|
55fe27f41a | ||
|
|
d66a7ee43e | ||
|
|
a702d65c3d | ||
|
|
52eb68677a | ||
|
|
d29d19bdfe | ||
|
|
c3fdb9d9df | ||
|
|
46e1f85085 | ||
|
|
1bff2a0447 | ||
|
|
b66203ae3f | ||
|
|
43a182fc4c | ||
|
|
8ead7a2581 | ||
|
|
3797679755 | ||
|
|
c056b751fe | ||
|
|
5977121c48 | ||
|
|
7b6181ecdb | ||
|
|
f976bc86ac | ||
|
|
441bf62e11 | ||
|
|
4852dcaab7 | ||
|
|
d6bc1c1ea3 | ||
|
|
7b2dec63ce | ||
|
|
d833254b84 | ||
|
|
a1b2bdf0a2 | ||
|
|
43841f95fc | ||
|
|
fdfce9e207 | ||
|
|
3255c0ace5 | ||
|
|
057c3b7cf1 | ||
|
|
a29c410caf | ||
|
|
a6f40978cb | ||
|
|
bdf6afc144 | ||
|
|
a362b0a451 | ||
|
|
041ed46171 | ||
|
|
9accddb2d7 | ||
|
|
e06e4edc8e | ||
|
|
e4d59eeeca | ||
|
|
1f5b1bb514 | ||
|
|
1a3b6005c6 | ||
|
|
7dc1a24bce | ||
|
|
3483f7fa73 | ||
|
|
a300a1029a | ||
|
|
7409d0b3cf | ||
|
|
19e90b28b0 | ||
|
|
584943d1e9 | ||
|
|
62e1d63f47 | ||
|
|
e4adaa8947 | ||
|
|
3b3fdd104b | ||
|
|
5897efd7c7 | ||
|
|
863a7842ca | ||
|
|
f414735be5 | ||
|
|
5c5c097da9 | ||
|
|
7d4e8b59db | ||
|
|
e3e9939eaf | ||
|
|
530fba51dd | ||
|
|
cb61c9052a | ||
|
|
96cc5d0157 | ||
|
|
7275a95907 | ||
|
|
80e5ca9328 | ||
|
|
67951c4971 | ||
|
|
4143fe9f47 | ||
|
|
a73b19e2b8 | ||
|
|
97c328e5fb | ||
|
|
96c397caa0 | ||
|
|
d5c6ed1a03 | ||
|
|
f6e8bc3352 | ||
|
|
7d0d1dc382 | ||
|
|
d50fe87801 | ||
|
|
e6aa9df428 | ||
|
|
02edfd2935 | ||
|
|
9aeece5bf6 | ||
|
|
bb84f7cf1d | ||
|
|
bb25baf3bc | ||
|
|
013a0cddfc | ||
|
|
ed7300b406 | ||
|
|
88b5efda99 | ||
|
|
0712e6f107 | ||
|
|
6a4c57e86f | ||
|
|
b7746aa71d | ||
|
|
8dd41ee798 | ||
|
|
9a1d10f4ea | ||
|
|
6587590986 | ||
|
|
b0fcdd3367 | ||
|
|
15edea1389 | ||
|
|
1d9e2efc33 | ||
|
|
f25f5cd368 | ||
|
|
f847ac7077 | ||
|
|
29fcf6be4d | ||
|
|
108e2b57d9 | ||
|
|
3da5577950 | ||
|
|
f692047b8b | ||
|
|
2401266374 | ||
|
|
ed90d1752a | ||
|
|
3261fde853 | ||
|
|
7492adc473 | ||
|
|
04f9a1fa31 | ||
|
|
13cbc99149 | ||
|
|
1a0b4198ba | ||
|
|
22371a6585 | ||
|
|
b2d6a85ac0 | ||
|
|
44ca5e6520 | ||
|
|
83b7146e1a | ||
|
|
f193b896c1 | ||
|
|
985795e99d | ||
|
|
9bf8c92325 | ||
|
|
ab5af57dae | ||
|
|
98190b7b83 | ||
|
|
2f45bba2d4 | ||
|
|
30d0bad175 | ||
|
|
b8abc1e3cc | ||
|
|
b2ed2e961c | ||
|
|
9cdca1d3d6 | ||
|
|
4ee65ed243 | ||
|
|
aa59f53ead | ||
|
|
bd5491dfd5 | ||
|
|
0eff3d9453 | ||
|
|
0be567ff69 | ||
|
|
83b3a5c31c | ||
|
|
4d1212ec65 | ||
|
|
7e27315207 | ||
|
|
aa1faefe33 | ||
|
|
7d738a3677 | ||
|
|
4a32f22418 | ||
|
|
01a4b9e694 | ||
|
|
3b01d3039b | ||
|
|
40b7bc59d0 | ||
|
|
269db1c4be | ||
|
|
90318d7214 | ||
|
|
64d370ac11 | ||
|
|
db8dc1e864 | ||
|
|
086458d041 | ||
|
|
2e0f8138e2 | ||
|
|
32a9a33226 | ||
|
|
2508633de9 | ||
|
|
7312428a44 | ||
|
|
e1801b57c9 | ||
|
|
60491a091f | ||
|
|
9f3840d1cf | ||
|
|
7120bddc6f | ||
|
|
77f7794452 | ||
|
|
62a1a45135 | ||
|
|
0440e60645 | ||
|
|
4babf898d7 | ||
|
|
ca69f97fef | ||
|
|
fe19e8246e | ||
|
|
c62d9b448f | ||
|
|
98ab6acbd5 | ||
|
|
092f17932a | ||
|
|
e455332e01 | ||
|
|
a9468bf355 | ||
|
|
3d464c4736 | ||
|
|
142552f024 | ||
|
|
e3a7ee4927 | ||
|
|
9eaaa7d2e8 | ||
|
|
8adef705c3 | ||
|
|
3fd6d45b3e | ||
|
|
d263413e36 | ||
|
|
6f8a5d0ede | ||
|
|
24bdd7ed9b | ||
|
|
aa724c06bc | ||
|
|
1e6655408e | ||
|
|
9ab43407e1 | ||
|
|
7ac0de3a8d | ||
|
|
06a6cd29b0 | ||
|
|
2472ec7ba8 | ||
|
|
69c3fad7ce | ||
|
|
bd9a05feef | ||
|
|
7d8e08d5b4 | ||
|
|
f7e49e1f90 | ||
|
|
cd4c3a6c88 | ||
|
|
fddc605c65 | ||
|
|
2ad6b38be9 | ||
|
|
fda90e23c9 | ||
|
|
3f3f6b2d0c | ||
|
|
fa8ff62b09 | ||
|
|
5113ab9ec4 | ||
|
|
9b7cb688ed | ||
|
|
9a5f8fc5dd | ||
|
|
2dc35193c9 | ||
|
|
9bf24480f4 | ||
|
|
3af9dc5d6f | ||
|
|
148bc380fe | ||
|
|
8dad62f300 | ||
|
|
1e79de87e8 | ||
|
|
2f57a69cb6 | ||
|
|
493a222421 | ||
|
|
d5a19eca8c | ||
|
|
e9fca37181 | ||
|
|
83c25eff03 | ||
|
|
285422f71a | ||
|
|
1838c37ecf | ||
|
|
a3649b2062 | ||
|
|
1bd14163a0 | ||
|
|
89a6ee9290 | ||
|
|
a66994aade | ||
|
|
34ffd2fa76 | ||
|
|
775353f8cd | ||
|
|
c9b2490ab9 | ||
|
|
2db53d5434 | ||
|
|
6268bbd7c8 | ||
|
|
bc2f23f72b | ||
|
|
72337b17f5 | ||
|
|
36b193992f | ||
|
|
37b6de9c0c | ||
|
|
22f3c9e58f | ||
|
|
9adefa4c2c | ||
|
|
c245bcdc9b | ||
|
|
f249e20028 | ||
|
|
3745f8b6af | ||
|
|
ba46aa76f0 | ||
|
|
8c1d8a2658 | ||
|
|
2702384c70 | ||
|
|
d3093c92dc | ||
|
|
32df302cc4 | ||
|
|
ea8e26eca3 | ||
|
|
bccdabb53d | ||
|
|
b91bd44476 | ||
|
|
dc2656a538 | ||
|
|
67109c648c | ||
|
|
61418b4e9f | ||
|
|
506ded205a | ||
|
|
9801f9f753 | ||
|
|
8a4ef3ddae | ||
|
|
bfb5401336 | ||
|
|
f2872a2e07 | ||
|
|
c739f00d0b | ||
|
|
310a09b5a4 | ||
|
|
c65bb70741 | ||
|
|
718c44d50e | ||
|
|
1f45c2c6b5 | ||
|
|
76a30fd572 | ||
|
|
a52c86ad81 | ||
|
|
108504d6e2 | ||
|
|
27cd2ee2bb | ||
|
|
dc88b29b92 | ||
|
|
45ea805620 | ||
|
|
1e68cff6dc | ||
|
|
906d3b9a7c | ||
|
|
e319762c69 | ||
|
|
f161135119 | ||
|
|
d2a0ff13f2 | ||
|
|
498aa45619 | ||
|
|
39ce819876 | ||
|
|
8973eb8ac4 | ||
|
|
34397b31b1 | ||
|
|
96583da3b9 | ||
|
|
34c6974311 | ||
|
|
5a4db3efad | ||
|
|
28399b0310 | ||
|
|
426e89d6fb | ||
|
|
4850376664 | ||
|
|
f366d65d4b | ||
|
|
e680eabb62 | ||
|
|
a3441a6871 | ||
|
|
9895ec0f41 | ||
|
|
666bb3e96b | ||
|
|
acc19e2817 | ||
|
|
5e02f936e4 | ||
|
|
6b41c91dc2 | ||
|
|
8992504e69 | ||
|
|
e2901cab06 | ||
|
|
13a8b0afc1 | ||
|
|
7e71450dc4 | ||
|
|
049fefb5fd | ||
|
|
6a89cfcd08 | ||
|
|
4571a8ad85 | ||
|
|
96db784b10 | ||
|
|
dd523b22c2 | ||
|
|
fa406c507f | ||
|
|
ad51c123e3 | ||
|
|
f6f945e747 | ||
|
|
0dd8aed134 | ||
|
|
cee788eac3 | ||
|
|
bebe2fae0e | ||
|
|
adb48cd737 | ||
|
|
363a88c4ec | ||
|
|
5ba1dd2524 | ||
|
|
784c7b47a4 | ||
|
|
376b36974f | ||
|
|
38ad1d4bc4 | ||
|
|
aab8a417db | ||
|
|
d5c787fea2 | ||
|
|
e3a70ef0dc | ||
|
|
044b299cda | ||
|
|
53d86e2a29 | ||
|
|
c338b92067 | ||
|
|
3c38a0ec11 | ||
|
|
88f88b574c | ||
|
|
9f143a9742 | ||
|
|
2815046b21 | ||
|
|
d8033504c8 | ||
|
|
48ad5fe20b | ||
|
|
4c801df4f2 | ||
|
|
9b79c4dc0c | ||
|
|
e010d66c5d | ||
|
|
3d91fd88a3 | ||
|
|
f988c43f8d | ||
|
|
857e3ea72b | ||
|
|
8b14bb54bb | ||
|
|
f78332453b | ||
|
|
22da7aedde | ||
|
|
2641b83b3e | ||
|
|
4980e6b704 | ||
|
|
c091b86919 | ||
|
|
a8c7bb96c8 | ||
|
|
09a95c9bd2 | ||
|
|
101da0a641 | ||
|
|
6d5851a9ee | ||
|
|
64c309f8db | ||
|
|
e00aa3031c | ||
|
|
f8afb040dc | ||
|
|
7823ece4fe | ||
|
|
b205391b28 | ||
|
|
32a937ddb9 | ||
|
|
bdbeedc723 | ||
|
|
f306618e84 | ||
|
|
89865b549c | ||
|
|
39eae2795f | ||
|
|
0eb56406a4 | ||
|
|
afb385fba4 | ||
|
|
821f5d8de4 | ||
|
|
3862731a12 | ||
|
|
42eb674d1a | ||
|
|
1af5f1bcdc | ||
|
|
32435d8a4c | ||
|
|
49ce792b91 | ||
|
|
4949793c3f | ||
|
|
61d46dccd4 | ||
|
|
88a1fce15c | ||
|
|
a2493cfafc | ||
|
|
e3de64d5ff | ||
|
|
ecd0457d5b | ||
|
|
7990ee689a | ||
|
|
f05e909d0e | ||
|
|
5e565fa3ef | ||
|
|
6df1b46313 | ||
|
|
24dba66bad | ||
|
|
fd585d496c | ||
|
|
5703591eb2 | ||
|
|
9ac3b203c8 | ||
|
|
23e1c9769c | ||
|
|
8e6e05ae2d | ||
|
|
713660c79c | ||
|
|
cb8c8031b0 | ||
|
|
d07447fe97 | ||
|
|
c26beae0f9 | ||
|
|
818215b570 | ||
|
|
79943c3a6c | ||
|
|
076a8e4d62 | ||
|
|
ffd1457927 | ||
|
|
523a055b74 | ||
|
|
624fb2781d | ||
|
|
641077a089 | ||
|
|
298d1fd3ba | ||
|
|
92c3403698 | ||
|
|
37af8b51b3 | ||
|
|
900298b94b | ||
|
|
9effd5ccdc | ||
|
|
ceeb57470f | ||
|
|
69454fa9bb | ||
|
|
5121ca7519 | ||
|
|
1eb3b364f4 | ||
|
|
f66fe3c1cb | ||
|
|
6f9d02fdf8 | ||
|
|
28613400b8 | ||
|
|
b6579d5a2a | ||
|
|
c2f32e7882 | ||
|
|
228e36a12d | ||
|
|
38bcedbf11 | ||
|
|
98f9fc2c2f | ||
|
|
d5cfb3fb25 | ||
|
|
b1dbe1f50d | ||
|
|
1b57d655ed | ||
|
|
64402914ba | ||
|
|
a7c9c1ef55 | ||
|
|
a05961974a | ||
|
|
acc9495429 | ||
|
|
344ac9cbfc | ||
|
|
6ccac2d0ab | ||
|
|
56f7037084 | ||
|
|
a0f8214d48 | ||
|
|
5f93140ba5 | ||
|
|
712f11d879 | ||
|
|
808a633e4d | ||
|
|
0a367bfbda | ||
|
|
845c2842b5 | ||
|
|
8543487db2 | ||
|
|
a9072e6b1b | ||
|
|
b0c28a1cf0 | ||
|
|
23b9d57305 | ||
|
|
807ec30762 | ||
|
|
5424886d63 | ||
|
|
2bebe0755d | ||
|
|
f0ce6e6388 | ||
|
|
62504b2622 | ||
|
|
c9bb284570 | ||
|
|
6a4bccba74 | ||
|
|
df67b7d94c | ||
|
|
677b9b681f | ||
|
|
fa1b569b78 | ||
|
|
d75115ce13 | ||
|
|
3d00d405a3 | ||
|
|
3480fc5e16 | ||
|
|
7fa5d291b8 | ||
|
|
b54548b13a | ||
|
|
c878d38c60 | ||
|
|
5d9067b84d | ||
|
|
13f48a406e | ||
|
|
35fcd11096 | ||
|
|
93b1656f86 |
13
.github/actions/bitcoin/action.yml
vendored
13
.github/actions/bitcoin/action.yml
vendored
@@ -5,14 +5,14 @@ inputs:
|
|||||||
version:
|
version:
|
||||||
description: "Version to download and run"
|
description: "Version to download and run"
|
||||||
required: false
|
required: false
|
||||||
default: 24.0.1
|
default: "27.0"
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Bitcoin Daemon Cache
|
- name: Bitcoin Daemon Cache
|
||||||
id: cache-bitcoind
|
id: cache-bitcoind
|
||||||
uses: actions/cache@v3
|
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
||||||
with:
|
with:
|
||||||
path: bitcoin.tar.gz
|
path: bitcoin.tar.gz
|
||||||
key: bitcoind-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
key: bitcoind-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||||
@@ -37,11 +37,4 @@ runs:
|
|||||||
|
|
||||||
- name: Bitcoin Regtest Daemon
|
- name: Bitcoin Regtest Daemon
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: PATH=$PATH:/usr/bin ./orchestration/dev/networks/bitcoin/run.sh -txindex -daemon
|
||||||
RPC_USER=serai
|
|
||||||
RPC_PASS=seraidex
|
|
||||||
|
|
||||||
bitcoind -txindex -regtest \
|
|
||||||
-rpcuser=$RPC_USER -rpcpassword=$RPC_PASS \
|
|
||||||
-rpcbind=127.0.0.1 -rpcbind=$(hostname) -rpcallowip=0.0.0.0/0 \
|
|
||||||
-daemon
|
|
||||||
|
|||||||
68
.github/actions/build-dependencies/action.yml
vendored
68
.github/actions/build-dependencies/action.yml
vendored
@@ -1,43 +1,49 @@
|
|||||||
name: build-dependencies
|
name: build-dependencies
|
||||||
description: Installs build dependencies for Serai
|
description: Installs build dependencies for Serai
|
||||||
|
|
||||||
inputs:
|
|
||||||
github-token:
|
|
||||||
description: "GitHub token to install Protobuf with"
|
|
||||||
require: true
|
|
||||||
default:
|
|
||||||
|
|
||||||
rust-toolchain:
|
|
||||||
description: "Rust toolchain to install"
|
|
||||||
required: false
|
|
||||||
default: stable
|
|
||||||
|
|
||||||
rust-components:
|
|
||||||
description: "Rust components to install"
|
|
||||||
required: false
|
|
||||||
default:
|
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Install Protobuf
|
- name: Remove unused packages
|
||||||
uses: arduino/setup-protoc@v2.0.0
|
shell: bash
|
||||||
with:
|
run: |
|
||||||
repo-token: ${{ inputs.github-token }}
|
sudo apt remove -y "*msbuild*" "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli
|
||||||
|
sudo apt remove -y "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*"
|
||||||
|
sudo apt remove -y "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*"
|
||||||
|
sudo apt remove -y "*qemu*" "*sql*" "*texinfo*" "*imagemagick*"
|
||||||
|
sudo apt autoremove -y
|
||||||
|
sudo apt clean
|
||||||
|
docker system prune -a --volumes
|
||||||
|
if: runner.os == 'Linux'
|
||||||
|
|
||||||
|
- name: Remove unused packages
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
(gem uninstall -aIx) || (exit 0)
|
||||||
|
brew uninstall --force "*msbuild*" "*powershell*" "*nuget*" "*bazel*" "*ansible*" "*terraform*" "*heroku*" "*aws*" azure-cli
|
||||||
|
brew uninstall --force "*nodejs*" "*npm*" "*yarn*" "*java*" "*kotlin*" "*golang*" "*swift*" "*julia*" "*fortran*" "*android*"
|
||||||
|
brew uninstall --force "*apache2*" "*nginx*" "*firefox*" "*chromium*" "*chrome*" "*edge*"
|
||||||
|
brew uninstall --force "*qemu*" "*sql*" "*texinfo*" "*imagemagick*"
|
||||||
|
brew cleanup
|
||||||
|
if: runner.os == 'macOS'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [ "$RUNNER_OS" == "Linux" ]; then
|
||||||
|
sudo apt install -y ca-certificates protobuf-compiler
|
||||||
|
elif [ "$RUNNER_OS" == "Windows" ]; then
|
||||||
|
choco install protoc
|
||||||
|
elif [ "$RUNNER_OS" == "macOS" ]; then
|
||||||
|
brew install protobuf
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Install solc
|
- name: Install solc
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
pip3 install solc-select==0.2.1
|
cargo install svm-rs
|
||||||
solc-select install 0.8.16
|
svm install 0.8.26
|
||||||
solc-select use 0.8.16
|
svm use 0.8.26
|
||||||
|
|
||||||
- name: Install Rust
|
|
||||||
uses: dtolnay/rust-toolchain@master
|
|
||||||
with:
|
|
||||||
toolchain: ${{ inputs.rust-toolchain }}
|
|
||||||
components: ${{ inputs.rust-components }}
|
|
||||||
targets: wasm32-unknown-unknown, riscv32imac-unknown-none-elf
|
|
||||||
|
|
||||||
# - name: Cache Rust
|
# - name: Cache Rust
|
||||||
# uses: Swatinem/rust-cache@v2
|
# uses: Swatinem/rust-cache@a95ba195448af2da9b00fb742d14ffaaf3c21f43
|
||||||
|
|||||||
11
.github/actions/monero-wallet-rpc/action.yml
vendored
11
.github/actions/monero-wallet-rpc/action.yml
vendored
@@ -5,14 +5,14 @@ inputs:
|
|||||||
version:
|
version:
|
||||||
description: "Version to download and run"
|
description: "Version to download and run"
|
||||||
required: false
|
required: false
|
||||||
default: v0.18.2.0
|
default: v0.18.3.4
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Monero Wallet RPC Cache
|
- name: Monero Wallet RPC Cache
|
||||||
id: cache-monero-wallet-rpc
|
id: cache-monero-wallet-rpc
|
||||||
uses: actions/cache@v3
|
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
||||||
with:
|
with:
|
||||||
path: monero-wallet-rpc
|
path: monero-wallet-rpc
|
||||||
key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
key: monero-wallet-rpc-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||||
@@ -41,4 +41,9 @@ runs:
|
|||||||
|
|
||||||
- name: Monero Wallet RPC
|
- name: Monero Wallet RPC
|
||||||
shell: bash
|
shell: bash
|
||||||
run: ./monero-wallet-rpc --disable-rpc-login --rpc-bind-port 6061 --allow-mismatched-daemon-version --wallet-dir ./ --detach
|
run: |
|
||||||
|
./monero-wallet-rpc --allow-mismatched-daemon-version \
|
||||||
|
--daemon-address 0.0.0.0:18081 --daemon-login serai:seraidex \
|
||||||
|
--disable-rpc-login --rpc-bind-port 18082 \
|
||||||
|
--wallet-dir ./ \
|
||||||
|
--detach
|
||||||
|
|||||||
12
.github/actions/monero/action.yml
vendored
12
.github/actions/monero/action.yml
vendored
@@ -5,16 +5,16 @@ inputs:
|
|||||||
version:
|
version:
|
||||||
description: "Version to download and run"
|
description: "Version to download and run"
|
||||||
required: false
|
required: false
|
||||||
default: v0.18.2.0
|
default: v0.18.3.4
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Monero Daemon Cache
|
- name: Monero Daemon Cache
|
||||||
id: cache-monerod
|
id: cache-monerod
|
||||||
uses: actions/cache@v3
|
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
||||||
with:
|
with:
|
||||||
path: monerod
|
path: /usr/bin/monerod
|
||||||
key: monerod-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
key: monerod-${{ runner.os }}-${{ runner.arch }}-${{ inputs.version }}
|
||||||
|
|
||||||
- name: Download the Monero Daemon
|
- name: Download the Monero Daemon
|
||||||
@@ -37,8 +37,10 @@ runs:
|
|||||||
wget https://downloads.getmonero.org/cli/$FILE
|
wget https://downloads.getmonero.org/cli/$FILE
|
||||||
tar -xvf $FILE
|
tar -xvf $FILE
|
||||||
|
|
||||||
mv monero-x86_64-linux-gnu-${{ inputs.version }}/monerod monerod
|
sudo mv monero-x86_64-linux-gnu-${{ inputs.version }}/monerod /usr/bin/monerod
|
||||||
|
sudo chmod 777 /usr/bin/monerod
|
||||||
|
sudo chmod +x /usr/bin/monerod
|
||||||
|
|
||||||
- name: Monero Regtest Daemon
|
- name: Monero Regtest Daemon
|
||||||
shell: bash
|
shell: bash
|
||||||
run: ./monerod --regtest --offline --fixed-difficulty=1 --detach
|
run: PATH=$PATH:/usr/bin ./orchestration/dev/networks/monero/run.sh --detach
|
||||||
|
|||||||
16
.github/actions/test-dependencies/action.yml
vendored
16
.github/actions/test-dependencies/action.yml
vendored
@@ -2,33 +2,27 @@ name: test-dependencies
|
|||||||
description: Installs test dependencies for Serai
|
description: Installs test dependencies for Serai
|
||||||
|
|
||||||
inputs:
|
inputs:
|
||||||
github-token:
|
|
||||||
description: "GitHub token to install Protobuf with"
|
|
||||||
require: true
|
|
||||||
default:
|
|
||||||
|
|
||||||
monero-version:
|
monero-version:
|
||||||
description: "Monero version to download and run as a regtest node"
|
description: "Monero version to download and run as a regtest node"
|
||||||
required: false
|
required: false
|
||||||
default: v0.18.2.0
|
default: v0.18.3.4
|
||||||
|
|
||||||
bitcoin-version:
|
bitcoin-version:
|
||||||
description: "Bitcoin version to download and run as a regtest node"
|
description: "Bitcoin version to download and run as a regtest node"
|
||||||
required: false
|
required: false
|
||||||
default: 24.0.1
|
default: "27.1"
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: "composite"
|
||||||
steps:
|
steps:
|
||||||
- name: Install Build Dependencies
|
- name: Install Build Dependencies
|
||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
with:
|
|
||||||
github-token: ${{ inputs.github-token }}
|
|
||||||
|
|
||||||
- name: Install Foundry
|
- name: Install Foundry
|
||||||
uses: foundry-rs/foundry-toolchain@v1
|
uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773
|
||||||
with:
|
with:
|
||||||
version: nightly
|
version: nightly-f625d0fa7c51e65b4bf1e8f7931cd1c6e2e285e9
|
||||||
|
cache: false
|
||||||
|
|
||||||
- name: Run a Monero Regtest Node
|
- name: Run a Monero Regtest Node
|
||||||
uses: ./.github/actions/monero
|
uses: ./.github/actions/monero
|
||||||
|
|||||||
2
.github/nightly-version
vendored
2
.github/nightly-version
vendored
@@ -1 +1 @@
|
|||||||
nightly-2023-07-01
|
nightly-2024-07-01
|
||||||
|
|||||||
34
.github/workflows/common-tests.yml
vendored
Normal file
34
.github/workflows/common-tests.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
name: common/ Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test-common:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Run Tests
|
||||||
|
run: |
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||||
|
-p std-shims \
|
||||||
|
-p zalloc \
|
||||||
|
-p patchable-async-sleep \
|
||||||
|
-p serai-db \
|
||||||
|
-p serai-env \
|
||||||
|
-p serai-task \
|
||||||
|
-p simple-request
|
||||||
40
.github/workflows/coordinator-tests.yml
vendored
Normal file
40
.github/workflows/coordinator-tests.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
name: Coordinator Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "networks/**"
|
||||||
|
- "message-queue/**"
|
||||||
|
- "coordinator/**"
|
||||||
|
- "orchestration/**"
|
||||||
|
- "tests/docker/**"
|
||||||
|
- "tests/coordinator/**"
|
||||||
|
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "networks/**"
|
||||||
|
- "message-queue/**"
|
||||||
|
- "coordinator/**"
|
||||||
|
- "orchestration/**"
|
||||||
|
- "tests/docker/**"
|
||||||
|
- "tests/coordinator/**"
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Install Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Run coordinator Docker tests
|
||||||
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-coordinator-tests
|
||||||
44
.github/workflows/crypto-tests.yml
vendored
Normal file
44
.github/workflows/crypto-tests.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
name: crypto/ Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test-crypto:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Run Tests
|
||||||
|
run: |
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||||
|
-p flexible-transcript \
|
||||||
|
-p ff-group-tests \
|
||||||
|
-p dalek-ff-group \
|
||||||
|
-p minimal-ed448 \
|
||||||
|
-p ciphersuite \
|
||||||
|
-p multiexp \
|
||||||
|
-p schnorr-signatures \
|
||||||
|
-p dleq \
|
||||||
|
-p generalized-bulletproofs \
|
||||||
|
-p generalized-bulletproofs-circuit-abstraction \
|
||||||
|
-p ec-divisors \
|
||||||
|
-p generalized-bulletproofs-ec-gadgets \
|
||||||
|
-p dkg \
|
||||||
|
-p modular-frost \
|
||||||
|
-p frost-schnorrkel
|
||||||
7
.github/workflows/daily-deny.yml
vendored
7
.github/workflows/daily-deny.yml
vendored
@@ -9,17 +9,14 @@ jobs:
|
|||||||
name: Run cargo deny
|
name: Run cargo deny
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
- name: Advisory Cache
|
- name: Advisory Cache
|
||||||
uses: actions/cache@v3
|
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
||||||
with:
|
with:
|
||||||
path: ~/.cargo/advisory-db
|
path: ~/.cargo/advisory-db
|
||||||
key: rust-advisory-db
|
key: rust-advisory-db
|
||||||
|
|
||||||
- name: Install cargo
|
|
||||||
uses: dtolnay/rust-toolchain@stable
|
|
||||||
|
|
||||||
- name: Install cargo deny
|
- name: Install cargo deny
|
||||||
run: cargo install --locked cargo-deny
|
run: cargo install --locked cargo-deny
|
||||||
|
|
||||||
|
|||||||
22
.github/workflows/full-stack-tests.yml
vendored
Normal file
22
.github/workflows/full-stack-tests.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
name: Full Stack Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Install Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Run Full Stack Docker tests
|
||||||
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-full-stack-tests
|
||||||
114
.github/workflows/lint.yml
vendored
Normal file
114
.github/workflows/lint.yml
vendored
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
name: Lint
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
pull_request:
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
clippy:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, macos-13, macos-14, windows-latest]
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Get nightly version to use
|
||||||
|
id: nightly
|
||||||
|
shell: bash
|
||||||
|
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Install nightly rust
|
||||||
|
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -t wasm32-unknown-unknown -c clippy
|
||||||
|
|
||||||
|
- name: Run Clippy
|
||||||
|
run: cargo +${{ steps.nightly.outputs.version }} clippy --all-features --all-targets -- -D warnings -A clippy::items_after_test_module
|
||||||
|
|
||||||
|
# Also verify the lockfile isn't dirty
|
||||||
|
# This happens when someone edits a Cargo.toml yet doesn't do anything
|
||||||
|
# which causes the lockfile to be updated
|
||||||
|
# The above clippy run will cause it to be updated, so checking there's
|
||||||
|
# no differences present now performs the desired check
|
||||||
|
- name: Verify lockfile
|
||||||
|
shell: bash
|
||||||
|
run: git diff | wc -l | LC_ALL="en_US.utf8" grep -x -e "^[ ]*0"
|
||||||
|
|
||||||
|
deny:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Advisory Cache
|
||||||
|
uses: actions/cache@13aacd865c20de90d75de3b17ebe84f7a17d57d2
|
||||||
|
with:
|
||||||
|
path: ~/.cargo/advisory-db
|
||||||
|
key: rust-advisory-db
|
||||||
|
|
||||||
|
- name: Install cargo deny
|
||||||
|
run: cargo install --locked cargo-deny
|
||||||
|
|
||||||
|
- name: Run cargo deny
|
||||||
|
run: cargo deny -L error --all-features check
|
||||||
|
|
||||||
|
fmt:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Get nightly version to use
|
||||||
|
id: nightly
|
||||||
|
shell: bash
|
||||||
|
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Install nightly rust
|
||||||
|
run: rustup toolchain install ${{ steps.nightly.outputs.version }} --profile minimal -c rustfmt
|
||||||
|
|
||||||
|
- name: Run rustfmt
|
||||||
|
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check
|
||||||
|
|
||||||
|
- name: Install foundry
|
||||||
|
uses: foundry-rs/foundry-toolchain@8f1998e9878d786675189ef566a2e4bf24869773
|
||||||
|
with:
|
||||||
|
version: nightly-41d4e5437107f6f42c7711123890147bc736a609
|
||||||
|
cache: false
|
||||||
|
|
||||||
|
- name: Run forge fmt
|
||||||
|
run: FOUNDRY_FMT_SORT_INPUTS=false FOUNDRY_FMT_LINE_LENGTH=100 FOUNDRY_FMT_TAB_WIDTH=2 FOUNDRY_FMT_BRACKET_SPACING=true FOUNDRY_FMT_INT_TYPES=preserve forge fmt --check $(find . -iname "*.sol")
|
||||||
|
|
||||||
|
machete:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
- name: Verify all dependencies are in use
|
||||||
|
run: |
|
||||||
|
cargo install cargo-machete
|
||||||
|
cargo machete
|
||||||
|
|
||||||
|
slither:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
- name: Slither
|
||||||
|
run: |
|
||||||
|
python3 -m pip install solc-select
|
||||||
|
solc-select install 0.8.26
|
||||||
|
solc-select use 0.8.26
|
||||||
|
|
||||||
|
python3 -m pip install slither-analyzer
|
||||||
|
|
||||||
|
slither --include-paths ./networks/ethereum/schnorr/contracts/Schnorr.sol
|
||||||
|
slither --include-paths ./networks/ethereum/schnorr/contracts ./networks/ethereum/schnorr/contracts/tests/Schnorr.sol
|
||||||
|
slither processor/ethereum/deployer/contracts/Deployer.sol
|
||||||
|
slither processor/ethereum/erc20/contracts/IERC20.sol
|
||||||
|
|
||||||
|
cp networks/ethereum/schnorr/contracts/Schnorr.sol processor/ethereum/router/contracts/
|
||||||
|
cp processor/ethereum/erc20/contracts/IERC20.sol processor/ethereum/router/contracts/
|
||||||
|
cd processor/ethereum/router/contracts
|
||||||
|
slither Router.sol
|
||||||
36
.github/workflows/message-queue-tests.yml
vendored
Normal file
36
.github/workflows/message-queue-tests.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
name: Message Queue Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "message-queue/**"
|
||||||
|
- "orchestration/**"
|
||||||
|
- "tests/docker/**"
|
||||||
|
- "tests/message-queue/**"
|
||||||
|
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "message-queue/**"
|
||||||
|
- "orchestration/**"
|
||||||
|
- "tests/docker/**"
|
||||||
|
- "tests/message-queue/**"
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Install Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Run message-queue Docker tests
|
||||||
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-message-queue-tests
|
||||||
26
.github/workflows/mini-tests.yml
vendored
Normal file
26
.github/workflows/mini-tests.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
name: mini/ Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
paths:
|
||||||
|
- "mini/**"
|
||||||
|
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "mini/**"
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test-common:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Run Tests
|
||||||
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p mini-serai
|
||||||
48
.github/workflows/monero-tests.yaml
vendored
48
.github/workflows/monero-tests.yaml
vendored
@@ -5,28 +5,43 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- develop
|
- develop
|
||||||
paths:
|
paths:
|
||||||
- "coins/monero/**"
|
- "networks/monero/**"
|
||||||
- "processor/**"
|
- "processor/**"
|
||||||
|
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- "coins/monero/**"
|
- "networks/monero/**"
|
||||||
- "processor/**"
|
- "processor/**"
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Only run these once since they will be consistent regardless of any node
|
# Only run these once since they will be consistent regardless of any node
|
||||||
unit-tests:
|
unit-tests:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
- name: Test Dependencies
|
- name: Test Dependencies
|
||||||
uses: ./.github/actions/test-dependencies
|
uses: ./.github/actions/test-dependencies
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Run Unit Tests Without Features
|
- name: Run Unit Tests Without Features
|
||||||
run: cargo test --package monero-serai --lib
|
run: |
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-io --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-generators --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-primitives --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-mlsag --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-clsag --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-borromean --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-bulletproofs --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-rpc --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-address --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-seed --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package polyseed --lib
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --lib
|
||||||
|
|
||||||
# Doesn't run unit tests with features as the tests workflow will
|
# Doesn't run unit tests with features as the tests workflow will
|
||||||
|
|
||||||
@@ -35,25 +50,28 @@ jobs:
|
|||||||
# Test against all supported protocol versions
|
# Test against all supported protocol versions
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
version: [v0.17.3.2, v0.18.2.0]
|
version: [v0.17.3.2, v0.18.3.4]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
- name: Test Dependencies
|
- name: Test Dependencies
|
||||||
uses: ./.github/actions/test-dependencies
|
uses: ./.github/actions/test-dependencies
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
monero-version: ${{ matrix.version }}
|
monero-version: ${{ matrix.version }}
|
||||||
|
|
||||||
- name: Run Integration Tests Without Features
|
- name: Run Integration Tests Without Features
|
||||||
# Runs with the binaries feature so the binaries build
|
run: |
|
||||||
# https://github.com/rust-lang/cargo/issues/8396
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --test '*'
|
||||||
run: cargo test --package monero-serai --features binaries --test '*'
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --test '*'
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --test '*'
|
||||||
|
|
||||||
- name: Run Integration Tests
|
- name: Run Integration Tests
|
||||||
# Don't run if the the tests workflow also will
|
# Don't run if the the tests workflow also will
|
||||||
if: ${{ matrix.version != 'v0.18.2.0' }}
|
if: ${{ matrix.version != 'v0.18.3.4' }}
|
||||||
run: |
|
run: |
|
||||||
cargo test --package monero-serai --all-features --test '*'
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-serai --all-features --test '*'
|
||||||
cargo test --package serai-processor --all-features monero
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-simple-request-rpc --test '*'
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet --all-features --test '*'
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --package monero-wallet-util --all-features --test '*'
|
||||||
|
|||||||
4
.github/workflows/monthly-nightly-update.yml
vendored
4
.github/workflows/monthly-nightly-update.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
|||||||
name: Update nightly
|
name: Update nightly
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
with:
|
with:
|
||||||
submodules: "recursive"
|
submodules: "recursive"
|
||||||
|
|
||||||
@@ -28,7 +28,7 @@ jobs:
|
|||||||
git push -u origin $(date +"nightly-%Y-%m")
|
git push -u origin $(date +"nightly-%Y-%m")
|
||||||
|
|
||||||
- name: Pull Request
|
- name: Pull Request
|
||||||
uses: actions/github-script@v6
|
uses: actions/github-script@d7906e4ad0b1822421a7e6a35d5ca353c962f410
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const { repo, owner } = context.repo;
|
const { repo, owner } = context.repo;
|
||||||
|
|||||||
259
.github/workflows/msrv.yml
vendored
Normal file
259
.github/workflows/msrv.yml
vendored
Normal file
@@ -0,0 +1,259 @@
|
|||||||
|
name: Weekly MSRV Check
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 0 * * 0"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
msrv-common:
|
||||||
|
name: Run cargo msrv on common
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Install Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Install cargo msrv
|
||||||
|
run: cargo install --locked cargo-msrv
|
||||||
|
|
||||||
|
- name: Run cargo msrv on common
|
||||||
|
run: |
|
||||||
|
cargo msrv verify --manifest-path common/zalloc/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path common/std-shims/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path common/env/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path common/db/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path common/task/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path common/request/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path common/patchable-async-sleep/Cargo.toml
|
||||||
|
|
||||||
|
msrv-crypto:
|
||||||
|
name: Run cargo msrv on crypto
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Install Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Install cargo msrv
|
||||||
|
run: cargo install --locked cargo-msrv
|
||||||
|
|
||||||
|
- name: Run cargo msrv on crypto
|
||||||
|
run: |
|
||||||
|
cargo msrv verify --manifest-path crypto/transcript/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path crypto/ff-group-tests/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path crypto/dalek-ff-group/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path crypto/ed448/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path crypto/multiexp/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path crypto/dleq/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path crypto/ciphersuite/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path crypto/schnorr/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path crypto/evrf/generalized-bulletproofs/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path crypto/evrf/circuit-abstraction/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path crypto/evrf/divisors/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path crypto/evrf/ec-gadgets/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path crypto/evrf/embedwards25519/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path crypto/evrf/secq256k1/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path crypto/dkg/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path crypto/frost/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path crypto/schnorrkel/Cargo.toml
|
||||||
|
|
||||||
|
msrv-networks:
|
||||||
|
name: Run cargo msrv on networks
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Install Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Install cargo msrv
|
||||||
|
run: cargo install --locked cargo-msrv
|
||||||
|
|
||||||
|
- name: Run cargo msrv on networks
|
||||||
|
run: |
|
||||||
|
cargo msrv verify --manifest-path networks/bitcoin/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path networks/ethereum/build-contracts/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/ethereum/schnorr/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/ethereum/alloy-simple-request-transport/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/ethereum/relayer/Cargo.toml --features parity-db
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path networks/monero/io/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/monero/generators/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/monero/primitives/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/monero/ringct/mlsag/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/monero/ringct/clsag/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/monero/ringct/borromean/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/monero/ringct/bulletproofs/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/monero/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/monero/rpc/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/monero/rpc/simple-request/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/monero/wallet/address/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/monero/wallet/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path networks/monero/verify-chain/Cargo.toml
|
||||||
|
|
||||||
|
msrv-message-queue:
|
||||||
|
name: Run cargo msrv on message-queue
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Install Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Install cargo msrv
|
||||||
|
run: cargo install --locked cargo-msrv
|
||||||
|
|
||||||
|
- name: Run cargo msrv on message-queue
|
||||||
|
run: |
|
||||||
|
cargo msrv verify --manifest-path message-queue/Cargo.toml --features parity-db
|
||||||
|
|
||||||
|
msrv-processor:
|
||||||
|
name: Run cargo msrv on processor
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Install Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Install cargo msrv
|
||||||
|
run: cargo install --locked cargo-msrv
|
||||||
|
|
||||||
|
- name: Run cargo msrv on processor
|
||||||
|
run: |
|
||||||
|
cargo msrv verify --manifest-path processor/view-keys/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path processor/primitives/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path processor/messages/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path processor/scanner/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path processor/scheduler/primitives/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path processor/scheduler/smart-contract/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path processor/scheduler/utxo/primitives/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path processor/scheduler/utxo/standard/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path processor/scheduler/utxo/transaction-chaining/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path processor/key-gen/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path processor/frost-attempt-manager/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path processor/signers/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path processor/bin/Cargo.toml --features parity-db
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path processor/bitcoin/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path processor/ethereum/primitives/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path processor/ethereum/test-primitives/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path processor/ethereum/erc20/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path processor/ethereum/deployer/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path processor/ethereum/router/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path processor/ethereum/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path processor/monero/Cargo.toml
|
||||||
|
|
||||||
|
msrv-coordinator:
|
||||||
|
name: Run cargo msrv on coordinator
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Install Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Install cargo msrv
|
||||||
|
run: cargo install --locked cargo-msrv
|
||||||
|
|
||||||
|
- name: Run cargo msrv on coordinator
|
||||||
|
run: |
|
||||||
|
cargo msrv verify --manifest-path coordinator/tributary-sdk/tendermint/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path coordinator/tributary-sdk/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path coordinator/cosign/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path coordinator/substrate/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path coordinator/tributary/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path coordinator/p2p/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path coordinator/p2p/libp2p/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path coordinator/Cargo.toml
|
||||||
|
|
||||||
|
msrv-substrate:
|
||||||
|
name: Run cargo msrv on substrate
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Install Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Install cargo msrv
|
||||||
|
run: cargo install --locked cargo-msrv
|
||||||
|
|
||||||
|
- name: Run cargo msrv on substrate
|
||||||
|
run: |
|
||||||
|
cargo msrv verify --manifest-path substrate/primitives/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path substrate/coins/primitives/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path substrate/coins/pallet/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path substrate/dex/pallet/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path substrate/economic-security/pallet/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path substrate/genesis-liquidity/primitives/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path substrate/genesis-liquidity/pallet/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path substrate/in-instructions/primitives/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path substrate/in-instructions/pallet/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path substrate/validator-sets/pallet/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path substrate/validator-sets/primitives/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path substrate/emissions/primitives/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path substrate/emissions/pallet/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path substrate/signals/primitives/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path substrate/signals/pallet/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path substrate/abi/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path substrate/client/Cargo.toml
|
||||||
|
|
||||||
|
cargo msrv verify --manifest-path substrate/runtime/Cargo.toml
|
||||||
|
cargo msrv verify --manifest-path substrate/node/Cargo.toml
|
||||||
|
|
||||||
|
msrv-orchestration:
|
||||||
|
name: Run cargo msrv on orchestration
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Install Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Install cargo msrv
|
||||||
|
run: cargo install --locked cargo-msrv
|
||||||
|
|
||||||
|
- name: Run cargo msrv on message-queue
|
||||||
|
run: |
|
||||||
|
cargo msrv verify --manifest-path orchestration/Cargo.toml
|
||||||
|
|
||||||
|
msrv-mini:
|
||||||
|
name: Run cargo msrv on mini
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Install Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Install cargo msrv
|
||||||
|
run: cargo install --locked cargo-msrv
|
||||||
|
|
||||||
|
- name: Run cargo msrv on mini
|
||||||
|
run: |
|
||||||
|
cargo msrv verify --manifest-path mini/Cargo.toml
|
||||||
52
.github/workflows/networks-tests.yml
vendored
Normal file
52
.github/workflows/networks-tests.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
name: networks/ Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "networks/**"
|
||||||
|
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "networks/**"
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test-networks:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Test Dependencies
|
||||||
|
uses: ./.github/actions/test-dependencies
|
||||||
|
|
||||||
|
- name: Run Tests
|
||||||
|
run: |
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||||
|
-p bitcoin-serai \
|
||||||
|
-p build-solidity-contracts \
|
||||||
|
-p ethereum-schnorr-contract \
|
||||||
|
-p alloy-simple-request-transport \
|
||||||
|
-p serai-ethereum-relayer \
|
||||||
|
-p monero-io \
|
||||||
|
-p monero-generators \
|
||||||
|
-p monero-primitives \
|
||||||
|
-p monero-mlsag \
|
||||||
|
-p monero-clsag \
|
||||||
|
-p monero-borromean \
|
||||||
|
-p monero-bulletproofs \
|
||||||
|
-p monero-serai \
|
||||||
|
-p monero-rpc \
|
||||||
|
-p monero-simple-request-rpc \
|
||||||
|
-p monero-address \
|
||||||
|
-p monero-wallet \
|
||||||
|
-p monero-seed \
|
||||||
|
-p polyseed \
|
||||||
|
-p monero-wallet-util \
|
||||||
|
-p monero-serai-verify-chain
|
||||||
22
.github/workflows/no-std.yml
vendored
22
.github/workflows/no-std.yml
vendored
@@ -4,18 +4,32 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- develop
|
- develop
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "networks/**"
|
||||||
|
- "tests/no-std/**"
|
||||||
|
|
||||||
pull_request:
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "networks/**"
|
||||||
|
- "tests/no-std/**"
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
- name: Install Build Dependencies
|
- name: Install Build Dependencies
|
||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
with:
|
|
||||||
github-token: ${{ inputs.github-token }}
|
- name: Install RISC-V Toolchain
|
||||||
|
run: sudo apt update && sudo apt install -y gcc-riscv64-unknown-elf gcc-multilib && rustup target add riscv32imac-unknown-none-elf
|
||||||
|
|
||||||
- name: Verify no-std builds
|
- name: Verify no-std builds
|
||||||
run: cd tests/no-std && cargo build --target riscv32imac-unknown-none-elf
|
run: CFLAGS=-I/usr/include cargo build --target riscv32imac-unknown-none-elf -p serai-no-std-tests
|
||||||
|
|||||||
90
.github/workflows/pages.yml
vendored
Normal file
90
.github/workflows/pages.yml
vendored
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
# MIT License
|
||||||
|
#
|
||||||
|
# Copyright (c) 2022 just-the-docs
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in all
|
||||||
|
# copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
# SOFTWARE.
|
||||||
|
|
||||||
|
# This workflow uses actions that are not certified by GitHub.
|
||||||
|
# They are provided by a third-party and are governed by
|
||||||
|
# separate terms of service, privacy policy, and support
|
||||||
|
# documentation.
|
||||||
|
|
||||||
|
# Sample workflow for building and deploying a Jekyll site to GitHub Pages
|
||||||
|
name: Deploy Jekyll site to Pages
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "develop"
|
||||||
|
paths:
|
||||||
|
- "docs/**"
|
||||||
|
|
||||||
|
# Allows you to run this workflow manually from the Actions tab
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pages: write
|
||||||
|
id-token: write
|
||||||
|
|
||||||
|
# Allow one concurrent deployment
|
||||||
|
concurrency:
|
||||||
|
group: "pages"
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Build job
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
working-directory: docs
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Setup Ruby
|
||||||
|
uses: ruby/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
bundler-cache: true
|
||||||
|
cache-version: 0
|
||||||
|
working-directory: "${{ github.workspace }}/docs"
|
||||||
|
- name: Setup Pages
|
||||||
|
id: pages
|
||||||
|
uses: actions/configure-pages@v3
|
||||||
|
- name: Build with Jekyll
|
||||||
|
run: bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}"
|
||||||
|
env:
|
||||||
|
JEKYLL_ENV: production
|
||||||
|
- name: Upload artifact
|
||||||
|
uses: actions/upload-pages-artifact@v1
|
||||||
|
with:
|
||||||
|
path: "docs/_site/"
|
||||||
|
|
||||||
|
# Deployment job
|
||||||
|
deploy:
|
||||||
|
environment:
|
||||||
|
name: github-pages
|
||||||
|
url: ${{ steps.deployment.outputs.page_url }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
|
steps:
|
||||||
|
- name: Deploy to GitHub Pages
|
||||||
|
id: deployment
|
||||||
|
uses: actions/deploy-pages@v2
|
||||||
40
.github/workflows/processor-tests.yml
vendored
Normal file
40
.github/workflows/processor-tests.yml
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
name: Processor Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "networks/**"
|
||||||
|
- "message-queue/**"
|
||||||
|
- "processor/**"
|
||||||
|
- "orchestration/**"
|
||||||
|
- "tests/docker/**"
|
||||||
|
- "tests/processor/**"
|
||||||
|
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "networks/**"
|
||||||
|
- "message-queue/**"
|
||||||
|
- "processor/**"
|
||||||
|
- "orchestration/**"
|
||||||
|
- "tests/docker/**"
|
||||||
|
- "tests/processor/**"
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Install Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Run processor Docker tests
|
||||||
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-processor-tests
|
||||||
36
.github/workflows/reproducible-runtime.yml
vendored
Normal file
36
.github/workflows/reproducible-runtime.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
name: Reproducible Runtime
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- develop
|
||||||
|
paths:
|
||||||
|
- "Cargo.lock"
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "substrate/**"
|
||||||
|
- "orchestration/runtime/**"
|
||||||
|
- "tests/reproducible-runtime/**"
|
||||||
|
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "Cargo.lock"
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "substrate/**"
|
||||||
|
- "orchestration/runtime/**"
|
||||||
|
- "tests/reproducible-runtime/**"
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Install Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Run Reproducible Runtime tests
|
||||||
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-reproducible-runtime-tests
|
||||||
151
.github/workflows/tests.yml
vendored
151
.github/workflows/tests.yml
vendored
@@ -4,82 +4,109 @@ on:
|
|||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- develop
|
- develop
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "networks/**"
|
||||||
|
- "message-queue/**"
|
||||||
|
- "processor/**"
|
||||||
|
- "coordinator/**"
|
||||||
|
- "substrate/**"
|
||||||
|
|
||||||
pull_request:
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- "common/**"
|
||||||
|
- "crypto/**"
|
||||||
|
- "networks/**"
|
||||||
|
- "message-queue/**"
|
||||||
|
- "processor/**"
|
||||||
|
- "coordinator/**"
|
||||||
|
- "substrate/**"
|
||||||
|
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
clippy:
|
test-infra:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
- name: Get nightly version to use
|
|
||||||
id: nightly
|
|
||||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Build Dependencies
|
- name: Build Dependencies
|
||||||
uses: ./.github/actions/build-dependencies
|
uses: ./.github/actions/build-dependencies
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
rust-toolchain: ${{ steps.nightly.outputs.version }}
|
|
||||||
rust-components: clippy
|
|
||||||
|
|
||||||
- name: Run Clippy
|
|
||||||
# Allow dbg_macro when run locally, yet not when pushed
|
|
||||||
run: cargo clippy --all-features --all-targets -- -D clippy::dbg_macro $(grep "\S" ../../clippy-config | grep -v "#")
|
|
||||||
|
|
||||||
deny:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Advisory Cache
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: ~/.cargo/advisory-db
|
|
||||||
key: rust-advisory-db
|
|
||||||
|
|
||||||
- name: Install cargo
|
|
||||||
uses: dtolnay/rust-toolchain@stable
|
|
||||||
|
|
||||||
- name: Install cargo deny
|
|
||||||
run: cargo install --locked cargo-deny
|
|
||||||
|
|
||||||
- name: Run cargo deny
|
|
||||||
run: cargo deny -L error --all-features check
|
|
||||||
|
|
||||||
test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Test Dependencies
|
|
||||||
uses: ./.github/actions/test-dependencies
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build node
|
|
||||||
run: |
|
|
||||||
cd substrate/node
|
|
||||||
cargo build
|
|
||||||
|
|
||||||
- name: Run Tests
|
- name: Run Tests
|
||||||
run: GITHUB_CI=true cargo test --all-features
|
run: |
|
||||||
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||||
|
-p serai-message-queue \
|
||||||
|
-p serai-processor-messages \
|
||||||
|
-p serai-processor-key-gen \
|
||||||
|
-p serai-processor-view-keys \
|
||||||
|
-p serai-processor-frost-attempt-manager \
|
||||||
|
-p serai-processor-primitives \
|
||||||
|
-p serai-processor-scanner \
|
||||||
|
-p serai-processor-scheduler-primitives \
|
||||||
|
-p serai-processor-utxo-scheduler-primitives \
|
||||||
|
-p serai-processor-utxo-scheduler \
|
||||||
|
-p serai-processor-transaction-chaining-scheduler \
|
||||||
|
-p serai-processor-smart-contract-scheduler \
|
||||||
|
-p serai-processor-signers \
|
||||||
|
-p serai-processor-bin \
|
||||||
|
-p serai-bitcoin-processor \
|
||||||
|
-p serai-processor-ethereum-primitives \
|
||||||
|
-p serai-processor-ethereum-test-primitives \
|
||||||
|
-p serai-processor-ethereum-deployer \
|
||||||
|
-p serai-processor-ethereum-router \
|
||||||
|
-p serai-processor-ethereum-erc20 \
|
||||||
|
-p serai-ethereum-processor \
|
||||||
|
-p serai-monero-processor \
|
||||||
|
-p tendermint-machine \
|
||||||
|
-p tributary-sdk \
|
||||||
|
-p serai-cosign \
|
||||||
|
-p serai-coordinator-substrate \
|
||||||
|
-p serai-coordinator-tributary \
|
||||||
|
-p serai-coordinator-p2p \
|
||||||
|
-p serai-coordinator-libp2p-p2p \
|
||||||
|
-p serai-coordinator \
|
||||||
|
-p serai-orchestrator \
|
||||||
|
-p serai-docker-tests
|
||||||
|
|
||||||
fmt:
|
test-substrate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
- name: Get nightly version to use
|
- name: Build Dependencies
|
||||||
id: nightly
|
uses: ./.github/actions/build-dependencies
|
||||||
run: echo "version=$(cat .github/nightly-version)" >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Install rustfmt
|
- name: Run Tests
|
||||||
uses: dtolnay/rust-toolchain@master
|
run: |
|
||||||
with:
|
GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features \
|
||||||
toolchain: ${{ steps.nightly.outputs.version }}
|
-p serai-primitives \
|
||||||
components: rustfmt
|
-p serai-coins-primitives \
|
||||||
|
-p serai-coins-pallet \
|
||||||
|
-p serai-dex-pallet \
|
||||||
|
-p serai-validator-sets-primitives \
|
||||||
|
-p serai-validator-sets-pallet \
|
||||||
|
-p serai-genesis-liquidity-primitives \
|
||||||
|
-p serai-genesis-liquidity-pallet \
|
||||||
|
-p serai-emissions-primitives \
|
||||||
|
-p serai-emissions-pallet \
|
||||||
|
-p serai-economic-security-pallet \
|
||||||
|
-p serai-in-instructions-primitives \
|
||||||
|
-p serai-in-instructions-pallet \
|
||||||
|
-p serai-signals-primitives \
|
||||||
|
-p serai-signals-pallet \
|
||||||
|
-p serai-abi \
|
||||||
|
-p serai-runtime \
|
||||||
|
-p serai-node
|
||||||
|
|
||||||
- name: Run rustfmt
|
test-serai-client:
|
||||||
run: cargo +${{ steps.nightly.outputs.version }} fmt -- --check
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac
|
||||||
|
|
||||||
|
- name: Build Dependencies
|
||||||
|
uses: ./.github/actions/build-dependencies
|
||||||
|
|
||||||
|
- name: Run Tests
|
||||||
|
run: GITHUB_CI=true RUST_BACKTRACE=1 cargo test --all-features -p serai-client
|
||||||
|
|||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -1,2 +1,7 @@
|
|||||||
target
|
target
|
||||||
|
Dockerfile
|
||||||
|
Dockerfile.fast-epoch
|
||||||
|
!orchestration/runtime/Dockerfile
|
||||||
|
.test-logs
|
||||||
|
|
||||||
.vscode
|
.vscode
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
edition = "2021"
|
||||||
tab_spaces = 2
|
tab_spaces = 2
|
||||||
|
|
||||||
max_width = 100
|
max_width = 100
|
||||||
|
|||||||
9893
Cargo.lock
generated
9893
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
216
Cargo.toml
216
Cargo.toml
@@ -1,8 +1,27 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
|
resolver = "2"
|
||||||
members = [
|
members = [
|
||||||
|
# Version patches
|
||||||
|
"patches/parking_lot_core",
|
||||||
|
"patches/parking_lot",
|
||||||
|
"patches/zstd",
|
||||||
|
"patches/rocksdb",
|
||||||
|
|
||||||
|
# std patches
|
||||||
|
"patches/matches",
|
||||||
|
"patches/is-terminal",
|
||||||
|
|
||||||
|
# Rewrites/redirects
|
||||||
|
"patches/option-ext",
|
||||||
|
"patches/directories-next",
|
||||||
|
|
||||||
"common/std-shims",
|
"common/std-shims",
|
||||||
"common/zalloc",
|
"common/zalloc",
|
||||||
|
"common/patchable-async-sleep",
|
||||||
"common/db",
|
"common/db",
|
||||||
|
"common/env",
|
||||||
|
"common/task",
|
||||||
|
"common/request",
|
||||||
|
|
||||||
"crypto/transcript",
|
"crypto/transcript",
|
||||||
|
|
||||||
@@ -12,61 +31,234 @@ members = [
|
|||||||
"crypto/ciphersuite",
|
"crypto/ciphersuite",
|
||||||
|
|
||||||
"crypto/multiexp",
|
"crypto/multiexp",
|
||||||
|
|
||||||
"crypto/schnorr",
|
"crypto/schnorr",
|
||||||
"crypto/dleq",
|
"crypto/dleq",
|
||||||
|
|
||||||
|
"crypto/evrf/secq256k1",
|
||||||
|
"crypto/evrf/embedwards25519",
|
||||||
|
"crypto/evrf/generalized-bulletproofs",
|
||||||
|
"crypto/evrf/circuit-abstraction",
|
||||||
|
"crypto/evrf/divisors",
|
||||||
|
"crypto/evrf/ec-gadgets",
|
||||||
|
|
||||||
"crypto/dkg",
|
"crypto/dkg",
|
||||||
"crypto/frost",
|
"crypto/frost",
|
||||||
"crypto/schnorrkel",
|
"crypto/schnorrkel",
|
||||||
|
|
||||||
"coins/ethereum",
|
"networks/bitcoin",
|
||||||
"coins/monero/generators",
|
|
||||||
"coins/monero",
|
"networks/ethereum/build-contracts",
|
||||||
|
"networks/ethereum/schnorr",
|
||||||
|
"networks/ethereum/alloy-simple-request-transport",
|
||||||
|
"networks/ethereum/relayer",
|
||||||
|
|
||||||
|
"networks/monero/io",
|
||||||
|
"networks/monero/generators",
|
||||||
|
"networks/monero/primitives",
|
||||||
|
"networks/monero/ringct/mlsag",
|
||||||
|
"networks/monero/ringct/clsag",
|
||||||
|
"networks/monero/ringct/borromean",
|
||||||
|
"networks/monero/ringct/bulletproofs",
|
||||||
|
"networks/monero",
|
||||||
|
"networks/monero/rpc",
|
||||||
|
"networks/monero/rpc/simple-request",
|
||||||
|
"networks/monero/wallet/address",
|
||||||
|
"networks/monero/wallet",
|
||||||
|
"networks/monero/wallet/seed",
|
||||||
|
"networks/monero/wallet/polyseed",
|
||||||
|
"networks/monero/wallet/util",
|
||||||
|
"networks/monero/verify-chain",
|
||||||
|
|
||||||
"message-queue",
|
"message-queue",
|
||||||
|
|
||||||
"processor/messages",
|
"processor/messages",
|
||||||
"processor",
|
|
||||||
|
|
||||||
"coordinator/tributary/tendermint",
|
"processor/key-gen",
|
||||||
|
"processor/view-keys",
|
||||||
|
"processor/frost-attempt-manager",
|
||||||
|
|
||||||
|
"processor/primitives",
|
||||||
|
"processor/scanner",
|
||||||
|
"processor/scheduler/primitives",
|
||||||
|
"processor/scheduler/utxo/primitives",
|
||||||
|
"processor/scheduler/utxo/standard",
|
||||||
|
"processor/scheduler/utxo/transaction-chaining",
|
||||||
|
"processor/scheduler/smart-contract",
|
||||||
|
"processor/signers",
|
||||||
|
|
||||||
|
"processor/bin",
|
||||||
|
"processor/bitcoin",
|
||||||
|
"processor/ethereum/primitives",
|
||||||
|
"processor/ethereum/test-primitives",
|
||||||
|
"processor/ethereum/deployer",
|
||||||
|
"processor/ethereum/router",
|
||||||
|
"processor/ethereum/erc20",
|
||||||
|
"processor/ethereum",
|
||||||
|
"processor/monero",
|
||||||
|
|
||||||
|
"coordinator/tributary-sdk/tendermint",
|
||||||
|
"coordinator/tributary-sdk",
|
||||||
|
"coordinator/cosign",
|
||||||
|
"coordinator/substrate",
|
||||||
"coordinator/tributary",
|
"coordinator/tributary",
|
||||||
|
"coordinator/p2p",
|
||||||
|
"coordinator/p2p/libp2p",
|
||||||
"coordinator",
|
"coordinator",
|
||||||
|
|
||||||
"substrate/primitives",
|
"substrate/primitives",
|
||||||
|
|
||||||
"substrate/tokens/primitives",
|
"substrate/coins/primitives",
|
||||||
"substrate/tokens/pallet",
|
"substrate/coins/pallet",
|
||||||
|
|
||||||
|
"substrate/dex/pallet",
|
||||||
|
|
||||||
|
"substrate/validator-sets/primitives",
|
||||||
|
"substrate/validator-sets/pallet",
|
||||||
|
|
||||||
|
"substrate/genesis-liquidity/primitives",
|
||||||
|
"substrate/genesis-liquidity/pallet",
|
||||||
|
|
||||||
|
"substrate/emissions/primitives",
|
||||||
|
"substrate/emissions/pallet",
|
||||||
|
|
||||||
|
"substrate/economic-security/pallet",
|
||||||
|
|
||||||
"substrate/in-instructions/primitives",
|
"substrate/in-instructions/primitives",
|
||||||
"substrate/in-instructions/pallet",
|
"substrate/in-instructions/pallet",
|
||||||
|
|
||||||
"substrate/validator-sets/primitives",
|
"substrate/signals/primitives",
|
||||||
"substrate/validator-sets/pallet",
|
"substrate/signals/pallet",
|
||||||
|
|
||||||
|
"substrate/abi",
|
||||||
|
|
||||||
"substrate/runtime",
|
"substrate/runtime",
|
||||||
"substrate/node",
|
"substrate/node",
|
||||||
|
|
||||||
"substrate/client",
|
"substrate/client",
|
||||||
|
|
||||||
|
"orchestration",
|
||||||
|
|
||||||
|
"mini",
|
||||||
|
|
||||||
"tests/no-std",
|
"tests/no-std",
|
||||||
|
|
||||||
|
"tests/docker",
|
||||||
|
"tests/message-queue",
|
||||||
|
"tests/processor",
|
||||||
|
"tests/coordinator",
|
||||||
|
"tests/full-stack",
|
||||||
|
"tests/reproducible-runtime",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Always compile Monero (and a variety of dependencies) with optimizations due
|
# Always compile Monero (and a variety of dependencies) with optimizations due
|
||||||
# to the extensive operations required for Bulletproofs
|
# to the extensive operations required for Bulletproofs
|
||||||
[profile.dev.package]
|
[profile.dev.package]
|
||||||
subtle = { opt-level = 3 }
|
subtle = { opt-level = 3 }
|
||||||
curve25519-dalek = { opt-level = 3 }
|
|
||||||
|
|
||||||
ff = { opt-level = 3 }
|
ff = { opt-level = 3 }
|
||||||
group = { opt-level = 3 }
|
group = { opt-level = 3 }
|
||||||
|
|
||||||
crypto-bigint = { opt-level = 3 }
|
crypto-bigint = { opt-level = 3 }
|
||||||
|
secp256k1 = { opt-level = 3 }
|
||||||
|
curve25519-dalek = { opt-level = 3 }
|
||||||
dalek-ff-group = { opt-level = 3 }
|
dalek-ff-group = { opt-level = 3 }
|
||||||
minimal-ed448 = { opt-level = 3 }
|
minimal-ed448 = { opt-level = 3 }
|
||||||
|
|
||||||
multiexp = { opt-level = 3 }
|
multiexp = { opt-level = 3 }
|
||||||
|
|
||||||
monero-serai = { opt-level = 3 }
|
secq256k1 = { opt-level = 3 }
|
||||||
|
embedwards25519 = { opt-level = 3 }
|
||||||
|
generalized-bulletproofs = { opt-level = 3 }
|
||||||
|
generalized-bulletproofs-circuit-abstraction = { opt-level = 3 }
|
||||||
|
ec-divisors = { opt-level = 3 }
|
||||||
|
generalized-bulletproofs-ec-gadgets = { opt-level = 3 }
|
||||||
|
|
||||||
|
dkg = { opt-level = 3 }
|
||||||
|
|
||||||
|
monero-generators = { opt-level = 3 }
|
||||||
|
monero-borromean = { opt-level = 3 }
|
||||||
|
monero-bulletproofs = { opt-level = 3 }
|
||||||
|
monero-mlsag = { opt-level = 3 }
|
||||||
|
monero-clsag = { opt-level = 3 }
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
panic = "unwind"
|
panic = "unwind"
|
||||||
|
|
||||||
|
[patch.crates-io]
|
||||||
|
# https://github.com/rust-lang-nursery/lazy-static.rs/issues/201
|
||||||
|
lazy_static = { git = "https://github.com/rust-lang-nursery/lazy-static.rs", rev = "5735630d46572f1e5377c8f2ba0f79d18f53b10c" }
|
||||||
|
|
||||||
|
parking_lot_core = { path = "patches/parking_lot_core" }
|
||||||
|
parking_lot = { path = "patches/parking_lot" }
|
||||||
|
# wasmtime pulls in an old version for this
|
||||||
|
zstd = { path = "patches/zstd" }
|
||||||
|
# Needed for WAL compression
|
||||||
|
rocksdb = { path = "patches/rocksdb" }
|
||||||
|
|
||||||
|
# is-terminal now has an std-based solution with an equivalent API
|
||||||
|
is-terminal = { path = "patches/is-terminal" }
|
||||||
|
# So does matches
|
||||||
|
matches = { path = "patches/matches" }
|
||||||
|
|
||||||
|
# directories-next was created because directories was unmaintained
|
||||||
|
# directories-next is now unmaintained while directories is maintained
|
||||||
|
# The directories author pulls in ridiculously pointless crates and prefers
|
||||||
|
# copyleft licenses
|
||||||
|
# The following two patches resolve everything
|
||||||
|
option-ext = { path = "patches/option-ext" }
|
||||||
|
directories-next = { path = "patches/directories-next" }
|
||||||
|
|
||||||
|
# The official pasta_curves repo doesn't support Zeroize
|
||||||
|
pasta_curves = { git = "https://github.com/kayabaNerve/pasta_curves", rev = "a46b5be95cacbff54d06aad8d3bbcba42e05d616" }
|
||||||
|
|
||||||
|
[workspace.lints.clippy]
|
||||||
|
unwrap_or_default = "allow"
|
||||||
|
map_unwrap_or = "allow"
|
||||||
|
borrow_as_ptr = "deny"
|
||||||
|
cast_lossless = "deny"
|
||||||
|
cast_possible_truncation = "deny"
|
||||||
|
cast_possible_wrap = "deny"
|
||||||
|
cast_precision_loss = "deny"
|
||||||
|
cast_ptr_alignment = "deny"
|
||||||
|
cast_sign_loss = "deny"
|
||||||
|
checked_conversions = "deny"
|
||||||
|
cloned_instead_of_copied = "deny"
|
||||||
|
enum_glob_use = "deny"
|
||||||
|
expl_impl_clone_on_copy = "deny"
|
||||||
|
explicit_into_iter_loop = "deny"
|
||||||
|
explicit_iter_loop = "deny"
|
||||||
|
flat_map_option = "deny"
|
||||||
|
float_cmp = "deny"
|
||||||
|
fn_params_excessive_bools = "deny"
|
||||||
|
ignored_unit_patterns = "deny"
|
||||||
|
implicit_clone = "deny"
|
||||||
|
inefficient_to_string = "deny"
|
||||||
|
invalid_upcast_comparisons = "deny"
|
||||||
|
large_stack_arrays = "deny"
|
||||||
|
linkedlist = "deny"
|
||||||
|
macro_use_imports = "deny"
|
||||||
|
manual_instant_elapsed = "deny"
|
||||||
|
manual_let_else = "deny"
|
||||||
|
manual_ok_or = "deny"
|
||||||
|
manual_string_new = "deny"
|
||||||
|
match_bool = "deny"
|
||||||
|
match_same_arms = "deny"
|
||||||
|
missing_fields_in_debug = "deny"
|
||||||
|
needless_continue = "deny"
|
||||||
|
needless_pass_by_value = "deny"
|
||||||
|
ptr_cast_constness = "deny"
|
||||||
|
range_minus_one = "deny"
|
||||||
|
range_plus_one = "deny"
|
||||||
|
redundant_closure_for_method_calls = "deny"
|
||||||
|
redundant_else = "deny"
|
||||||
|
string_add_assign = "deny"
|
||||||
|
string_slice = "deny"
|
||||||
|
unchecked_duration_subtraction = "deny"
|
||||||
|
uninlined_format_args = "deny"
|
||||||
|
unnecessary_box_returns = "deny"
|
||||||
|
unnecessary_join = "deny"
|
||||||
|
unnecessary_wraps = "deny"
|
||||||
|
unnested_or_patterns = "deny"
|
||||||
|
unused_async = "deny"
|
||||||
|
unused_self = "deny"
|
||||||
|
zero_sized_map_values = "deny"
|
||||||
|
|||||||
36
README.md
36
README.md
@@ -5,26 +5,32 @@ Bitcoin, Ethereum, DAI, and Monero, offering a liquidity-pool-based trading
|
|||||||
experience. Funds are stored in an economically secured threshold-multisig
|
experience. Funds are stored in an economically secured threshold-multisig
|
||||||
wallet.
|
wallet.
|
||||||
|
|
||||||
[Getting Started](docs/Getting%20Started.md)
|
[Getting Started](spec/Getting%20Started.md)
|
||||||
|
|
||||||
### Layout
|
### Layout
|
||||||
|
|
||||||
- `audits`: Audits for various parts of Serai.
|
- `audits`: Audits for various parts of Serai.
|
||||||
|
|
||||||
- `docs`: Documentation on the Serai protocol.
|
- `spec`: The specification of the Serai protocol, both internally and as
|
||||||
|
networked.
|
||||||
|
|
||||||
|
- `docs`: User-facing documentation on the Serai protocol.
|
||||||
|
|
||||||
- `common`: Crates containing utilities common to a variety of areas under
|
- `common`: Crates containing utilities common to a variety of areas under
|
||||||
Serai, none neatly fitting under another category.
|
Serai, none neatly fitting under another category.
|
||||||
|
|
||||||
- `crypto`: A series of composable cryptographic libraries built around the
|
- `crypto`: A series of composable cryptographic libraries built around the
|
||||||
`ff`/`group` APIs achieving a variety of tasks. These range from generic
|
`ff`/`group` APIs, achieving a variety of tasks. These range from generic
|
||||||
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
|
infrastructure, to our IETF-compliant FROST implementation, to a DLEq proof as
|
||||||
needed for Bitcoin-Monero atomic swaps.
|
needed for Bitcoin-Monero atomic swaps.
|
||||||
|
|
||||||
- `coins`: Various coin libraries intended for usage in Serai yet also by the
|
- `networks`: Various libraries intended for usage in Serai yet also by the
|
||||||
wider community. This means they will always support the functionality Serai
|
wider community. This means they will always support the functionality Serai
|
||||||
needs, yet won't disadvantage other use cases when possible.
|
needs, yet won't disadvantage other use cases when possible.
|
||||||
|
|
||||||
|
- `message-queue`: An ordered message server so services can talk to each other,
|
||||||
|
even when the other is offline.
|
||||||
|
|
||||||
- `processor`: A generic chain processor to process data for Serai and process
|
- `processor`: A generic chain processor to process data for Serai and process
|
||||||
events from Serai, executing transactions as expected and needed.
|
events from Serai, executing transactions as expected and needed.
|
||||||
|
|
||||||
@@ -33,12 +39,28 @@ wallet.
|
|||||||
|
|
||||||
- `substrate`: Substrate crates used to instantiate the Serai network.
|
- `substrate`: Substrate crates used to instantiate the Serai network.
|
||||||
|
|
||||||
- `deploy`: Scripts to deploy a Serai node/test environment.
|
- `orchestration`: Dockerfiles and scripts to deploy a Serai node/test
|
||||||
|
environment.
|
||||||
|
|
||||||
|
- `tests`: Tests for various crates. Generally, `crate/src/tests` is used, or
|
||||||
|
`crate/tests`, yet any tests requiring crates' binaries are placed here.
|
||||||
|
|
||||||
|
### Security
|
||||||
|
|
||||||
|
Serai hosts a bug bounty program via
|
||||||
|
[Immunefi](https://immunefi.com/bounty/serai/). For in-scope critical
|
||||||
|
vulnerabilities, we will reward whitehats with up to $30,000.
|
||||||
|
|
||||||
|
Anything not in-scope should still be submitted through Immunefi, with rewards
|
||||||
|
issued at the discretion of the Immunefi program managers.
|
||||||
|
|
||||||
### Links
|
### Links
|
||||||
|
|
||||||
|
- [Website](https://serai.exchange/): https://serai.exchange/
|
||||||
|
- [Immunefi](https://immunefi.com/bounty/serai/): https://immunefi.com/bounty/serai/
|
||||||
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
|
- [Twitter](https://twitter.com/SeraiDEX): https://twitter.com/SeraiDEX
|
||||||
- [Mastodon](https://cryptodon.lol/@serai): https://cryptodon.lol/@serai
|
- [Mastodon](https://cryptodon.lol/@serai): https://cryptodon.lol/@serai
|
||||||
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
|
- [Discord](https://discord.gg/mpEUtJR3vz): https://discord.gg/mpEUtJR3vz
|
||||||
- [Matrix](https://matrix.to/#/#serai:matrix.org):
|
- [Matrix](https://matrix.to/#/#serai:matrix.org): https://matrix.to/#/#serai:matrix.org
|
||||||
https://matrix.to/#/#serai:matrix.org
|
- [Reddit](https://www.reddit.com/r/SeraiDEX/): https://www.reddit.com/r/SeraiDEX/
|
||||||
|
- [Telegram](https://t.me/SeraiDEX): https://t.me/SeraiDEX
|
||||||
|
|||||||
BIN
audits/Cypher Stack networks bitcoin August 2023/Audit.pdf
Normal file
BIN
audits/Cypher Stack networks bitcoin August 2023/Audit.pdf
Normal file
Binary file not shown.
@@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2022-2023 Luke Parker
|
Copyright (c) 2023 Cypher Stack
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
@@ -0,0 +1,7 @@
|
|||||||
|
# Cypher Stack /networks/bitcoin Audit, August 2023
|
||||||
|
|
||||||
|
This audit was over the `/networks/bitcoin` folder (at the time located at
|
||||||
|
`/coins/bitcoin`). It is encompassing up to commit
|
||||||
|
5121ca75199dff7bd34230880a1fdd793012068c.
|
||||||
|
|
||||||
|
Please see https://github.com/cypherstack/serai-btc-audit for provenance.
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
# No warnings allowed
|
|
||||||
-D warnings
|
|
||||||
|
|
||||||
# nursery
|
|
||||||
-D clippy::nursery
|
|
||||||
# Erratic and unhelpful
|
|
||||||
-A clippy::missing_const_for_fn
|
|
||||||
# Too many false/irrelevant positives
|
|
||||||
-A clippy::redundant_pub_crate
|
|
||||||
# Flags on any debug_assert using an RNG
|
|
||||||
-A clippy::debug_assert_with_mut_call
|
|
||||||
# Stylistic preference
|
|
||||||
-A clippy::option_if_let_else
|
|
||||||
|
|
||||||
# pedantic
|
|
||||||
-D clippy::unnecessary_wraps
|
|
||||||
-D clippy::unused_async
|
|
||||||
-D clippy::unused_self
|
|
||||||
|
|
||||||
# restrictions
|
|
||||||
|
|
||||||
# Safety
|
|
||||||
-D clippy::as_conversions
|
|
||||||
-D clippy::disallowed_script_idents
|
|
||||||
-D clippy::wildcard_enum_match_arm
|
|
||||||
|
|
||||||
# Clarity
|
|
||||||
-D clippy::assertions_on_result_states
|
|
||||||
-D clippy::deref_by_slicing
|
|
||||||
-D clippy::empty_structs_with_brackets
|
|
||||||
-D clippy::get_unwrap
|
|
||||||
-D clippy::rest_pat_in_fully_bound_structs
|
|
||||||
-D clippy::semicolon_inside_block
|
|
||||||
-D clippy::tests_outside_test_module
|
|
||||||
|
|
||||||
# Quality
|
|
||||||
-D clippy::format_push_string
|
|
||||||
-D clippy::string_to_string
|
|
||||||
|
|
||||||
# These potentially should be enabled in the future
|
|
||||||
# -D clippy::missing_errors_doc
|
|
||||||
# -D clippy::missing_panics_doc
|
|
||||||
# -D clippy::doc_markdown
|
|
||||||
|
|
||||||
# TODO: Enable this
|
|
||||||
# -D clippy::cargo
|
|
||||||
|
|
||||||
# Not in nightly yet
|
|
||||||
# -D clippy::redundant_type_annotations
|
|
||||||
# -D clippy::big_endian_bytes
|
|
||||||
# -D clippy::host_endian_bytes
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "bitcoin-serai"
|
|
||||||
version = "0.2.0"
|
|
||||||
description = "A Bitcoin library for FROST-signing transactions"
|
|
||||||
license = "MIT"
|
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/bitcoin"
|
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Vrx <vrx00@proton.me>"]
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
lazy_static = "1"
|
|
||||||
thiserror = "1"
|
|
||||||
|
|
||||||
zeroize = "^1.5"
|
|
||||||
rand_core = "0.6"
|
|
||||||
|
|
||||||
sha2 = "0.10"
|
|
||||||
|
|
||||||
secp256k1 = { version = "0.27", features = ["global-context"] }
|
|
||||||
bitcoin = { version = "0.30", features = ["serde"] }
|
|
||||||
|
|
||||||
k256 = { version = "^0.13.1", default-features = false, features = ["std", "arithmetic", "bits"] }
|
|
||||||
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", features = ["recommended"] }
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["secp256k1"] }
|
|
||||||
|
|
||||||
hex = "0.4"
|
|
||||||
serde = { version = "1", features = ["derive"] }
|
|
||||||
serde_json = "1"
|
|
||||||
reqwest = { version = "0.11", features = ["json"] }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["tests"] }
|
|
||||||
|
|
||||||
tokio = { version = "1", features = ["full"] }
|
|
||||||
|
|
||||||
[features]
|
|
||||||
hazmat = []
|
|
||||||
@@ -1,160 +0,0 @@
|
|||||||
use core::fmt::Debug;
|
|
||||||
use std::io;
|
|
||||||
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
|
|
||||||
use zeroize::Zeroizing;
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
use transcript::Transcript;
|
|
||||||
|
|
||||||
use secp256k1::schnorr::Signature;
|
|
||||||
use k256::{
|
|
||||||
elliptic_curve::{
|
|
||||||
ops::Reduce,
|
|
||||||
sec1::{Tag, ToEncodedPoint},
|
|
||||||
},
|
|
||||||
U256, Scalar, ProjectivePoint,
|
|
||||||
};
|
|
||||||
use frost::{
|
|
||||||
curve::{Ciphersuite, Secp256k1},
|
|
||||||
Participant, ThresholdKeys, ThresholdView, FrostError,
|
|
||||||
algorithm::{Hram as HramTrait, Algorithm, Schnorr as FrostSchnorr},
|
|
||||||
};
|
|
||||||
|
|
||||||
use bitcoin::key::XOnlyPublicKey;
|
|
||||||
|
|
||||||
/// Get the x coordinate of a non-infinity, even point. Panics on invalid input.
|
|
||||||
pub fn x(key: &ProjectivePoint) -> [u8; 32] {
|
|
||||||
let encoded = key.to_encoded_point(true);
|
|
||||||
assert_eq!(encoded.tag(), Tag::CompressedEvenY, "x coordinate of odd key");
|
|
||||||
(*encoded.x().expect("point at infinity")).into()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert a non-infinite even point to a XOnlyPublicKey. Panics on invalid input.
|
|
||||||
pub fn x_only(key: &ProjectivePoint) -> XOnlyPublicKey {
|
|
||||||
XOnlyPublicKey::from_slice(&x(key)).unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Make a point even by adding the generator until it is even. Returns the even point and the
|
|
||||||
/// amount of additions required.
|
|
||||||
pub fn make_even(mut key: ProjectivePoint) -> (ProjectivePoint, u64) {
|
|
||||||
let mut c = 0;
|
|
||||||
while key.to_encoded_point(true).tag() == Tag::CompressedOddY {
|
|
||||||
key += ProjectivePoint::GENERATOR;
|
|
||||||
c += 1;
|
|
||||||
}
|
|
||||||
(key, c)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A BIP-340 compatible HRAm for use with the modular-frost Schnorr Algorithm.
|
|
||||||
///
|
|
||||||
/// If passed an odd nonce, it will have the generator added until it is even.
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
|
||||||
pub struct Hram {}
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref TAG_HASH: [u8; 32] = Sha256::digest(b"BIP0340/challenge").into();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
impl HramTrait<Secp256k1> for Hram {
|
|
||||||
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
|
||||||
// Convert the nonce to be even
|
|
||||||
let (R, _) = make_even(*R);
|
|
||||||
|
|
||||||
let mut data = Sha256::new();
|
|
||||||
data.update(*TAG_HASH);
|
|
||||||
data.update(*TAG_HASH);
|
|
||||||
data.update(x(&R));
|
|
||||||
data.update(x(A));
|
|
||||||
data.update(m);
|
|
||||||
|
|
||||||
Scalar::reduce(U256::from_be_slice(&data.finalize()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// BIP-340 Schnorr signature algorithm.
|
|
||||||
///
|
|
||||||
/// This must be used with a ThresholdKeys whose group key is even. If it is odd, this will panic.
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct Schnorr<T: Sync + Clone + Debug + Transcript>(FrostSchnorr<Secp256k1, T, Hram>);
|
|
||||||
impl<T: Sync + Clone + Debug + Transcript> Schnorr<T> {
|
|
||||||
/// Construct a Schnorr algorithm continuing the specified transcript.
|
|
||||||
pub fn new(transcript: T) -> Schnorr<T> {
|
|
||||||
Schnorr(FrostSchnorr::new(transcript))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Sync + Clone + Debug + Transcript> Algorithm<Secp256k1> for Schnorr<T> {
|
|
||||||
type Transcript = T;
|
|
||||||
type Addendum = ();
|
|
||||||
type Signature = Signature;
|
|
||||||
|
|
||||||
fn transcript(&mut self) -> &mut Self::Transcript {
|
|
||||||
self.0.transcript()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn nonces(&self) -> Vec<Vec<ProjectivePoint>> {
|
|
||||||
self.0.nonces()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
|
||||||
&mut self,
|
|
||||||
rng: &mut R,
|
|
||||||
keys: &ThresholdKeys<Secp256k1>,
|
|
||||||
) {
|
|
||||||
self.0.preprocess_addendum(rng, keys)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_addendum<R: io::Read>(&self, reader: &mut R) -> io::Result<Self::Addendum> {
|
|
||||||
self.0.read_addendum(reader)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn process_addendum(
|
|
||||||
&mut self,
|
|
||||||
view: &ThresholdView<Secp256k1>,
|
|
||||||
i: Participant,
|
|
||||||
addendum: (),
|
|
||||||
) -> Result<(), FrostError> {
|
|
||||||
self.0.process_addendum(view, i, addendum)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn sign_share(
|
|
||||||
&mut self,
|
|
||||||
params: &ThresholdView<Secp256k1>,
|
|
||||||
nonce_sums: &[Vec<<Secp256k1 as Ciphersuite>::G>],
|
|
||||||
nonces: Vec<Zeroizing<<Secp256k1 as Ciphersuite>::F>>,
|
|
||||||
msg: &[u8],
|
|
||||||
) -> <Secp256k1 as Ciphersuite>::F {
|
|
||||||
self.0.sign_share(params, nonce_sums, nonces, msg)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn verify(
|
|
||||||
&self,
|
|
||||||
group_key: ProjectivePoint,
|
|
||||||
nonces: &[Vec<ProjectivePoint>],
|
|
||||||
sum: Scalar,
|
|
||||||
) -> Option<Self::Signature> {
|
|
||||||
self.0.verify(group_key, nonces, sum).map(|mut sig| {
|
|
||||||
// Make the R of the final signature even
|
|
||||||
let offset;
|
|
||||||
(sig.R, offset) = make_even(sig.R);
|
|
||||||
// s = r + cx. Since we added to the r, add to s
|
|
||||||
sig.s += Scalar::from(offset);
|
|
||||||
// Convert to a secp256k1 signature
|
|
||||||
Signature::from_slice(&sig.serialize()[1 ..]).unwrap()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn verify_share(
|
|
||||||
&self,
|
|
||||||
verification_share: ProjectivePoint,
|
|
||||||
nonces: &[Vec<ProjectivePoint>],
|
|
||||||
share: Scalar,
|
|
||||||
) -> Result<Vec<(Scalar, ProjectivePoint)>, ()> {
|
|
||||||
self.0.verify_share(verification_share, nonces, share)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,145 +0,0 @@
|
|||||||
use core::fmt::Debug;
|
|
||||||
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
use serde::{Deserialize, de::DeserializeOwned};
|
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use bitcoin::{
|
|
||||||
hashes::{Hash, hex::FromHex},
|
|
||||||
consensus::encode,
|
|
||||||
Txid, Transaction, BlockHash, Block,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Deserialize)]
|
|
||||||
pub struct Error {
|
|
||||||
code: isize,
|
|
||||||
message: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize)]
|
|
||||||
#[serde(untagged)]
|
|
||||||
enum RpcResponse<T> {
|
|
||||||
Ok { result: T },
|
|
||||||
Err { error: Error },
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A minimal asynchronous Bitcoin RPC client.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Rpc(String);
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Error)]
|
|
||||||
pub enum RpcError {
|
|
||||||
#[error("couldn't connect to node")]
|
|
||||||
ConnectionError,
|
|
||||||
#[error("request had an error: {0:?}")]
|
|
||||||
RequestError(Error),
|
|
||||||
#[error("node sent an invalid response")]
|
|
||||||
InvalidResponse,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Rpc {
|
|
||||||
pub async fn new(url: String) -> Result<Rpc, RpcError> {
|
|
||||||
let rpc = Rpc(url);
|
|
||||||
// Make an RPC request to verify the node is reachable and sane
|
|
||||||
rpc.get_latest_block_number().await?;
|
|
||||||
Ok(rpc)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Perform an arbitrary RPC call.
|
|
||||||
pub async fn rpc_call<Response: DeserializeOwned + Debug>(
|
|
||||||
&self,
|
|
||||||
method: &str,
|
|
||||||
params: serde_json::Value,
|
|
||||||
) -> Result<Response, RpcError> {
|
|
||||||
let client = reqwest::Client::new();
|
|
||||||
let res = client
|
|
||||||
.post(&self.0)
|
|
||||||
.json(&json!({ "jsonrpc": "2.0", "method": method, "params": params }))
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.map_err(|_| RpcError::ConnectionError)?
|
|
||||||
.text()
|
|
||||||
.await
|
|
||||||
.map_err(|_| RpcError::ConnectionError)?;
|
|
||||||
|
|
||||||
let res: RpcResponse<Response> =
|
|
||||||
serde_json::from_str(&res).map_err(|_| RpcError::InvalidResponse)?;
|
|
||||||
match res {
|
|
||||||
RpcResponse::Ok { result } => Ok(result),
|
|
||||||
RpcResponse::Err { error } => Err(RpcError::RequestError(error)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the latest block's number.
|
|
||||||
///
|
|
||||||
/// The genesis block's 'number' is zero. They increment from there.
|
|
||||||
pub async fn get_latest_block_number(&self) -> Result<usize, RpcError> {
|
|
||||||
// getblockcount doesn't return the amount of blocks on the current chain, yet the "height"
|
|
||||||
// of the current chain. The "height" of the current chain is defined as the "height" of the
|
|
||||||
// tip block of the current chain. The "height" of a block is defined as the amount of blocks
|
|
||||||
// present when the block was created. Accordingly, the genesis block has height 0, and
|
|
||||||
// getblockcount will return 0 when it's only the only block, despite their being one block.
|
|
||||||
self.rpc_call("getblockcount", json!([])).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the hash of a block by the block's number.
|
|
||||||
pub async fn get_block_hash(&self, number: usize) -> Result<[u8; 32], RpcError> {
|
|
||||||
let mut hash = *self
|
|
||||||
.rpc_call::<BlockHash>("getblockhash", json!([number]))
|
|
||||||
.await?
|
|
||||||
.as_raw_hash()
|
|
||||||
.as_byte_array();
|
|
||||||
// bitcoin stores the inner bytes in reverse order.
|
|
||||||
hash.reverse();
|
|
||||||
Ok(hash)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a block's number by its hash.
|
|
||||||
pub async fn get_block_number(&self, hash: &[u8; 32]) -> Result<usize, RpcError> {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct Number {
|
|
||||||
height: usize,
|
|
||||||
}
|
|
||||||
Ok(self.rpc_call::<Number>("getblockheader", json!([hex::encode(hash)])).await?.height)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a block by its hash.
|
|
||||||
pub async fn get_block(&self, hash: &[u8; 32]) -> Result<Block, RpcError> {
|
|
||||||
let hex = self.rpc_call::<String>("getblock", json!([hex::encode(hash), 0])).await?;
|
|
||||||
let bytes: Vec<u8> = FromHex::from_hex(&hex).map_err(|_| RpcError::InvalidResponse)?;
|
|
||||||
let block: Block = encode::deserialize(&bytes).map_err(|_| RpcError::InvalidResponse)?;
|
|
||||||
|
|
||||||
let mut block_hash = *block.block_hash().as_raw_hash().as_byte_array();
|
|
||||||
block_hash.reverse();
|
|
||||||
if hash != &block_hash {
|
|
||||||
Err(RpcError::InvalidResponse)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(block)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Publish a transaction.
|
|
||||||
pub async fn send_raw_transaction(&self, tx: &Transaction) -> Result<Txid, RpcError> {
|
|
||||||
let txid = self.rpc_call("sendrawtransaction", json!([encode::serialize_hex(tx)])).await?;
|
|
||||||
if txid != tx.txid() {
|
|
||||||
Err(RpcError::InvalidResponse)?;
|
|
||||||
}
|
|
||||||
Ok(txid)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a transaction by its hash.
|
|
||||||
pub async fn get_transaction(&self, hash: &[u8; 32]) -> Result<Transaction, RpcError> {
|
|
||||||
let hex = self.rpc_call::<String>("getrawtransaction", json!([hex::encode(hash)])).await?;
|
|
||||||
let bytes: Vec<u8> = FromHex::from_hex(&hex).map_err(|_| RpcError::InvalidResponse)?;
|
|
||||||
let tx: Transaction = encode::deserialize(&bytes).map_err(|_| RpcError::InvalidResponse)?;
|
|
||||||
|
|
||||||
let mut tx_hash = *tx.txid().as_raw_hash().as_byte_array();
|
|
||||||
tx_hash.reverse();
|
|
||||||
if hash != &tx_hash {
|
|
||||||
Err(RpcError::InvalidResponse)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(tx)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,160 +0,0 @@
|
|||||||
use std::{
|
|
||||||
io::{self, Read, Write},
|
|
||||||
collections::HashMap,
|
|
||||||
};
|
|
||||||
|
|
||||||
use k256::{
|
|
||||||
elliptic_curve::sec1::{Tag, ToEncodedPoint},
|
|
||||||
Scalar, ProjectivePoint,
|
|
||||||
};
|
|
||||||
use frost::{
|
|
||||||
curve::{Ciphersuite, Secp256k1},
|
|
||||||
ThresholdKeys,
|
|
||||||
};
|
|
||||||
|
|
||||||
use bitcoin::{
|
|
||||||
consensus::encode::{Decodable, serialize},
|
|
||||||
key::TweakedPublicKey,
|
|
||||||
OutPoint, ScriptBuf, TxOut, Transaction, Block, Network, Address,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::crypto::{x_only, make_even};
|
|
||||||
|
|
||||||
mod send;
|
|
||||||
pub use send::*;
|
|
||||||
|
|
||||||
/// Tweak keys to ensure they're usable with Bitcoin.
|
|
||||||
pub fn tweak_keys(keys: &ThresholdKeys<Secp256k1>) -> ThresholdKeys<Secp256k1> {
|
|
||||||
let (_, offset) = make_even(keys.group_key());
|
|
||||||
keys.offset(Scalar::from(offset))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the Taproot address for a public key.
|
|
||||||
pub fn address(network: Network, key: ProjectivePoint) -> Option<Address> {
|
|
||||||
if key.to_encoded_point(true).tag() != Tag::CompressedEvenY {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(Address::p2tr_tweaked(TweakedPublicKey::dangerous_assume_tweaked(x_only(&key)), network))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A spendable output.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct ReceivedOutput {
|
|
||||||
// The scalar offset to obtain the key usable to spend this output.
|
|
||||||
offset: Scalar,
|
|
||||||
// The output to spend.
|
|
||||||
output: TxOut,
|
|
||||||
// The TX ID and vout of the output to spend.
|
|
||||||
outpoint: OutPoint,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ReceivedOutput {
|
|
||||||
/// The offset for this output.
|
|
||||||
pub fn offset(&self) -> Scalar {
|
|
||||||
self.offset
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The outpoint for this output.
|
|
||||||
pub fn outpoint(&self) -> &OutPoint {
|
|
||||||
&self.outpoint
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The value of this output.
|
|
||||||
pub fn value(&self) -> u64 {
|
|
||||||
self.output.value
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Read a ReceivedOutput from a generic satisfying Read.
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<ReceivedOutput> {
|
|
||||||
Ok(ReceivedOutput {
|
|
||||||
offset: Secp256k1::read_F(r)?,
|
|
||||||
output: TxOut::consensus_decode(r)
|
|
||||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid TxOut"))?,
|
|
||||||
outpoint: OutPoint::consensus_decode(r)
|
|
||||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "invalid OutPoint"))?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Write a ReceivedOutput to a generic satisfying Write.
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
w.write_all(&self.offset.to_bytes())?;
|
|
||||||
w.write_all(&serialize(&self.output))?;
|
|
||||||
w.write_all(&serialize(&self.outpoint))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Serialize a ReceivedOutput to a Vec<u8>.
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut res = vec![];
|
|
||||||
self.write(&mut res).unwrap();
|
|
||||||
res
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A transaction scanner capable of being used with HDKD schemes.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Scanner {
|
|
||||||
key: ProjectivePoint,
|
|
||||||
scripts: HashMap<ScriptBuf, Scalar>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Scanner {
|
|
||||||
/// Construct a Scanner for a key.
|
|
||||||
///
|
|
||||||
/// Returns None if this key can't be scanned for.
|
|
||||||
pub fn new(key: ProjectivePoint) -> Option<Scanner> {
|
|
||||||
let mut scripts = HashMap::new();
|
|
||||||
// Uses Network::Bitcoin since network is irrelevant here
|
|
||||||
scripts.insert(address(Network::Bitcoin, key)?.script_pubkey(), Scalar::ZERO);
|
|
||||||
Some(Scanner { key, scripts })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Register an offset to scan for.
|
|
||||||
///
|
|
||||||
/// Due to Bitcoin's requirement that points are even, not every offset may be used.
|
|
||||||
/// If an offset isn't usable, it will be incremented until it is. If this offset is already
|
|
||||||
/// present, None is returned. Else, Some(offset) will be, with the used offset.
|
|
||||||
pub fn register_offset(&mut self, mut offset: Scalar) -> Option<Scalar> {
|
|
||||||
loop {
|
|
||||||
match address(Network::Bitcoin, self.key + (ProjectivePoint::GENERATOR * offset)) {
|
|
||||||
Some(address) => {
|
|
||||||
let script = address.script_pubkey();
|
|
||||||
if self.scripts.contains_key(&script) {
|
|
||||||
None?;
|
|
||||||
}
|
|
||||||
self.scripts.insert(script, offset);
|
|
||||||
return Some(offset);
|
|
||||||
}
|
|
||||||
None => offset += Scalar::ONE,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Scan a transaction.
|
|
||||||
pub fn scan_transaction(&self, tx: &Transaction) -> Vec<ReceivedOutput> {
|
|
||||||
let mut res = vec![];
|
|
||||||
for (vout, output) in tx.output.iter().enumerate() {
|
|
||||||
if let Some(offset) = self.scripts.get(&output.script_pubkey) {
|
|
||||||
res.push(ReceivedOutput {
|
|
||||||
offset: *offset,
|
|
||||||
output: output.clone(),
|
|
||||||
outpoint: OutPoint::new(tx.txid(), u32::try_from(vout).unwrap()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Scan a block.
|
|
||||||
///
|
|
||||||
/// This will also scan the coinbase transaction which is bound by maturity. If received outputs
|
|
||||||
/// must be immediately spendable, a post-processing pass is needed to remove those outputs.
|
|
||||||
/// Alternatively, scan_transaction can be called on `block.txdata[1 ..]`.
|
|
||||||
pub fn scan_block(&self, block: &Block) -> Vec<ReceivedOutput> {
|
|
||||||
let mut res = vec![];
|
|
||||||
for tx in &block.txdata {
|
|
||||||
res.extend(self.scan_transaction(tx));
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
}
|
|
||||||
3
coins/ethereum/.gitignore
vendored
3
coins/ethereum/.gitignore
vendored
@@ -1,3 +0,0 @@
|
|||||||
# solidity build outputs
|
|
||||||
cache
|
|
||||||
artifacts
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "ethereum-serai"
|
|
||||||
version = "0.1.0"
|
|
||||||
description = "An Ethereum library supporting Schnorr signing and on-chain verification"
|
|
||||||
license = "AGPL-3.0-only"
|
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/ethereum"
|
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>", "Elizabeth Binks <elizabethjbinks@gmail.com>"]
|
|
||||||
edition = "2021"
|
|
||||||
publish = false
|
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
|
||||||
all-features = true
|
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
thiserror = "1"
|
|
||||||
rand_core = "0.6"
|
|
||||||
|
|
||||||
serde_json = "1"
|
|
||||||
serde = "1"
|
|
||||||
|
|
||||||
sha2 = "0.10"
|
|
||||||
sha3 = "0.10"
|
|
||||||
|
|
||||||
group = "0.13"
|
|
||||||
k256 = { version = "^0.13.1", default-features = false, features = ["std", "arithmetic", "bits", "ecdsa"] }
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", features = ["secp256k1", "tests"] }
|
|
||||||
|
|
||||||
eyre = "0.6"
|
|
||||||
|
|
||||||
ethers = { version = "2", default-features = false, features = ["abigen", "ethers-solc"] }
|
|
||||||
|
|
||||||
[build-dependencies]
|
|
||||||
ethers-solc = "2"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
tokio = { version = "1", features = ["macros"] }
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
# Ethereum
|
|
||||||
|
|
||||||
This package contains Ethereum-related functionality, specifically deploying and
|
|
||||||
interacting with Serai contracts.
|
|
||||||
|
|
||||||
### Dependencies
|
|
||||||
|
|
||||||
- solc
|
|
||||||
- [Foundry](https://github.com/foundry-rs/foundry)
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
use ethers_solc::{Project, ProjectPathsConfig};
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
println!("cargo:rerun-if-changed=contracts");
|
|
||||||
println!("cargo:rerun-if-changed=artifacts");
|
|
||||||
|
|
||||||
// configure the project with all its paths, solc, cache etc.
|
|
||||||
let project = Project::builder()
|
|
||||||
.paths(ProjectPathsConfig::hardhat(env!("CARGO_MANIFEST_DIR")).unwrap())
|
|
||||||
.build()
|
|
||||||
.unwrap();
|
|
||||||
project.compile().unwrap();
|
|
||||||
|
|
||||||
// Tell Cargo that if a source file changes, to rerun this build script.
|
|
||||||
project.rerun_if_sources_changed();
|
|
||||||
}
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
//SPDX-License-Identifier: AGPLv3
|
|
||||||
pragma solidity ^0.8.0;
|
|
||||||
|
|
||||||
// see https://github.com/noot/schnorr-verify for implementation details
|
|
||||||
contract Schnorr {
|
|
||||||
// secp256k1 group order
|
|
||||||
uint256 constant public Q =
|
|
||||||
0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141;
|
|
||||||
|
|
||||||
// parity := public key y-coord parity (27 or 28)
|
|
||||||
// px := public key x-coord
|
|
||||||
// message := 32-byte message
|
|
||||||
// s := schnorr signature
|
|
||||||
// e := schnorr signature challenge
|
|
||||||
function verify(
|
|
||||||
uint8 parity,
|
|
||||||
bytes32 px,
|
|
||||||
bytes32 message,
|
|
||||||
bytes32 s,
|
|
||||||
bytes32 e
|
|
||||||
) public view returns (bool) {
|
|
||||||
// ecrecover = (m, v, r, s);
|
|
||||||
bytes32 sp = bytes32(Q - mulmod(uint256(s), uint256(px), Q));
|
|
||||||
bytes32 ep = bytes32(Q - mulmod(uint256(e), uint256(px), Q));
|
|
||||||
|
|
||||||
require(sp != 0);
|
|
||||||
// the ecrecover precompile implementation checks that the `r` and `s`
|
|
||||||
// inputs are non-zero (in this case, `px` and `ep`), thus we don't need to
|
|
||||||
// check if they're zero.will make me
|
|
||||||
address R = ecrecover(sp, parity, px, ep);
|
|
||||||
require(R != address(0), "ecrecover failed");
|
|
||||||
return e == keccak256(
|
|
||||||
abi.encodePacked(R, uint8(parity), px, block.chainid, message)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
use crate::crypto::ProcessedSignature;
|
|
||||||
use ethers::{contract::ContractFactory, prelude::*, solc::artifacts::contract::ContractBytecode};
|
|
||||||
use eyre::{eyre, Result};
|
|
||||||
use std::fs::File;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
|
||||||
pub enum EthereumError {
|
|
||||||
#[error("failed to verify Schnorr signature")]
|
|
||||||
VerificationError,
|
|
||||||
}
|
|
||||||
|
|
||||||
abigen!(
|
|
||||||
Schnorr,
|
|
||||||
"./artifacts/Schnorr.sol/Schnorr.json",
|
|
||||||
event_derives(serde::Deserialize, serde::Serialize),
|
|
||||||
);
|
|
||||||
|
|
||||||
pub async fn deploy_schnorr_verifier_contract(
|
|
||||||
client: Arc<SignerMiddleware<Provider<Http>, LocalWallet>>,
|
|
||||||
) -> Result<Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>> {
|
|
||||||
let path = "./artifacts/Schnorr.sol/Schnorr.json";
|
|
||||||
let artifact: ContractBytecode = serde_json::from_reader(File::open(path).unwrap()).unwrap();
|
|
||||||
let abi = artifact.abi.unwrap();
|
|
||||||
let bin = artifact.bytecode.unwrap().object;
|
|
||||||
let factory = ContractFactory::new(abi, bin.into_bytes().unwrap(), client.clone());
|
|
||||||
let contract = factory.deploy(())?.send().await?;
|
|
||||||
let contract = Schnorr::new(contract.address(), client);
|
|
||||||
Ok(contract)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn call_verify(
|
|
||||||
contract: &Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>,
|
|
||||||
params: &ProcessedSignature,
|
|
||||||
) -> Result<()> {
|
|
||||||
if contract
|
|
||||||
.verify(
|
|
||||||
params.parity + 27,
|
|
||||||
params.px.to_bytes().into(),
|
|
||||||
params.message,
|
|
||||||
params.s.to_bytes().into(),
|
|
||||||
params.e.to_bytes().into(),
|
|
||||||
)
|
|
||||||
.call()
|
|
||||||
.await?
|
|
||||||
{
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(eyre!(EthereumError::VerificationError))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
use sha3::{Digest, Keccak256};
|
|
||||||
|
|
||||||
use group::Group;
|
|
||||||
use k256::{
|
|
||||||
elliptic_curve::{
|
|
||||||
bigint::ArrayEncoding, ops::Reduce, point::DecompressPoint, sec1::ToEncodedPoint,
|
|
||||||
},
|
|
||||||
AffinePoint, ProjectivePoint, Scalar, U256,
|
|
||||||
};
|
|
||||||
|
|
||||||
use frost::{algorithm::Hram, curve::Secp256k1};
|
|
||||||
|
|
||||||
pub fn keccak256(data: &[u8]) -> [u8; 32] {
|
|
||||||
Keccak256::digest(data).try_into().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
|
||||||
Scalar::reduce(U256::from_be_slice(&keccak256(data)))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn address(point: &ProjectivePoint) -> [u8; 20] {
|
|
||||||
let encoded_point = point.to_encoded_point(false);
|
|
||||||
keccak256(&encoded_point.as_ref()[1 .. 65])[12 .. 32].try_into().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ecrecover(message: Scalar, v: u8, r: Scalar, s: Scalar) -> Option<[u8; 20]> {
|
|
||||||
if r.is_zero().into() || s.is_zero().into() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let R = AffinePoint::decompress(&r.to_bytes(), v.into());
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
if let Some(R) = Option::<AffinePoint>::from(R) {
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let R = ProjectivePoint::from(R);
|
|
||||||
|
|
||||||
let r = r.invert().unwrap();
|
|
||||||
let u1 = ProjectivePoint::GENERATOR * (-message * r);
|
|
||||||
let u2 = R * (s * r);
|
|
||||||
let key: ProjectivePoint = u1 + u2;
|
|
||||||
if !bool::from(key.is_identity()) {
|
|
||||||
return Some(address(&key));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Default)]
|
|
||||||
pub struct EthereumHram {}
|
|
||||||
impl Hram<Secp256k1> for EthereumHram {
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
fn hram(R: &ProjectivePoint, A: &ProjectivePoint, m: &[u8]) -> Scalar {
|
|
||||||
let a_encoded_point = A.to_encoded_point(true);
|
|
||||||
let mut a_encoded = a_encoded_point.as_ref().to_owned();
|
|
||||||
a_encoded[0] += 25; // Ethereum uses 27/28 for point parity
|
|
||||||
let mut data = address(R).to_vec();
|
|
||||||
data.append(&mut a_encoded);
|
|
||||||
data.append(&mut m.to_vec());
|
|
||||||
Scalar::reduce(U256::from_be_slice(&keccak256(&data)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct ProcessedSignature {
|
|
||||||
pub s: Scalar,
|
|
||||||
pub px: Scalar,
|
|
||||||
pub parity: u8,
|
|
||||||
pub message: [u8; 32],
|
|
||||||
pub e: Scalar,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub fn preprocess_signature_for_ecrecover(
|
|
||||||
m: [u8; 32],
|
|
||||||
R: &ProjectivePoint,
|
|
||||||
s: Scalar,
|
|
||||||
A: &ProjectivePoint,
|
|
||||||
chain_id: U256,
|
|
||||||
) -> (Scalar, Scalar) {
|
|
||||||
let processed_sig = process_signature_for_contract(m, R, s, A, chain_id);
|
|
||||||
let sr = processed_sig.s.mul(&processed_sig.px).negate();
|
|
||||||
let er = processed_sig.e.mul(&processed_sig.px).negate();
|
|
||||||
(sr, er)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub fn process_signature_for_contract(
|
|
||||||
m: [u8; 32],
|
|
||||||
R: &ProjectivePoint,
|
|
||||||
s: Scalar,
|
|
||||||
A: &ProjectivePoint,
|
|
||||||
chain_id: U256,
|
|
||||||
) -> ProcessedSignature {
|
|
||||||
let encoded_pk = A.to_encoded_point(true);
|
|
||||||
let px = &encoded_pk.as_ref()[1 .. 33];
|
|
||||||
let px_scalar = Scalar::reduce(U256::from_be_slice(px));
|
|
||||||
let e = EthereumHram::hram(R, A, &[chain_id.to_be_byte_array().as_slice(), &m].concat());
|
|
||||||
ProcessedSignature {
|
|
||||||
s,
|
|
||||||
px: px_scalar,
|
|
||||||
parity: &encoded_pk.as_ref()[0] - 2,
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
message: m,
|
|
||||||
e,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
pub mod contract;
|
|
||||||
pub mod crypto;
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
use std::{convert::TryFrom, sync::Arc, time::Duration};
|
|
||||||
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
use ::k256::{elliptic_curve::bigint::ArrayEncoding, U256};
|
|
||||||
|
|
||||||
use ethers::{
|
|
||||||
prelude::*,
|
|
||||||
utils::{keccak256, Anvil, AnvilInstance},
|
|
||||||
};
|
|
||||||
|
|
||||||
use frost::{
|
|
||||||
curve::Secp256k1,
|
|
||||||
Participant,
|
|
||||||
algorithm::IetfSchnorr,
|
|
||||||
tests::{key_gen, algorithm_machines, sign},
|
|
||||||
};
|
|
||||||
|
|
||||||
use ethereum_serai::{
|
|
||||||
crypto,
|
|
||||||
contract::{Schnorr, call_verify, deploy_schnorr_verifier_contract},
|
|
||||||
};
|
|
||||||
|
|
||||||
async fn deploy_test_contract(
|
|
||||||
) -> (u32, AnvilInstance, Schnorr<SignerMiddleware<Provider<Http>, LocalWallet>>) {
|
|
||||||
let anvil = Anvil::new().spawn();
|
|
||||||
|
|
||||||
let wallet: LocalWallet = anvil.keys()[0].clone().into();
|
|
||||||
let provider =
|
|
||||||
Provider::<Http>::try_from(anvil.endpoint()).unwrap().interval(Duration::from_millis(10u64));
|
|
||||||
let chain_id = provider.get_chainid().await.unwrap().as_u32();
|
|
||||||
let client = Arc::new(SignerMiddleware::new_with_provider_chain(provider, wallet).await.unwrap());
|
|
||||||
|
|
||||||
(chain_id, anvil, deploy_schnorr_verifier_contract(client).await.unwrap())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_deploy_contract() {
|
|
||||||
deploy_test_contract().await;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_ecrecover_hack() {
|
|
||||||
let (chain_id, _anvil, contract) = deploy_test_contract().await;
|
|
||||||
let chain_id = U256::from(chain_id);
|
|
||||||
|
|
||||||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
|
||||||
let group_key = keys[&Participant::new(1).unwrap()].group_key();
|
|
||||||
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
|
||||||
let hashed_message = keccak256(MESSAGE);
|
|
||||||
|
|
||||||
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
|
||||||
|
|
||||||
let algo = IetfSchnorr::<Secp256k1, crypto::EthereumHram>::ietf();
|
|
||||||
let sig = sign(
|
|
||||||
&mut OsRng,
|
|
||||||
algo.clone(),
|
|
||||||
keys.clone(),
|
|
||||||
algorithm_machines(&mut OsRng, algo, &keys),
|
|
||||||
full_message,
|
|
||||||
);
|
|
||||||
let mut processed_sig =
|
|
||||||
crypto::process_signature_for_contract(hashed_message, &sig.R, sig.s, &group_key, chain_id);
|
|
||||||
|
|
||||||
call_verify(&contract, &processed_sig).await.unwrap();
|
|
||||||
|
|
||||||
// test invalid signature fails
|
|
||||||
processed_sig.message[0] = 0;
|
|
||||||
assert!(call_verify(&contract, &processed_sig).await.is_err());
|
|
||||||
}
|
|
||||||
@@ -1,92 +0,0 @@
|
|||||||
use k256::{
|
|
||||||
elliptic_curve::{bigint::ArrayEncoding, ops::Reduce, sec1::ToEncodedPoint},
|
|
||||||
ProjectivePoint, Scalar, U256,
|
|
||||||
};
|
|
||||||
use frost::{curve::Secp256k1, Participant};
|
|
||||||
|
|
||||||
use ethereum_serai::crypto::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_ecrecover() {
|
|
||||||
use rand_core::OsRng;
|
|
||||||
use sha2::Sha256;
|
|
||||||
use sha3::{Digest, Keccak256};
|
|
||||||
use k256::ecdsa::{hazmat::SignPrimitive, signature::DigestVerifier, SigningKey, VerifyingKey};
|
|
||||||
|
|
||||||
let private = SigningKey::random(&mut OsRng);
|
|
||||||
let public = VerifyingKey::from(&private);
|
|
||||||
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
|
||||||
let (sig, recovery_id) = private
|
|
||||||
.as_nonzero_scalar()
|
|
||||||
.try_sign_prehashed_rfc6979::<Sha256>(&Keccak256::digest(MESSAGE), b"")
|
|
||||||
.unwrap();
|
|
||||||
#[allow(clippy::unit_cmp)] // Intended to assert this wasn't changed to Result<bool>
|
|
||||||
{
|
|
||||||
assert_eq!(public.verify_digest(Keccak256::new_with_prefix(MESSAGE), &sig).unwrap(), ());
|
|
||||||
}
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
ecrecover(hash_to_scalar(MESSAGE), recovery_id.unwrap().is_y_odd().into(), *sig.r(), *sig.s())
|
|
||||||
.unwrap(),
|
|
||||||
address(&ProjectivePoint::from(public.as_affine()))
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_signing() {
|
|
||||||
use frost::{
|
|
||||||
algorithm::IetfSchnorr,
|
|
||||||
tests::{algorithm_machines, key_gen, sign},
|
|
||||||
};
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
|
||||||
let _group_key = keys[&Participant::new(1).unwrap()].group_key();
|
|
||||||
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
|
||||||
|
|
||||||
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
|
||||||
let _sig = sign(
|
|
||||||
&mut OsRng,
|
|
||||||
algo,
|
|
||||||
keys.clone(),
|
|
||||||
algorithm_machines(&mut OsRng, IetfSchnorr::<Secp256k1, EthereumHram>::ietf(), &keys),
|
|
||||||
MESSAGE,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_ecrecover_hack() {
|
|
||||||
use frost::{
|
|
||||||
algorithm::IetfSchnorr,
|
|
||||||
tests::{algorithm_machines, key_gen, sign},
|
|
||||||
};
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
let keys = key_gen::<_, Secp256k1>(&mut OsRng);
|
|
||||||
let group_key = keys[&Participant::new(1).unwrap()].group_key();
|
|
||||||
let group_key_encoded = group_key.to_encoded_point(true);
|
|
||||||
let group_key_compressed = group_key_encoded.as_ref();
|
|
||||||
let group_key_x = Scalar::reduce(U256::from_be_slice(&group_key_compressed[1 .. 33]));
|
|
||||||
|
|
||||||
const MESSAGE: &[u8] = b"Hello, World!";
|
|
||||||
let hashed_message = keccak256(MESSAGE);
|
|
||||||
let chain_id = U256::ONE;
|
|
||||||
|
|
||||||
let full_message = &[chain_id.to_be_byte_array().as_slice(), &hashed_message].concat();
|
|
||||||
|
|
||||||
let algo = IetfSchnorr::<Secp256k1, EthereumHram>::ietf();
|
|
||||||
let sig = sign(
|
|
||||||
&mut OsRng,
|
|
||||||
algo.clone(),
|
|
||||||
keys.clone(),
|
|
||||||
algorithm_machines(&mut OsRng, algo, &keys),
|
|
||||||
full_message,
|
|
||||||
);
|
|
||||||
|
|
||||||
let (sr, er) =
|
|
||||||
preprocess_signature_for_ecrecover(hashed_message, &sig.R, sig.s, &group_key, chain_id);
|
|
||||||
let q = ecrecover(sr, group_key_compressed[0] - 2, group_key_x, er).unwrap();
|
|
||||||
assert_eq!(q, address(&sig.R));
|
|
||||||
}
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
mod contract;
|
|
||||||
mod crypto;
|
|
||||||
@@ -1,107 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "monero-serai"
|
|
||||||
version = "0.1.4-alpha"
|
|
||||||
description = "A modern Monero transaction library"
|
|
||||||
license = "MIT"
|
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero"
|
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
|
||||||
all-features = true
|
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
std-shims = { path = "../../common/std-shims", version = "0.1", default-features = false }
|
|
||||||
|
|
||||||
async-trait = { version = "0.1", default-features = false }
|
|
||||||
thiserror = { version = "1", optional = true }
|
|
||||||
|
|
||||||
zeroize = { version = "^1.5", default-features = false, features = ["zeroize_derive"] }
|
|
||||||
subtle = { version = "^2.4", default-features = false }
|
|
||||||
|
|
||||||
rand_core = { version = "0.6", default-features = false }
|
|
||||||
# Used to send transactions
|
|
||||||
rand = { version = "0.8", default-features = false }
|
|
||||||
rand_chacha = { version = "0.3", default-features = false }
|
|
||||||
# Used to select decoys
|
|
||||||
rand_distr = { version = "0.4", default-features = false }
|
|
||||||
|
|
||||||
crc = { version = "3", default-features = false }
|
|
||||||
sha3 = { version = "0.10", default-features = false }
|
|
||||||
|
|
||||||
curve25519-dalek = { version = "^3.2", default-features = false }
|
|
||||||
|
|
||||||
# Used for the hash to curve, along with the more complicated proofs
|
|
||||||
group = { version = "0.13", default-features = false }
|
|
||||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.3", default-features = false }
|
|
||||||
multiexp = { path = "../../crypto/multiexp", version = "0.3", default-features = false, features = ["batch"] }
|
|
||||||
|
|
||||||
# Needed for multisig
|
|
||||||
transcript = { package = "flexible-transcript", path = "../../crypto/transcript", version = "0.3", default-features = false, features = ["recommended"], optional = true }
|
|
||||||
dleq = { path = "../../crypto/dleq", version = "0.3", features = ["serialize"], optional = true }
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["ed25519"], optional = true }
|
|
||||||
|
|
||||||
monero-generators = { path = "generators", version = "0.3", default-features = false }
|
|
||||||
|
|
||||||
futures = { version = "0.3", default-features = false, features = ["alloc"], optional = true }
|
|
||||||
|
|
||||||
hex-literal = "0.4"
|
|
||||||
hex = { version = "0.4", default-features = false, features = ["alloc"] }
|
|
||||||
serde = { version = "1", default-features = false, features = ["derive"] }
|
|
||||||
serde_json = { version = "1", default-features = false, features = ["alloc"] }
|
|
||||||
|
|
||||||
base58-monero = { version = "1", git = "https://github.com/monero-rs/base58-monero", rev = "5045e8d2b817b3b6c1190661f504e879bc769c29", default-features = false, features = ["check"] }
|
|
||||||
|
|
||||||
# Used for the provided RPC
|
|
||||||
digest_auth = { version = "0.3", optional = true }
|
|
||||||
reqwest = { version = "0.11", features = ["json"], optional = true }
|
|
||||||
|
|
||||||
# Used for the binaries
|
|
||||||
tokio = { version = "1", features = ["full"], optional = true }
|
|
||||||
|
|
||||||
[build-dependencies]
|
|
||||||
dalek-ff-group = { path = "../../crypto/dalek-ff-group", version = "0.3", default-features = false }
|
|
||||||
monero-generators = { path = "generators", version = "0.3", default-features = false }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
tokio = { version = "1", features = ["full"] }
|
|
||||||
monero-rpc = "0.3"
|
|
||||||
|
|
||||||
frost = { package = "modular-frost", path = "../../crypto/frost", version = "0.7", features = ["tests"] }
|
|
||||||
|
|
||||||
[features]
|
|
||||||
std = [
|
|
||||||
"std-shims/std",
|
|
||||||
|
|
||||||
"thiserror",
|
|
||||||
|
|
||||||
"zeroize/std",
|
|
||||||
"subtle/std",
|
|
||||||
|
|
||||||
"rand_core/std",
|
|
||||||
"rand_chacha/std",
|
|
||||||
"rand/std",
|
|
||||||
"rand_distr/std",
|
|
||||||
|
|
||||||
"sha3/std",
|
|
||||||
|
|
||||||
"curve25519-dalek/std",
|
|
||||||
|
|
||||||
"multiexp/std",
|
|
||||||
|
|
||||||
"monero-generators/std",
|
|
||||||
|
|
||||||
"futures/std",
|
|
||||||
|
|
||||||
"hex/std",
|
|
||||||
"serde/std",
|
|
||||||
"serde_json/std",
|
|
||||||
]
|
|
||||||
|
|
||||||
http_rpc = ["digest_auth", "reqwest"]
|
|
||||||
multisig = ["transcript", "frost", "dleq", "std"]
|
|
||||||
binaries = ["tokio"]
|
|
||||||
experimental = []
|
|
||||||
|
|
||||||
default = ["std", "http_rpc"]
|
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
# monero-serai
|
|
||||||
|
|
||||||
A modern Monero transaction library intended for usage in wallets. It prides
|
|
||||||
itself on accuracy, correctness, and removing common pit falls developers may
|
|
||||||
face.
|
|
||||||
|
|
||||||
monero-serai also offers the following features:
|
|
||||||
|
|
||||||
- Featured Addresses
|
|
||||||
- A FROST-based multisig orders of magnitude more performant than Monero's
|
|
||||||
|
|
||||||
### Purpose and support
|
|
||||||
|
|
||||||
monero-serai was written for Serai, a decentralized exchange aiming to support
|
|
||||||
Monero. Despite this, monero-serai is intended to be a widely usable library,
|
|
||||||
accurate to Monero. monero-serai guarantees the functionality needed for Serai,
|
|
||||||
yet will not deprive functionality from other users.
|
|
||||||
|
|
||||||
Various legacy transaction formats are not currently implemented, yet we are
|
|
||||||
willing to add support for them. There aren't active development efforts around
|
|
||||||
them however.
|
|
||||||
|
|
||||||
### Caveats
|
|
||||||
|
|
||||||
This library DOES attempt to do the following:
|
|
||||||
|
|
||||||
- Create on-chain transactions identical to how wallet2 would (unless told not
|
|
||||||
to)
|
|
||||||
- Not be detectable as monero-serai when scanning outputs
|
|
||||||
- Not reveal spent outputs to the connected RPC node
|
|
||||||
|
|
||||||
This library DOES NOT attempt to do the following:
|
|
||||||
|
|
||||||
- Have identical RPC behavior when creating transactions
|
|
||||||
- Be a wallet
|
|
||||||
|
|
||||||
This means that monero-serai shouldn't be fingerprintable on-chain. It also
|
|
||||||
shouldn't be fingerprintable if a targeted attack occurs to detect if the
|
|
||||||
receiving wallet is monero-serai or wallet2. It also should be generally safe
|
|
||||||
for usage with remote nodes.
|
|
||||||
|
|
||||||
It won't hide from remote nodes it's monero-serai however, potentially
|
|
||||||
allowing a remote node to profile you. The implications of this are left to the
|
|
||||||
user to consider.
|
|
||||||
|
|
||||||
It also won't act as a wallet, just as a transaction library. wallet2 has
|
|
||||||
several *non-transaction-level* policies, such as always attempting to use two
|
|
||||||
inputs to create transactions. These are considered out of scope to
|
|
||||||
monero-serai.
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
use std::{
|
|
||||||
io::Write,
|
|
||||||
env,
|
|
||||||
path::Path,
|
|
||||||
fs::{File, remove_file},
|
|
||||||
};
|
|
||||||
|
|
||||||
use dalek_ff_group::EdwardsPoint;
|
|
||||||
|
|
||||||
use monero_generators::bulletproofs_generators;
|
|
||||||
|
|
||||||
fn serialize(generators_string: &mut String, points: &[EdwardsPoint]) {
|
|
||||||
for generator in points {
|
|
||||||
generators_string.extend(
|
|
||||||
format!(
|
|
||||||
"
|
|
||||||
dalek_ff_group::EdwardsPoint(
|
|
||||||
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap()
|
|
||||||
),
|
|
||||||
",
|
|
||||||
generator.compress().to_bytes()
|
|
||||||
)
|
|
||||||
.chars(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn generators(prefix: &'static str, path: &str) {
|
|
||||||
let generators = bulletproofs_generators(prefix.as_bytes());
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let mut G_str = String::new();
|
|
||||||
serialize(&mut G_str, &generators.G);
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let mut H_str = String::new();
|
|
||||||
serialize(&mut H_str, &generators.H);
|
|
||||||
|
|
||||||
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
|
|
||||||
let _ = remove_file(&path);
|
|
||||||
File::create(&path)
|
|
||||||
.unwrap()
|
|
||||||
.write_all(
|
|
||||||
format!(
|
|
||||||
"
|
|
||||||
pub static GENERATORS_CELL: OnceLock<Generators> = OnceLock::new();
|
|
||||||
pub fn GENERATORS() -> &'static Generators {{
|
|
||||||
GENERATORS_CELL.get_or_init(|| Generators {{
|
|
||||||
G: [
|
|
||||||
{G_str}
|
|
||||||
],
|
|
||||||
H: [
|
|
||||||
{H_str}
|
|
||||||
],
|
|
||||||
}})
|
|
||||||
}}
|
|
||||||
",
|
|
||||||
)
|
|
||||||
.as_bytes(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
println!("cargo:rerun-if-changed=build.rs");
|
|
||||||
|
|
||||||
generators("bulletproof", "generators.rs");
|
|
||||||
generators("bulletproof_plus", "generators_plus.rs");
|
|
||||||
}
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "monero-generators"
|
|
||||||
version = "0.3.0"
|
|
||||||
description = "Monero's hash_to_point and generators"
|
|
||||||
license = "MIT"
|
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/coins/monero/generators"
|
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
|
||||||
edition = "2021"
|
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
|
||||||
all-features = true
|
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
std-shims = { path = "../../../common/std-shims", version = "0.1", default-features = false }
|
|
||||||
|
|
||||||
subtle = { version = "^2.4", default-features = false }
|
|
||||||
|
|
||||||
sha3 = { version = "0.10", default-features = false }
|
|
||||||
|
|
||||||
curve25519-dalek = { version = "3", default-features = false }
|
|
||||||
|
|
||||||
group = { version = "0.13", default-features = false }
|
|
||||||
dalek-ff-group = { path = "../../../crypto/dalek-ff-group", version = "0.3" }
|
|
||||||
|
|
||||||
[features]
|
|
||||||
std = ["std-shims/std"]
|
|
||||||
default = ["std"]
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
# Monero Generators
|
|
||||||
|
|
||||||
Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
|
|
||||||
An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
|
|
||||||
`hash_to_point` here, is included, as needed to generate generators.
|
|
||||||
|
|
||||||
This library is usable under no_std when the `alloc` feature is enabled.
|
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
//! Generators used by Monero in both its Pedersen commitments and Bulletproofs(+).
|
|
||||||
//!
|
|
||||||
//! An implementation of Monero's `ge_fromfe_frombytes_vartime`, simply called
|
|
||||||
//! `hash_to_point` here, is included, as needed to generate generators.
|
|
||||||
|
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
|
||||||
|
|
||||||
use std_shims::sync::OnceLock;
|
|
||||||
|
|
||||||
use sha3::{Digest, Keccak256};
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::{EdwardsPoint as DalekPoint, CompressedEdwardsY};
|
|
||||||
|
|
||||||
use group::{Group, GroupEncoding};
|
|
||||||
use dalek_ff_group::EdwardsPoint;
|
|
||||||
|
|
||||||
mod varint;
|
|
||||||
use varint::write_varint;
|
|
||||||
|
|
||||||
mod hash_to_point;
|
|
||||||
pub use hash_to_point::hash_to_point;
|
|
||||||
|
|
||||||
fn hash(data: &[u8]) -> [u8; 32] {
|
|
||||||
Keccak256::digest(data).into()
|
|
||||||
}
|
|
||||||
|
|
||||||
static H_CELL: OnceLock<DalekPoint> = OnceLock::new();
|
|
||||||
/// Monero's alternate generator `H`, used for amounts in Pedersen commitments.
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub fn H() -> DalekPoint {
|
|
||||||
*H_CELL.get_or_init(|| {
|
|
||||||
CompressedEdwardsY(hash(&EdwardsPoint::generator().to_bytes()))
|
|
||||||
.decompress()
|
|
||||||
.unwrap()
|
|
||||||
.mul_by_cofactor()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
static H_POW_2_CELL: OnceLock<[DalekPoint; 64]> = OnceLock::new();
|
|
||||||
/// Monero's alternate generator `H`, multiplied by 2**i for i in 1 ..= 64.
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub fn H_pow_2() -> &'static [DalekPoint; 64] {
|
|
||||||
H_POW_2_CELL.get_or_init(|| {
|
|
||||||
let mut res = [H(); 64];
|
|
||||||
for i in 1 .. 64 {
|
|
||||||
res[i] = res[i - 1] + res[i - 1];
|
|
||||||
}
|
|
||||||
res
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const MAX_M: usize = 16;
|
|
||||||
const N: usize = 64;
|
|
||||||
const MAX_MN: usize = MAX_M * N;
|
|
||||||
|
|
||||||
/// Container struct for Bulletproofs(+) generators.
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub struct Generators {
|
|
||||||
pub G: [EdwardsPoint; MAX_MN],
|
|
||||||
pub H: [EdwardsPoint; MAX_MN],
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generate generators as needed for Bulletproofs(+), as Monero does.
|
|
||||||
pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
|
|
||||||
let mut res =
|
|
||||||
Generators { G: [EdwardsPoint::identity(); MAX_MN], H: [EdwardsPoint::identity(); MAX_MN] };
|
|
||||||
for i in 0 .. MAX_MN {
|
|
||||||
let i = 2 * i;
|
|
||||||
|
|
||||||
let mut even = H().compress().to_bytes().to_vec();
|
|
||||||
even.extend(dst);
|
|
||||||
let mut odd = even.clone();
|
|
||||||
|
|
||||||
write_varint(&i.try_into().unwrap(), &mut even).unwrap();
|
|
||||||
write_varint(&(i + 1).try_into().unwrap(), &mut odd).unwrap();
|
|
||||||
res.H[i / 2] = EdwardsPoint(hash_to_point(hash(&even)));
|
|
||||||
res.G[i / 2] = EdwardsPoint(hash_to_point(hash(&odd)));
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
use std_shims::io::{self, Write};
|
|
||||||
|
|
||||||
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
|
||||||
|
|
||||||
#[allow(clippy::trivially_copy_pass_by_ref)] // &u64 is needed for API consistency
|
|
||||||
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
|
||||||
let mut varint = *varint;
|
|
||||||
while {
|
|
||||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
|
||||||
varint >>= 7;
|
|
||||||
if varint != 0 {
|
|
||||||
b |= VARINT_CONTINUATION_MASK;
|
|
||||||
}
|
|
||||||
w.write_all(&[b])?;
|
|
||||||
varint != 0
|
|
||||||
} {}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@@ -1,155 +0,0 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use serde::Deserialize;
|
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use monero_serai::{
|
|
||||||
transaction::Transaction,
|
|
||||||
block::Block,
|
|
||||||
rpc::{Rpc, HttpRpc},
|
|
||||||
};
|
|
||||||
|
|
||||||
use tokio::task::JoinHandle;
|
|
||||||
|
|
||||||
async fn check_block(rpc: Arc<Rpc<HttpRpc>>, block_i: usize) {
|
|
||||||
let hash = rpc.get_block_hash(block_i).await.expect("couldn't get block {block_i}'s hash");
|
|
||||||
|
|
||||||
// TODO: Grab the JSON to also check it was deserialized correctly
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct BlockResponse {
|
|
||||||
blob: String,
|
|
||||||
}
|
|
||||||
let res: BlockResponse = rpc
|
|
||||||
.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) })))
|
|
||||||
.await
|
|
||||||
.expect("couldn't get block {block} via block.hash()");
|
|
||||||
|
|
||||||
let blob = hex::decode(res.blob).expect("node returned non-hex block");
|
|
||||||
let block = Block::read(&mut blob.as_slice()).expect("couldn't deserialize block {block_i}");
|
|
||||||
assert_eq!(block.hash(), hash, "hash differs");
|
|
||||||
assert_eq!(block.serialize(), blob, "serialization differs");
|
|
||||||
|
|
||||||
let txs_len = 1 + block.txs.len();
|
|
||||||
|
|
||||||
if !block.txs.is_empty() {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct TransactionResponse {
|
|
||||||
tx_hash: String,
|
|
||||||
as_hex: String,
|
|
||||||
}
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct TransactionsResponse {
|
|
||||||
#[serde(default)]
|
|
||||||
missed_tx: Vec<String>,
|
|
||||||
txs: Vec<TransactionResponse>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut hashes_hex = block.txs.iter().map(hex::encode).collect::<Vec<_>>();
|
|
||||||
let mut all_txs = vec![];
|
|
||||||
while !hashes_hex.is_empty() {
|
|
||||||
let txs: TransactionsResponse = rpc
|
|
||||||
.rpc_call(
|
|
||||||
"get_transactions",
|
|
||||||
Some(json!({
|
|
||||||
"txs_hashes": hashes_hex.drain(.. hashes_hex.len().min(100)).collect::<Vec<_>>(),
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.expect("couldn't call get_transactions");
|
|
||||||
assert!(txs.missed_tx.is_empty());
|
|
||||||
all_txs.extend(txs.txs);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (tx_hash, tx_res) in block.txs.into_iter().zip(all_txs.into_iter()) {
|
|
||||||
assert_eq!(
|
|
||||||
tx_res.tx_hash,
|
|
||||||
hex::encode(tx_hash),
|
|
||||||
"node returned a transaction with different hash"
|
|
||||||
);
|
|
||||||
|
|
||||||
let tx = Transaction::read(
|
|
||||||
&mut hex::decode(&tx_res.as_hex).expect("node returned non-hex transaction").as_slice(),
|
|
||||||
)
|
|
||||||
.expect("couldn't deserialize transaction");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
hex::encode(tx.serialize()),
|
|
||||||
tx_res.as_hex,
|
|
||||||
"Transaction serialization was different"
|
|
||||||
);
|
|
||||||
assert_eq!(tx.hash(), tx_hash, "Transaction hash was different");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
println!("Deserialized, hashed, and reserialized {block_i} with {} TXs", txs_len);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
|
||||||
async fn main() {
|
|
||||||
let args = std::env::args().collect::<Vec<String>>();
|
|
||||||
|
|
||||||
// Read start block as the first arg
|
|
||||||
let mut block_i = args[1].parse::<usize>().expect("invalid start block");
|
|
||||||
|
|
||||||
// How many blocks to work on at once
|
|
||||||
let async_parallelism: usize =
|
|
||||||
args.get(2).unwrap_or(&"8".to_string()).parse::<usize>().expect("invalid parallelism argument");
|
|
||||||
|
|
||||||
// Read further args as RPC URLs
|
|
||||||
let default_nodes = vec![
|
|
||||||
"http://xmr-node.cakewallet.com:18081".to_string(),
|
|
||||||
"https://node.sethforprivacy.com".to_string(),
|
|
||||||
];
|
|
||||||
let mut specified_nodes = vec![];
|
|
||||||
{
|
|
||||||
let mut i = 0;
|
|
||||||
loop {
|
|
||||||
let Some(node) = args.get(3 + i) else { break };
|
|
||||||
specified_nodes.push(node.clone());
|
|
||||||
i += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let nodes = if specified_nodes.is_empty() { default_nodes } else { specified_nodes };
|
|
||||||
|
|
||||||
let rpc = |url: String| {
|
|
||||||
HttpRpc::new(url.clone())
|
|
||||||
.unwrap_or_else(|_| panic!("couldn't create HttpRpc connected to {url}"))
|
|
||||||
};
|
|
||||||
let main_rpc = rpc(nodes[0].clone());
|
|
||||||
let mut rpcs = vec![];
|
|
||||||
for i in 0 .. async_parallelism {
|
|
||||||
rpcs.push(Arc::new(rpc(nodes[i % nodes.len()].clone())));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut rpc_i = 0;
|
|
||||||
let mut handles: Vec<JoinHandle<()>> = vec![];
|
|
||||||
let mut height = 0;
|
|
||||||
loop {
|
|
||||||
let new_height = main_rpc.get_height().await.expect("couldn't call get_height");
|
|
||||||
if new_height == height {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
height = new_height;
|
|
||||||
|
|
||||||
while block_i < height {
|
|
||||||
if handles.len() >= async_parallelism {
|
|
||||||
// Guarantee one handle is complete
|
|
||||||
handles.swap_remove(0).await.unwrap();
|
|
||||||
|
|
||||||
// Remove all of the finished handles
|
|
||||||
let mut i = 0;
|
|
||||||
while i < handles.len() {
|
|
||||||
if handles[i].is_finished() {
|
|
||||||
handles.swap_remove(i).await.unwrap();
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
i += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
handles.push(tokio::spawn(check_block(rpcs[rpc_i].clone(), block_i)));
|
|
||||||
rpc_i = (rpc_i + 1) % rpcs.len();
|
|
||||||
block_i += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,116 +0,0 @@
|
|||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
hash,
|
|
||||||
merkle::merkle_root,
|
|
||||||
serialize::*,
|
|
||||||
transaction::{Input, Transaction},
|
|
||||||
};
|
|
||||||
|
|
||||||
const CORRECT_BLOCK_HASH_202612: [u8; 32] =
|
|
||||||
hex_literal::hex!("426d16cff04c71f8b16340b722dc4010a2dd3831c22041431f772547ba6e331a");
|
|
||||||
const EXISTING_BLOCK_HASH_202612: [u8; 32] =
|
|
||||||
hex_literal::hex!("bbd604d2ba11ba27935e006ed39c9bfdd99b76bf4a50654bc1e1e61217962698");
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct BlockHeader {
|
|
||||||
pub major_version: u64,
|
|
||||||
pub minor_version: u64,
|
|
||||||
pub timestamp: u64,
|
|
||||||
pub previous: [u8; 32],
|
|
||||||
pub nonce: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BlockHeader {
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
write_varint(&self.major_version, w)?;
|
|
||||||
write_varint(&self.minor_version, w)?;
|
|
||||||
write_varint(&self.timestamp, w)?;
|
|
||||||
w.write_all(&self.previous)?;
|
|
||||||
w.write_all(&self.nonce.to_le_bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut serialized = vec![];
|
|
||||||
self.write(&mut serialized).unwrap();
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self {
|
|
||||||
major_version: read_varint(r)?,
|
|
||||||
minor_version: read_varint(r)?,
|
|
||||||
timestamp: read_varint(r)?,
|
|
||||||
previous: read_bytes(r)?,
|
|
||||||
nonce: read_bytes(r).map(u32::from_le_bytes)?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct Block {
|
|
||||||
pub header: BlockHeader,
|
|
||||||
pub miner_tx: Transaction,
|
|
||||||
pub txs: Vec<[u8; 32]>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Block {
|
|
||||||
pub fn number(&self) -> usize {
|
|
||||||
match self.miner_tx.prefix.inputs.get(0) {
|
|
||||||
Some(Input::Gen(number)) => (*number).try_into().unwrap(),
|
|
||||||
_ => panic!("invalid block, miner TX didn't have a Input::Gen"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
self.header.write(w)?;
|
|
||||||
self.miner_tx.write(w)?;
|
|
||||||
write_varint(&self.txs.len().try_into().unwrap(), w)?;
|
|
||||||
for tx in &self.txs {
|
|
||||||
w.write_all(tx)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tx_merkle_root(&self) -> [u8; 32] {
|
|
||||||
merkle_root(self.miner_tx.hash(), &self.txs)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn serialize_hashable(&self) -> Vec<u8> {
|
|
||||||
let mut blob = self.header.serialize();
|
|
||||||
blob.extend_from_slice(&self.tx_merkle_root());
|
|
||||||
write_varint(&(1 + u64::try_from(self.txs.len()).unwrap()), &mut blob).unwrap();
|
|
||||||
|
|
||||||
let mut out = Vec::with_capacity(8 + blob.len());
|
|
||||||
write_varint(&u64::try_from(blob.len()).unwrap(), &mut out).unwrap();
|
|
||||||
out.append(&mut blob);
|
|
||||||
|
|
||||||
out
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hash(&self) -> [u8; 32] {
|
|
||||||
let hash = hash(&self.serialize_hashable());
|
|
||||||
if hash == CORRECT_BLOCK_HASH_202612 {
|
|
||||||
return EXISTING_BLOCK_HASH_202612;
|
|
||||||
};
|
|
||||||
|
|
||||||
hash
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut serialized = vec![];
|
|
||||||
self.write(&mut serialized).unwrap();
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self {
|
|
||||||
header: BlockHeader::read(r)?,
|
|
||||||
miner_tx: Transaction::read(r)?,
|
|
||||||
txs: (0 .. read_varint(r)?).map(|_| read_bytes(r)).collect::<Result<_, _>>()?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,178 +0,0 @@
|
|||||||
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
|
||||||
#![doc = include_str!("../README.md")]
|
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
|
||||||
|
|
||||||
#[cfg(not(feature = "std"))]
|
|
||||||
#[macro_use]
|
|
||||||
extern crate alloc;
|
|
||||||
|
|
||||||
use std_shims::{sync::OnceLock, io};
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
|
||||||
|
|
||||||
use sha3::{Digest, Keccak256};
|
|
||||||
|
|
||||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
|
||||||
|
|
||||||
pub use monero_generators::H;
|
|
||||||
|
|
||||||
mod merkle;
|
|
||||||
|
|
||||||
mod serialize;
|
|
||||||
use serialize::{read_byte, read_u16};
|
|
||||||
|
|
||||||
/// RingCT structs and functionality.
|
|
||||||
pub mod ringct;
|
|
||||||
use ringct::RctType;
|
|
||||||
|
|
||||||
/// Transaction structs.
|
|
||||||
pub mod transaction;
|
|
||||||
/// Block structs.
|
|
||||||
pub mod block;
|
|
||||||
|
|
||||||
/// Monero daemon RPC interface.
|
|
||||||
pub mod rpc;
|
|
||||||
/// Wallet functionality, enabling scanning and sending transactions.
|
|
||||||
pub mod wallet;
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests;
|
|
||||||
|
|
||||||
static INV_EIGHT_CELL: OnceLock<Scalar> = OnceLock::new();
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub(crate) fn INV_EIGHT() -> Scalar {
|
|
||||||
*INV_EIGHT_CELL.get_or_init(|| Scalar::from(8u8).invert())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Monero protocol version.
|
|
||||||
///
|
|
||||||
/// v15 is omitted as v15 was simply v14 and v16 being active at the same time, with regards to the
|
|
||||||
/// transactions supported. Accordingly, v16 should be used during v15.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
#[allow(non_camel_case_types)]
|
|
||||||
pub enum Protocol {
|
|
||||||
v14,
|
|
||||||
v16,
|
|
||||||
Custom { ring_len: usize, bp_plus: bool, optimal_rct_type: RctType },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Protocol {
|
|
||||||
/// Amount of ring members under this protocol version.
|
|
||||||
pub const fn ring_len(&self) -> usize {
|
|
||||||
match self {
|
|
||||||
Self::v14 => 11,
|
|
||||||
Self::v16 => 16,
|
|
||||||
Self::Custom { ring_len, .. } => *ring_len,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Whether or not the specified version uses Bulletproofs or Bulletproofs+.
|
|
||||||
///
|
|
||||||
/// This method will likely be reworked when versions not using Bulletproofs at all are added.
|
|
||||||
pub const fn bp_plus(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::v14 => false,
|
|
||||||
Self::v16 => true,
|
|
||||||
Self::Custom { bp_plus, .. } => *bp_plus,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Make this an Option when we support pre-RCT protocols
|
|
||||||
pub const fn optimal_rct_type(&self) -> RctType {
|
|
||||||
match self {
|
|
||||||
Self::v14 => RctType::Clsag,
|
|
||||||
Self::v16 => RctType::BulletproofsPlus,
|
|
||||||
Self::Custom { optimal_rct_type, .. } => *optimal_rct_type,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
Self::v14 => w.write_all(&[0, 14]),
|
|
||||||
Self::v16 => w.write_all(&[0, 16]),
|
|
||||||
Self::Custom { ring_len, bp_plus, optimal_rct_type } => {
|
|
||||||
// Custom, version 0
|
|
||||||
w.write_all(&[1, 0])?;
|
|
||||||
w.write_all(&u16::try_from(*ring_len).unwrap().to_le_bytes())?;
|
|
||||||
w.write_all(&[u8::from(*bp_plus)])?;
|
|
||||||
w.write_all(&[optimal_rct_type.to_byte()])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read<R: io::Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(match read_byte(r)? {
|
|
||||||
// Monero protocol
|
|
||||||
0 => match read_byte(r)? {
|
|
||||||
14 => Self::v14,
|
|
||||||
16 => Self::v16,
|
|
||||||
_ => Err(io::Error::new(io::ErrorKind::Other, "unrecognized monero protocol"))?,
|
|
||||||
},
|
|
||||||
// Custom
|
|
||||||
1 => match read_byte(r)? {
|
|
||||||
0 => Self::Custom {
|
|
||||||
ring_len: read_u16(r)?.into(),
|
|
||||||
bp_plus: match read_byte(r)? {
|
|
||||||
0 => false,
|
|
||||||
1 => true,
|
|
||||||
_ => Err(io::Error::new(io::ErrorKind::Other, "invalid bool serialization"))?,
|
|
||||||
},
|
|
||||||
optimal_rct_type: RctType::from_byte(read_byte(r)?)
|
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid RctType serialization"))?,
|
|
||||||
},
|
|
||||||
_ => {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "unrecognized custom protocol serialization"))?
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => Err(io::Error::new(io::ErrorKind::Other, "unrecognized protocol serialization"))?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Transparent structure representing a Pedersen commitment's contents.
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub struct Commitment {
|
|
||||||
pub mask: Scalar,
|
|
||||||
pub amount: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Commitment {
|
|
||||||
/// A commitment to zero, defined with a mask of 1 (as to not be the identity).
|
|
||||||
pub fn zero() -> Self {
|
|
||||||
Self { mask: Scalar::one(), amount: 0 }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new(mask: Scalar, amount: u64) -> Self {
|
|
||||||
Self { mask, amount }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Calculate a Pedersen commitment, as a point, from the transparent structure.
|
|
||||||
pub fn calculate(&self) -> EdwardsPoint {
|
|
||||||
(&self.mask * &ED25519_BASEPOINT_TABLE) + (Scalar::from(self.amount) * H())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Support generating a random scalar using a modern rand, as dalek's is notoriously dated.
|
|
||||||
pub fn random_scalar<R: RngCore + CryptoRng>(rng: &mut R) -> Scalar {
|
|
||||||
let mut r = [0; 64];
|
|
||||||
rng.fill_bytes(&mut r);
|
|
||||||
Scalar::from_bytes_mod_order_wide(&r)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn hash(data: &[u8]) -> [u8; 32] {
|
|
||||||
Keccak256::digest(data).into()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Hash the provided data to a scalar via keccak256(data) % l.
|
|
||||||
pub fn hash_to_scalar(data: &[u8]) -> Scalar {
|
|
||||||
let scalar = Scalar::from_bytes_mod_order(hash(data));
|
|
||||||
// Monero will explicitly error in this case
|
|
||||||
// This library acknowledges its practical impossibility of it occurring, and doesn't bother to
|
|
||||||
// code in logic to handle it. That said, if it ever occurs, something must happen in order to
|
|
||||||
// not generate/verify a proof we believe to be valid when it isn't
|
|
||||||
assert!(scalar != Scalar::zero(), "ZERO HASH: {data:?}");
|
|
||||||
scalar
|
|
||||||
}
|
|
||||||
@@ -1,102 +0,0 @@
|
|||||||
use core::fmt::Debug;
|
|
||||||
use std_shims::io::{self, Read, Write};
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::EdwardsPoint;
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
use curve25519_dalek::{traits::Identity, scalar::Scalar};
|
|
||||||
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
use monero_generators::H_pow_2;
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
use crate::hash_to_scalar;
|
|
||||||
use crate::serialize::*;
|
|
||||||
|
|
||||||
/// 64 Borromean ring signatures.
|
|
||||||
///
|
|
||||||
/// This type keeps the data as raw bytes as Monero has some transactions with unreduced scalars in
|
|
||||||
/// this field. While we could use `from_bytes_mod_order`, we'd then not be able to encode this
|
|
||||||
/// back into it's original form.
|
|
||||||
///
|
|
||||||
/// Those scalars also have a custom reduction algorithm...
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct BorromeanSignatures {
|
|
||||||
pub s0: [[u8; 32]; 64],
|
|
||||||
pub s1: [[u8; 32]; 64],
|
|
||||||
pub ee: [u8; 32],
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BorromeanSignatures {
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self { s0: read_array(read_bytes, r)?, s1: read_array(read_bytes, r)?, ee: read_bytes(r)? })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
for s0 in &self.s0 {
|
|
||||||
w.write_all(s0)?;
|
|
||||||
}
|
|
||||||
for s1 in &self.s1 {
|
|
||||||
w.write_all(s1)?;
|
|
||||||
}
|
|
||||||
w.write_all(&self.ee)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
fn verify(&self, keys_a: &[EdwardsPoint], keys_b: &[EdwardsPoint]) -> bool {
|
|
||||||
let mut transcript = [0; 2048];
|
|
||||||
for i in 0 .. 64 {
|
|
||||||
// TODO: These aren't the correct reduction
|
|
||||||
// TODO: Can either of these be tightened?
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let LL = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
|
||||||
&Scalar::from_bytes_mod_order(self.ee),
|
|
||||||
&keys_a[i],
|
|
||||||
&Scalar::from_bytes_mod_order(self.s0[i]),
|
|
||||||
);
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let LV = EdwardsPoint::vartime_double_scalar_mul_basepoint(
|
|
||||||
&hash_to_scalar(LL.compress().as_bytes()),
|
|
||||||
&keys_b[i],
|
|
||||||
&Scalar::from_bytes_mod_order(self.s1[i]),
|
|
||||||
);
|
|
||||||
transcript[i .. ((i + 1) * 32)].copy_from_slice(LV.compress().as_bytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: This isn't the correct reduction
|
|
||||||
// TODO: Can this be tightened to from_canonical_bytes?
|
|
||||||
hash_to_scalar(&transcript) == Scalar::from_bytes_mod_order(self.ee)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A range proof premised on Borromean ring signatures.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct BorromeanRange {
|
|
||||||
pub sigs: BorromeanSignatures,
|
|
||||||
pub bit_commitments: [EdwardsPoint; 64],
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BorromeanRange {
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self { sigs: BorromeanSignatures::read(r)?, bit_commitments: read_array(read_point, r)? })
|
|
||||||
}
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
self.sigs.write(w)?;
|
|
||||||
write_raw_vec(write_point, &self.bit_commitments, w)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
#[must_use]
|
|
||||||
pub fn verify(&self, commitment: &EdwardsPoint) -> bool {
|
|
||||||
if &self.bit_commitments.iter().sum::<EdwardsPoint>() != commitment {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let H_pow_2 = H_pow_2();
|
|
||||||
let mut commitments_sub_one = [EdwardsPoint::identity(); 64];
|
|
||||||
for i in 0 .. 64 {
|
|
||||||
commitments_sub_one[i] = self.bit_commitments[i] - H_pow_2[i];
|
|
||||||
}
|
|
||||||
|
|
||||||
self.sigs.verify(&self.bit_commitments, &commitments_sub_one)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,151 +0,0 @@
|
|||||||
use std_shims::{vec::Vec, sync::OnceLock};
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use subtle::{Choice, ConditionallySelectable};
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::EdwardsPoint as DalekPoint;
|
|
||||||
|
|
||||||
use group::{ff::Field, Group};
|
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use multiexp::multiexp as multiexp_const;
|
|
||||||
|
|
||||||
pub(crate) use monero_generators::Generators;
|
|
||||||
|
|
||||||
use crate::{INV_EIGHT as DALEK_INV_EIGHT, H as DALEK_H, Commitment, hash_to_scalar as dalek_hash};
|
|
||||||
pub(crate) use crate::ringct::bulletproofs::scalar_vector::*;
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub(crate) fn INV_EIGHT() -> Scalar {
|
|
||||||
Scalar(DALEK_INV_EIGHT())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[inline]
|
|
||||||
pub(crate) fn H() -> EdwardsPoint {
|
|
||||||
EdwardsPoint(DALEK_H())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn hash_to_scalar(data: &[u8]) -> Scalar {
|
|
||||||
Scalar(dalek_hash(data))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Components common between variants
|
|
||||||
pub(crate) const MAX_M: usize = 16;
|
|
||||||
pub(crate) const LOG_N: usize = 6; // 2 << 6 == N
|
|
||||||
pub(crate) const N: usize = 64;
|
|
||||||
|
|
||||||
pub(crate) fn prove_multiexp(pairs: &[(Scalar, EdwardsPoint)]) -> EdwardsPoint {
|
|
||||||
multiexp_const(pairs) * INV_EIGHT()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn vector_exponent(
|
|
||||||
generators: &Generators,
|
|
||||||
a: &ScalarVector,
|
|
||||||
b: &ScalarVector,
|
|
||||||
) -> EdwardsPoint {
|
|
||||||
debug_assert_eq!(a.len(), b.len());
|
|
||||||
(a * &generators.G[.. a.len()]) + (b * &generators.H[.. b.len()])
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn hash_cache(cache: &mut Scalar, mash: &[[u8; 32]]) -> Scalar {
|
|
||||||
let slice =
|
|
||||||
&[cache.to_bytes().as_ref(), mash.iter().copied().flatten().collect::<Vec<_>>().as_ref()]
|
|
||||||
.concat();
|
|
||||||
*cache = hash_to_scalar(slice);
|
|
||||||
*cache
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn MN(outputs: usize) -> (usize, usize, usize) {
|
|
||||||
let mut logM = 0;
|
|
||||||
let mut M;
|
|
||||||
while {
|
|
||||||
M = 1 << logM;
|
|
||||||
(M <= MAX_M) && (M < outputs)
|
|
||||||
} {
|
|
||||||
logM += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
(logM + LOG_N, M, M * N)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn bit_decompose(commitments: &[Commitment]) -> (ScalarVector, ScalarVector) {
|
|
||||||
let (_, M, MN) = MN(commitments.len());
|
|
||||||
|
|
||||||
let sv = commitments.iter().map(|c| Scalar::from(c.amount)).collect::<Vec<_>>();
|
|
||||||
let mut aL = ScalarVector::new(MN);
|
|
||||||
let mut aR = ScalarVector::new(MN);
|
|
||||||
|
|
||||||
for j in 0 .. M {
|
|
||||||
for i in (0 .. N).rev() {
|
|
||||||
let bit =
|
|
||||||
if j < sv.len() { Choice::from((sv[j][i / 8] >> (i % 8)) & 1) } else { Choice::from(0) };
|
|
||||||
aL.0[(j * N) + i] = Scalar::conditional_select(&Scalar::ZERO, &Scalar::ONE, bit);
|
|
||||||
aR.0[(j * N) + i] = Scalar::conditional_select(&-Scalar::ONE, &Scalar::ZERO, bit);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
(aL, aR)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn hash_commitments<C: IntoIterator<Item = DalekPoint>>(
|
|
||||||
commitments: C,
|
|
||||||
) -> (Scalar, Vec<EdwardsPoint>) {
|
|
||||||
let V = commitments.into_iter().map(|c| EdwardsPoint(c) * INV_EIGHT()).collect::<Vec<_>>();
|
|
||||||
(hash_to_scalar(&V.iter().flat_map(|V| V.compress().to_bytes()).collect::<Vec<_>>()), V)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn alpha_rho<R: RngCore + CryptoRng>(
|
|
||||||
rng: &mut R,
|
|
||||||
generators: &Generators,
|
|
||||||
aL: &ScalarVector,
|
|
||||||
aR: &ScalarVector,
|
|
||||||
) -> (Scalar, EdwardsPoint) {
|
|
||||||
let ar = Scalar::random(rng);
|
|
||||||
(ar, (vector_exponent(generators, aL, aR) + (EdwardsPoint::generator() * ar)) * INV_EIGHT())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn LR_statements(
|
|
||||||
a: &ScalarVector,
|
|
||||||
G_i: &[EdwardsPoint],
|
|
||||||
b: &ScalarVector,
|
|
||||||
H_i: &[EdwardsPoint],
|
|
||||||
cL: Scalar,
|
|
||||||
U: EdwardsPoint,
|
|
||||||
) -> Vec<(Scalar, EdwardsPoint)> {
|
|
||||||
let mut res = a
|
|
||||||
.0
|
|
||||||
.iter()
|
|
||||||
.copied()
|
|
||||||
.zip(G_i.iter().copied())
|
|
||||||
.chain(b.0.iter().copied().zip(H_i.iter().copied()))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
res.push((cL, U));
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
static TWO_N_CELL: OnceLock<ScalarVector> = OnceLock::new();
|
|
||||||
pub(crate) fn TWO_N() -> &'static ScalarVector {
|
|
||||||
TWO_N_CELL.get_or_init(|| ScalarVector::powers(Scalar::from(2u8), N))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn challenge_products(w: &[Scalar], winv: &[Scalar]) -> Vec<Scalar> {
|
|
||||||
let mut products = vec![Scalar::ZERO; 1 << w.len()];
|
|
||||||
products[0] = winv[0];
|
|
||||||
products[1] = w[0];
|
|
||||||
for j in 1 .. w.len() {
|
|
||||||
let mut slots = (1 << (j + 1)) - 1;
|
|
||||||
while slots > 0 {
|
|
||||||
products[slots] = products[slots / 2] * w[j];
|
|
||||||
products[slots - 1] = products[slots / 2] * winv[j];
|
|
||||||
slots = slots.saturating_sub(2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sanity check as if the above failed to populate, it'd be critical
|
|
||||||
for w in &products {
|
|
||||||
debug_assert!(!bool::from(w.is_zero()));
|
|
||||||
}
|
|
||||||
|
|
||||||
products
|
|
||||||
}
|
|
||||||
@@ -1,179 +0,0 @@
|
|||||||
#![allow(non_snake_case)]
|
|
||||||
|
|
||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::EdwardsPoint;
|
|
||||||
use multiexp::BatchVerifier;
|
|
||||||
|
|
||||||
use crate::{Commitment, wallet::TransactionError, serialize::*};
|
|
||||||
|
|
||||||
pub(crate) mod scalar_vector;
|
|
||||||
pub(crate) mod core;
|
|
||||||
use self::core::LOG_N;
|
|
||||||
|
|
||||||
pub(crate) mod original;
|
|
||||||
pub use original::GENERATORS as BULLETPROOFS_GENERATORS;
|
|
||||||
pub(crate) mod plus;
|
|
||||||
pub use plus::GENERATORS as BULLETPROOFS_PLUS_GENERATORS;
|
|
||||||
|
|
||||||
pub(crate) use self::original::OriginalStruct;
|
|
||||||
pub(crate) use self::plus::PlusStruct;
|
|
||||||
|
|
||||||
pub(crate) const MAX_OUTPUTS: usize = self::core::MAX_M;
|
|
||||||
|
|
||||||
/// Bulletproofs enum, supporting the original and plus formulations.
|
|
||||||
#[allow(clippy::large_enum_variant)]
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub enum Bulletproofs {
|
|
||||||
Original(OriginalStruct),
|
|
||||||
Plus(PlusStruct),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Bulletproofs {
|
|
||||||
pub(crate) fn fee_weight(plus: bool, outputs: usize) -> usize {
|
|
||||||
let fields = if plus { 6 } else { 9 };
|
|
||||||
|
|
||||||
// TODO: Shouldn't this use u32/u64?
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let mut LR_len = usize::try_from(usize::BITS - (outputs - 1).leading_zeros()).unwrap();
|
|
||||||
let padded_outputs = 1 << LR_len;
|
|
||||||
LR_len += LOG_N;
|
|
||||||
|
|
||||||
let len = (fields + (2 * LR_len)) * 32;
|
|
||||||
len +
|
|
||||||
if padded_outputs <= 2 {
|
|
||||||
0
|
|
||||||
} else {
|
|
||||||
let base = ((fields + (2 * (LOG_N + 1))) * 32) / 2;
|
|
||||||
let size = (fields + (2 * LR_len)) * 32;
|
|
||||||
((base * padded_outputs) - size) * 4 / 5
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Prove the list of commitments are within [0 .. 2^64).
|
|
||||||
pub fn prove<R: RngCore + CryptoRng>(
|
|
||||||
rng: &mut R,
|
|
||||||
outputs: &[Commitment],
|
|
||||||
plus: bool,
|
|
||||||
) -> Result<Self, TransactionError> {
|
|
||||||
if outputs.len() > MAX_OUTPUTS {
|
|
||||||
return Err(TransactionError::TooManyOutputs)?;
|
|
||||||
}
|
|
||||||
Ok(if !plus {
|
|
||||||
Self::Plus(PlusStruct::prove(rng, outputs))
|
|
||||||
} else {
|
|
||||||
Self::Original(OriginalStruct::prove(rng, outputs))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Verify the given Bulletproofs.
|
|
||||||
#[must_use]
|
|
||||||
pub fn verify<R: RngCore + CryptoRng>(&self, rng: &mut R, commitments: &[EdwardsPoint]) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Original(bp) => bp.verify(rng, commitments),
|
|
||||||
Self::Plus(bp) => bp.verify(rng, commitments),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Accumulate the verification for the given Bulletproofs into the specified BatchVerifier.
|
|
||||||
/// Returns false if the Bulletproofs aren't sane, without mutating the BatchVerifier.
|
|
||||||
/// Returns true if the Bulletproofs are sane, regardless of their validity.
|
|
||||||
#[must_use]
|
|
||||||
pub fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
|
||||||
&self,
|
|
||||||
rng: &mut R,
|
|
||||||
verifier: &mut BatchVerifier<ID, dalek_ff_group::EdwardsPoint>,
|
|
||||||
id: ID,
|
|
||||||
commitments: &[EdwardsPoint],
|
|
||||||
) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Original(bp) => bp.batch_verify(rng, verifier, id, commitments),
|
|
||||||
Self::Plus(bp) => bp.batch_verify(rng, verifier, id, commitments),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write_core<W: Write, F: Fn(&[EdwardsPoint], &mut W) -> io::Result<()>>(
|
|
||||||
&self,
|
|
||||||
w: &mut W,
|
|
||||||
specific_write_vec: F,
|
|
||||||
) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
Self::Original(bp) => {
|
|
||||||
write_point(&bp.A, w)?;
|
|
||||||
write_point(&bp.S, w)?;
|
|
||||||
write_point(&bp.T1, w)?;
|
|
||||||
write_point(&bp.T2, w)?;
|
|
||||||
write_scalar(&bp.taux, w)?;
|
|
||||||
write_scalar(&bp.mu, w)?;
|
|
||||||
specific_write_vec(&bp.L, w)?;
|
|
||||||
specific_write_vec(&bp.R, w)?;
|
|
||||||
write_scalar(&bp.a, w)?;
|
|
||||||
write_scalar(&bp.b, w)?;
|
|
||||||
write_scalar(&bp.t, w)
|
|
||||||
}
|
|
||||||
|
|
||||||
Self::Plus(bp) => {
|
|
||||||
write_point(&bp.A, w)?;
|
|
||||||
write_point(&bp.A1, w)?;
|
|
||||||
write_point(&bp.B, w)?;
|
|
||||||
write_scalar(&bp.r1, w)?;
|
|
||||||
write_scalar(&bp.s1, w)?;
|
|
||||||
write_scalar(&bp.d1, w)?;
|
|
||||||
specific_write_vec(&bp.L, w)?;
|
|
||||||
specific_write_vec(&bp.R, w)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
self.write_core(w, |points, w| write_raw_vec(write_point, points, w))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
self.write_core(w, |points, w| write_vec(write_point, points, w))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut serialized = vec![];
|
|
||||||
self.write(&mut serialized).unwrap();
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Read Bulletproofs.
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self::Original(OriginalStruct {
|
|
||||||
A: read_point(r)?,
|
|
||||||
S: read_point(r)?,
|
|
||||||
T1: read_point(r)?,
|
|
||||||
T2: read_point(r)?,
|
|
||||||
taux: read_scalar(r)?,
|
|
||||||
mu: read_scalar(r)?,
|
|
||||||
L: read_vec(read_point, r)?,
|
|
||||||
R: read_vec(read_point, r)?,
|
|
||||||
a: read_scalar(r)?,
|
|
||||||
b: read_scalar(r)?,
|
|
||||||
t: read_scalar(r)?,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Read Bulletproofs+.
|
|
||||||
pub fn read_plus<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self::Plus(PlusStruct {
|
|
||||||
A: read_point(r)?,
|
|
||||||
A1: read_point(r)?,
|
|
||||||
B: read_point(r)?,
|
|
||||||
r1: read_scalar(r)?,
|
|
||||||
s1: read_scalar(r)?,
|
|
||||||
d1: read_scalar(r)?,
|
|
||||||
L: read_vec(read_point, r)?,
|
|
||||||
R: read_vec(read_point, r)?,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,308 +0,0 @@
|
|||||||
use std_shims::{vec::Vec, sync::OnceLock};
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
|
||||||
|
|
||||||
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
|
|
||||||
|
|
||||||
use group::{ff::Field, Group};
|
|
||||||
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use multiexp::BatchVerifier;
|
|
||||||
|
|
||||||
use crate::{Commitment, ringct::bulletproofs::core::*};
|
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/generators.rs"));
|
|
||||||
|
|
||||||
static IP12_CELL: OnceLock<Scalar> = OnceLock::new();
|
|
||||||
pub(crate) fn IP12() -> Scalar {
|
|
||||||
*IP12_CELL.get_or_init(|| inner_product(&ScalarVector(vec![Scalar::ONE; N]), TWO_N()))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct OriginalStruct {
|
|
||||||
pub(crate) A: DalekPoint,
|
|
||||||
pub(crate) S: DalekPoint,
|
|
||||||
pub(crate) T1: DalekPoint,
|
|
||||||
pub(crate) T2: DalekPoint,
|
|
||||||
pub(crate) taux: DalekScalar,
|
|
||||||
pub(crate) mu: DalekScalar,
|
|
||||||
pub(crate) L: Vec<DalekPoint>,
|
|
||||||
pub(crate) R: Vec<DalekPoint>,
|
|
||||||
pub(crate) a: DalekScalar,
|
|
||||||
pub(crate) b: DalekScalar,
|
|
||||||
pub(crate) t: DalekScalar,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl OriginalStruct {
|
|
||||||
#[allow(clippy::many_single_char_names)]
|
|
||||||
pub(crate) fn prove<R: RngCore + CryptoRng>(rng: &mut R, commitments: &[Commitment]) -> Self {
|
|
||||||
let (logMN, M, MN) = MN(commitments.len());
|
|
||||||
|
|
||||||
let (aL, aR) = bit_decompose(commitments);
|
|
||||||
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
|
||||||
let (mut cache, _) = hash_commitments(commitments_points.clone());
|
|
||||||
|
|
||||||
let (sL, sR) =
|
|
||||||
ScalarVector((0 .. (MN * 2)).map(|_| Scalar::random(&mut *rng)).collect::<Vec<_>>()).split();
|
|
||||||
|
|
||||||
let generators = GENERATORS();
|
|
||||||
let (mut alpha, A) = alpha_rho(&mut *rng, generators, &aL, &aR);
|
|
||||||
let (mut rho, S) = alpha_rho(&mut *rng, generators, &sL, &sR);
|
|
||||||
|
|
||||||
let y = hash_cache(&mut cache, &[A.compress().to_bytes(), S.compress().to_bytes()]);
|
|
||||||
let mut cache = hash_to_scalar(&y.to_bytes());
|
|
||||||
let z = cache;
|
|
||||||
|
|
||||||
let l0 = &aL - z;
|
|
||||||
let l1 = sL;
|
|
||||||
|
|
||||||
let mut zero_twos = Vec::with_capacity(MN);
|
|
||||||
let zpow = ScalarVector::powers(z, M + 2);
|
|
||||||
for j in 0 .. M {
|
|
||||||
for i in 0 .. N {
|
|
||||||
zero_twos.push(zpow[j + 2] * TWO_N()[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let yMN = ScalarVector::powers(y, MN);
|
|
||||||
let r0 = (&(aR + z) * &yMN) + ScalarVector(zero_twos);
|
|
||||||
let r1 = yMN * sR;
|
|
||||||
|
|
||||||
let (T1, T2, x, mut taux) = {
|
|
||||||
let t1 = inner_product(&l0, &r1) + inner_product(&l1, &r0);
|
|
||||||
let t2 = inner_product(&l1, &r1);
|
|
||||||
|
|
||||||
let mut tau1 = Scalar::random(&mut *rng);
|
|
||||||
let mut tau2 = Scalar::random(&mut *rng);
|
|
||||||
|
|
||||||
let T1 = prove_multiexp(&[(t1, H()), (tau1, EdwardsPoint::generator())]);
|
|
||||||
let T2 = prove_multiexp(&[(t2, H()), (tau2, EdwardsPoint::generator())]);
|
|
||||||
|
|
||||||
let x =
|
|
||||||
hash_cache(&mut cache, &[z.to_bytes(), T1.compress().to_bytes(), T2.compress().to_bytes()]);
|
|
||||||
|
|
||||||
let taux = (tau2 * (x * x)) + (tau1 * x);
|
|
||||||
|
|
||||||
tau1.zeroize();
|
|
||||||
tau2.zeroize();
|
|
||||||
(T1, T2, x, taux)
|
|
||||||
};
|
|
||||||
|
|
||||||
let mu = (x * rho) + alpha;
|
|
||||||
alpha.zeroize();
|
|
||||||
rho.zeroize();
|
|
||||||
|
|
||||||
for (i, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
|
|
||||||
taux += zpow[i + 2] * gamma;
|
|
||||||
}
|
|
||||||
|
|
||||||
let l = &l0 + &(l1 * x);
|
|
||||||
let r = &r0 + &(r1 * x);
|
|
||||||
|
|
||||||
let t = inner_product(&l, &r);
|
|
||||||
|
|
||||||
let x_ip =
|
|
||||||
hash_cache(&mut cache, &[x.to_bytes(), taux.to_bytes(), mu.to_bytes(), t.to_bytes()]);
|
|
||||||
|
|
||||||
let mut a = l;
|
|
||||||
let mut b = r;
|
|
||||||
|
|
||||||
let yinv = y.invert().unwrap();
|
|
||||||
let yinvpow = ScalarVector::powers(yinv, MN);
|
|
||||||
|
|
||||||
let mut G_proof = generators.G[.. a.len()].to_vec();
|
|
||||||
let mut H_proof = generators.H[.. a.len()].to_vec();
|
|
||||||
H_proof.iter_mut().zip(yinvpow.0.iter()).for_each(|(this_H, yinvpow)| *this_H *= yinvpow);
|
|
||||||
let U = H() * x_ip;
|
|
||||||
|
|
||||||
let mut L = Vec::with_capacity(logMN);
|
|
||||||
let mut R = Vec::with_capacity(logMN);
|
|
||||||
|
|
||||||
while a.len() != 1 {
|
|
||||||
let (aL, aR) = a.split();
|
|
||||||
let (bL, bR) = b.split();
|
|
||||||
|
|
||||||
let cL = inner_product(&aL, &bR);
|
|
||||||
let cR = inner_product(&aR, &bL);
|
|
||||||
|
|
||||||
let (G_L, G_R) = G_proof.split_at(aL.len());
|
|
||||||
let (H_L, H_R) = H_proof.split_at(aL.len());
|
|
||||||
|
|
||||||
let L_i = prove_multiexp(&LR_statements(&aL, G_R, &bR, H_L, cL, U));
|
|
||||||
let R_i = prove_multiexp(&LR_statements(&aR, G_L, &bL, H_R, cR, U));
|
|
||||||
L.push(*L_i);
|
|
||||||
R.push(*R_i);
|
|
||||||
|
|
||||||
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
|
|
||||||
let winv = w.invert().unwrap();
|
|
||||||
|
|
||||||
a = (aL * w) + (aR * winv);
|
|
||||||
b = (bL * winv) + (bR * w);
|
|
||||||
|
|
||||||
if a.len() != 1 {
|
|
||||||
G_proof = hadamard_fold(G_L, G_R, winv, w);
|
|
||||||
H_proof = hadamard_fold(H_L, H_R, w, winv);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let res = Self {
|
|
||||||
A: *A,
|
|
||||||
S: *S,
|
|
||||||
T1: *T1,
|
|
||||||
T2: *T2,
|
|
||||||
taux: *taux,
|
|
||||||
mu: *mu,
|
|
||||||
L,
|
|
||||||
R,
|
|
||||||
a: *a[0],
|
|
||||||
b: *b[0],
|
|
||||||
t: *t,
|
|
||||||
};
|
|
||||||
debug_assert!(res.verify(rng, &commitments_points));
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::many_single_char_names)]
|
|
||||||
#[must_use]
|
|
||||||
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
|
||||||
&self,
|
|
||||||
rng: &mut R,
|
|
||||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
|
||||||
id: ID,
|
|
||||||
commitments: &[DalekPoint],
|
|
||||||
) -> bool {
|
|
||||||
// Verify commitments are valid
|
|
||||||
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify L and R are properly sized
|
|
||||||
if self.L.len() != self.R.len() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (logMN, M, MN) = MN(commitments.len());
|
|
||||||
if self.L.len() != logMN {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rebuild all challenges
|
|
||||||
let (mut cache, commitments) = hash_commitments(commitments.iter().copied());
|
|
||||||
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes(), self.S.compress().to_bytes()]);
|
|
||||||
|
|
||||||
let z = hash_to_scalar(&y.to_bytes());
|
|
||||||
cache = z;
|
|
||||||
|
|
||||||
let x = hash_cache(
|
|
||||||
&mut cache,
|
|
||||||
&[z.to_bytes(), self.T1.compress().to_bytes(), self.T2.compress().to_bytes()],
|
|
||||||
);
|
|
||||||
|
|
||||||
let x_ip = hash_cache(
|
|
||||||
&mut cache,
|
|
||||||
&[x.to_bytes(), self.taux.to_bytes(), self.mu.to_bytes(), self.t.to_bytes()],
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut w = Vec::with_capacity(logMN);
|
|
||||||
let mut winv = Vec::with_capacity(logMN);
|
|
||||||
for (L, R) in self.L.iter().zip(&self.R) {
|
|
||||||
w.push(hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]));
|
|
||||||
winv.push(cache.invert().unwrap());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert the proof from * INV_EIGHT to its actual form
|
|
||||||
let normalize = |point: &DalekPoint| EdwardsPoint(point.mul_by_cofactor());
|
|
||||||
|
|
||||||
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
|
|
||||||
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
|
|
||||||
let T1 = normalize(&self.T1);
|
|
||||||
let T2 = normalize(&self.T2);
|
|
||||||
let A = normalize(&self.A);
|
|
||||||
let S = normalize(&self.S);
|
|
||||||
|
|
||||||
let commitments = commitments.iter().map(EdwardsPoint::mul_by_cofactor).collect::<Vec<_>>();
|
|
||||||
|
|
||||||
// Verify it
|
|
||||||
let mut proof = Vec::with_capacity(4 + commitments.len());
|
|
||||||
|
|
||||||
let zpow = ScalarVector::powers(z, M + 3);
|
|
||||||
let ip1y = ScalarVector::powers(y, M * N).sum();
|
|
||||||
let mut k = -(zpow[2] * ip1y);
|
|
||||||
for j in 1 ..= M {
|
|
||||||
k -= zpow[j + 2] * IP12();
|
|
||||||
}
|
|
||||||
let y1 = Scalar(self.t) - ((z * ip1y) + k);
|
|
||||||
proof.push((-y1, H()));
|
|
||||||
|
|
||||||
proof.push((-Scalar(self.taux), G));
|
|
||||||
|
|
||||||
for (j, commitment) in commitments.iter().enumerate() {
|
|
||||||
proof.push((zpow[j + 2], *commitment));
|
|
||||||
}
|
|
||||||
|
|
||||||
proof.push((x, T1));
|
|
||||||
proof.push((x * x, T2));
|
|
||||||
verifier.queue(&mut *rng, id, proof);
|
|
||||||
|
|
||||||
proof = Vec::with_capacity(4 + (2 * (MN + logMN)));
|
|
||||||
let z3 = (Scalar(self.t) - (Scalar(self.a) * Scalar(self.b))) * x_ip;
|
|
||||||
proof.push((z3, H()));
|
|
||||||
proof.push((-Scalar(self.mu), G));
|
|
||||||
|
|
||||||
proof.push((Scalar::ONE, A));
|
|
||||||
proof.push((x, S));
|
|
||||||
|
|
||||||
{
|
|
||||||
let ypow = ScalarVector::powers(y, MN);
|
|
||||||
let yinv = y.invert().unwrap();
|
|
||||||
let yinvpow = ScalarVector::powers(yinv, MN);
|
|
||||||
|
|
||||||
let w_cache = challenge_products(&w, &winv);
|
|
||||||
|
|
||||||
let generators = GENERATORS();
|
|
||||||
for i in 0 .. MN {
|
|
||||||
let g = (Scalar(self.a) * w_cache[i]) + z;
|
|
||||||
proof.push((-g, generators.G[i]));
|
|
||||||
|
|
||||||
let mut h = Scalar(self.b) * yinvpow[i] * w_cache[(!i) & (MN - 1)];
|
|
||||||
h -= ((zpow[(i / N) + 2] * TWO_N()[i % N]) + (z * ypow[i])) * yinvpow[i];
|
|
||||||
proof.push((-h, generators.H[i]));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for i in 0 .. logMN {
|
|
||||||
proof.push((w[i] * w[i], L[i]));
|
|
||||||
proof.push((winv[i] * winv[i], R[i]));
|
|
||||||
}
|
|
||||||
verifier.queue(rng, id, proof);
|
|
||||||
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub(crate) fn verify<R: RngCore + CryptoRng>(
|
|
||||||
&self,
|
|
||||||
rng: &mut R,
|
|
||||||
commitments: &[DalekPoint],
|
|
||||||
) -> bool {
|
|
||||||
let mut verifier = BatchVerifier::new(1);
|
|
||||||
if self.verify_core(rng, &mut verifier, (), commitments) {
|
|
||||||
verifier.verify_vartime()
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
|
||||||
&self,
|
|
||||||
rng: &mut R,
|
|
||||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
|
||||||
id: ID,
|
|
||||||
commitments: &[DalekPoint],
|
|
||||||
) -> bool {
|
|
||||||
self.verify_core(rng, verifier, id, commitments)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,300 +0,0 @@
|
|||||||
use std_shims::{vec::Vec, sync::OnceLock};
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
|
||||||
|
|
||||||
use curve25519_dalek::{scalar::Scalar as DalekScalar, edwards::EdwardsPoint as DalekPoint};
|
|
||||||
|
|
||||||
use group::ff::Field;
|
|
||||||
use dalek_ff_group::{ED25519_BASEPOINT_POINT as G, Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use multiexp::BatchVerifier;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Commitment, hash,
|
|
||||||
ringct::{hash_to_point::raw_hash_to_point, bulletproofs::core::*},
|
|
||||||
};
|
|
||||||
|
|
||||||
include!(concat!(env!("OUT_DIR"), "/generators_plus.rs"));
|
|
||||||
|
|
||||||
static TRANSCRIPT_CELL: OnceLock<[u8; 32]> = OnceLock::new();
|
|
||||||
pub(crate) fn TRANSCRIPT() -> [u8; 32] {
|
|
||||||
*TRANSCRIPT_CELL.get_or_init(|| {
|
|
||||||
EdwardsPoint(raw_hash_to_point(hash(b"bulletproof_plus_transcript"))).compress().to_bytes()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// TRANSCRIPT isn't a Scalar, so we need this alternative for the first hash
|
|
||||||
fn hash_plus<C: IntoIterator<Item = DalekPoint>>(commitments: C) -> (Scalar, Vec<EdwardsPoint>) {
|
|
||||||
let (cache, commitments) = hash_commitments(commitments);
|
|
||||||
(hash_to_scalar(&[TRANSCRIPT().as_ref(), &cache.to_bytes()].concat()), commitments)
|
|
||||||
}
|
|
||||||
|
|
||||||
// d[j*N+i] = z**(2*(j+1)) * 2**i
|
|
||||||
fn d(z: Scalar, M: usize, MN: usize) -> (ScalarVector, ScalarVector) {
|
|
||||||
let zpow = ScalarVector::even_powers(z, 2 * M);
|
|
||||||
let mut d = vec![Scalar::ZERO; MN];
|
|
||||||
for j in 0 .. M {
|
|
||||||
for i in 0 .. N {
|
|
||||||
d[(j * N) + i] = zpow[j] * TWO_N()[i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
(zpow, ScalarVector(d))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct PlusStruct {
|
|
||||||
pub(crate) A: DalekPoint,
|
|
||||||
pub(crate) A1: DalekPoint,
|
|
||||||
pub(crate) B: DalekPoint,
|
|
||||||
pub(crate) r1: DalekScalar,
|
|
||||||
pub(crate) s1: DalekScalar,
|
|
||||||
pub(crate) d1: DalekScalar,
|
|
||||||
pub(crate) L: Vec<DalekPoint>,
|
|
||||||
pub(crate) R: Vec<DalekPoint>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PlusStruct {
|
|
||||||
#[allow(clippy::many_single_char_names)]
|
|
||||||
pub(crate) fn prove<R: RngCore + CryptoRng>(rng: &mut R, commitments: &[Commitment]) -> Self {
|
|
||||||
let generators = GENERATORS();
|
|
||||||
|
|
||||||
let (logMN, M, MN) = MN(commitments.len());
|
|
||||||
|
|
||||||
let (aL, aR) = bit_decompose(commitments);
|
|
||||||
let commitments_points = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
|
||||||
let (mut cache, _) = hash_plus(commitments_points.clone());
|
|
||||||
let (mut alpha1, A) = alpha_rho(&mut *rng, generators, &aL, &aR);
|
|
||||||
|
|
||||||
let y = hash_cache(&mut cache, &[A.compress().to_bytes()]);
|
|
||||||
let mut cache = hash_to_scalar(&y.to_bytes());
|
|
||||||
let z = cache;
|
|
||||||
|
|
||||||
let (zpow, d) = d(z, M, MN);
|
|
||||||
|
|
||||||
let aL1 = aL - z;
|
|
||||||
|
|
||||||
let ypow = ScalarVector::powers(y, MN + 2);
|
|
||||||
let mut y_for_d = ScalarVector(ypow.0[1 ..= MN].to_vec());
|
|
||||||
y_for_d.0.reverse();
|
|
||||||
let aR1 = (aR + z) + (y_for_d * d);
|
|
||||||
|
|
||||||
for (j, gamma) in commitments.iter().map(|c| Scalar(c.mask)).enumerate() {
|
|
||||||
alpha1 += zpow[j] * ypow[MN + 1] * gamma;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut a = aL1;
|
|
||||||
let mut b = aR1;
|
|
||||||
|
|
||||||
let yinv = y.invert().unwrap();
|
|
||||||
let yinvpow = ScalarVector::powers(yinv, MN);
|
|
||||||
|
|
||||||
let mut G_proof = generators.G[.. a.len()].to_vec();
|
|
||||||
let mut H_proof = generators.H[.. a.len()].to_vec();
|
|
||||||
|
|
||||||
let mut L = Vec::with_capacity(logMN);
|
|
||||||
let mut R = Vec::with_capacity(logMN);
|
|
||||||
|
|
||||||
while a.len() != 1 {
|
|
||||||
let (aL, aR) = a.split();
|
|
||||||
let (bL, bR) = b.split();
|
|
||||||
|
|
||||||
let cL = weighted_inner_product(&aL, &bR, y);
|
|
||||||
let cR = weighted_inner_product(&(&aR * ypow[aR.len()]), &bL, y);
|
|
||||||
|
|
||||||
let (mut dL, mut dR) = (Scalar::random(&mut *rng), Scalar::random(&mut *rng));
|
|
||||||
|
|
||||||
let (G_L, G_R) = G_proof.split_at(aL.len());
|
|
||||||
let (H_L, H_R) = H_proof.split_at(aL.len());
|
|
||||||
|
|
||||||
let mut L_i = LR_statements(&(&aL * yinvpow[aL.len()]), G_R, &bR, H_L, cL, H());
|
|
||||||
L_i.push((dL, G));
|
|
||||||
let L_i = prove_multiexp(&L_i);
|
|
||||||
L.push(*L_i);
|
|
||||||
|
|
||||||
let mut R_i = LR_statements(&(&aR * ypow[aR.len()]), G_L, &bL, H_R, cR, H());
|
|
||||||
R_i.push((dR, G));
|
|
||||||
let R_i = prove_multiexp(&R_i);
|
|
||||||
R.push(*R_i);
|
|
||||||
|
|
||||||
let w = hash_cache(&mut cache, &[L_i.compress().to_bytes(), R_i.compress().to_bytes()]);
|
|
||||||
let winv = w.invert().unwrap();
|
|
||||||
|
|
||||||
G_proof = hadamard_fold(G_L, G_R, winv, w * yinvpow[aL.len()]);
|
|
||||||
H_proof = hadamard_fold(H_L, H_R, w, winv);
|
|
||||||
|
|
||||||
a = (&aL * w) + (aR * (winv * ypow[aL.len()]));
|
|
||||||
b = (bL * winv) + (bR * w);
|
|
||||||
|
|
||||||
alpha1 += (dL * (w * w)) + (dR * (winv * winv));
|
|
||||||
|
|
||||||
dL.zeroize();
|
|
||||||
dR.zeroize();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut r = Scalar::random(&mut *rng);
|
|
||||||
let mut s = Scalar::random(&mut *rng);
|
|
||||||
let mut d = Scalar::random(&mut *rng);
|
|
||||||
let mut eta = Scalar::random(&mut *rng);
|
|
||||||
|
|
||||||
let A1 = prove_multiexp(&[
|
|
||||||
(r, G_proof[0]),
|
|
||||||
(s, H_proof[0]),
|
|
||||||
(d, G),
|
|
||||||
((r * y * b[0]) + (s * y * a[0]), H()),
|
|
||||||
]);
|
|
||||||
let B = prove_multiexp(&[(r * y * s, H()), (eta, G)]);
|
|
||||||
let e = hash_cache(&mut cache, &[A1.compress().to_bytes(), B.compress().to_bytes()]);
|
|
||||||
|
|
||||||
let r1 = (a[0] * e) + r;
|
|
||||||
r.zeroize();
|
|
||||||
let s1 = (b[0] * e) + s;
|
|
||||||
s.zeroize();
|
|
||||||
let d1 = ((d * e) + eta) + (alpha1 * (e * e));
|
|
||||||
d.zeroize();
|
|
||||||
eta.zeroize();
|
|
||||||
alpha1.zeroize();
|
|
||||||
|
|
||||||
let res = Self { A: *A, A1: *A1, B: *B, r1: *r1, s1: *s1, d1: *d1, L, R };
|
|
||||||
debug_assert!(res.verify(rng, &commitments_points));
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::many_single_char_names)]
|
|
||||||
#[must_use]
|
|
||||||
fn verify_core<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
|
||||||
&self,
|
|
||||||
rng: &mut R,
|
|
||||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
|
||||||
id: ID,
|
|
||||||
commitments: &[DalekPoint],
|
|
||||||
) -> bool {
|
|
||||||
// Verify commitments are valid
|
|
||||||
if commitments.is_empty() || (commitments.len() > MAX_M) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify L and R are properly sized
|
|
||||||
if self.L.len() != self.R.len() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (logMN, M, MN) = MN(commitments.len());
|
|
||||||
if self.L.len() != logMN {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rebuild all challenges
|
|
||||||
let (mut cache, commitments) = hash_plus(commitments.iter().copied());
|
|
||||||
let y = hash_cache(&mut cache, &[self.A.compress().to_bytes()]);
|
|
||||||
let yinv = y.invert().unwrap();
|
|
||||||
let z = hash_to_scalar(&y.to_bytes());
|
|
||||||
cache = z;
|
|
||||||
|
|
||||||
let mut w = Vec::with_capacity(logMN);
|
|
||||||
let mut winv = Vec::with_capacity(logMN);
|
|
||||||
for (L, R) in self.L.iter().zip(&self.R) {
|
|
||||||
w.push(hash_cache(&mut cache, &[L.compress().to_bytes(), R.compress().to_bytes()]));
|
|
||||||
winv.push(cache.invert().unwrap());
|
|
||||||
}
|
|
||||||
|
|
||||||
let e = hash_cache(&mut cache, &[self.A1.compress().to_bytes(), self.B.compress().to_bytes()]);
|
|
||||||
|
|
||||||
// Convert the proof from * INV_EIGHT to its actual form
|
|
||||||
let normalize = |point: &DalekPoint| EdwardsPoint(point.mul_by_cofactor());
|
|
||||||
|
|
||||||
let L = self.L.iter().map(normalize).collect::<Vec<_>>();
|
|
||||||
let R = self.R.iter().map(normalize).collect::<Vec<_>>();
|
|
||||||
let A = normalize(&self.A);
|
|
||||||
let A1 = normalize(&self.A1);
|
|
||||||
let B = normalize(&self.B);
|
|
||||||
|
|
||||||
// Verify it
|
|
||||||
let mut proof = Vec::with_capacity(logMN + 5 + (2 * (MN + logMN)));
|
|
||||||
|
|
||||||
let mut yMN = y;
|
|
||||||
for _ in 0 .. logMN {
|
|
||||||
yMN *= yMN;
|
|
||||||
}
|
|
||||||
let yMNy = yMN * y;
|
|
||||||
|
|
||||||
let (zpow, d) = d(z, M, MN);
|
|
||||||
let zsq = zpow[0];
|
|
||||||
|
|
||||||
let esq = e * e;
|
|
||||||
let minus_esq = -esq;
|
|
||||||
let commitment_weight = minus_esq * yMNy;
|
|
||||||
for (i, commitment) in commitments.iter().map(EdwardsPoint::mul_by_cofactor).enumerate() {
|
|
||||||
proof.push((commitment_weight * zpow[i], commitment));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Invert B, instead of the Scalar, as the latter is only 2x as expensive yet enables reduction
|
|
||||||
// to a single addition under vartime for the first BP verified in the batch, which is expected
|
|
||||||
// to be much more significant
|
|
||||||
proof.push((Scalar::ONE, -B));
|
|
||||||
proof.push((-e, A1));
|
|
||||||
proof.push((minus_esq, A));
|
|
||||||
proof.push((Scalar(self.d1), G));
|
|
||||||
|
|
||||||
let d_sum = zpow.sum() * Scalar::from(u64::MAX);
|
|
||||||
let y_sum = weighted_powers(y, MN).sum();
|
|
||||||
proof.push((
|
|
||||||
Scalar(self.r1 * y.0 * self.s1) + (esq * ((yMNy * z * d_sum) + ((zsq - z) * y_sum))),
|
|
||||||
H(),
|
|
||||||
));
|
|
||||||
|
|
||||||
let w_cache = challenge_products(&w, &winv);
|
|
||||||
|
|
||||||
let mut e_r1_y = e * Scalar(self.r1);
|
|
||||||
let e_s1 = e * Scalar(self.s1);
|
|
||||||
let esq_z = esq * z;
|
|
||||||
let minus_esq_z = -esq_z;
|
|
||||||
let mut minus_esq_y = minus_esq * yMN;
|
|
||||||
|
|
||||||
let generators = GENERATORS();
|
|
||||||
for i in 0 .. MN {
|
|
||||||
proof.push((e_r1_y * w_cache[i] + esq_z, generators.G[i]));
|
|
||||||
proof.push((
|
|
||||||
(e_s1 * w_cache[(!i) & (MN - 1)]) + minus_esq_z + (minus_esq_y * d[i]),
|
|
||||||
generators.H[i],
|
|
||||||
));
|
|
||||||
|
|
||||||
e_r1_y *= yinv;
|
|
||||||
minus_esq_y *= yinv;
|
|
||||||
}
|
|
||||||
|
|
||||||
for i in 0 .. logMN {
|
|
||||||
proof.push((minus_esq * w[i] * w[i], L[i]));
|
|
||||||
proof.push((minus_esq * winv[i] * winv[i], R[i]));
|
|
||||||
}
|
|
||||||
|
|
||||||
verifier.queue(rng, id, proof);
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub(crate) fn verify<R: RngCore + CryptoRng>(
|
|
||||||
&self,
|
|
||||||
rng: &mut R,
|
|
||||||
commitments: &[DalekPoint],
|
|
||||||
) -> bool {
|
|
||||||
let mut verifier = BatchVerifier::new(1);
|
|
||||||
if self.verify_core(rng, &mut verifier, (), commitments) {
|
|
||||||
verifier.verify_vartime()
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub(crate) fn batch_verify<ID: Copy + Zeroize, R: RngCore + CryptoRng>(
|
|
||||||
&self,
|
|
||||||
rng: &mut R,
|
|
||||||
verifier: &mut BatchVerifier<ID, EdwardsPoint>,
|
|
||||||
id: ID,
|
|
||||||
commitments: &[DalekPoint],
|
|
||||||
) -> bool {
|
|
||||||
self.verify_core(rng, verifier, id, commitments)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,137 +0,0 @@
|
|||||||
use core::ops::{Add, Sub, Mul, Index};
|
|
||||||
use std_shims::vec::Vec;
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
|
||||||
|
|
||||||
use group::ff::Field;
|
|
||||||
use dalek_ff_group::{Scalar, EdwardsPoint};
|
|
||||||
|
|
||||||
use multiexp::multiexp;
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub(crate) struct ScalarVector(pub(crate) Vec<Scalar>);
|
|
||||||
macro_rules! math_op {
|
|
||||||
($Op: ident, $op: ident, $f: expr) => {
|
|
||||||
impl $Op<Scalar> for ScalarVector {
|
|
||||||
type Output = Self;
|
|
||||||
fn $op(self, b: Scalar) -> Self {
|
|
||||||
Self(self.0.iter().map(|a| $f((a, &b))).collect())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl $Op<Scalar> for &ScalarVector {
|
|
||||||
type Output = ScalarVector;
|
|
||||||
fn $op(self, b: Scalar) -> ScalarVector {
|
|
||||||
ScalarVector(self.0.iter().map(|a| $f((a, &b))).collect())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl $Op<ScalarVector> for ScalarVector {
|
|
||||||
type Output = Self;
|
|
||||||
fn $op(self, b: Self) -> Self {
|
|
||||||
debug_assert_eq!(self.len(), b.len());
|
|
||||||
Self(self.0.iter().zip(b.0.iter()).map($f).collect())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl $Op<Self> for &ScalarVector {
|
|
||||||
type Output = ScalarVector;
|
|
||||||
fn $op(self, b: Self) -> ScalarVector {
|
|
||||||
debug_assert_eq!(self.len(), b.len());
|
|
||||||
ScalarVector(self.0.iter().zip(b.0.iter()).map($f).collect())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
math_op!(Add, add, |(a, b): (&Scalar, &Scalar)| *a + *b);
|
|
||||||
math_op!(Sub, sub, |(a, b): (&Scalar, &Scalar)| *a - *b);
|
|
||||||
math_op!(Mul, mul, |(a, b): (&Scalar, &Scalar)| *a * *b);
|
|
||||||
|
|
||||||
impl ScalarVector {
|
|
||||||
pub(crate) fn new(len: usize) -> Self {
|
|
||||||
Self(vec![Scalar::ZERO; len])
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn powers(x: Scalar, len: usize) -> Self {
|
|
||||||
debug_assert!(len != 0);
|
|
||||||
|
|
||||||
let mut res = Vec::with_capacity(len);
|
|
||||||
res.push(Scalar::ONE);
|
|
||||||
for i in 1 .. len {
|
|
||||||
res.push(res[i - 1] * x);
|
|
||||||
}
|
|
||||||
Self(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn even_powers(x: Scalar, pow: usize) -> Self {
|
|
||||||
debug_assert!(pow != 0);
|
|
||||||
// Verify pow is a power of two
|
|
||||||
debug_assert_eq!(((pow - 1) & pow), 0);
|
|
||||||
|
|
||||||
let xsq = x * x;
|
|
||||||
let mut res = Self(Vec::with_capacity(pow / 2));
|
|
||||||
res.0.push(xsq);
|
|
||||||
|
|
||||||
let mut prev = 2;
|
|
||||||
while prev < pow {
|
|
||||||
res.0.push(res[res.len() - 1] * xsq);
|
|
||||||
prev += 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn sum(mut self) -> Scalar {
|
|
||||||
self.0.drain(..).sum()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn len(&self) -> usize {
|
|
||||||
self.0.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn split(self) -> (Self, Self) {
|
|
||||||
let (l, r) = self.0.split_at(self.0.len() / 2);
|
|
||||||
(Self(l.to_vec()), Self(r.to_vec()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Index<usize> for ScalarVector {
|
|
||||||
type Output = Scalar;
|
|
||||||
fn index(&self, index: usize) -> &Scalar {
|
|
||||||
&self.0[index]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn inner_product(a: &ScalarVector, b: &ScalarVector) -> Scalar {
|
|
||||||
(a * b).sum()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn weighted_powers(x: Scalar, len: usize) -> ScalarVector {
|
|
||||||
ScalarVector(ScalarVector::powers(x, len + 1).0[1 ..].to_vec())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn weighted_inner_product(a: &ScalarVector, b: &ScalarVector, y: Scalar) -> Scalar {
|
|
||||||
// y ** 0 is not used as a power
|
|
||||||
(a * b * weighted_powers(y, a.len())).sum()
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Mul<&[EdwardsPoint]> for &ScalarVector {
|
|
||||||
type Output = EdwardsPoint;
|
|
||||||
fn mul(self, b: &[EdwardsPoint]) -> EdwardsPoint {
|
|
||||||
debug_assert_eq!(self.len(), b.len());
|
|
||||||
multiexp(&self.0.iter().copied().zip(b.iter().copied()).collect::<Vec<_>>())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn hadamard_fold(
|
|
||||||
l: &[EdwardsPoint],
|
|
||||||
r: &[EdwardsPoint],
|
|
||||||
a: Scalar,
|
|
||||||
b: Scalar,
|
|
||||||
) -> Vec<EdwardsPoint> {
|
|
||||||
let mut res = Vec::with_capacity(l.len() / 2);
|
|
||||||
for i in 0 .. l.len() {
|
|
||||||
res.push(multiexp(&[(a, l[i]), (b, r[i])]));
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
@@ -1,325 +0,0 @@
|
|||||||
#![allow(non_snake_case)]
|
|
||||||
|
|
||||||
use core::ops::Deref;
|
|
||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
|
||||||
use subtle::{ConstantTimeEq, Choice, CtOption};
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use curve25519_dalek::{
|
|
||||||
constants::ED25519_BASEPOINT_TABLE,
|
|
||||||
scalar::Scalar,
|
|
||||||
traits::{IsIdentity, VartimePrecomputedMultiscalarMul},
|
|
||||||
edwards::{EdwardsPoint, VartimeEdwardsPrecomputation},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
INV_EIGHT, Commitment, random_scalar, hash_to_scalar, wallet::decoys::Decoys,
|
|
||||||
ringct::hash_to_point, serialize::*,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
mod multisig;
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
pub use multisig::{ClsagDetails, ClsagAddendum, ClsagMultisig};
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
pub(crate) use multisig::add_key_image_share;
|
|
||||||
|
|
||||||
/// Errors returned when CLSAG signing fails.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
|
||||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
|
||||||
pub enum ClsagError {
|
|
||||||
#[cfg_attr(feature = "std", error("internal error ({0})"))]
|
|
||||||
InternalError(&'static str),
|
|
||||||
#[cfg_attr(feature = "std", error("invalid ring"))]
|
|
||||||
InvalidRing,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid ring member (member {0}, ring size {1})"))]
|
|
||||||
InvalidRingMember(u8, u8),
|
|
||||||
#[cfg_attr(feature = "std", error("invalid commitment"))]
|
|
||||||
InvalidCommitment,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid key image"))]
|
|
||||||
InvalidImage,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid D"))]
|
|
||||||
InvalidD,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid s"))]
|
|
||||||
InvalidS,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid c1"))]
|
|
||||||
InvalidC1,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Input being signed for.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub struct ClsagInput {
|
|
||||||
// The actual commitment for the true spend
|
|
||||||
pub(crate) commitment: Commitment,
|
|
||||||
// True spend index, offsets, and ring
|
|
||||||
pub(crate) decoys: Decoys,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ClsagInput {
|
|
||||||
pub fn new(commitment: Commitment, decoys: Decoys) -> Result<Self, ClsagError> {
|
|
||||||
let n = decoys.len();
|
|
||||||
if n > u8::MAX.into() {
|
|
||||||
Err(ClsagError::InternalError("max ring size in this library is u8 max"))?;
|
|
||||||
}
|
|
||||||
let n = u8::try_from(n).unwrap();
|
|
||||||
if decoys.i >= n {
|
|
||||||
Err(ClsagError::InvalidRingMember(decoys.i, n))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate the commitment matches
|
|
||||||
if decoys.ring[usize::from(decoys.i)][1] != commitment.calculate() {
|
|
||||||
Err(ClsagError::InvalidCommitment)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self { commitment, decoys })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::large_enum_variant)]
|
|
||||||
enum Mode {
|
|
||||||
Sign(usize, EdwardsPoint, EdwardsPoint),
|
|
||||||
Verify(Scalar),
|
|
||||||
}
|
|
||||||
|
|
||||||
// Core of the CLSAG algorithm, applicable to both sign and verify with minimal differences
|
|
||||||
// Said differences are covered via the above Mode
|
|
||||||
fn core(
|
|
||||||
ring: &[[EdwardsPoint; 2]],
|
|
||||||
I: &EdwardsPoint,
|
|
||||||
pseudo_out: &EdwardsPoint,
|
|
||||||
msg: &[u8; 32],
|
|
||||||
D: &EdwardsPoint,
|
|
||||||
s: &[Scalar],
|
|
||||||
A_c1: Mode,
|
|
||||||
) -> ((EdwardsPoint, Scalar, Scalar), Scalar) {
|
|
||||||
let n = ring.len();
|
|
||||||
|
|
||||||
let images_precomp = VartimeEdwardsPrecomputation::new([I, D]);
|
|
||||||
let D = D * INV_EIGHT();
|
|
||||||
|
|
||||||
// Generate the transcript
|
|
||||||
// Instead of generating multiple, a single transcript is created and then edited as needed
|
|
||||||
const PREFIX: &[u8] = b"CLSAG_";
|
|
||||||
#[rustfmt::skip]
|
|
||||||
const AGG_0: &[u8] = b"agg_0";
|
|
||||||
#[rustfmt::skip]
|
|
||||||
const ROUND: &[u8] = b"round";
|
|
||||||
const PREFIX_AGG_0_LEN: usize = PREFIX.len() + AGG_0.len();
|
|
||||||
|
|
||||||
let mut to_hash = Vec::with_capacity(((2 * n) + 5) * 32);
|
|
||||||
to_hash.extend(PREFIX);
|
|
||||||
to_hash.extend(AGG_0);
|
|
||||||
to_hash.extend([0; 32 - PREFIX_AGG_0_LEN]);
|
|
||||||
|
|
||||||
let mut P = Vec::with_capacity(n);
|
|
||||||
for member in ring {
|
|
||||||
P.push(member[0]);
|
|
||||||
to_hash.extend(member[0].compress().to_bytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut C = Vec::with_capacity(n);
|
|
||||||
for member in ring {
|
|
||||||
C.push(member[1] - pseudo_out);
|
|
||||||
to_hash.extend(member[1].compress().to_bytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
to_hash.extend(I.compress().to_bytes());
|
|
||||||
to_hash.extend(D.compress().to_bytes());
|
|
||||||
to_hash.extend(pseudo_out.compress().to_bytes());
|
|
||||||
// mu_P with agg_0
|
|
||||||
let mu_P = hash_to_scalar(&to_hash);
|
|
||||||
// mu_C with agg_1
|
|
||||||
to_hash[PREFIX_AGG_0_LEN - 1] = b'1';
|
|
||||||
let mu_C = hash_to_scalar(&to_hash);
|
|
||||||
|
|
||||||
// Truncate it for the round transcript, altering the DST as needed
|
|
||||||
to_hash.truncate(((2 * n) + 1) * 32);
|
|
||||||
for i in 0 .. ROUND.len() {
|
|
||||||
to_hash[PREFIX.len() + i] = ROUND[i];
|
|
||||||
}
|
|
||||||
// Unfortunately, it's I D pseudo_out instead of pseudo_out I D, meaning this needs to be
|
|
||||||
// truncated just to add it back
|
|
||||||
to_hash.extend(pseudo_out.compress().to_bytes());
|
|
||||||
to_hash.extend(msg);
|
|
||||||
|
|
||||||
// Configure the loop based on if we're signing or verifying
|
|
||||||
let start;
|
|
||||||
let end;
|
|
||||||
let mut c;
|
|
||||||
match A_c1 {
|
|
||||||
Mode::Sign(r, A, AH) => {
|
|
||||||
start = r + 1;
|
|
||||||
end = r + n;
|
|
||||||
to_hash.extend(A.compress().to_bytes());
|
|
||||||
to_hash.extend(AH.compress().to_bytes());
|
|
||||||
c = hash_to_scalar(&to_hash);
|
|
||||||
}
|
|
||||||
|
|
||||||
Mode::Verify(c1) => {
|
|
||||||
start = 0;
|
|
||||||
end = n;
|
|
||||||
c = c1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Perform the core loop
|
|
||||||
let mut c1 = CtOption::new(Scalar::zero(), Choice::from(0));
|
|
||||||
for i in (start .. end).map(|i| i % n) {
|
|
||||||
// This will only execute once and shouldn't need to be constant time. Making it constant time
|
|
||||||
// removes the risk of branch prediction creating timing differences depending on ring index
|
|
||||||
// however
|
|
||||||
c1 = c1.or_else(|| CtOption::new(c, i.ct_eq(&0)));
|
|
||||||
|
|
||||||
let c_p = mu_P * c;
|
|
||||||
let c_c = mu_C * c;
|
|
||||||
|
|
||||||
let L = (&s[i] * &ED25519_BASEPOINT_TABLE) + (c_p * P[i]) + (c_c * C[i]);
|
|
||||||
let PH = hash_to_point(P[i]);
|
|
||||||
// Shouldn't be an issue as all of the variables in this vartime statement are public
|
|
||||||
let R = (s[i] * PH) + images_precomp.vartime_multiscalar_mul([c_p, c_c]);
|
|
||||||
|
|
||||||
to_hash.truncate(((2 * n) + 3) * 32);
|
|
||||||
to_hash.extend(L.compress().to_bytes());
|
|
||||||
to_hash.extend(R.compress().to_bytes());
|
|
||||||
c = hash_to_scalar(&to_hash);
|
|
||||||
}
|
|
||||||
|
|
||||||
// This first tuple is needed to continue signing, the latter is the c to be tested/worked with
|
|
||||||
((D, c * mu_P, c * mu_C), c1.unwrap_or(c))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// CLSAG signature, as used in Monero.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct Clsag {
|
|
||||||
pub D: EdwardsPoint,
|
|
||||||
pub s: Vec<Scalar>,
|
|
||||||
pub c1: Scalar,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Clsag {
|
|
||||||
// Sign core is the extension of core as needed for signing, yet is shared between single signer
|
|
||||||
// and multisig, hence why it's still core
|
|
||||||
#[allow(clippy::many_single_char_names)]
|
|
||||||
pub(crate) fn sign_core<R: RngCore + CryptoRng>(
|
|
||||||
rng: &mut R,
|
|
||||||
I: &EdwardsPoint,
|
|
||||||
input: &ClsagInput,
|
|
||||||
mask: Scalar,
|
|
||||||
msg: &[u8; 32],
|
|
||||||
A: EdwardsPoint,
|
|
||||||
AH: EdwardsPoint,
|
|
||||||
) -> (Self, EdwardsPoint, Scalar, Scalar) {
|
|
||||||
let r: usize = input.decoys.i.into();
|
|
||||||
|
|
||||||
let pseudo_out = Commitment::new(mask, input.commitment.amount).calculate();
|
|
||||||
let z = input.commitment.mask - mask;
|
|
||||||
|
|
||||||
let H = hash_to_point(input.decoys.ring[r][0]);
|
|
||||||
let D = H * z;
|
|
||||||
let mut s = Vec::with_capacity(input.decoys.ring.len());
|
|
||||||
for _ in 0 .. input.decoys.ring.len() {
|
|
||||||
s.push(random_scalar(rng));
|
|
||||||
}
|
|
||||||
let ((D, p, c), c1) =
|
|
||||||
core(&input.decoys.ring, I, &pseudo_out, msg, &D, &s, Mode::Sign(r, A, AH));
|
|
||||||
|
|
||||||
(Self { D, s, c1 }, pseudo_out, p, c * z)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generate CLSAG signatures for the given inputs.
|
|
||||||
/// inputs is of the form (private key, key image, input).
|
|
||||||
/// sum_outputs is for the sum of the outputs' commitment masks.
|
|
||||||
pub fn sign<R: RngCore + CryptoRng>(
|
|
||||||
rng: &mut R,
|
|
||||||
mut inputs: Vec<(Zeroizing<Scalar>, EdwardsPoint, ClsagInput)>,
|
|
||||||
sum_outputs: Scalar,
|
|
||||||
msg: [u8; 32],
|
|
||||||
) -> Vec<(Self, EdwardsPoint)> {
|
|
||||||
let mut res = Vec::with_capacity(inputs.len());
|
|
||||||
let mut sum_pseudo_outs = Scalar::zero();
|
|
||||||
for i in 0 .. inputs.len() {
|
|
||||||
let mask = if i == (inputs.len() - 1) {
|
|
||||||
sum_outputs - sum_pseudo_outs
|
|
||||||
} else {
|
|
||||||
let mask = random_scalar(rng);
|
|
||||||
sum_pseudo_outs += mask;
|
|
||||||
mask
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut nonce = Zeroizing::new(random_scalar(rng));
|
|
||||||
let (mut clsag, pseudo_out, p, c) = Self::sign_core(
|
|
||||||
rng,
|
|
||||||
&inputs[i].1,
|
|
||||||
&inputs[i].2,
|
|
||||||
mask,
|
|
||||||
&msg,
|
|
||||||
nonce.deref() * &ED25519_BASEPOINT_TABLE,
|
|
||||||
nonce.deref() *
|
|
||||||
hash_to_point(inputs[i].2.decoys.ring[usize::from(inputs[i].2.decoys.i)][0]),
|
|
||||||
);
|
|
||||||
clsag.s[usize::from(inputs[i].2.decoys.i)] =
|
|
||||||
(-((p * inputs[i].0.deref()) + c)) + nonce.deref();
|
|
||||||
inputs[i].0.zeroize();
|
|
||||||
nonce.zeroize();
|
|
||||||
|
|
||||||
debug_assert!(clsag
|
|
||||||
.verify(&inputs[i].2.decoys.ring, &inputs[i].1, &pseudo_out, &msg)
|
|
||||||
.is_ok());
|
|
||||||
|
|
||||||
res.push((clsag, pseudo_out));
|
|
||||||
}
|
|
||||||
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Verify the CLSAG signature against the given Transaction data.
|
|
||||||
pub fn verify(
|
|
||||||
&self,
|
|
||||||
ring: &[[EdwardsPoint; 2]],
|
|
||||||
I: &EdwardsPoint,
|
|
||||||
pseudo_out: &EdwardsPoint,
|
|
||||||
msg: &[u8; 32],
|
|
||||||
) -> Result<(), ClsagError> {
|
|
||||||
// Preliminary checks. s, c1, and points must also be encoded canonically, which isn't checked
|
|
||||||
// here
|
|
||||||
if ring.is_empty() {
|
|
||||||
Err(ClsagError::InvalidRing)?;
|
|
||||||
}
|
|
||||||
if ring.len() != self.s.len() {
|
|
||||||
Err(ClsagError::InvalidS)?;
|
|
||||||
}
|
|
||||||
if I.is_identity() {
|
|
||||||
Err(ClsagError::InvalidImage)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let D = self.D.mul_by_cofactor();
|
|
||||||
if D.is_identity() {
|
|
||||||
Err(ClsagError::InvalidD)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let (_, c1) = core(ring, I, pseudo_out, msg, &D, &self.s, Mode::Verify(self.c1));
|
|
||||||
if c1 != self.c1 {
|
|
||||||
Err(ClsagError::InvalidC1)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn fee_weight(ring_len: usize) -> usize {
|
|
||||||
(ring_len * 32) + 32 + 32
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
write_raw_vec(write_scalar, &self.s, w)?;
|
|
||||||
w.write_all(&self.c1.to_bytes())?;
|
|
||||||
write_point(&self.D, w)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(decoys: usize, r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self { s: read_raw_vec(read_scalar, decoys, r)?, c1: read_scalar(r)?, D: read_point(r)? })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,311 +0,0 @@
|
|||||||
use core::{ops::Deref, fmt::Debug};
|
|
||||||
use std_shims::{
|
|
||||||
sync::Arc,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
use std::sync::RwLock;
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
|
||||||
use rand_chacha::ChaCha20Rng;
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
|
||||||
|
|
||||||
use curve25519_dalek::{
|
|
||||||
traits::{Identity, IsIdentity},
|
|
||||||
scalar::Scalar,
|
|
||||||
edwards::EdwardsPoint,
|
|
||||||
};
|
|
||||||
|
|
||||||
use group::{ff::Field, Group, GroupEncoding};
|
|
||||||
|
|
||||||
use transcript::{Transcript, RecommendedTranscript};
|
|
||||||
use dalek_ff_group as dfg;
|
|
||||||
use dleq::DLEqProof;
|
|
||||||
use frost::{
|
|
||||||
dkg::lagrange,
|
|
||||||
curve::Ed25519,
|
|
||||||
Participant, FrostError, ThresholdKeys, ThresholdView,
|
|
||||||
algorithm::{WriteAddendum, Algorithm},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::ringct::{
|
|
||||||
hash_to_point,
|
|
||||||
clsag::{ClsagInput, Clsag},
|
|
||||||
};
|
|
||||||
|
|
||||||
fn dleq_transcript() -> RecommendedTranscript {
|
|
||||||
RecommendedTranscript::new(b"monero_key_image_dleq")
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ClsagInput {
|
|
||||||
fn transcript<T: Transcript>(&self, transcript: &mut T) {
|
|
||||||
// Doesn't domain separate as this is considered part of the larger CLSAG proof
|
|
||||||
|
|
||||||
// Ring index
|
|
||||||
transcript.append_message(b"real_spend", [self.decoys.i]);
|
|
||||||
|
|
||||||
// Ring
|
|
||||||
for (i, pair) in self.decoys.ring.iter().enumerate() {
|
|
||||||
// Doesn't include global output indexes as CLSAG doesn't care and won't be affected by it
|
|
||||||
// They're just a unreliable reference to this data which will be included in the message
|
|
||||||
// if in use
|
|
||||||
transcript.append_message(b"member", [u8::try_from(i).expect("ring size exceeded 255")]);
|
|
||||||
transcript.append_message(b"key", pair[0].compress().to_bytes());
|
|
||||||
transcript.append_message(b"commitment", pair[1].compress().to_bytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Doesn't include the commitment's parts as the above ring + index includes the commitment
|
|
||||||
// The only potential malleability would be if the G/H relationship is known breaking the
|
|
||||||
// discrete log problem, which breaks everything already
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// CLSAG input and the mask to use for it.
|
|
||||||
#[derive(Clone, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub struct ClsagDetails {
|
|
||||||
input: ClsagInput,
|
|
||||||
mask: Scalar,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ClsagDetails {
|
|
||||||
pub fn new(input: ClsagInput, mask: Scalar) -> Self {
|
|
||||||
Self { input, mask }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Addendum produced during the FROST signing process with relevant data.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Zeroize, Debug)]
|
|
||||||
pub struct ClsagAddendum {
|
|
||||||
pub(crate) key_image: dfg::EdwardsPoint,
|
|
||||||
dleq: DLEqProof<dfg::EdwardsPoint>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WriteAddendum for ClsagAddendum {
|
|
||||||
fn write<W: Write>(&self, writer: &mut W) -> io::Result<()> {
|
|
||||||
writer.write_all(self.key_image.compress().to_bytes().as_ref())?;
|
|
||||||
self.dleq.write(writer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
struct Interim {
|
|
||||||
p: Scalar,
|
|
||||||
c: Scalar,
|
|
||||||
|
|
||||||
clsag: Clsag,
|
|
||||||
pseudo_out: EdwardsPoint,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// FROST algorithm for producing a CLSAG signature.
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct ClsagMultisig {
|
|
||||||
transcript: RecommendedTranscript,
|
|
||||||
|
|
||||||
pub(crate) H: EdwardsPoint,
|
|
||||||
// Merged here as CLSAG needs it, passing it would be a mess, yet having it beforehand requires
|
|
||||||
// an extra round
|
|
||||||
image: EdwardsPoint,
|
|
||||||
|
|
||||||
details: Arc<RwLock<Option<ClsagDetails>>>,
|
|
||||||
|
|
||||||
msg: Option<[u8; 32]>,
|
|
||||||
interim: Option<Interim>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ClsagMultisig {
|
|
||||||
pub fn new(
|
|
||||||
transcript: RecommendedTranscript,
|
|
||||||
output_key: EdwardsPoint,
|
|
||||||
details: Arc<RwLock<Option<ClsagDetails>>>,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
transcript,
|
|
||||||
|
|
||||||
H: hash_to_point(output_key),
|
|
||||||
image: EdwardsPoint::identity(),
|
|
||||||
|
|
||||||
details,
|
|
||||||
|
|
||||||
msg: None,
|
|
||||||
interim: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn input(&self) -> ClsagInput {
|
|
||||||
(*self.details.read().unwrap()).as_ref().unwrap().input.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn mask(&self) -> Scalar {
|
|
||||||
(*self.details.read().unwrap()).as_ref().unwrap().mask
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn add_key_image_share(
|
|
||||||
image: &mut EdwardsPoint,
|
|
||||||
generator: EdwardsPoint,
|
|
||||||
offset: Scalar,
|
|
||||||
included: &[Participant],
|
|
||||||
participant: Participant,
|
|
||||||
share: EdwardsPoint,
|
|
||||||
) {
|
|
||||||
if image.is_identity() {
|
|
||||||
*image = generator * offset;
|
|
||||||
}
|
|
||||||
*image += share * lagrange::<dfg::Scalar>(participant, included).0;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Algorithm<Ed25519> for ClsagMultisig {
|
|
||||||
type Transcript = RecommendedTranscript;
|
|
||||||
type Addendum = ClsagAddendum;
|
|
||||||
type Signature = (Clsag, EdwardsPoint);
|
|
||||||
|
|
||||||
fn nonces(&self) -> Vec<Vec<dfg::EdwardsPoint>> {
|
|
||||||
vec![vec![dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)]]
|
|
||||||
}
|
|
||||||
|
|
||||||
fn preprocess_addendum<R: RngCore + CryptoRng>(
|
|
||||||
&mut self,
|
|
||||||
rng: &mut R,
|
|
||||||
keys: &ThresholdKeys<Ed25519>,
|
|
||||||
) -> ClsagAddendum {
|
|
||||||
ClsagAddendum {
|
|
||||||
key_image: dfg::EdwardsPoint(self.H) * keys.secret_share().deref(),
|
|
||||||
dleq: DLEqProof::prove(
|
|
||||||
rng,
|
|
||||||
// Doesn't take in a larger transcript object due to the usage of this
|
|
||||||
// Every prover would immediately write their own DLEq proof, when they can only do so in
|
|
||||||
// the proper order if they want to reach consensus
|
|
||||||
// It'd be a poor API to have CLSAG define a new transcript solely to pass here, just to
|
|
||||||
// try to merge later in some form, when it should instead just merge xH (as it does)
|
|
||||||
&mut dleq_transcript(),
|
|
||||||
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
|
|
||||||
keys.secret_share(),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_addendum<R: Read>(&self, reader: &mut R) -> io::Result<ClsagAddendum> {
|
|
||||||
let mut bytes = [0; 32];
|
|
||||||
reader.read_exact(&mut bytes)?;
|
|
||||||
// dfg ensures the point is torsion free
|
|
||||||
let xH = Option::<dfg::EdwardsPoint>::from(dfg::EdwardsPoint::from_bytes(&bytes))
|
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid key image"))?;
|
|
||||||
// Ensure this is a canonical point
|
|
||||||
if xH.to_bytes() != bytes {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "non-canonical key image"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(ClsagAddendum { key_image: xH, dleq: DLEqProof::<dfg::EdwardsPoint>::read(reader)? })
|
|
||||||
}
|
|
||||||
|
|
||||||
fn process_addendum(
|
|
||||||
&mut self,
|
|
||||||
view: &ThresholdView<Ed25519>,
|
|
||||||
l: Participant,
|
|
||||||
addendum: ClsagAddendum,
|
|
||||||
) -> Result<(), FrostError> {
|
|
||||||
if self.image.is_identity() {
|
|
||||||
self.transcript.domain_separate(b"CLSAG");
|
|
||||||
self.input().transcript(&mut self.transcript);
|
|
||||||
self.transcript.append_message(b"mask", self.mask().to_bytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
self.transcript.append_message(b"participant", l.to_bytes());
|
|
||||||
|
|
||||||
addendum
|
|
||||||
.dleq
|
|
||||||
.verify(
|
|
||||||
&mut dleq_transcript(),
|
|
||||||
&[dfg::EdwardsPoint::generator(), dfg::EdwardsPoint(self.H)],
|
|
||||||
&[view.original_verification_share(l), addendum.key_image],
|
|
||||||
)
|
|
||||||
.map_err(|_| FrostError::InvalidPreprocess(l))?;
|
|
||||||
|
|
||||||
self.transcript.append_message(b"key_image_share", addendum.key_image.compress().to_bytes());
|
|
||||||
add_key_image_share(
|
|
||||||
&mut self.image,
|
|
||||||
self.H,
|
|
||||||
view.offset().0,
|
|
||||||
view.included(),
|
|
||||||
l,
|
|
||||||
addendum.key_image.0,
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn transcript(&mut self) -> &mut Self::Transcript {
|
|
||||||
&mut self.transcript
|
|
||||||
}
|
|
||||||
|
|
||||||
fn sign_share(
|
|
||||||
&mut self,
|
|
||||||
view: &ThresholdView<Ed25519>,
|
|
||||||
nonce_sums: &[Vec<dfg::EdwardsPoint>],
|
|
||||||
nonces: Vec<Zeroizing<dfg::Scalar>>,
|
|
||||||
msg: &[u8],
|
|
||||||
) -> dfg::Scalar {
|
|
||||||
// Use the transcript to get a seeded random number generator
|
|
||||||
// The transcript contains private data, preventing passive adversaries from recreating this
|
|
||||||
// process even if they have access to commitments (specifically, the ring index being signed
|
|
||||||
// for, along with the mask which should not only require knowing the shared keys yet also the
|
|
||||||
// input commitment masks)
|
|
||||||
let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"decoy_responses"));
|
|
||||||
|
|
||||||
self.msg = Some(msg.try_into().expect("CLSAG message should be 32-bytes"));
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let (clsag, pseudo_out, p, c) = Clsag::sign_core(
|
|
||||||
&mut rng,
|
|
||||||
&self.image,
|
|
||||||
&self.input(),
|
|
||||||
self.mask(),
|
|
||||||
self.msg.as_ref().unwrap(),
|
|
||||||
nonce_sums[0][0].0,
|
|
||||||
nonce_sums[0][1].0,
|
|
||||||
);
|
|
||||||
self.interim = Some(Interim { p, c, clsag, pseudo_out });
|
|
||||||
|
|
||||||
(-(dfg::Scalar(p) * view.secret_share().deref())) + nonces[0].deref()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn verify(
|
|
||||||
&self,
|
|
||||||
_: dfg::EdwardsPoint,
|
|
||||||
_: &[Vec<dfg::EdwardsPoint>],
|
|
||||||
sum: dfg::Scalar,
|
|
||||||
) -> Option<Self::Signature> {
|
|
||||||
let interim = self.interim.as_ref().unwrap();
|
|
||||||
let mut clsag = interim.clsag.clone();
|
|
||||||
clsag.s[usize::from(self.input().decoys.i)] = sum.0 - interim.c;
|
|
||||||
if clsag
|
|
||||||
.verify(
|
|
||||||
&self.input().decoys.ring,
|
|
||||||
&self.image,
|
|
||||||
&interim.pseudo_out,
|
|
||||||
self.msg.as_ref().unwrap(),
|
|
||||||
)
|
|
||||||
.is_ok()
|
|
||||||
{
|
|
||||||
return Some((clsag, interim.pseudo_out));
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn verify_share(
|
|
||||||
&self,
|
|
||||||
verification_share: dfg::EdwardsPoint,
|
|
||||||
nonces: &[Vec<dfg::EdwardsPoint>],
|
|
||||||
share: dfg::Scalar,
|
|
||||||
) -> Result<Vec<(dfg::Scalar, dfg::EdwardsPoint)>, ()> {
|
|
||||||
let interim = self.interim.as_ref().unwrap();
|
|
||||||
Ok(vec![
|
|
||||||
(share, dfg::EdwardsPoint::generator()),
|
|
||||||
(dfg::Scalar(interim.p), verification_share),
|
|
||||||
(-dfg::Scalar::ONE, nonces[0][0]),
|
|
||||||
])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
use curve25519_dalek::edwards::EdwardsPoint;
|
|
||||||
|
|
||||||
pub use monero_generators::{hash_to_point as raw_hash_to_point};
|
|
||||||
|
|
||||||
/// Monero's hash to point function, as named `ge_fromfe_frombytes_vartime`.
|
|
||||||
pub fn hash_to_point(key: EdwardsPoint) -> EdwardsPoint {
|
|
||||||
raw_hash_to_point(key.compress().to_bytes())
|
|
||||||
}
|
|
||||||
@@ -1,72 +0,0 @@
|
|||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use curve25519_dalek::scalar::Scalar;
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
use curve25519_dalek::edwards::EdwardsPoint;
|
|
||||||
|
|
||||||
use crate::serialize::*;
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
use crate::{hash_to_scalar, ringct::hash_to_point};
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct Mlsag {
|
|
||||||
pub ss: Vec<[Scalar; 2]>,
|
|
||||||
pub cc: Scalar,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Mlsag {
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
for ss in &self.ss {
|
|
||||||
write_raw_vec(write_scalar, ss, w)?;
|
|
||||||
}
|
|
||||||
write_scalar(&self.cc, w)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(mixins: usize, r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self {
|
|
||||||
ss: (0 .. mixins).map(|_| read_array(read_scalar, r)).collect::<Result<_, _>>()?,
|
|
||||||
cc: read_scalar(r)?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
#[must_use]
|
|
||||||
pub fn verify(
|
|
||||||
&self,
|
|
||||||
msg: &[u8; 32],
|
|
||||||
ring: &[[EdwardsPoint; 2]],
|
|
||||||
key_image: &EdwardsPoint,
|
|
||||||
) -> bool {
|
|
||||||
if ring.is_empty() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut buf = Vec::with_capacity(6 * 32);
|
|
||||||
let mut ci = self.cc;
|
|
||||||
for (i, ring_member) in ring.iter().enumerate() {
|
|
||||||
buf.extend_from_slice(msg);
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let L =
|
|
||||||
|r| EdwardsPoint::vartime_double_scalar_mul_basepoint(&ci, &ring_member[r], &self.ss[i][r]);
|
|
||||||
|
|
||||||
buf.extend_from_slice(ring_member[0].compress().as_bytes());
|
|
||||||
buf.extend_from_slice(L(0).compress().as_bytes());
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let R = (self.ss[i][0] * hash_to_point(ring_member[0])) + (ci * key_image);
|
|
||||||
buf.extend_from_slice(R.compress().as_bytes());
|
|
||||||
|
|
||||||
buf.extend_from_slice(ring_member[1].compress().as_bytes());
|
|
||||||
buf.extend_from_slice(L(1).compress().as_bytes());
|
|
||||||
|
|
||||||
ci = hash_to_scalar(&buf);
|
|
||||||
buf.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
ci == self.cc
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,383 +0,0 @@
|
|||||||
use core::ops::Deref;
|
|
||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, Zeroizing};
|
|
||||||
|
|
||||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
|
||||||
|
|
||||||
pub(crate) mod hash_to_point;
|
|
||||||
pub use hash_to_point::{raw_hash_to_point, hash_to_point};
|
|
||||||
|
|
||||||
/// MLSAG struct, along with verifying functionality.
|
|
||||||
pub mod mlsag;
|
|
||||||
/// CLSAG struct, along with signing and verifying functionality.
|
|
||||||
pub mod clsag;
|
|
||||||
/// BorromeanRange struct, along with verifying functionality.
|
|
||||||
pub mod borromean;
|
|
||||||
/// Bulletproofs(+) structs, along with proving and verifying functionality.
|
|
||||||
pub mod bulletproofs;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Protocol,
|
|
||||||
serialize::*,
|
|
||||||
ringct::{mlsag::Mlsag, clsag::Clsag, borromean::BorromeanRange, bulletproofs::Bulletproofs},
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Generate a key image for a given key. Defined as `x * hash_to_point(xG)`.
|
|
||||||
pub fn generate_key_image(secret: &Zeroizing<Scalar>) -> EdwardsPoint {
|
|
||||||
hash_to_point(&ED25519_BASEPOINT_TABLE * secret.deref()) * secret.deref()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub enum EncryptedAmount {
|
|
||||||
Original { mask: [u8; 32], amount: [u8; 32] },
|
|
||||||
Compact { amount: [u8; 8] },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl EncryptedAmount {
|
|
||||||
pub fn read<R: Read>(compact: bool, r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(if compact {
|
|
||||||
Self::Compact { amount: read_bytes(r)? }
|
|
||||||
} else {
|
|
||||||
Self::Original { mask: read_bytes(r)?, amount: read_bytes(r)? }
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
Self::Original { mask, amount } => {
|
|
||||||
w.write_all(mask)?;
|
|
||||||
w.write_all(amount)
|
|
||||||
}
|
|
||||||
Self::Compact { amount } => w.write_all(amount),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub enum RctType {
|
|
||||||
/// No RCT proofs.
|
|
||||||
Null,
|
|
||||||
/// One MLSAG for a single input and a Borromean range proof (RCTTypeFull).
|
|
||||||
MlsagAggregate,
|
|
||||||
// One MLSAG for each input and a Borromean range proof (RCTTypeSimple).
|
|
||||||
MlsagIndividual,
|
|
||||||
// One MLSAG for each input and a Bulletproof (RCTTypeBulletproof).
|
|
||||||
Bulletproofs,
|
|
||||||
/// One MLSAG for each input and a Bulletproof, yet starting to use EncryptedAmount::Compact
|
|
||||||
/// (RCTTypeBulletproof2).
|
|
||||||
BulletproofsCompactAmount,
|
|
||||||
/// One CLSAG for each input and a Bulletproof (RCTTypeCLSAG).
|
|
||||||
Clsag,
|
|
||||||
/// One CLSAG for each input and a Bulletproof+ (RCTTypeBulletproofPlus).
|
|
||||||
BulletproofsPlus,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RctType {
|
|
||||||
pub fn to_byte(self) -> u8 {
|
|
||||||
match self {
|
|
||||||
Self::Null => 0,
|
|
||||||
Self::MlsagAggregate => 1,
|
|
||||||
Self::MlsagIndividual => 2,
|
|
||||||
Self::Bulletproofs => 3,
|
|
||||||
Self::BulletproofsCompactAmount => 4,
|
|
||||||
Self::Clsag => 5,
|
|
||||||
Self::BulletproofsPlus => 6,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_byte(byte: u8) -> Option<Self> {
|
|
||||||
Some(match byte {
|
|
||||||
0 => Self::Null,
|
|
||||||
1 => Self::MlsagAggregate,
|
|
||||||
2 => Self::MlsagIndividual,
|
|
||||||
3 => Self::Bulletproofs,
|
|
||||||
4 => Self::BulletproofsCompactAmount,
|
|
||||||
5 => Self::Clsag,
|
|
||||||
6 => Self::BulletproofsPlus,
|
|
||||||
_ => None?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn compact_encrypted_amounts(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Null | Self::MlsagAggregate | Self::MlsagIndividual | Self::Bulletproofs => false,
|
|
||||||
Self::BulletproofsCompactAmount | Self::Clsag | Self::BulletproofsPlus => true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct RctBase {
|
|
||||||
pub fee: u64,
|
|
||||||
pub pseudo_outs: Vec<EdwardsPoint>,
|
|
||||||
pub encrypted_amounts: Vec<EncryptedAmount>,
|
|
||||||
pub commitments: Vec<EdwardsPoint>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RctBase {
|
|
||||||
pub(crate) fn fee_weight(outputs: usize) -> usize {
|
|
||||||
1 + 8 + (outputs * (8 + 32))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W, rct_type: RctType) -> io::Result<()> {
|
|
||||||
w.write_all(&[rct_type.to_byte()])?;
|
|
||||||
match rct_type {
|
|
||||||
RctType::Null => Ok(()),
|
|
||||||
RctType::MlsagAggregate |
|
|
||||||
RctType::MlsagIndividual |
|
|
||||||
RctType::Bulletproofs |
|
|
||||||
RctType::BulletproofsCompactAmount |
|
|
||||||
RctType::Clsag |
|
|
||||||
RctType::BulletproofsPlus => {
|
|
||||||
write_varint(&self.fee, w)?;
|
|
||||||
if rct_type == RctType::MlsagIndividual {
|
|
||||||
write_raw_vec(write_point, &self.pseudo_outs, w)?;
|
|
||||||
}
|
|
||||||
for encrypted_amount in &self.encrypted_amounts {
|
|
||||||
encrypted_amount.write(w)?;
|
|
||||||
}
|
|
||||||
write_raw_vec(write_point, &self.commitments, w)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(inputs: usize, outputs: usize, r: &mut R) -> io::Result<(Self, RctType)> {
|
|
||||||
let rct_type = RctType::from_byte(read_byte(r)?)
|
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid RCT type"))?;
|
|
||||||
|
|
||||||
match rct_type {
|
|
||||||
RctType::Null | RctType::MlsagAggregate | RctType::MlsagIndividual => {}
|
|
||||||
RctType::Bulletproofs |
|
|
||||||
RctType::BulletproofsCompactAmount |
|
|
||||||
RctType::Clsag |
|
|
||||||
RctType::BulletproofsPlus => {
|
|
||||||
if outputs == 0 {
|
|
||||||
// Because the Bulletproofs(+) layout must be canonical, there must be 1 Bulletproof if
|
|
||||||
// Bulletproofs are in use
|
|
||||||
// If there are Bulletproofs, there must be a matching amount of outputs, implicitly
|
|
||||||
// banning 0 outputs
|
|
||||||
// Since HF 12 (CLSAG being 13), a 2-output minimum has also been enforced
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "RCT with Bulletproofs(+) had 0 outputs"))?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok((
|
|
||||||
if rct_type == RctType::Null {
|
|
||||||
Self { fee: 0, pseudo_outs: vec![], encrypted_amounts: vec![], commitments: vec![] }
|
|
||||||
} else {
|
|
||||||
Self {
|
|
||||||
fee: read_varint(r)?,
|
|
||||||
pseudo_outs: if rct_type == RctType::MlsagIndividual {
|
|
||||||
read_raw_vec(read_point, inputs, r)?
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
},
|
|
||||||
encrypted_amounts: (0 .. outputs)
|
|
||||||
.map(|_| EncryptedAmount::read(rct_type.compact_encrypted_amounts(), r))
|
|
||||||
.collect::<Result<_, _>>()?,
|
|
||||||
commitments: read_raw_vec(read_point, outputs, r)?,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
rct_type,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub enum RctPrunable {
|
|
||||||
Null,
|
|
||||||
MlsagBorromean {
|
|
||||||
borromean: Vec<BorromeanRange>,
|
|
||||||
mlsags: Vec<Mlsag>,
|
|
||||||
},
|
|
||||||
MlsagBulletproofs {
|
|
||||||
bulletproofs: Bulletproofs,
|
|
||||||
mlsags: Vec<Mlsag>,
|
|
||||||
pseudo_outs: Vec<EdwardsPoint>,
|
|
||||||
},
|
|
||||||
Clsag {
|
|
||||||
bulletproofs: Bulletproofs,
|
|
||||||
clsags: Vec<Clsag>,
|
|
||||||
pseudo_outs: Vec<EdwardsPoint>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RctPrunable {
|
|
||||||
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
|
|
||||||
1 + Bulletproofs::fee_weight(protocol.bp_plus(), outputs) +
|
|
||||||
(inputs * (Clsag::fee_weight(protocol.ring_len()) + 32))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W, rct_type: RctType) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
Self::Null => Ok(()),
|
|
||||||
Self::MlsagBorromean { borromean, mlsags } => {
|
|
||||||
write_raw_vec(BorromeanRange::write, borromean, w)?;
|
|
||||||
write_raw_vec(Mlsag::write, mlsags, w)
|
|
||||||
}
|
|
||||||
Self::MlsagBulletproofs { bulletproofs, mlsags, pseudo_outs } => {
|
|
||||||
if rct_type == RctType::Bulletproofs {
|
|
||||||
w.write_all(&1u32.to_le_bytes())?;
|
|
||||||
} else {
|
|
||||||
w.write_all(&[1])?;
|
|
||||||
}
|
|
||||||
bulletproofs.write(w)?;
|
|
||||||
|
|
||||||
write_raw_vec(Mlsag::write, mlsags, w)?;
|
|
||||||
write_raw_vec(write_point, pseudo_outs, w)
|
|
||||||
}
|
|
||||||
Self::Clsag { bulletproofs, clsags, pseudo_outs } => {
|
|
||||||
w.write_all(&[1])?;
|
|
||||||
bulletproofs.write(w)?;
|
|
||||||
|
|
||||||
write_raw_vec(Clsag::write, clsags, w)?;
|
|
||||||
write_raw_vec(write_point, pseudo_outs, w)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self, rct_type: RctType) -> Vec<u8> {
|
|
||||||
let mut serialized = vec![];
|
|
||||||
self.write(&mut serialized, rct_type).unwrap();
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(
|
|
||||||
rct_type: RctType,
|
|
||||||
decoys: &[usize],
|
|
||||||
outputs: usize,
|
|
||||||
r: &mut R,
|
|
||||||
) -> io::Result<Self> {
|
|
||||||
Ok(match rct_type {
|
|
||||||
RctType::Null => Self::Null,
|
|
||||||
RctType::MlsagAggregate | RctType::MlsagIndividual => Self::MlsagBorromean {
|
|
||||||
borromean: read_raw_vec(BorromeanRange::read, outputs, r)?,
|
|
||||||
mlsags: decoys.iter().map(|d| Mlsag::read(*d, r)).collect::<Result<_, _>>()?,
|
|
||||||
},
|
|
||||||
RctType::Bulletproofs | RctType::BulletproofsCompactAmount => Self::MlsagBulletproofs {
|
|
||||||
bulletproofs: {
|
|
||||||
if (if rct_type == RctType::Bulletproofs {
|
|
||||||
u64::from(read_u32(r)?)
|
|
||||||
} else {
|
|
||||||
read_varint(r)?
|
|
||||||
}) != 1
|
|
||||||
{
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "n bulletproofs instead of one"))?;
|
|
||||||
}
|
|
||||||
Bulletproofs::read(r)?
|
|
||||||
},
|
|
||||||
mlsags: decoys.iter().map(|d| Mlsag::read(*d, r)).collect::<Result<_, _>>()?,
|
|
||||||
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
|
|
||||||
},
|
|
||||||
RctType::Clsag | RctType::BulletproofsPlus => Self::Clsag {
|
|
||||||
bulletproofs: {
|
|
||||||
if read_varint(r)? != 1 {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "n bulletproofs instead of one"))?;
|
|
||||||
}
|
|
||||||
(if rct_type == RctType::Clsag { Bulletproofs::read } else { Bulletproofs::read_plus })(
|
|
||||||
r,
|
|
||||||
)?
|
|
||||||
},
|
|
||||||
clsags: (0 .. decoys.len()).map(|o| Clsag::read(decoys[o], r)).collect::<Result<_, _>>()?,
|
|
||||||
pseudo_outs: read_raw_vec(read_point, decoys.len(), r)?,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn signature_write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
Self::Null => panic!("Serializing RctPrunable::Null for a signature"),
|
|
||||||
Self::MlsagBorromean { borromean, .. } => borromean.iter().try_for_each(|rs| rs.write(w)),
|
|
||||||
Self::MlsagBulletproofs { bulletproofs, .. } => bulletproofs.signature_write(w),
|
|
||||||
Self::Clsag { bulletproofs, .. } => bulletproofs.signature_write(w),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct RctSignatures {
|
|
||||||
pub base: RctBase,
|
|
||||||
pub prunable: RctPrunable,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RctSignatures {
|
|
||||||
/// RctType for a given RctSignatures struct.
|
|
||||||
pub fn rct_type(&self) -> RctType {
|
|
||||||
match &self.prunable {
|
|
||||||
RctPrunable::Null => RctType::Null,
|
|
||||||
RctPrunable::MlsagBorromean { .. } => {
|
|
||||||
/*
|
|
||||||
This type of RctPrunable may have no outputs, yet pseudo_outs are per input
|
|
||||||
This will only be a valid RctSignatures if it's for a TX with inputs
|
|
||||||
That makes this valid for any valid RctSignatures
|
|
||||||
|
|
||||||
While it will be invalid for any invalid RctSignatures, potentially letting an invalid
|
|
||||||
MlsagAggregate be interpreted as a valid MlsagIndividual (or vice versa), they have
|
|
||||||
incompatible deserializations
|
|
||||||
|
|
||||||
This means it's impossible to receive a MlsagAggregate over the wire and interpret it
|
|
||||||
as a MlsagIndividual (or vice versa)
|
|
||||||
|
|
||||||
That only makes manual manipulation unsafe, which will always be true since these fields
|
|
||||||
are all pub
|
|
||||||
|
|
||||||
TODO: Consider making them private with read-only accessors?
|
|
||||||
*/
|
|
||||||
if self.base.pseudo_outs.is_empty() {
|
|
||||||
RctType::MlsagAggregate
|
|
||||||
} else {
|
|
||||||
RctType::MlsagIndividual
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// RctBase ensures there's at least one output, making the following
|
|
||||||
// inferences guaranteed/expects impossible on any valid RctSignatures
|
|
||||||
RctPrunable::MlsagBulletproofs { .. } => {
|
|
||||||
if matches!(
|
|
||||||
self
|
|
||||||
.base
|
|
||||||
.encrypted_amounts
|
|
||||||
.get(0)
|
|
||||||
.expect("MLSAG with Bulletproofs didn't have any outputs"),
|
|
||||||
EncryptedAmount::Original { .. }
|
|
||||||
) {
|
|
||||||
RctType::Bulletproofs
|
|
||||||
} else {
|
|
||||||
RctType::BulletproofsCompactAmount
|
|
||||||
}
|
|
||||||
}
|
|
||||||
RctPrunable::Clsag { bulletproofs, .. } => {
|
|
||||||
if matches!(bulletproofs, Bulletproofs::Original { .. }) {
|
|
||||||
RctType::Clsag
|
|
||||||
} else {
|
|
||||||
RctType::BulletproofsPlus
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn fee_weight(protocol: Protocol, inputs: usize, outputs: usize) -> usize {
|
|
||||||
RctBase::fee_weight(outputs) + RctPrunable::fee_weight(protocol, inputs, outputs)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
let rct_type = self.rct_type();
|
|
||||||
self.base.write(w, rct_type)?;
|
|
||||||
self.prunable.write(w, rct_type)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut serialized = vec![];
|
|
||||||
self.write(&mut serialized).unwrap();
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(decoys: Vec<usize>, outputs: usize, r: &mut R) -> io::Result<Self> {
|
|
||||||
let base = RctBase::read(decoys.len(), outputs, r)?;
|
|
||||||
Ok(Self { base: base.0, prunable: RctPrunable::read(base.1, &decoys, outputs, r)? })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,91 +0,0 @@
|
|||||||
use async_trait::async_trait;
|
|
||||||
|
|
||||||
use digest_auth::AuthContext;
|
|
||||||
use reqwest::Client;
|
|
||||||
|
|
||||||
use crate::rpc::{RpcError, RpcConnection, Rpc};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct HttpRpc {
|
|
||||||
client: Client,
|
|
||||||
userpass: Option<(String, String)>,
|
|
||||||
url: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HttpRpc {
|
|
||||||
/// Create a new HTTP(S) RPC connection.
|
|
||||||
///
|
|
||||||
/// A daemon requiring authentication can be used via including the username and password in the
|
|
||||||
/// URL.
|
|
||||||
pub fn new(mut url: String) -> Result<Rpc<Self>, RpcError> {
|
|
||||||
// Parse out the username and password
|
|
||||||
let userpass = if url.contains('@') {
|
|
||||||
let url_clone = url;
|
|
||||||
let split_url = url_clone.split('@').collect::<Vec<_>>();
|
|
||||||
if split_url.len() != 2 {
|
|
||||||
Err(RpcError::InvalidNode)?;
|
|
||||||
}
|
|
||||||
let mut userpass = split_url[0];
|
|
||||||
url = split_url[1].to_string();
|
|
||||||
|
|
||||||
// If there was additionally a protocol string, restore that to the daemon URL
|
|
||||||
if userpass.contains("://") {
|
|
||||||
let split_userpass = userpass.split("://").collect::<Vec<_>>();
|
|
||||||
if split_userpass.len() != 2 {
|
|
||||||
Err(RpcError::InvalidNode)?;
|
|
||||||
}
|
|
||||||
url = split_userpass[0].to_string() + "://" + &url;
|
|
||||||
userpass = split_userpass[1];
|
|
||||||
}
|
|
||||||
|
|
||||||
let split_userpass = userpass.split(':').collect::<Vec<_>>();
|
|
||||||
if split_userpass.len() != 2 {
|
|
||||||
Err(RpcError::InvalidNode)?;
|
|
||||||
}
|
|
||||||
Some((split_userpass[0].to_string(), split_userpass[1].to_string()))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Rpc(Self { client: Client::new(), userpass, url }))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl RpcConnection for HttpRpc {
|
|
||||||
async fn post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError> {
|
|
||||||
let mut builder = self.client.post(self.url.clone() + "/" + route).body(body);
|
|
||||||
|
|
||||||
if let Some((user, pass)) = &self.userpass {
|
|
||||||
let req = self.client.post(&self.url).send().await.map_err(|_| RpcError::InvalidNode)?;
|
|
||||||
// Only provide authentication if this daemon actually expects it
|
|
||||||
if let Some(header) = req.headers().get("www-authenticate") {
|
|
||||||
builder = builder.header(
|
|
||||||
"Authorization",
|
|
||||||
digest_auth::parse(header.to_str().map_err(|_| RpcError::InvalidNode)?)
|
|
||||||
.map_err(|_| RpcError::InvalidNode)?
|
|
||||||
.respond(&AuthContext::new_post::<_, _, _, &[u8]>(
|
|
||||||
user,
|
|
||||||
pass,
|
|
||||||
"/".to_string() + route,
|
|
||||||
None,
|
|
||||||
))
|
|
||||||
.map_err(|_| RpcError::InvalidNode)?
|
|
||||||
.to_header_string(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(
|
|
||||||
builder
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.map_err(|_| RpcError::ConnectionError)?
|
|
||||||
.bytes()
|
|
||||||
.await
|
|
||||||
.map_err(|_| RpcError::ConnectionError)?
|
|
||||||
.slice(..)
|
|
||||||
.to_vec(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,617 +0,0 @@
|
|||||||
use core::fmt::Debug;
|
|
||||||
#[cfg(not(feature = "std"))]
|
|
||||||
use alloc::boxed::Box;
|
|
||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io,
|
|
||||||
string::{String, ToString},
|
|
||||||
};
|
|
||||||
|
|
||||||
use async_trait::async_trait;
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
|
||||||
|
|
||||||
use serde::{Serialize, Deserialize, de::DeserializeOwned};
|
|
||||||
use serde_json::{Value, json};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Protocol,
|
|
||||||
serialize::*,
|
|
||||||
transaction::{Input, Timelock, Transaction},
|
|
||||||
block::Block,
|
|
||||||
wallet::Fee,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(feature = "http_rpc")]
|
|
||||||
mod http;
|
|
||||||
#[cfg(feature = "http_rpc")]
|
|
||||||
pub use http::*;
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
pub struct EmptyResponse;
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
pub struct JsonRpcResponse<T> {
|
|
||||||
result: T,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct TransactionResponse {
|
|
||||||
tx_hash: String,
|
|
||||||
as_hex: String,
|
|
||||||
pruned_as_hex: String,
|
|
||||||
}
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct TransactionsResponse {
|
|
||||||
#[serde(default)]
|
|
||||||
missed_tx: Vec<String>,
|
|
||||||
txs: Vec<TransactionResponse>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
|
||||||
pub enum RpcError {
|
|
||||||
#[cfg_attr(feature = "std", error("internal error ({0})"))]
|
|
||||||
InternalError(&'static str),
|
|
||||||
#[cfg_attr(feature = "std", error("connection error"))]
|
|
||||||
ConnectionError,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid node"))]
|
|
||||||
InvalidNode,
|
|
||||||
#[cfg_attr(feature = "std", error("unsupported protocol version ({0})"))]
|
|
||||||
UnsupportedProtocol(usize),
|
|
||||||
#[cfg_attr(feature = "std", error("transactions not found"))]
|
|
||||||
TransactionsNotFound(Vec<[u8; 32]>),
|
|
||||||
#[cfg_attr(feature = "std", error("invalid point ({0})"))]
|
|
||||||
InvalidPoint(String),
|
|
||||||
#[cfg_attr(feature = "std", error("pruned transaction"))]
|
|
||||||
PrunedTransaction,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid transaction ({0:?})"))]
|
|
||||||
InvalidTransaction([u8; 32]),
|
|
||||||
}
|
|
||||||
|
|
||||||
fn rpc_hex(value: &str) -> Result<Vec<u8>, RpcError> {
|
|
||||||
hex::decode(value).map_err(|_| RpcError::InvalidNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hash_hex(hash: &str) -> Result<[u8; 32], RpcError> {
|
|
||||||
rpc_hex(hash)?.try_into().map_err(|_| RpcError::InvalidNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn rpc_point(point: &str) -> Result<EdwardsPoint, RpcError> {
|
|
||||||
CompressedEdwardsY(
|
|
||||||
rpc_hex(point)?.try_into().map_err(|_| RpcError::InvalidPoint(point.to_string()))?,
|
|
||||||
)
|
|
||||||
.decompress()
|
|
||||||
.ok_or_else(|| RpcError::InvalidPoint(point.to_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read an EPEE VarInt, distinct from the VarInts used throughout the rest of the protocol
|
|
||||||
fn read_epee_vi<R: io::Read>(reader: &mut R) -> io::Result<u64> {
|
|
||||||
let vi_start = read_byte(reader)?;
|
|
||||||
let len = match vi_start & 0b11 {
|
|
||||||
0 => 1,
|
|
||||||
1 => 2,
|
|
||||||
2 => 4,
|
|
||||||
3 => 8,
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
let mut vi = u64::from(vi_start >> 2);
|
|
||||||
for i in 1 .. len {
|
|
||||||
vi |= u64::from(read_byte(reader)?) << (((i - 1) * 8) + 6);
|
|
||||||
}
|
|
||||||
Ok(vi)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
pub trait RpcConnection: Send + Sync + Clone + Debug {
|
|
||||||
/// Perform a POST request to the specified route with the specified body.
|
|
||||||
///
|
|
||||||
/// The implementor is left to handle anything such as authentication.
|
|
||||||
async fn post(&self, route: &str, body: Vec<u8>) -> Result<Vec<u8>, RpcError>;
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Make this provided methods for RpcConnection?
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Rpc<R: RpcConnection>(R);
|
|
||||||
impl<R: RpcConnection> Rpc<R> {
|
|
||||||
/// Perform a RPC call to the specified route with the provided parameters.
|
|
||||||
///
|
|
||||||
/// This is NOT a JSON-RPC call. They use a route of "json_rpc" and are available via
|
|
||||||
/// `json_rpc_call`.
|
|
||||||
pub async fn rpc_call<Params: Send + Serialize + Debug, Response: DeserializeOwned + Debug>(
|
|
||||||
&self,
|
|
||||||
route: &str,
|
|
||||||
params: Option<Params>,
|
|
||||||
) -> Result<Response, RpcError> {
|
|
||||||
serde_json::from_str(
|
|
||||||
std_shims::str::from_utf8(
|
|
||||||
&self
|
|
||||||
.0
|
|
||||||
.post(
|
|
||||||
route,
|
|
||||||
if let Some(params) = params {
|
|
||||||
serde_json::to_string(¶ms).unwrap().into_bytes()
|
|
||||||
} else {
|
|
||||||
vec![]
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.await?,
|
|
||||||
)
|
|
||||||
.map_err(|_| RpcError::InvalidNode)?,
|
|
||||||
)
|
|
||||||
.map_err(|_| RpcError::InvalidNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Perform a JSON-RPC call with the specified method with the provided parameters
|
|
||||||
pub async fn json_rpc_call<Response: DeserializeOwned + Debug>(
|
|
||||||
&self,
|
|
||||||
method: &str,
|
|
||||||
params: Option<Value>,
|
|
||||||
) -> Result<Response, RpcError> {
|
|
||||||
let mut req = json!({ "method": method });
|
|
||||||
if let Some(params) = params {
|
|
||||||
req.as_object_mut().unwrap().insert("params".into(), params);
|
|
||||||
}
|
|
||||||
Ok(self.rpc_call::<_, JsonRpcResponse<Response>>("json_rpc", Some(req)).await?.result)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Perform a binary call to the specified route with the provided parameters.
|
|
||||||
pub async fn bin_call(&self, route: &str, params: Vec<u8>) -> Result<Vec<u8>, RpcError> {
|
|
||||||
self.0.post(route, params).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the active blockchain protocol version.
|
|
||||||
pub async fn get_protocol(&self) -> Result<Protocol, RpcError> {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct ProtocolResponse {
|
|
||||||
major_version: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct LastHeaderResponse {
|
|
||||||
block_header: ProtocolResponse,
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(
|
|
||||||
match self
|
|
||||||
.json_rpc_call::<LastHeaderResponse>("get_last_block_header", None)
|
|
||||||
.await?
|
|
||||||
.block_header
|
|
||||||
.major_version
|
|
||||||
{
|
|
||||||
13 | 14 => Protocol::v14,
|
|
||||||
15 | 16 => Protocol::v16,
|
|
||||||
protocol => Err(RpcError::UnsupportedProtocol(protocol))?,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_height(&self) -> Result<usize, RpcError> {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct HeightResponse {
|
|
||||||
height: usize,
|
|
||||||
}
|
|
||||||
Ok(self.rpc_call::<Option<()>, HeightResponse>("get_height", None).await?.height)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_transactions(&self, hashes: &[[u8; 32]]) -> Result<Vec<Transaction>, RpcError> {
|
|
||||||
if hashes.is_empty() {
|
|
||||||
return Ok(vec![]);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut hashes_hex = hashes.iter().map(hex::encode).collect::<Vec<_>>();
|
|
||||||
let mut all_txs = Vec::with_capacity(hashes.len());
|
|
||||||
while !hashes_hex.is_empty() {
|
|
||||||
// Monero errors if more than 100 is requested unless using a non-restricted RPC
|
|
||||||
const TXS_PER_REQUEST: usize = 100;
|
|
||||||
let this_count = TXS_PER_REQUEST.min(hashes_hex.len());
|
|
||||||
|
|
||||||
let txs: TransactionsResponse = self
|
|
||||||
.rpc_call(
|
|
||||||
"get_transactions",
|
|
||||||
Some(json!({
|
|
||||||
"txs_hashes": hashes_hex.drain(.. this_count).collect::<Vec<_>>(),
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if !txs.missed_tx.is_empty() {
|
|
||||||
Err(RpcError::TransactionsNotFound(
|
|
||||||
txs.missed_tx.iter().map(|hash| hash_hex(hash)).collect::<Result<_, _>>()?,
|
|
||||||
))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
all_txs.extend(txs.txs);
|
|
||||||
}
|
|
||||||
|
|
||||||
all_txs
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, res)| {
|
|
||||||
let tx = Transaction::read::<&[u8]>(
|
|
||||||
&mut rpc_hex(if !res.as_hex.is_empty() { &res.as_hex } else { &res.pruned_as_hex })?
|
|
||||||
.as_ref(),
|
|
||||||
)
|
|
||||||
.map_err(|_| match hash_hex(&res.tx_hash) {
|
|
||||||
Ok(hash) => RpcError::InvalidTransaction(hash),
|
|
||||||
Err(err) => err,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// https://github.com/monero-project/monero/issues/8311
|
|
||||||
if res.as_hex.is_empty() {
|
|
||||||
match tx.prefix.inputs.get(0) {
|
|
||||||
Some(Input::Gen { .. }) => (),
|
|
||||||
_ => Err(RpcError::PrunedTransaction)?,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This does run a few keccak256 hashes, which is pointless if the node is trusted
|
|
||||||
// In exchange, this provides resilience against invalid/malicious nodes
|
|
||||||
if tx.hash() != hashes[i] {
|
|
||||||
Err(RpcError::InvalidNode)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(tx)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_transaction(&self, tx: [u8; 32]) -> Result<Transaction, RpcError> {
|
|
||||||
self.get_transactions(&[tx]).await.map(|mut txs| txs.swap_remove(0))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the hash of a block from the node by the block's numbers.
|
|
||||||
/// This function does not verify the returned block hash is actually for the number in question.
|
|
||||||
pub async fn get_block_hash(&self, number: usize) -> Result<[u8; 32], RpcError> {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct BlockHeaderResponse {
|
|
||||||
hash: String,
|
|
||||||
}
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct BlockHeaderByHeightResponse {
|
|
||||||
block_header: BlockHeaderResponse,
|
|
||||||
}
|
|
||||||
|
|
||||||
let header: BlockHeaderByHeightResponse =
|
|
||||||
self.json_rpc_call("get_block_header_by_height", Some(json!({ "height": number }))).await?;
|
|
||||||
rpc_hex(&header.block_header.hash)?.try_into().map_err(|_| RpcError::InvalidNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a block from the node by its hash.
|
|
||||||
/// This function does not verify the returned block actually has the hash in question.
|
|
||||||
pub async fn get_block(&self, hash: [u8; 32]) -> Result<Block, RpcError> {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct BlockResponse {
|
|
||||||
blob: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
let res: BlockResponse =
|
|
||||||
self.json_rpc_call("get_block", Some(json!({ "hash": hex::encode(hash) }))).await?;
|
|
||||||
|
|
||||||
let block =
|
|
||||||
Block::read::<&[u8]>(&mut rpc_hex(&res.blob)?.as_ref()).map_err(|_| RpcError::InvalidNode)?;
|
|
||||||
if block.hash() != hash {
|
|
||||||
Err(RpcError::InvalidNode)?;
|
|
||||||
}
|
|
||||||
Ok(block)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_block_by_number(&self, number: usize) -> Result<Block, RpcError> {
|
|
||||||
match self.get_block(self.get_block_hash(number).await?).await {
|
|
||||||
Ok(block) => {
|
|
||||||
// Make sure this is actually the block for this number
|
|
||||||
match block.miner_tx.prefix.inputs.get(0) {
|
|
||||||
Some(Input::Gen(actual)) => {
|
|
||||||
if usize::try_from(*actual).unwrap() == number {
|
|
||||||
Ok(block)
|
|
||||||
} else {
|
|
||||||
Err(RpcError::InvalidNode)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(Input::ToKey { .. }) | None => Err(RpcError::InvalidNode),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
e => e,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_block_transactions(&self, hash: [u8; 32]) -> Result<Vec<Transaction>, RpcError> {
|
|
||||||
let block = self.get_block(hash).await?;
|
|
||||||
let mut res = vec![block.miner_tx];
|
|
||||||
res.extend(self.get_transactions(&block.txs).await?);
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_block_transactions_by_number(
|
|
||||||
&self,
|
|
||||||
number: usize,
|
|
||||||
) -> Result<Vec<Transaction>, RpcError> {
|
|
||||||
self.get_block_transactions(self.get_block_hash(number).await?).await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the output indexes of the specified transaction.
|
|
||||||
pub async fn get_o_indexes(&self, hash: [u8; 32]) -> Result<Vec<u64>, RpcError> {
|
|
||||||
/*
|
|
||||||
TODO: Use these when a suitable epee serde lib exists
|
|
||||||
|
|
||||||
#[derive(Serialize, Debug)]
|
|
||||||
struct Request {
|
|
||||||
txid: [u8; 32],
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct OIndexes {
|
|
||||||
o_indexes: Vec<u64>,
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Given the immaturity of Rust epee libraries, this is a homegrown one which is only validated
|
|
||||||
// to work against this specific function
|
|
||||||
|
|
||||||
// Header for EPEE, an 8-byte magic and a version
|
|
||||||
const EPEE_HEADER: &[u8] = b"\x01\x11\x01\x01\x01\x01\x02\x01\x01";
|
|
||||||
|
|
||||||
let mut request = EPEE_HEADER.to_vec();
|
|
||||||
// Number of fields (shifted over 2 bits as the 2 LSBs are reserved for metadata)
|
|
||||||
request.push(1 << 2);
|
|
||||||
// Length of field name
|
|
||||||
request.push(4);
|
|
||||||
// Field name
|
|
||||||
request.extend(b"txid");
|
|
||||||
// Type of field
|
|
||||||
request.push(10);
|
|
||||||
// Length of string, since this byte array is technically a string
|
|
||||||
request.push(32 << 2);
|
|
||||||
// The "string"
|
|
||||||
request.extend(hash);
|
|
||||||
|
|
||||||
let indexes_buf = self.bin_call("get_o_indexes.bin", request).await?;
|
|
||||||
let mut indexes: &[u8] = indexes_buf.as_ref();
|
|
||||||
|
|
||||||
(|| {
|
|
||||||
if read_bytes::<_, { EPEE_HEADER.len() }>(&mut indexes)? != EPEE_HEADER {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "invalid header"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let read_object = |reader: &mut &[u8]| {
|
|
||||||
let fields = read_byte(reader)? >> 2;
|
|
||||||
|
|
||||||
for _ in 0 .. fields {
|
|
||||||
let name_len = read_byte(reader)?;
|
|
||||||
let name = read_raw_vec(read_byte, name_len.into(), reader)?;
|
|
||||||
|
|
||||||
let type_with_array_flag = read_byte(reader)?;
|
|
||||||
let kind = type_with_array_flag & (!0x80);
|
|
||||||
|
|
||||||
let iters = if type_with_array_flag != kind { read_epee_vi(reader)? } else { 1 };
|
|
||||||
|
|
||||||
if (&name == b"o_indexes") && (kind != 5) {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "o_indexes weren't u64s"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let f = match kind {
|
|
||||||
// i64
|
|
||||||
1 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
|
|
||||||
// i32
|
|
||||||
2 => |reader: &mut &[u8]| read_raw_vec(read_byte, 4, reader),
|
|
||||||
// i16
|
|
||||||
3 => |reader: &mut &[u8]| read_raw_vec(read_byte, 2, reader),
|
|
||||||
// i8
|
|
||||||
4 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
|
|
||||||
// u64
|
|
||||||
5 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
|
|
||||||
// u32
|
|
||||||
6 => |reader: &mut &[u8]| read_raw_vec(read_byte, 4, reader),
|
|
||||||
// u16
|
|
||||||
7 => |reader: &mut &[u8]| read_raw_vec(read_byte, 2, reader),
|
|
||||||
// u8
|
|
||||||
8 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
|
|
||||||
// double
|
|
||||||
9 => |reader: &mut &[u8]| read_raw_vec(read_byte, 8, reader),
|
|
||||||
// string, or any collection of bytes
|
|
||||||
10 => |reader: &mut &[u8]| {
|
|
||||||
let len = read_epee_vi(reader)?;
|
|
||||||
read_raw_vec(
|
|
||||||
read_byte,
|
|
||||||
len
|
|
||||||
.try_into()
|
|
||||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "u64 length exceeded usize"))?,
|
|
||||||
reader,
|
|
||||||
)
|
|
||||||
},
|
|
||||||
// bool
|
|
||||||
11 => |reader: &mut &[u8]| read_raw_vec(read_byte, 1, reader),
|
|
||||||
// object, errors here as it shouldn't be used on this call
|
|
||||||
12 => |_: &mut &[u8]| {
|
|
||||||
Err(io::Error::new(
|
|
||||||
io::ErrorKind::Other,
|
|
||||||
"node used object in reply to get_o_indexes",
|
|
||||||
))
|
|
||||||
},
|
|
||||||
// array, so far unused
|
|
||||||
13 => |_: &mut &[u8]| {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "node used the unused array type"))
|
|
||||||
},
|
|
||||||
_ => {
|
|
||||||
|_: &mut &[u8]| Err(io::Error::new(io::ErrorKind::Other, "node used an invalid type"))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut res = vec![];
|
|
||||||
for _ in 0 .. iters {
|
|
||||||
res.push(f(reader)?);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut actual_res = Vec::with_capacity(res.len());
|
|
||||||
if &name == b"o_indexes" {
|
|
||||||
for o_index in res {
|
|
||||||
actual_res.push(u64::from_le_bytes(o_index.try_into().map_err(|_| {
|
|
||||||
io::Error::new(io::ErrorKind::Other, "node didn't provide 8 bytes for a u64")
|
|
||||||
})?));
|
|
||||||
}
|
|
||||||
return Ok(actual_res);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Didn't return a response with o_indexes
|
|
||||||
// TODO: Check if this didn't have o_indexes because it's an error response
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "response didn't contain o_indexes"))
|
|
||||||
};
|
|
||||||
|
|
||||||
read_object(&mut indexes)
|
|
||||||
})()
|
|
||||||
.map_err(|_| RpcError::InvalidNode)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the output distribution, from the specified height to the specified height (both
|
|
||||||
/// inclusive).
|
|
||||||
pub async fn get_output_distribution(
|
|
||||||
&self,
|
|
||||||
from: usize,
|
|
||||||
to: usize,
|
|
||||||
) -> Result<Vec<u64>, RpcError> {
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct Distribution {
|
|
||||||
distribution: Vec<u64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct Distributions {
|
|
||||||
distributions: Vec<Distribution>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut distributions: Distributions = self
|
|
||||||
.json_rpc_call(
|
|
||||||
"get_output_distribution",
|
|
||||||
Some(json!({
|
|
||||||
"binary": false,
|
|
||||||
"amounts": [0],
|
|
||||||
"cumulative": true,
|
|
||||||
"from_height": from,
|
|
||||||
"to_height": to,
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(distributions.distributions.swap_remove(0).distribution)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the specified outputs from the RingCT (zero-amount) pool, but only return them if their
|
|
||||||
/// timelock has been satisfied. This is distinct from being free of the 10-block lock applied to
|
|
||||||
/// all Monero transactions.
|
|
||||||
pub async fn get_unlocked_outputs(
|
|
||||||
&self,
|
|
||||||
indexes: &[u64],
|
|
||||||
height: usize,
|
|
||||||
) -> Result<Vec<Option<[EdwardsPoint; 2]>>, RpcError> {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct Out {
|
|
||||||
key: String,
|
|
||||||
mask: String,
|
|
||||||
txid: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct Outs {
|
|
||||||
outs: Vec<Out>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let outs: Outs = self
|
|
||||||
.rpc_call(
|
|
||||||
"get_outs",
|
|
||||||
Some(json!({
|
|
||||||
"get_txid": true,
|
|
||||||
"outputs": indexes.iter().map(|o| json!({
|
|
||||||
"amount": 0,
|
|
||||||
"index": o
|
|
||||||
})).collect::<Vec<_>>()
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let txs = self
|
|
||||||
.get_transactions(
|
|
||||||
&outs
|
|
||||||
.outs
|
|
||||||
.iter()
|
|
||||||
.map(|out| rpc_hex(&out.txid)?.try_into().map_err(|_| RpcError::InvalidNode))
|
|
||||||
.collect::<Result<Vec<_>, _>>()?,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// TODO: https://github.com/serai-dex/serai/issues/104
|
|
||||||
outs
|
|
||||||
.outs
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(i, out)| {
|
|
||||||
Ok(
|
|
||||||
Some([rpc_point(&out.key)?, rpc_point(&out.mask)?])
|
|
||||||
.filter(|_| Timelock::Block(height) >= txs[i].prefix.timelock),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the currently estimated fee from the node. This may be manipulated to unsafe levels and
|
|
||||||
/// MUST be sanity checked.
|
|
||||||
// TODO: Take a sanity check argument
|
|
||||||
pub async fn get_fee(&self) -> Result<Fee, RpcError> {
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct FeeResponse {
|
|
||||||
fee: u64,
|
|
||||||
quantization_mask: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
let res: FeeResponse = self.json_rpc_call("get_fee_estimate", None).await?;
|
|
||||||
Ok(Fee { per_weight: res.fee, mask: res.quantization_mask })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn publish_transaction(&self, tx: &Transaction) -> Result<(), RpcError> {
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct SendRawResponse {
|
|
||||||
status: String,
|
|
||||||
double_spend: bool,
|
|
||||||
fee_too_low: bool,
|
|
||||||
invalid_input: bool,
|
|
||||||
invalid_output: bool,
|
|
||||||
low_mixin: bool,
|
|
||||||
not_relayed: bool,
|
|
||||||
overspend: bool,
|
|
||||||
too_big: bool,
|
|
||||||
too_few_outputs: bool,
|
|
||||||
reason: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
let res: SendRawResponse = self
|
|
||||||
.rpc_call("send_raw_transaction", Some(json!({ "tx_as_hex": hex::encode(tx.serialize()) })))
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if res.status != "OK" {
|
|
||||||
Err(RpcError::InvalidTransaction(tx.hash()))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn generate_blocks(&self, address: &str, block_count: usize) -> Result<(), RpcError> {
|
|
||||||
self
|
|
||||||
.rpc_call::<_, EmptyResponse>(
|
|
||||||
"json_rpc",
|
|
||||||
Some(json!({
|
|
||||||
"method": "generateblocks",
|
|
||||||
"params": {
|
|
||||||
"wallet_address": address,
|
|
||||||
"amount_of_blocks": block_count
|
|
||||||
},
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,156 +0,0 @@
|
|||||||
use core::fmt::Debug;
|
|
||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use curve25519_dalek::{
|
|
||||||
scalar::Scalar,
|
|
||||||
edwards::{EdwardsPoint, CompressedEdwardsY},
|
|
||||||
};
|
|
||||||
|
|
||||||
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
|
||||||
|
|
||||||
pub(crate) fn varint_len(varint: usize) -> usize {
|
|
||||||
((usize::try_from(usize::BITS - varint.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
|
|
||||||
w.write_all(&[*byte])
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn write_varint<W: Write>(varint: &u64, w: &mut W) -> io::Result<()> {
|
|
||||||
let mut varint = *varint;
|
|
||||||
while {
|
|
||||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
|
||||||
varint >>= 7;
|
|
||||||
if varint != 0 {
|
|
||||||
b |= VARINT_CONTINUATION_MASK;
|
|
||||||
}
|
|
||||||
write_byte(&b, w)?;
|
|
||||||
varint != 0
|
|
||||||
} {}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn write_scalar<W: Write>(scalar: &Scalar, w: &mut W) -> io::Result<()> {
|
|
||||||
w.write_all(&scalar.to_bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn write_point<W: Write>(point: &EdwardsPoint, w: &mut W) -> io::Result<()> {
|
|
||||||
w.write_all(&point.compress().to_bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn write_raw_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
|
||||||
f: F,
|
|
||||||
values: &[T],
|
|
||||||
w: &mut W,
|
|
||||||
) -> io::Result<()> {
|
|
||||||
for value in values {
|
|
||||||
f(value, w)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn write_vec<T, W: Write, F: Fn(&T, &mut W) -> io::Result<()>>(
|
|
||||||
f: F,
|
|
||||||
values: &[T],
|
|
||||||
w: &mut W,
|
|
||||||
) -> io::Result<()> {
|
|
||||||
write_varint(&values.len().try_into().unwrap(), w)?;
|
|
||||||
write_raw_vec(f, values, w)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_bytes<R: Read, const N: usize>(r: &mut R) -> io::Result<[u8; N]> {
|
|
||||||
let mut res = [0; N];
|
|
||||||
r.read_exact(&mut res)?;
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_byte<R: Read>(r: &mut R) -> io::Result<u8> {
|
|
||||||
Ok(read_bytes::<_, 1>(r)?[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_u16<R: Read>(r: &mut R) -> io::Result<u16> {
|
|
||||||
read_bytes(r).map(u16::from_le_bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_u32<R: Read>(r: &mut R) -> io::Result<u32> {
|
|
||||||
read_bytes(r).map(u32::from_le_bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_u64<R: Read>(r: &mut R) -> io::Result<u64> {
|
|
||||||
read_bytes(r).map(u64::from_le_bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_varint<R: Read>(r: &mut R) -> io::Result<u64> {
|
|
||||||
let mut bits = 0;
|
|
||||||
let mut res = 0;
|
|
||||||
while {
|
|
||||||
let b = read_byte(r)?;
|
|
||||||
if (bits != 0) && (b == 0) {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "non-canonical varint"))?;
|
|
||||||
}
|
|
||||||
if ((bits + 7) > 64) && (b >= (1 << (64 - bits))) {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "varint overflow"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
res += u64::from(b & (!VARINT_CONTINUATION_MASK)) << bits;
|
|
||||||
bits += 7;
|
|
||||||
b & VARINT_CONTINUATION_MASK == VARINT_CONTINUATION_MASK
|
|
||||||
} {}
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
// All scalar fields supported by monero-serai are checked to be canonical for valid transactions
|
|
||||||
// While from_bytes_mod_order would be more flexible, it's not currently needed and would be
|
|
||||||
// inaccurate to include now. While casting a wide net may be preferable, it'd also be inaccurate
|
|
||||||
// for now. There's also further edge cases as noted by
|
|
||||||
// https://github.com/monero-project/monero/issues/8438, where some scalars had an archaic
|
|
||||||
// reduction applied
|
|
||||||
pub(crate) fn read_scalar<R: Read>(r: &mut R) -> io::Result<Scalar> {
|
|
||||||
Scalar::from_canonical_bytes(read_bytes(r)?)
|
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "unreduced scalar"))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
|
||||||
let bytes = read_bytes(r)?;
|
|
||||||
CompressedEdwardsY(bytes)
|
|
||||||
.decompress()
|
|
||||||
// Ban points which are either unreduced or -0
|
|
||||||
.filter(|point| point.compress().to_bytes() == bytes)
|
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_torsion_free_point<R: Read>(r: &mut R) -> io::Result<EdwardsPoint> {
|
|
||||||
read_point(r)
|
|
||||||
.ok()
|
|
||||||
.filter(EdwardsPoint::is_torsion_free)
|
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid point"))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_raw_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
|
||||||
f: F,
|
|
||||||
len: usize,
|
|
||||||
r: &mut R,
|
|
||||||
) -> io::Result<Vec<T>> {
|
|
||||||
let mut res = vec![];
|
|
||||||
for _ in 0 .. len {
|
|
||||||
res.push(f(r)?);
|
|
||||||
}
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_array<R: Read, T: Debug, F: Fn(&mut R) -> io::Result<T>, const N: usize>(
|
|
||||||
f: F,
|
|
||||||
r: &mut R,
|
|
||||||
) -> io::Result<[T; N]> {
|
|
||||||
read_raw_vec(f, N, r).map(|vec| vec.try_into().unwrap())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn read_vec<R: Read, T, F: Fn(&mut R) -> io::Result<T>>(
|
|
||||||
f: F,
|
|
||||||
r: &mut R,
|
|
||||||
) -> io::Result<Vec<T>> {
|
|
||||||
read_raw_vec(f, read_varint(r)?.try_into().unwrap(), r)
|
|
||||||
}
|
|
||||||
@@ -1,92 +0,0 @@
|
|||||||
use hex_literal::hex;
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
use curve25519_dalek::{scalar::Scalar, edwards::CompressedEdwardsY};
|
|
||||||
use multiexp::BatchVerifier;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Commitment, random_scalar,
|
|
||||||
ringct::bulletproofs::{Bulletproofs, original::OriginalStruct},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn bulletproofs_vector() {
|
|
||||||
let scalar = |scalar| Scalar::from_canonical_bytes(scalar).unwrap();
|
|
||||||
let point = |point| CompressedEdwardsY(point).decompress().unwrap();
|
|
||||||
|
|
||||||
// Generated from Monero
|
|
||||||
assert!(Bulletproofs::Original(OriginalStruct {
|
|
||||||
A: point(hex!("ef32c0b9551b804decdcb107eb22aa715b7ce259bf3c5cac20e24dfa6b28ac71")),
|
|
||||||
S: point(hex!("e1285960861783574ee2b689ae53622834eb0b035d6943103f960cd23e063fa0")),
|
|
||||||
T1: point(hex!("4ea07735f184ba159d0e0eb662bac8cde3eb7d39f31e567b0fbda3aa23fe5620")),
|
|
||||||
T2: point(hex!("b8390aa4b60b255630d40e592f55ec6b7ab5e3a96bfcdcd6f1cd1d2fc95f441e")),
|
|
||||||
taux: scalar(hex!("5957dba8ea9afb23d6e81cc048a92f2d502c10c749dc1b2bd148ae8d41ec7107")),
|
|
||||||
mu: scalar(hex!("923023b234c2e64774b820b4961f7181f6c1dc152c438643e5a25b0bf271bc02")),
|
|
||||||
L: vec![
|
|
||||||
point(hex!("c45f656316b9ebf9d357fb6a9f85b5f09e0b991dd50a6e0ae9b02de3946c9d99")),
|
|
||||||
point(hex!("9304d2bf0f27183a2acc58cc755a0348da11bd345485fda41b872fee89e72aac")),
|
|
||||||
point(hex!("1bb8b71925d155dd9569f64129ea049d6149fdc4e7a42a86d9478801d922129b")),
|
|
||||||
point(hex!("5756a7bf887aa72b9a952f92f47182122e7b19d89e5dd434c747492b00e1c6b7")),
|
|
||||||
point(hex!("6e497c910d102592830555356af5ff8340e8d141e3fb60ea24cfa587e964f07d")),
|
|
||||||
point(hex!("f4fa3898e7b08e039183d444f3d55040f3c790ed806cb314de49f3068bdbb218")),
|
|
||||||
point(hex!("0bbc37597c3ead517a3841e159c8b7b79a5ceaee24b2a9a20350127aab428713")),
|
|
||||||
],
|
|
||||||
R: vec![
|
|
||||||
point(hex!("609420ba1702781692e84accfd225adb3d077aedc3cf8125563400466b52dbd9")),
|
|
||||||
point(hex!("fb4e1d079e7a2b0ec14f7e2a3943bf50b6d60bc346a54fcf562fb234b342abf8")),
|
|
||||||
point(hex!("6ae3ac97289c48ce95b9c557289e82a34932055f7f5e32720139824fe81b12e5")),
|
|
||||||
point(hex!("d071cc2ffbdab2d840326ad15f68c01da6482271cae3cf644670d1632f29a15c")),
|
|
||||||
point(hex!("e52a1754b95e1060589ba7ce0c43d0060820ebfc0d49dc52884bc3c65ad18af5")),
|
|
||||||
point(hex!("41573b06140108539957df71aceb4b1816d2409ce896659aa5c86f037ca5e851")),
|
|
||||||
point(hex!("a65970b2cc3c7b08b2b5b739dbc8e71e646783c41c625e2a5b1535e3d2e0f742")),
|
|
||||||
],
|
|
||||||
a: scalar(hex!("0077c5383dea44d3cd1bc74849376bd60679612dc4b945255822457fa0c0a209")),
|
|
||||||
b: scalar(hex!("fe80cf5756473482581e1d38644007793ddc66fdeb9404ec1689a907e4863302")),
|
|
||||||
t: scalar(hex!("40dfb08e09249040df997851db311bd6827c26e87d6f0f332c55be8eef10e603"))
|
|
||||||
})
|
|
||||||
.verify(
|
|
||||||
&mut OsRng,
|
|
||||||
&[
|
|
||||||
// For some reason, these vectors are * INV_EIGHT
|
|
||||||
point(hex!("8e8f23f315edae4f6c2f948d9a861e0ae32d356b933cd11d2f0e031ac744c41f"))
|
|
||||||
.mul_by_cofactor(),
|
|
||||||
point(hex!("2829cbd025aa54cd6e1b59a032564f22f0b2e5627f7f2c4297f90da438b5510f"))
|
|
||||||
.mul_by_cofactor(),
|
|
||||||
]
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! bulletproofs_tests {
|
|
||||||
($name: ident, $max: ident, $plus: literal) => {
|
|
||||||
#[test]
|
|
||||||
fn $name() {
|
|
||||||
// Create Bulletproofs for all possible output quantities
|
|
||||||
let mut verifier = BatchVerifier::new(16);
|
|
||||||
for i in 1 .. 17 {
|
|
||||||
let commitments = (1 ..= i)
|
|
||||||
.map(|i| Commitment::new(random_scalar(&mut OsRng), u64::try_from(i).unwrap()))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let bp = Bulletproofs::prove(&mut OsRng, &commitments, $plus).unwrap();
|
|
||||||
|
|
||||||
let commitments = commitments.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
|
||||||
assert!(bp.verify(&mut OsRng, &commitments));
|
|
||||||
assert!(bp.batch_verify(&mut OsRng, &mut verifier, i, &commitments));
|
|
||||||
}
|
|
||||||
assert!(verifier.verify_vartime());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn $max() {
|
|
||||||
// Check Bulletproofs errors if we try to prove for too many outputs
|
|
||||||
let mut commitments = vec![];
|
|
||||||
for _ in 0 .. 17 {
|
|
||||||
commitments.push(Commitment::new(Scalar::zero(), 0));
|
|
||||||
}
|
|
||||||
assert!(Bulletproofs::prove(&mut OsRng, &commitments, $plus).is_err());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
bulletproofs_tests!(bulletproofs, bulletproofs_max, false);
|
|
||||||
bulletproofs_tests!(bulletproofs_plus, bulletproofs_plus_max, true);
|
|
||||||
@@ -1,127 +0,0 @@
|
|||||||
use core::ops::Deref;
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
use std_shims::sync::Arc;
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
use std::sync::RwLock;
|
|
||||||
|
|
||||||
use zeroize::Zeroizing;
|
|
||||||
use rand_core::{RngCore, OsRng};
|
|
||||||
|
|
||||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
|
|
||||||
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
use transcript::{Transcript, RecommendedTranscript};
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
use frost::curve::Ed25519;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Commitment, random_scalar,
|
|
||||||
wallet::Decoys,
|
|
||||||
ringct::{
|
|
||||||
generate_key_image,
|
|
||||||
clsag::{ClsagInput, Clsag},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
use crate::ringct::clsag::{ClsagDetails, ClsagMultisig};
|
|
||||||
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
use frost::{
|
|
||||||
Participant,
|
|
||||||
tests::{key_gen, algorithm_machines, sign},
|
|
||||||
};
|
|
||||||
|
|
||||||
const RING_LEN: u64 = 11;
|
|
||||||
const AMOUNT: u64 = 1337;
|
|
||||||
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
const RING_INDEX: u8 = 3;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn clsag() {
|
|
||||||
for real in 0 .. RING_LEN {
|
|
||||||
let msg = [1; 32];
|
|
||||||
|
|
||||||
let mut secrets = (Zeroizing::new(Scalar::zero()), Scalar::zero());
|
|
||||||
let mut ring = vec![];
|
|
||||||
for i in 0 .. RING_LEN {
|
|
||||||
let dest = Zeroizing::new(random_scalar(&mut OsRng));
|
|
||||||
let mask = random_scalar(&mut OsRng);
|
|
||||||
let amount = if i == real {
|
|
||||||
secrets = (dest.clone(), mask);
|
|
||||||
AMOUNT
|
|
||||||
} else {
|
|
||||||
OsRng.next_u64()
|
|
||||||
};
|
|
||||||
ring
|
|
||||||
.push([dest.deref() * &ED25519_BASEPOINT_TABLE, Commitment::new(mask, amount).calculate()]);
|
|
||||||
}
|
|
||||||
|
|
||||||
let image = generate_key_image(&secrets.0);
|
|
||||||
let (clsag, pseudo_out) = Clsag::sign(
|
|
||||||
&mut OsRng,
|
|
||||||
vec![(
|
|
||||||
secrets.0,
|
|
||||||
image,
|
|
||||||
ClsagInput::new(
|
|
||||||
Commitment::new(secrets.1, AMOUNT),
|
|
||||||
Decoys {
|
|
||||||
i: u8::try_from(real).unwrap(),
|
|
||||||
offsets: (1 ..= RING_LEN).collect(),
|
|
||||||
ring: ring.clone(),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
)],
|
|
||||||
random_scalar(&mut OsRng),
|
|
||||||
msg,
|
|
||||||
)
|
|
||||||
.swap_remove(0);
|
|
||||||
clsag.verify(&ring, &image, &pseudo_out, &msg).unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
#[test]
|
|
||||||
fn clsag_multisig() {
|
|
||||||
let keys = key_gen::<_, Ed25519>(&mut OsRng);
|
|
||||||
|
|
||||||
let randomness = random_scalar(&mut OsRng);
|
|
||||||
let mut ring = vec![];
|
|
||||||
for i in 0 .. RING_LEN {
|
|
||||||
let dest;
|
|
||||||
let mask;
|
|
||||||
let amount = if i == u64::from(RING_INDEX) {
|
|
||||||
dest = keys[&Participant::new(1).unwrap()].group_key().0;
|
|
||||||
mask = randomness;
|
|
||||||
AMOUNT
|
|
||||||
} else {
|
|
||||||
dest = &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE;
|
|
||||||
mask = random_scalar(&mut OsRng);
|
|
||||||
OsRng.next_u64()
|
|
||||||
};
|
|
||||||
ring.push([dest, Commitment::new(mask, amount).calculate()]);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mask_sum = random_scalar(&mut OsRng);
|
|
||||||
let algorithm = ClsagMultisig::new(
|
|
||||||
RecommendedTranscript::new(b"Monero Serai CLSAG Test"),
|
|
||||||
keys[&Participant::new(1).unwrap()].group_key().0,
|
|
||||||
Arc::new(RwLock::new(Some(ClsagDetails::new(
|
|
||||||
ClsagInput::new(
|
|
||||||
Commitment::new(randomness, AMOUNT),
|
|
||||||
Decoys { i: RING_INDEX, offsets: (1 ..= RING_LEN).collect(), ring: ring.clone() },
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
mask_sum,
|
|
||||||
)))),
|
|
||||||
);
|
|
||||||
|
|
||||||
sign(
|
|
||||||
&mut OsRng,
|
|
||||||
algorithm.clone(),
|
|
||||||
keys.clone(),
|
|
||||||
algorithm_machines(&mut OsRng, algorithm, &keys),
|
|
||||||
&[1; 32],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
mod clsag;
|
|
||||||
mod bulletproofs;
|
|
||||||
mod address;
|
|
||||||
mod seed;
|
|
||||||
@@ -1,378 +0,0 @@
|
|||||||
use core::cmp::Ordering;
|
|
||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
|
||||||
|
|
||||||
use curve25519_dalek::{
|
|
||||||
scalar::Scalar,
|
|
||||||
edwards::{EdwardsPoint, CompressedEdwardsY},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Protocol, hash,
|
|
||||||
serialize::*,
|
|
||||||
ringct::{RctBase, RctPrunable, RctSignatures},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub enum Input {
|
|
||||||
Gen(u64),
|
|
||||||
ToKey { amount: Option<u64>, key_offsets: Vec<u64>, key_image: EdwardsPoint },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Input {
|
|
||||||
// Worst-case predictive len
|
|
||||||
pub(crate) fn fee_weight(ring_len: usize) -> usize {
|
|
||||||
// Uses 1 byte for the VarInt amount due to amount being 0
|
|
||||||
// Uses 1 byte for the VarInt encoding of the length of the ring as well
|
|
||||||
1 + 1 + 1 + (8 * ring_len) + 32
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
Self::Gen(height) => {
|
|
||||||
w.write_all(&[255])?;
|
|
||||||
write_varint(height, w)
|
|
||||||
}
|
|
||||||
|
|
||||||
Self::ToKey { amount, key_offsets, key_image } => {
|
|
||||||
w.write_all(&[2])?;
|
|
||||||
write_varint(&amount.unwrap_or(0), w)?;
|
|
||||||
write_vec(write_varint, key_offsets, w)?;
|
|
||||||
write_point(key_image, w)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut res = vec![];
|
|
||||||
self.write(&mut res).unwrap();
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(interpret_as_rct: bool, r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(match read_byte(r)? {
|
|
||||||
255 => Self::Gen(read_varint(r)?),
|
|
||||||
2 => {
|
|
||||||
let amount = read_varint(r)?;
|
|
||||||
let amount = if (amount == 0) && interpret_as_rct { None } else { Some(amount) };
|
|
||||||
Self::ToKey {
|
|
||||||
amount,
|
|
||||||
key_offsets: read_vec(read_varint, r)?,
|
|
||||||
key_image: read_torsion_free_point(r)?,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown/unused input type"))?
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Doesn't bother moving to an enum for the unused Script classes
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct Output {
|
|
||||||
pub amount: Option<u64>,
|
|
||||||
pub key: CompressedEdwardsY,
|
|
||||||
pub view_tag: Option<u8>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Output {
|
|
||||||
pub(crate) fn fee_weight() -> usize {
|
|
||||||
1 + 1 + 32 + 1
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
write_varint(&self.amount.unwrap_or(0), w)?;
|
|
||||||
w.write_all(&[2 + u8::from(self.view_tag.is_some())])?;
|
|
||||||
w.write_all(&self.key.to_bytes())?;
|
|
||||||
if let Some(view_tag) = self.view_tag {
|
|
||||||
w.write_all(&[view_tag])?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut res = Vec::with_capacity(8 + 1 + 32);
|
|
||||||
self.write(&mut res).unwrap();
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(interpret_as_rct: bool, r: &mut R) -> io::Result<Self> {
|
|
||||||
let amount = read_varint(r)?;
|
|
||||||
let amount = if interpret_as_rct {
|
|
||||||
if amount != 0 {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "RCT TX output wasn't 0"))?;
|
|
||||||
}
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(amount)
|
|
||||||
};
|
|
||||||
|
|
||||||
let view_tag = match read_byte(r)? {
|
|
||||||
2 => false,
|
|
||||||
3 => true,
|
|
||||||
_ => Err(io::Error::new(
|
|
||||||
io::ErrorKind::Other,
|
|
||||||
"Tried to deserialize unknown/unused output type",
|
|
||||||
))?,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
amount,
|
|
||||||
key: CompressedEdwardsY(read_bytes(r)?),
|
|
||||||
view_tag: if view_tag { Some(read_byte(r)?) } else { None },
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub enum Timelock {
|
|
||||||
None,
|
|
||||||
Block(usize),
|
|
||||||
Time(u64),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Timelock {
|
|
||||||
fn from_raw(raw: u64) -> Self {
|
|
||||||
if raw == 0 {
|
|
||||||
Self::None
|
|
||||||
} else if raw < 500_000_000 {
|
|
||||||
Self::Block(usize::try_from(raw).unwrap())
|
|
||||||
} else {
|
|
||||||
Self::Time(raw)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
write_varint(
|
|
||||||
&match self {
|
|
||||||
Self::None => 0,
|
|
||||||
Self::Block(block) => (*block).try_into().unwrap(),
|
|
||||||
Self::Time(time) => *time,
|
|
||||||
},
|
|
||||||
w,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialOrd for Timelock {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
|
||||||
match (self, other) {
|
|
||||||
(Self::None, _) => Some(Ordering::Less),
|
|
||||||
(Self::Block(a), Self::Block(b)) => a.partial_cmp(b),
|
|
||||||
(Self::Time(a), Self::Time(b)) => a.partial_cmp(b),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct TransactionPrefix {
|
|
||||||
pub version: u64,
|
|
||||||
pub timelock: Timelock,
|
|
||||||
pub inputs: Vec<Input>,
|
|
||||||
pub outputs: Vec<Output>,
|
|
||||||
pub extra: Vec<u8>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TransactionPrefix {
|
|
||||||
pub(crate) fn fee_weight(ring_len: usize, inputs: usize, outputs: usize, extra: usize) -> usize {
|
|
||||||
// Assumes Timelock::None since this library won't let you create a TX with a timelock
|
|
||||||
1 + 1 +
|
|
||||||
varint_len(inputs) +
|
|
||||||
(inputs * Input::fee_weight(ring_len)) +
|
|
||||||
1 +
|
|
||||||
(outputs * Output::fee_weight()) +
|
|
||||||
varint_len(extra) +
|
|
||||||
extra
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
write_varint(&self.version, w)?;
|
|
||||||
self.timelock.write(w)?;
|
|
||||||
write_vec(Input::write, &self.inputs, w)?;
|
|
||||||
write_vec(Output::write, &self.outputs, w)?;
|
|
||||||
write_varint(&self.extra.len().try_into().unwrap(), w)?;
|
|
||||||
w.write_all(&self.extra)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut res = vec![];
|
|
||||||
self.write(&mut res).unwrap();
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
let version = read_varint(r)?;
|
|
||||||
// TODO: Create an enum out of version
|
|
||||||
if (version == 0) || (version > 2) {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "unrecognized transaction version"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let timelock = Timelock::from_raw(read_varint(r)?);
|
|
||||||
|
|
||||||
let inputs = read_vec(|r| Input::read(version == 2, r), r)?;
|
|
||||||
if inputs.is_empty() {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "transaction had no inputs"))?;
|
|
||||||
}
|
|
||||||
let is_miner_tx = matches!(inputs[0], Input::Gen { .. });
|
|
||||||
|
|
||||||
let mut prefix = Self {
|
|
||||||
version,
|
|
||||||
timelock,
|
|
||||||
inputs,
|
|
||||||
outputs: read_vec(|r| Output::read((!is_miner_tx) && (version == 2), r), r)?,
|
|
||||||
extra: vec![],
|
|
||||||
};
|
|
||||||
prefix.extra = read_vec(read_byte, r)?;
|
|
||||||
Ok(prefix)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hash(&self) -> [u8; 32] {
|
|
||||||
hash(&self.serialize())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Monero transaction. For version 1, rct_signatures still contains an accurate fee value.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
pub struct Transaction {
|
|
||||||
pub prefix: TransactionPrefix,
|
|
||||||
pub signatures: Vec<Vec<(Scalar, Scalar)>>,
|
|
||||||
pub rct_signatures: RctSignatures,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Transaction {
|
|
||||||
pub(crate) fn fee_weight(
|
|
||||||
protocol: Protocol,
|
|
||||||
inputs: usize,
|
|
||||||
outputs: usize,
|
|
||||||
extra: usize,
|
|
||||||
) -> usize {
|
|
||||||
TransactionPrefix::fee_weight(protocol.ring_len(), inputs, outputs, extra) +
|
|
||||||
RctSignatures::fee_weight(protocol, inputs, outputs)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
self.prefix.write(w)?;
|
|
||||||
if self.prefix.version == 1 {
|
|
||||||
for sigs in &self.signatures {
|
|
||||||
for sig in sigs {
|
|
||||||
write_scalar(&sig.0, w)?;
|
|
||||||
write_scalar(&sig.1, w)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
} else if self.prefix.version == 2 {
|
|
||||||
self.rct_signatures.write(w)
|
|
||||||
} else {
|
|
||||||
panic!("Serializing a transaction with an unknown version");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut res = Vec::with_capacity(2048);
|
|
||||||
self.write(&mut res).unwrap();
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
let prefix = TransactionPrefix::read(r)?;
|
|
||||||
let mut signatures = vec![];
|
|
||||||
let mut rct_signatures = RctSignatures {
|
|
||||||
base: RctBase { fee: 0, encrypted_amounts: vec![], pseudo_outs: vec![], commitments: vec![] },
|
|
||||||
prunable: RctPrunable::Null,
|
|
||||||
};
|
|
||||||
|
|
||||||
if prefix.version == 1 {
|
|
||||||
signatures = prefix
|
|
||||||
.inputs
|
|
||||||
.iter()
|
|
||||||
.filter_map(|input| match input {
|
|
||||||
Input::ToKey { key_offsets, .. } => Some(
|
|
||||||
key_offsets
|
|
||||||
.iter()
|
|
||||||
.map(|_| Ok((read_scalar(r)?, read_scalar(r)?)))
|
|
||||||
.collect::<Result<_, io::Error>>(),
|
|
||||||
),
|
|
||||||
_ => None,
|
|
||||||
})
|
|
||||||
.collect::<Result<_, _>>()?;
|
|
||||||
|
|
||||||
rct_signatures.base.fee = prefix
|
|
||||||
.inputs
|
|
||||||
.iter()
|
|
||||||
.map(|input| match input {
|
|
||||||
Input::Gen(..) => 0,
|
|
||||||
Input::ToKey { amount, .. } => amount.unwrap(),
|
|
||||||
})
|
|
||||||
.sum::<u64>()
|
|
||||||
.saturating_sub(prefix.outputs.iter().map(|output| output.amount.unwrap()).sum());
|
|
||||||
} else if prefix.version == 2 {
|
|
||||||
rct_signatures = RctSignatures::read(
|
|
||||||
prefix
|
|
||||||
.inputs
|
|
||||||
.iter()
|
|
||||||
.map(|input| match input {
|
|
||||||
Input::Gen(_) => 0,
|
|
||||||
Input::ToKey { key_offsets, .. } => key_offsets.len(),
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
prefix.outputs.len(),
|
|
||||||
r,
|
|
||||||
)?;
|
|
||||||
} else {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "Tried to deserialize unknown version"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self { prefix, signatures, rct_signatures })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hash(&self) -> [u8; 32] {
|
|
||||||
let mut buf = Vec::with_capacity(2048);
|
|
||||||
if self.prefix.version == 1 {
|
|
||||||
self.write(&mut buf).unwrap();
|
|
||||||
hash(&buf)
|
|
||||||
} else {
|
|
||||||
let mut hashes = Vec::with_capacity(96);
|
|
||||||
|
|
||||||
hashes.extend(self.prefix.hash());
|
|
||||||
|
|
||||||
self.rct_signatures.base.write(&mut buf, self.rct_signatures.rct_type()).unwrap();
|
|
||||||
hashes.extend(hash(&buf));
|
|
||||||
buf.clear();
|
|
||||||
|
|
||||||
hashes.extend(&match self.rct_signatures.prunable {
|
|
||||||
RctPrunable::Null => [0; 32],
|
|
||||||
RctPrunable::MlsagBorromean { .. } |
|
|
||||||
RctPrunable::MlsagBulletproofs { .. } |
|
|
||||||
RctPrunable::Clsag { .. } => {
|
|
||||||
self.rct_signatures.prunable.write(&mut buf, self.rct_signatures.rct_type()).unwrap();
|
|
||||||
hash(&buf)
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
hash(&hashes)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Calculate the hash of this transaction as needed for signing it.
|
|
||||||
pub fn signature_hash(&self) -> [u8; 32] {
|
|
||||||
let mut buf = Vec::with_capacity(2048);
|
|
||||||
let mut sig_hash = Vec::with_capacity(96);
|
|
||||||
|
|
||||||
sig_hash.extend(self.prefix.hash());
|
|
||||||
|
|
||||||
self.rct_signatures.base.write(&mut buf, self.rct_signatures.rct_type()).unwrap();
|
|
||||||
sig_hash.extend(hash(&buf));
|
|
||||||
buf.clear();
|
|
||||||
|
|
||||||
self.rct_signatures.prunable.signature_write(&mut buf).unwrap();
|
|
||||||
sig_hash.extend(hash(&buf));
|
|
||||||
|
|
||||||
hash(&sig_hash)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,311 +0,0 @@
|
|||||||
use core::{marker::PhantomData, fmt::Debug};
|
|
||||||
use std_shims::string::{String, ToString};
|
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::{EdwardsPoint, CompressedEdwardsY};
|
|
||||||
|
|
||||||
use base58_monero::base58::{encode_check, decode_check};
|
|
||||||
|
|
||||||
/// The network this address is for.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub enum Network {
|
|
||||||
Mainnet,
|
|
||||||
Testnet,
|
|
||||||
Stagenet,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The address type, supporting the officially documented addresses, along with
|
|
||||||
/// [Featured Addresses](https://gist.github.com/kayabaNerve/01c50bbc35441e0bbdcee63a9d823789).
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub enum AddressType {
|
|
||||||
Standard,
|
|
||||||
Integrated([u8; 8]),
|
|
||||||
Subaddress,
|
|
||||||
Featured { subaddress: bool, payment_id: Option<[u8; 8]>, guaranteed: bool },
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub struct SubaddressIndex {
|
|
||||||
pub(crate) account: u32,
|
|
||||||
pub(crate) address: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SubaddressIndex {
|
|
||||||
pub const fn new(account: u32, address: u32) -> Option<Self> {
|
|
||||||
if (account == 0) && (address == 0) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
Some(Self { account, address })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn account(&self) -> u32 {
|
|
||||||
self.account
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn address(&self) -> u32 {
|
|
||||||
self.address
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Address specification. Used internally to create addresses.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub enum AddressSpec {
|
|
||||||
Standard,
|
|
||||||
Integrated([u8; 8]),
|
|
||||||
Subaddress(SubaddressIndex),
|
|
||||||
Featured { subaddress: Option<SubaddressIndex>, payment_id: Option<[u8; 8]>, guaranteed: bool },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AddressType {
|
|
||||||
pub const fn is_subaddress(&self) -> bool {
|
|
||||||
matches!(self, Self::Subaddress) || matches!(self, Self::Featured { subaddress: true, .. })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn payment_id(&self) -> Option<[u8; 8]> {
|
|
||||||
if let Self::Integrated(id) = self {
|
|
||||||
Some(*id)
|
|
||||||
} else if let Self::Featured { payment_id, .. } = self {
|
|
||||||
*payment_id
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn is_guaranteed(&self) -> bool {
|
|
||||||
matches!(self, Self::Featured { guaranteed: true, .. })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A type which returns the byte for a given address.
|
|
||||||
pub trait AddressBytes: Clone + Copy + PartialEq + Eq + Debug {
|
|
||||||
fn network_bytes(network: Network) -> (u8, u8, u8, u8);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Address bytes for Monero.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
|
||||||
pub struct MoneroAddressBytes;
|
|
||||||
impl AddressBytes for MoneroAddressBytes {
|
|
||||||
fn network_bytes(network: Network) -> (u8, u8, u8, u8) {
|
|
||||||
match network {
|
|
||||||
Network::Mainnet => (18, 19, 42, 70),
|
|
||||||
Network::Testnet => (53, 54, 63, 111),
|
|
||||||
Network::Stagenet => (24, 25, 36, 86),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Address metadata.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
|
||||||
pub struct AddressMeta<B: AddressBytes> {
|
|
||||||
_bytes: PhantomData<B>,
|
|
||||||
pub network: Network,
|
|
||||||
pub kind: AddressType,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<B: AddressBytes> Zeroize for AddressMeta<B> {
|
|
||||||
fn zeroize(&mut self) {
|
|
||||||
self.network.zeroize();
|
|
||||||
self.kind.zeroize();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Error when decoding an address.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
|
||||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
|
||||||
pub enum AddressError {
|
|
||||||
#[cfg_attr(feature = "std", error("invalid address byte"))]
|
|
||||||
InvalidByte,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid address encoding"))]
|
|
||||||
InvalidEncoding,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid length"))]
|
|
||||||
InvalidLength,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid key"))]
|
|
||||||
InvalidKey,
|
|
||||||
#[cfg_attr(feature = "std", error("unknown features"))]
|
|
||||||
UnknownFeatures,
|
|
||||||
#[cfg_attr(feature = "std", error("different network than expected"))]
|
|
||||||
DifferentNetwork,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<B: AddressBytes> AddressMeta<B> {
|
|
||||||
#[allow(clippy::wrong_self_convention)]
|
|
||||||
fn to_byte(&self) -> u8 {
|
|
||||||
let bytes = B::network_bytes(self.network);
|
|
||||||
match self.kind {
|
|
||||||
AddressType::Standard => bytes.0,
|
|
||||||
AddressType::Integrated(_) => bytes.1,
|
|
||||||
AddressType::Subaddress => bytes.2,
|
|
||||||
AddressType::Featured { .. } => bytes.3,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create an address's metadata.
|
|
||||||
pub const fn new(network: Network, kind: AddressType) -> Self {
|
|
||||||
Self { _bytes: PhantomData, network, kind }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns an incomplete instantiation in the case of Integrated/Featured addresses
|
|
||||||
fn from_byte(byte: u8) -> Result<Self, AddressError> {
|
|
||||||
let mut meta = None;
|
|
||||||
for network in [Network::Mainnet, Network::Testnet, Network::Stagenet] {
|
|
||||||
let (standard, integrated, subaddress, featured) = B::network_bytes(network);
|
|
||||||
if let Some(kind) = match byte {
|
|
||||||
_ if byte == standard => Some(AddressType::Standard),
|
|
||||||
_ if byte == integrated => Some(AddressType::Integrated([0; 8])),
|
|
||||||
_ if byte == subaddress => Some(AddressType::Subaddress),
|
|
||||||
_ if byte == featured => {
|
|
||||||
Some(AddressType::Featured { subaddress: false, payment_id: None, guaranteed: false })
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
} {
|
|
||||||
meta = Some(Self::new(network, kind));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
meta.ok_or(AddressError::InvalidByte)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn is_subaddress(&self) -> bool {
|
|
||||||
self.kind.is_subaddress()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn payment_id(&self) -> Option<[u8; 8]> {
|
|
||||||
self.kind.payment_id()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn is_guaranteed(&self) -> bool {
|
|
||||||
self.kind.is_guaranteed()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A Monero address, composed of metadata and a spend/view key.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
|
||||||
pub struct Address<B: AddressBytes> {
|
|
||||||
pub meta: AddressMeta<B>,
|
|
||||||
pub spend: EdwardsPoint,
|
|
||||||
pub view: EdwardsPoint,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<B: AddressBytes> Zeroize for Address<B> {
|
|
||||||
fn zeroize(&mut self) {
|
|
||||||
self.meta.zeroize();
|
|
||||||
self.spend.zeroize();
|
|
||||||
self.view.zeroize();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<B: AddressBytes> ToString for Address<B> {
|
|
||||||
fn to_string(&self) -> String {
|
|
||||||
let mut data = vec![self.meta.to_byte()];
|
|
||||||
data.extend(self.spend.compress().to_bytes());
|
|
||||||
data.extend(self.view.compress().to_bytes());
|
|
||||||
if let AddressType::Featured { subaddress, payment_id, guaranteed } = self.meta.kind {
|
|
||||||
// Technically should be a VarInt, yet we don't have enough features it's needed
|
|
||||||
data.push(
|
|
||||||
u8::from(subaddress) + (u8::from(payment_id.is_some()) << 1) + (u8::from(guaranteed) << 2),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if let Some(id) = self.meta.kind.payment_id() {
|
|
||||||
data.extend(id);
|
|
||||||
}
|
|
||||||
encode_check(&data).unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<B: AddressBytes> Address<B> {
|
|
||||||
pub const fn new(meta: AddressMeta<B>, spend: EdwardsPoint, view: EdwardsPoint) -> Self {
|
|
||||||
Self { meta, spend, view }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_str_raw(s: &str) -> Result<Self, AddressError> {
|
|
||||||
let raw = decode_check(s).map_err(|_| AddressError::InvalidEncoding)?;
|
|
||||||
if raw.len() < (1 + 32 + 32) {
|
|
||||||
Err(AddressError::InvalidLength)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut meta = AddressMeta::from_byte(raw[0])?;
|
|
||||||
let spend = CompressedEdwardsY(raw[1 .. 33].try_into().unwrap())
|
|
||||||
.decompress()
|
|
||||||
.ok_or(AddressError::InvalidKey)?;
|
|
||||||
let view = CompressedEdwardsY(raw[33 .. 65].try_into().unwrap())
|
|
||||||
.decompress()
|
|
||||||
.ok_or(AddressError::InvalidKey)?;
|
|
||||||
let mut read = 65;
|
|
||||||
|
|
||||||
if matches!(meta.kind, AddressType::Featured { .. }) {
|
|
||||||
if raw[read] >= (2 << 3) {
|
|
||||||
Err(AddressError::UnknownFeatures)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let subaddress = (raw[read] & 1) == 1;
|
|
||||||
let integrated = ((raw[read] >> 1) & 1) == 1;
|
|
||||||
let guaranteed = ((raw[read] >> 2) & 1) == 1;
|
|
||||||
|
|
||||||
meta.kind = AddressType::Featured {
|
|
||||||
subaddress,
|
|
||||||
payment_id: Some([0; 8]).filter(|_| integrated),
|
|
||||||
guaranteed,
|
|
||||||
};
|
|
||||||
read += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update read early so we can verify the length
|
|
||||||
if meta.kind.payment_id().is_some() {
|
|
||||||
read += 8;
|
|
||||||
}
|
|
||||||
if raw.len() != read {
|
|
||||||
Err(AddressError::InvalidLength)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let AddressType::Integrated(ref mut id) = meta.kind {
|
|
||||||
id.copy_from_slice(&raw[(read - 8) .. read]);
|
|
||||||
}
|
|
||||||
if let AddressType::Featured { payment_id: Some(ref mut id), .. } = meta.kind {
|
|
||||||
id.copy_from_slice(&raw[(read - 8) .. read]);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self { meta, spend, view })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_str(network: Network, s: &str) -> Result<Self, AddressError> {
|
|
||||||
Self::from_str_raw(s).and_then(|addr| {
|
|
||||||
if addr.meta.network == network {
|
|
||||||
Ok(addr)
|
|
||||||
} else {
|
|
||||||
Err(AddressError::DifferentNetwork)?
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn network(&self) -> Network {
|
|
||||||
self.meta.network
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn is_subaddress(&self) -> bool {
|
|
||||||
self.meta.is_subaddress()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn payment_id(&self) -> Option<[u8; 8]> {
|
|
||||||
self.meta.payment_id()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn is_guaranteed(&self) -> bool {
|
|
||||||
self.meta.is_guaranteed()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Instantiation of the Address type with Monero's network bytes.
|
|
||||||
pub type MoneroAddress = Address<MoneroAddressBytes>;
|
|
||||||
// Allow re-interpreting of an arbitrary address as a Monero address so it can be used with the
|
|
||||||
// rest of this library. Doesn't use From as it was conflicting with From<T> for T.
|
|
||||||
impl MoneroAddress {
|
|
||||||
pub const fn from<B: AddressBytes>(address: Address<B>) -> Self {
|
|
||||||
Self::new(
|
|
||||||
AddressMeta::new(address.meta.network, address.meta.kind),
|
|
||||||
address.spend,
|
|
||||||
address.view,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,281 +0,0 @@
|
|||||||
use std_shims::{sync::OnceLock, vec::Vec, collections::HashSet};
|
|
||||||
|
|
||||||
#[cfg(not(feature = "std"))]
|
|
||||||
use std_shims::sync::Mutex;
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
use futures::lock::Mutex;
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
use rand_distr::{Distribution, Gamma};
|
|
||||||
#[cfg(not(feature = "std"))]
|
|
||||||
use rand_distr::num_traits::Float;
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::EdwardsPoint;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
wallet::SpendableOutput,
|
|
||||||
rpc::{RpcError, RpcConnection, Rpc},
|
|
||||||
};
|
|
||||||
|
|
||||||
const LOCK_WINDOW: usize = 10;
|
|
||||||
const MATURITY: u64 = 60;
|
|
||||||
const RECENT_WINDOW: usize = 15;
|
|
||||||
const BLOCK_TIME: usize = 120;
|
|
||||||
const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME;
|
|
||||||
#[allow(clippy::as_conversions)]
|
|
||||||
const TIP_APPLICATION: f64 = (LOCK_WINDOW * BLOCK_TIME) as f64;
|
|
||||||
|
|
||||||
// TODO: Expose an API to reset this in case a reorg occurs/the RPC fails/returns garbage
|
|
||||||
// TODO: Update this when scanning a block, as possible
|
|
||||||
static DISTRIBUTION_CELL: OnceLock<Mutex<Vec<u64>>> = OnceLock::new();
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
fn DISTRIBUTION() -> &'static Mutex<Vec<u64>> {
|
|
||||||
DISTRIBUTION_CELL.get_or_init(|| Mutex::new(Vec::with_capacity(3_000_000)))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments, clippy::as_conversions)]
|
|
||||||
async fn select_n<'a, R: Send + RngCore + CryptoRng, RPC: RpcConnection>(
|
|
||||||
rng: &mut R,
|
|
||||||
rpc: &Rpc<RPC>,
|
|
||||||
distribution: &[u64],
|
|
||||||
height: usize,
|
|
||||||
high: u64,
|
|
||||||
per_second: f64,
|
|
||||||
real: &[u64],
|
|
||||||
used: &mut HashSet<u64>,
|
|
||||||
count: usize,
|
|
||||||
) -> Result<Vec<(u64, [EdwardsPoint; 2])>, RpcError> {
|
|
||||||
if height >= rpc.get_height().await? {
|
|
||||||
// TODO: Don't use InternalError for the caller's failure
|
|
||||||
Err(RpcError::InternalError("decoys being requested from too young blocks"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
let mut iters = 0;
|
|
||||||
let mut confirmed = Vec::with_capacity(count);
|
|
||||||
// Retries on failure. Retries are obvious as decoys, yet should be minimal
|
|
||||||
while confirmed.len() != count {
|
|
||||||
let remaining = count - confirmed.len();
|
|
||||||
let mut candidates = Vec::with_capacity(remaining);
|
|
||||||
while candidates.len() != remaining {
|
|
||||||
#[cfg(test)]
|
|
||||||
{
|
|
||||||
iters += 1;
|
|
||||||
// This is cheap and on fresh chains, a lot of rounds may be needed
|
|
||||||
if iters == 100 {
|
|
||||||
Err(RpcError::InternalError("hit decoy selection round limit"))?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use a gamma distribution
|
|
||||||
let mut age = Gamma::<f64>::new(19.28, 1.0 / 1.61).unwrap().sample(rng).exp();
|
|
||||||
if age > TIP_APPLICATION {
|
|
||||||
age -= TIP_APPLICATION;
|
|
||||||
} else {
|
|
||||||
// f64 does not have try_from available, which is why these are written with `as`
|
|
||||||
age = (rng.next_u64() % u64::try_from(RECENT_WINDOW * BLOCK_TIME).unwrap()) as f64;
|
|
||||||
}
|
|
||||||
|
|
||||||
let o = (age * per_second) as u64;
|
|
||||||
if o < high {
|
|
||||||
let i = distribution.partition_point(|s| *s < (high - 1 - o));
|
|
||||||
let prev = i.saturating_sub(1);
|
|
||||||
let n = distribution[i] - distribution[prev];
|
|
||||||
if n != 0 {
|
|
||||||
let o = distribution[prev] + (rng.next_u64() % n);
|
|
||||||
if !used.contains(&o) {
|
|
||||||
// It will either actually be used, or is unusable and this prevents trying it again
|
|
||||||
used.insert(o);
|
|
||||||
candidates.push(o);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If this is the first time we're requesting these outputs, include the real one as well
|
|
||||||
// Prevents the node we're connected to from having a list of known decoys and then seeing a
|
|
||||||
// TX which uses all of them, with one additional output (the true spend)
|
|
||||||
let mut real_indexes = HashSet::with_capacity(real.len());
|
|
||||||
if confirmed.is_empty() {
|
|
||||||
for real in real {
|
|
||||||
candidates.push(*real);
|
|
||||||
}
|
|
||||||
// Sort candidates so the real spends aren't the ones at the end
|
|
||||||
candidates.sort();
|
|
||||||
for real in real {
|
|
||||||
real_indexes.insert(candidates.binary_search(real).unwrap());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (i, output) in rpc.get_unlocked_outputs(&candidates, height).await?.iter_mut().enumerate() {
|
|
||||||
// Don't include the real spend as a decoy, despite requesting it
|
|
||||||
if real_indexes.contains(&i) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(output) = output.take() {
|
|
||||||
confirmed.push((candidates[i], output));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(confirmed)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn offset(ring: &[u64]) -> Vec<u64> {
|
|
||||||
let mut res = vec![ring[0]];
|
|
||||||
res.resize(ring.len(), 0);
|
|
||||||
for m in (1 .. ring.len()).rev() {
|
|
||||||
res[m] = ring[m] - ring[m - 1];
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Decoy data, containing the actual member as well (at index `i`).
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub struct Decoys {
|
|
||||||
pub i: u8,
|
|
||||||
pub offsets: Vec<u64>,
|
|
||||||
pub ring: Vec<[EdwardsPoint; 2]>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Decoys {
|
|
||||||
pub fn len(&self) -> usize {
|
|
||||||
self.offsets.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Select decoys using the same distribution as Monero.
|
|
||||||
#[allow(clippy::as_conversions)]
|
|
||||||
pub async fn select<R: Send + RngCore + CryptoRng, RPC: RpcConnection>(
|
|
||||||
rng: &mut R,
|
|
||||||
rpc: &Rpc<RPC>,
|
|
||||||
ring_len: usize,
|
|
||||||
height: usize,
|
|
||||||
inputs: &[SpendableOutput],
|
|
||||||
) -> Result<Vec<Self>, RpcError> {
|
|
||||||
#[cfg(not(feature = "std"))]
|
|
||||||
let mut distribution = DISTRIBUTION().lock();
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
let mut distribution = DISTRIBUTION().lock().await;
|
|
||||||
|
|
||||||
let decoy_count = ring_len - 1;
|
|
||||||
|
|
||||||
// Convert the inputs in question to the raw output data
|
|
||||||
let mut real = Vec::with_capacity(inputs.len());
|
|
||||||
let mut outputs = Vec::with_capacity(inputs.len());
|
|
||||||
for input in inputs {
|
|
||||||
real.push(input.global_index);
|
|
||||||
outputs.push((real[real.len() - 1], [input.key(), input.commitment().calculate()]));
|
|
||||||
}
|
|
||||||
|
|
||||||
if distribution.len() <= height {
|
|
||||||
let extension = rpc.get_output_distribution(distribution.len(), height).await?;
|
|
||||||
distribution.extend(extension);
|
|
||||||
}
|
|
||||||
// If asked to use an older height than previously asked, truncate to ensure accuracy
|
|
||||||
// Should never happen, yet risks desyncing if it did
|
|
||||||
distribution.truncate(height + 1); // height is inclusive, and 0 is a valid height
|
|
||||||
|
|
||||||
let high = distribution[distribution.len() - 1];
|
|
||||||
let per_second = {
|
|
||||||
let blocks = distribution.len().min(BLOCKS_PER_YEAR);
|
|
||||||
let outputs = high - distribution[distribution.len().saturating_sub(blocks + 1)];
|
|
||||||
(outputs as f64) / ((blocks * BLOCK_TIME) as f64)
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut used = HashSet::<u64>::new();
|
|
||||||
for o in &outputs {
|
|
||||||
used.insert(o.0);
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Create a TX with less than the target amount, as allowed by the protocol
|
|
||||||
if (high - MATURITY) < u64::try_from(inputs.len() * ring_len).unwrap() {
|
|
||||||
Err(RpcError::InternalError("not enough decoy candidates"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Select all decoys for this transaction, assuming we generate a sane transaction
|
|
||||||
// We should almost never naturally generate an insane transaction, hence why this doesn't
|
|
||||||
// bother with an overage
|
|
||||||
let mut decoys = select_n(
|
|
||||||
rng,
|
|
||||||
rpc,
|
|
||||||
&distribution,
|
|
||||||
height,
|
|
||||||
high,
|
|
||||||
per_second,
|
|
||||||
&real,
|
|
||||||
&mut used,
|
|
||||||
inputs.len() * decoy_count,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
real.zeroize();
|
|
||||||
|
|
||||||
let mut res = Vec::with_capacity(inputs.len());
|
|
||||||
for o in outputs {
|
|
||||||
// Grab the decoys for this specific output
|
|
||||||
let mut ring = decoys.drain((decoys.len() - decoy_count) ..).collect::<Vec<_>>();
|
|
||||||
ring.push(o);
|
|
||||||
ring.sort_by(|a, b| a.0.cmp(&b.0));
|
|
||||||
|
|
||||||
// Sanity checks are only run when 1000 outputs are available in Monero
|
|
||||||
// We run this check whenever the highest output index, which we acknowledge, is > 500
|
|
||||||
// This means we assume (for presumably test blockchains) the height being used has not had
|
|
||||||
// 500 outputs since while itself not being a sufficiently mature blockchain
|
|
||||||
// Considering Monero's p2p layer doesn't actually check transaction sanity, it should be
|
|
||||||
// fine for us to not have perfectly matching rules, especially since this code will infinite
|
|
||||||
// loop if it can't determine sanity, which is possible with sufficient inputs on
|
|
||||||
// sufficiently small chains
|
|
||||||
if high > 500 {
|
|
||||||
// Make sure the TX passes the sanity check that the median output is within the last 40%
|
|
||||||
let target_median = high * 3 / 5;
|
|
||||||
while ring[ring_len / 2].0 < target_median {
|
|
||||||
// If it's not, update the bottom half with new values to ensure the median only moves up
|
|
||||||
#[allow(clippy::needless_collect)] // Needed for ownership reasons
|
|
||||||
for removed in ring.drain(0 .. (ring_len / 2)).collect::<Vec<_>>() {
|
|
||||||
// If we removed the real spend, add it back
|
|
||||||
if removed.0 == o.0 {
|
|
||||||
ring.push(o);
|
|
||||||
} else {
|
|
||||||
// We could not remove this, saving CPU time and removing low values as
|
|
||||||
// possibilities, yet it'd increase the amount of decoys required to create this
|
|
||||||
// transaction and some removed outputs may be the best option (as we drop the first
|
|
||||||
// half, not just the bottom n)
|
|
||||||
used.remove(&removed.0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Select new outputs until we have a full sized ring again
|
|
||||||
ring.extend(
|
|
||||||
select_n(
|
|
||||||
rng,
|
|
||||||
rpc,
|
|
||||||
&distribution,
|
|
||||||
height,
|
|
||||||
high,
|
|
||||||
per_second,
|
|
||||||
&[],
|
|
||||||
&mut used,
|
|
||||||
ring_len - ring.len(),
|
|
||||||
)
|
|
||||||
.await?,
|
|
||||||
);
|
|
||||||
ring.sort_by(|a, b| a.0.cmp(&b.0));
|
|
||||||
}
|
|
||||||
|
|
||||||
// The other sanity check rule is about duplicates, yet we already enforce unique ring
|
|
||||||
// members
|
|
||||||
}
|
|
||||||
|
|
||||||
res.push(Self {
|
|
||||||
// Binary searches for the real spend since we don't know where it sorted to
|
|
||||||
i: u8::try_from(ring.partition_point(|x| x.0 < o.0)).unwrap(),
|
|
||||||
offsets: offset(&ring.iter().map(|output| output.0).collect::<Vec<_>>()),
|
|
||||||
ring: ring.iter().map(|output| output.1).collect(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,218 +0,0 @@
|
|||||||
use core::ops::BitXor;
|
|
||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use zeroize::Zeroize;
|
|
||||||
|
|
||||||
use curve25519_dalek::edwards::EdwardsPoint;
|
|
||||||
|
|
||||||
use crate::serialize::{
|
|
||||||
varint_len, read_byte, read_bytes, read_varint, read_point, read_vec, write_byte, write_varint,
|
|
||||||
write_point, write_vec,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const MAX_TX_EXTRA_NONCE_SIZE: usize = 255;
|
|
||||||
|
|
||||||
pub const PAYMENT_ID_MARKER: u8 = 0;
|
|
||||||
pub const ENCRYPTED_PAYMENT_ID_MARKER: u8 = 1;
|
|
||||||
// Used as it's the highest value not interpretable as a continued VarInt
|
|
||||||
pub const ARBITRARY_DATA_MARKER: u8 = 127;
|
|
||||||
|
|
||||||
// 1 byte is used for the marker
|
|
||||||
pub const MAX_ARBITRARY_DATA_SIZE: usize = MAX_TX_EXTRA_NONCE_SIZE - 1;
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub enum PaymentId {
|
|
||||||
Unencrypted([u8; 32]),
|
|
||||||
Encrypted([u8; 8]),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BitXor<[u8; 8]> for PaymentId {
|
|
||||||
type Output = Self;
|
|
||||||
|
|
||||||
fn bitxor(self, bytes: [u8; 8]) -> Self {
|
|
||||||
match self {
|
|
||||||
// Don't perform the xor since this isn't intended to be encrypted with xor
|
|
||||||
Self::Unencrypted(_) => self,
|
|
||||||
Self::Encrypted(id) => {
|
|
||||||
Self::Encrypted((u64::from_le_bytes(id) ^ u64::from_le_bytes(bytes)).to_le_bytes())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PaymentId {
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
Self::Unencrypted(id) => {
|
|
||||||
w.write_all(&[PAYMENT_ID_MARKER])?;
|
|
||||||
w.write_all(id)?;
|
|
||||||
}
|
|
||||||
Self::Encrypted(id) => {
|
|
||||||
w.write_all(&[ENCRYPTED_PAYMENT_ID_MARKER])?;
|
|
||||||
w.write_all(id)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(match read_byte(r)? {
|
|
||||||
0 => Self::Unencrypted(read_bytes(r)?),
|
|
||||||
1 => Self::Encrypted(read_bytes(r)?),
|
|
||||||
_ => Err(io::Error::new(io::ErrorKind::Other, "unknown payment ID type"))?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Doesn't bother with padding nor MinerGate
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub enum ExtraField {
|
|
||||||
PublicKey(EdwardsPoint),
|
|
||||||
Nonce(Vec<u8>),
|
|
||||||
MergeMining(usize, [u8; 32]),
|
|
||||||
PublicKeys(Vec<EdwardsPoint>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExtraField {
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
match self {
|
|
||||||
Self::PublicKey(key) => {
|
|
||||||
w.write_all(&[1])?;
|
|
||||||
w.write_all(&key.compress().to_bytes())?;
|
|
||||||
}
|
|
||||||
Self::Nonce(data) => {
|
|
||||||
w.write_all(&[2])?;
|
|
||||||
write_vec(write_byte, data, w)?;
|
|
||||||
}
|
|
||||||
Self::MergeMining(height, merkle) => {
|
|
||||||
w.write_all(&[3])?;
|
|
||||||
write_varint(&u64::try_from(*height).unwrap(), w)?;
|
|
||||||
w.write_all(merkle)?;
|
|
||||||
}
|
|
||||||
Self::PublicKeys(keys) => {
|
|
||||||
w.write_all(&[4])?;
|
|
||||||
write_vec(write_point, keys, w)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(match read_byte(r)? {
|
|
||||||
1 => Self::PublicKey(read_point(r)?),
|
|
||||||
2 => Self::Nonce({
|
|
||||||
let nonce = read_vec(read_byte, r)?;
|
|
||||||
if nonce.len() > MAX_TX_EXTRA_NONCE_SIZE {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "too long nonce"))?;
|
|
||||||
}
|
|
||||||
nonce
|
|
||||||
}),
|
|
||||||
3 => Self::MergeMining(
|
|
||||||
usize::try_from(read_varint(r)?)
|
|
||||||
.map_err(|_| io::Error::new(io::ErrorKind::Other, "varint for height exceeds usize"))?,
|
|
||||||
read_bytes(r)?,
|
|
||||||
),
|
|
||||||
4 => Self::PublicKeys(read_vec(read_point, r)?),
|
|
||||||
_ => Err(io::Error::new(io::ErrorKind::Other, "unknown extra field"))?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub struct Extra(Vec<ExtraField>);
|
|
||||||
impl Extra {
|
|
||||||
pub fn keys(&self) -> Option<(EdwardsPoint, Option<Vec<EdwardsPoint>>)> {
|
|
||||||
let mut key = None;
|
|
||||||
let mut additional = None;
|
|
||||||
for field in &self.0 {
|
|
||||||
match field.clone() {
|
|
||||||
ExtraField::PublicKey(this_key) => key = key.or(Some(this_key)),
|
|
||||||
ExtraField::PublicKeys(these_additional) => {
|
|
||||||
additional = additional.or(Some(these_additional));
|
|
||||||
}
|
|
||||||
ExtraField::Nonce(_) | ExtraField::MergeMining(..) => (),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Don't return any keys if this was non-standard and didn't include the primary key
|
|
||||||
key.map(|key| (key, additional))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn payment_id(&self) -> Option<PaymentId> {
|
|
||||||
for field in &self.0 {
|
|
||||||
if let ExtraField::Nonce(data) = field {
|
|
||||||
return PaymentId::read::<&[u8]>(&mut data.as_ref()).ok();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn data(&self) -> Vec<Vec<u8>> {
|
|
||||||
let mut res = vec![];
|
|
||||||
for field in &self.0 {
|
|
||||||
if let ExtraField::Nonce(data) = field {
|
|
||||||
if data[0] == ARBITRARY_DATA_MARKER {
|
|
||||||
res.push(data[1 ..].to_vec());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn new(key: EdwardsPoint, additional: Vec<EdwardsPoint>) -> Self {
|
|
||||||
let mut res = Self(Vec::with_capacity(3));
|
|
||||||
res.push(ExtraField::PublicKey(key));
|
|
||||||
if !additional.is_empty() {
|
|
||||||
res.push(ExtraField::PublicKeys(additional));
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn push(&mut self, field: ExtraField) {
|
|
||||||
self.0.push(field);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rustfmt::skip]
|
|
||||||
pub(crate) fn fee_weight(
|
|
||||||
outputs: usize,
|
|
||||||
additional: bool,
|
|
||||||
payment_id: bool,
|
|
||||||
data: &[Vec<u8>]
|
|
||||||
) -> usize {
|
|
||||||
// PublicKey, key
|
|
||||||
(1 + 32) +
|
|
||||||
// PublicKeys, length, additional keys
|
|
||||||
(if additional { 1 + 1 + (outputs * 32) } else { 0 }) +
|
|
||||||
// PaymentId (Nonce), length, encrypted, ID
|
|
||||||
(if payment_id { 1 + 1 + 1 + 8 } else { 0 }) +
|
|
||||||
// Nonce, length, ARBITRARY_DATA_MARKER, data
|
|
||||||
data.iter().map(|v| 1 + varint_len(1 + v.len()) + 1 + v.len()).sum::<usize>()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
for field in &self.0 {
|
|
||||||
field.write(w)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut buf = vec![];
|
|
||||||
self.write(&mut buf).unwrap();
|
|
||||||
buf
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
let mut res = Self(vec![]);
|
|
||||||
let mut field;
|
|
||||||
while {
|
|
||||||
field = ExtraField::read(r);
|
|
||||||
field.is_ok()
|
|
||||||
} {
|
|
||||||
res.0.push(field.unwrap());
|
|
||||||
}
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,268 +0,0 @@
|
|||||||
use core::ops::Deref;
|
|
||||||
use std_shims::collections::{HashSet, HashMap};
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
|
||||||
|
|
||||||
use curve25519_dalek::{
|
|
||||||
constants::ED25519_BASEPOINT_TABLE,
|
|
||||||
scalar::Scalar,
|
|
||||||
edwards::{EdwardsPoint, CompressedEdwardsY},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
hash, hash_to_scalar, serialize::write_varint, ringct::EncryptedAmount, transaction::Input,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub mod extra;
|
|
||||||
pub(crate) use extra::{PaymentId, ExtraField, Extra};
|
|
||||||
|
|
||||||
/// Seed creation and parsing functionality.
|
|
||||||
pub mod seed;
|
|
||||||
|
|
||||||
/// Address encoding and decoding functionality.
|
|
||||||
pub mod address;
|
|
||||||
use address::{Network, AddressType, SubaddressIndex, AddressSpec, AddressMeta, MoneroAddress};
|
|
||||||
|
|
||||||
mod scan;
|
|
||||||
pub use scan::{ReceivedOutput, SpendableOutput, Timelocked};
|
|
||||||
|
|
||||||
pub(crate) mod decoys;
|
|
||||||
pub(crate) use decoys::Decoys;
|
|
||||||
|
|
||||||
mod send;
|
|
||||||
pub use send::{Fee, TransactionError, Change, SignableTransaction, Eventuality};
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
pub use send::SignableTransactionBuilder;
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
pub(crate) use send::InternalPayment;
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
pub use send::TransactionMachine;
|
|
||||||
|
|
||||||
fn key_image_sort(x: &EdwardsPoint, y: &EdwardsPoint) -> core::cmp::Ordering {
|
|
||||||
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
|
|
||||||
}
|
|
||||||
|
|
||||||
// https://gist.github.com/kayabaNerve/8066c13f1fe1573286ba7a2fd79f6100
|
|
||||||
pub(crate) fn uniqueness(inputs: &[Input]) -> [u8; 32] {
|
|
||||||
let mut u = b"uniqueness".to_vec();
|
|
||||||
for input in inputs {
|
|
||||||
match input {
|
|
||||||
// If Gen, this should be the only input, making this loop somewhat pointless
|
|
||||||
// This works and even if there were somehow multiple inputs, it'd be a false negative
|
|
||||||
Input::Gen(height) => {
|
|
||||||
write_varint(height, &mut u).unwrap();
|
|
||||||
}
|
|
||||||
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
hash(&u)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Hs("view_tag" || 8Ra || o), Hs(8Ra || o), and H(8Ra || 0x8d) with uniqueness inclusion in the
|
|
||||||
// Scalar as an option
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub(crate) fn shared_key(
|
|
||||||
uniqueness: Option<[u8; 32]>,
|
|
||||||
ecdh: EdwardsPoint,
|
|
||||||
o: usize,
|
|
||||||
) -> (u8, Scalar, [u8; 8]) {
|
|
||||||
// 8Ra
|
|
||||||
let mut output_derivation = ecdh.mul_by_cofactor().compress().to_bytes().to_vec();
|
|
||||||
|
|
||||||
let mut payment_id_xor = [0; 8];
|
|
||||||
payment_id_xor
|
|
||||||
.copy_from_slice(&hash(&[output_derivation.as_ref(), [0x8d].as_ref()].concat())[.. 8]);
|
|
||||||
|
|
||||||
// || o
|
|
||||||
write_varint(&o.try_into().unwrap(), &mut output_derivation).unwrap();
|
|
||||||
|
|
||||||
let view_tag = hash(&[b"view_tag".as_ref(), &output_derivation].concat())[0];
|
|
||||||
|
|
||||||
// uniqueness ||
|
|
||||||
let shared_key = if let Some(uniqueness) = uniqueness {
|
|
||||||
[uniqueness.as_ref(), &output_derivation].concat()
|
|
||||||
} else {
|
|
||||||
output_derivation
|
|
||||||
};
|
|
||||||
|
|
||||||
(view_tag, hash_to_scalar(&shared_key), payment_id_xor)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn commitment_mask(shared_key: Scalar) -> Scalar {
|
|
||||||
let mut mask = b"commitment_mask".to_vec();
|
|
||||||
mask.extend(shared_key.to_bytes());
|
|
||||||
hash_to_scalar(&mask)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn amount_encryption(amount: u64, key: Scalar) -> [u8; 8] {
|
|
||||||
let mut amount_mask = b"amount".to_vec();
|
|
||||||
amount_mask.extend(key.to_bytes());
|
|
||||||
(amount ^ u64::from_le_bytes(hash(&amount_mask)[.. 8].try_into().unwrap())).to_le_bytes()
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Move this under EncryptedAmount?
|
|
||||||
fn amount_decryption(amount: &EncryptedAmount, key: Scalar) -> (Scalar, u64) {
|
|
||||||
match amount {
|
|
||||||
EncryptedAmount::Original { mask, amount } => {
|
|
||||||
#[cfg(feature = "experimental")]
|
|
||||||
{
|
|
||||||
let mask_shared_sec = hash(key.as_bytes());
|
|
||||||
let mask =
|
|
||||||
Scalar::from_bytes_mod_order(*mask) - Scalar::from_bytes_mod_order(mask_shared_sec);
|
|
||||||
|
|
||||||
let amount_shared_sec = hash(&mask_shared_sec);
|
|
||||||
let amount_scalar =
|
|
||||||
Scalar::from_bytes_mod_order(*amount) - Scalar::from_bytes_mod_order(amount_shared_sec);
|
|
||||||
// d2b from rctTypes.cpp
|
|
||||||
let amount = u64::from_le_bytes(amount_scalar.to_bytes()[0 .. 8].try_into().unwrap());
|
|
||||||
|
|
||||||
(mask, amount)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(feature = "experimental"))]
|
|
||||||
{
|
|
||||||
let _ = mask;
|
|
||||||
let _ = amount;
|
|
||||||
todo!("decrypting a legacy monero transaction's amount")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
EncryptedAmount::Compact { amount } => (
|
|
||||||
commitment_mask(key),
|
|
||||||
u64::from_le_bytes(amount_encryption(u64::from_le_bytes(*amount), key)),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The private view key and public spend key, enabling scanning transactions.
|
|
||||||
#[derive(Clone, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub struct ViewPair {
|
|
||||||
spend: EdwardsPoint,
|
|
||||||
view: Zeroizing<Scalar>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ViewPair {
|
|
||||||
pub const fn new(spend: EdwardsPoint, view: Zeroizing<Scalar>) -> Self {
|
|
||||||
Self { spend, view }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn spend(&self) -> EdwardsPoint {
|
|
||||||
self.spend
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn view(&self) -> EdwardsPoint {
|
|
||||||
self.view.deref() * &ED25519_BASEPOINT_TABLE
|
|
||||||
}
|
|
||||||
|
|
||||||
fn subaddress_derivation(&self, index: SubaddressIndex) -> Scalar {
|
|
||||||
hash_to_scalar(&Zeroizing::new(
|
|
||||||
[
|
|
||||||
b"SubAddr\0".as_ref(),
|
|
||||||
Zeroizing::new(self.view.to_bytes()).as_ref(),
|
|
||||||
&index.account().to_le_bytes(),
|
|
||||||
&index.address().to_le_bytes(),
|
|
||||||
]
|
|
||||||
.concat(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn subaddress_keys(&self, index: SubaddressIndex) -> (EdwardsPoint, EdwardsPoint) {
|
|
||||||
let scalar = self.subaddress_derivation(index);
|
|
||||||
let spend = self.spend + (&scalar * &ED25519_BASEPOINT_TABLE);
|
|
||||||
let view = self.view.deref() * spend;
|
|
||||||
(spend, view)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns an address with the provided specification.
|
|
||||||
pub fn address(&self, network: Network, spec: AddressSpec) -> MoneroAddress {
|
|
||||||
let mut spend = self.spend;
|
|
||||||
let mut view: EdwardsPoint = self.view.deref() * &ED25519_BASEPOINT_TABLE;
|
|
||||||
|
|
||||||
// construct the address meta
|
|
||||||
let meta = match spec {
|
|
||||||
AddressSpec::Standard => AddressMeta::new(network, AddressType::Standard),
|
|
||||||
AddressSpec::Integrated(payment_id) => {
|
|
||||||
AddressMeta::new(network, AddressType::Integrated(payment_id))
|
|
||||||
}
|
|
||||||
AddressSpec::Subaddress(index) => {
|
|
||||||
(spend, view) = self.subaddress_keys(index);
|
|
||||||
AddressMeta::new(network, AddressType::Subaddress)
|
|
||||||
}
|
|
||||||
AddressSpec::Featured { subaddress, payment_id, guaranteed } => {
|
|
||||||
if let Some(index) = subaddress {
|
|
||||||
(spend, view) = self.subaddress_keys(index);
|
|
||||||
}
|
|
||||||
AddressMeta::new(
|
|
||||||
network,
|
|
||||||
AddressType::Featured { subaddress: subaddress.is_some(), payment_id, guaranteed },
|
|
||||||
)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
MoneroAddress::new(meta, spend, view)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Transaction scanner.
|
|
||||||
/// This scanner is capable of generating subaddresses, additionally scanning for them once they've
|
|
||||||
/// been explicitly generated. If the burning bug is attempted, any secondary outputs will be
|
|
||||||
/// ignored.
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct Scanner {
|
|
||||||
pair: ViewPair,
|
|
||||||
// Also contains the spend key as None
|
|
||||||
pub(crate) subaddresses: HashMap<CompressedEdwardsY, Option<SubaddressIndex>>,
|
|
||||||
pub(crate) burning_bug: Option<HashSet<CompressedEdwardsY>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Zeroize for Scanner {
|
|
||||||
fn zeroize(&mut self) {
|
|
||||||
self.pair.zeroize();
|
|
||||||
|
|
||||||
// These may not be effective, unfortunately
|
|
||||||
for (mut key, mut value) in self.subaddresses.drain() {
|
|
||||||
key.zeroize();
|
|
||||||
value.zeroize();
|
|
||||||
}
|
|
||||||
if let Some(ref mut burning_bug) = self.burning_bug.take() {
|
|
||||||
for mut output in burning_bug.drain() {
|
|
||||||
output.zeroize();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for Scanner {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
self.zeroize();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ZeroizeOnDrop for Scanner {}
|
|
||||||
|
|
||||||
impl Scanner {
|
|
||||||
/// Create a Scanner from a ViewPair.
|
|
||||||
///
|
|
||||||
/// burning_bug is a HashSet of used keys, intended to prevent key reuse which would burn funds.
|
|
||||||
///
|
|
||||||
/// When an output is successfully scanned, the output key MUST be saved to disk.
|
|
||||||
///
|
|
||||||
/// When a new scanner is created, ALL saved output keys must be passed in to be secure.
|
|
||||||
///
|
|
||||||
/// If None is passed, a modified shared key derivation is used which is immune to the burning
|
|
||||||
/// bug (specifically the Guaranteed feature from Featured Addresses).
|
|
||||||
pub fn from_view(pair: ViewPair, burning_bug: Option<HashSet<CompressedEdwardsY>>) -> Self {
|
|
||||||
let mut subaddresses = HashMap::new();
|
|
||||||
subaddresses.insert(pair.spend.compress(), None);
|
|
||||||
Self { pair, subaddresses, burning_bug }
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Register a subaddress.
|
|
||||||
// There used to be an address function here, yet it wasn't safe. It could generate addresses
|
|
||||||
// incompatible with the Scanner. While we could return None for that, then we have the issue
|
|
||||||
// of runtime failures to generate an address.
|
|
||||||
// Removing that API was the simplest option.
|
|
||||||
pub fn register_subaddress(&mut self, subaddress: SubaddressIndex) {
|
|
||||||
let (spend, _) = self.pair.subaddress_keys(subaddress);
|
|
||||||
self.subaddresses.insert(spend.compress(), Some(subaddress));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,474 +0,0 @@
|
|||||||
use core::ops::Deref;
|
|
||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read, Write},
|
|
||||||
};
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop};
|
|
||||||
|
|
||||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar, edwards::EdwardsPoint};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Commitment,
|
|
||||||
serialize::{read_byte, read_u32, read_u64, read_bytes, read_scalar, read_point, read_raw_vec},
|
|
||||||
transaction::{Input, Timelock, Transaction},
|
|
||||||
block::Block,
|
|
||||||
rpc::{RpcError, RpcConnection, Rpc},
|
|
||||||
wallet::{
|
|
||||||
PaymentId, Extra, address::SubaddressIndex, Scanner, uniqueness, shared_key, amount_decryption,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
/// An absolute output ID, defined as its transaction hash and output index.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub struct AbsoluteId {
|
|
||||||
pub tx: [u8; 32],
|
|
||||||
pub o: u8,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AbsoluteId {
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
w.write_all(&self.tx)?;
|
|
||||||
w.write_all(&[self.o])
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut serialized = Vec::with_capacity(32 + 1);
|
|
||||||
self.write(&mut serialized).unwrap();
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self { tx: read_bytes(r)?, o: read_byte(r)? })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The data contained with an output.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub struct OutputData {
|
|
||||||
pub key: EdwardsPoint,
|
|
||||||
/// Absolute difference between the spend key and the key in this output
|
|
||||||
pub key_offset: Scalar,
|
|
||||||
pub commitment: Commitment,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl OutputData {
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
w.write_all(&self.key.compress().to_bytes())?;
|
|
||||||
w.write_all(&self.key_offset.to_bytes())?;
|
|
||||||
w.write_all(&self.commitment.mask.to_bytes())?;
|
|
||||||
w.write_all(&self.commitment.amount.to_le_bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut serialized = Vec::with_capacity(32 + 32 + 32 + 8);
|
|
||||||
self.write(&mut serialized).unwrap();
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self {
|
|
||||||
key: read_point(r)?,
|
|
||||||
key_offset: read_scalar(r)?,
|
|
||||||
commitment: Commitment::new(read_scalar(r)?, read_u64(r)?),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The metadata for an output.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub struct Metadata {
|
|
||||||
/// The subaddress this output was sent to.
|
|
||||||
pub subaddress: Option<SubaddressIndex>,
|
|
||||||
/// The payment ID included with this output.
|
|
||||||
/// This will be gibberish if the payment ID wasn't intended for the recipient or wasn't included.
|
|
||||||
// Could be an Option, as extra doesn't necessarily have a payment ID, yet all Monero TXs should
|
|
||||||
// have this making it simplest for it to be as-is.
|
|
||||||
pub payment_id: [u8; 8],
|
|
||||||
/// Arbitrary data encoded in TX extra.
|
|
||||||
pub arbitrary_data: Vec<Vec<u8>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Metadata {
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
if let Some(subaddress) = self.subaddress {
|
|
||||||
w.write_all(&[1])?;
|
|
||||||
w.write_all(&subaddress.account().to_le_bytes())?;
|
|
||||||
w.write_all(&subaddress.address().to_le_bytes())?;
|
|
||||||
} else {
|
|
||||||
w.write_all(&[0])?;
|
|
||||||
}
|
|
||||||
w.write_all(&self.payment_id)?;
|
|
||||||
|
|
||||||
w.write_all(&u32::try_from(self.arbitrary_data.len()).unwrap().to_le_bytes())?;
|
|
||||||
for part in &self.arbitrary_data {
|
|
||||||
w.write_all(&[u8::try_from(part.len()).unwrap()])?;
|
|
||||||
w.write_all(part)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut serialized = Vec::with_capacity(1 + 8 + 1);
|
|
||||||
self.write(&mut serialized).unwrap();
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
#[allow(clippy::if_then_some_else_none)] // The Result usage makes this invalid
|
|
||||||
let subaddress = if read_byte(r)? == 1 {
|
|
||||||
Some(
|
|
||||||
SubaddressIndex::new(read_u32(r)?, read_u32(r)?)
|
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid subaddress in metadata"))?,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
subaddress,
|
|
||||||
payment_id: read_bytes(r)?,
|
|
||||||
arbitrary_data: {
|
|
||||||
let mut data = vec![];
|
|
||||||
for _ in 0 .. read_u32(r)? {
|
|
||||||
let len = read_byte(r)?;
|
|
||||||
data.push(read_raw_vec(read_byte, usize::from(len), r)?);
|
|
||||||
}
|
|
||||||
data
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A received output, defined as its absolute ID, data, and metadara.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub struct ReceivedOutput {
|
|
||||||
pub absolute: AbsoluteId,
|
|
||||||
pub data: OutputData,
|
|
||||||
pub metadata: Metadata,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ReceivedOutput {
|
|
||||||
pub fn key(&self) -> EdwardsPoint {
|
|
||||||
self.data.key
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn key_offset(&self) -> Scalar {
|
|
||||||
self.data.key_offset
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn commitment(&self) -> Commitment {
|
|
||||||
self.data.commitment.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn arbitrary_data(&self) -> &[Vec<u8>] {
|
|
||||||
&self.metadata.arbitrary_data
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
self.absolute.write(w)?;
|
|
||||||
self.data.write(w)?;
|
|
||||||
self.metadata.write(w)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut serialized = vec![];
|
|
||||||
self.write(&mut serialized).unwrap();
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self {
|
|
||||||
absolute: AbsoluteId::read(r)?,
|
|
||||||
data: OutputData::read(r)?,
|
|
||||||
metadata: Metadata::read(r)?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A spendable output, defined as a received output and its index on the Monero blockchain.
|
|
||||||
/// This index is dependent on the Monero blockchain and will only be known once the output is
|
|
||||||
/// included within a block. This may change if there's a reorganization.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub struct SpendableOutput {
|
|
||||||
pub output: ReceivedOutput,
|
|
||||||
pub global_index: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SpendableOutput {
|
|
||||||
/// Update the spendable output's global index. This is intended to be called if a
|
|
||||||
/// re-organization occurred.
|
|
||||||
pub async fn refresh_global_index<RPC: RpcConnection>(
|
|
||||||
&mut self,
|
|
||||||
rpc: &Rpc<RPC>,
|
|
||||||
) -> Result<(), RpcError> {
|
|
||||||
self.global_index =
|
|
||||||
rpc.get_o_indexes(self.output.absolute.tx).await?[usize::from(self.output.absolute.o)];
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn from<RPC: RpcConnection>(
|
|
||||||
rpc: &Rpc<RPC>,
|
|
||||||
output: ReceivedOutput,
|
|
||||||
) -> Result<Self, RpcError> {
|
|
||||||
let mut output = Self { output, global_index: 0 };
|
|
||||||
output.refresh_global_index(rpc).await?;
|
|
||||||
Ok(output)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn key(&self) -> EdwardsPoint {
|
|
||||||
self.output.key()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn key_offset(&self) -> Scalar {
|
|
||||||
self.output.key_offset()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn commitment(&self) -> Commitment {
|
|
||||||
self.output.commitment()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn arbitrary_data(&self) -> &[Vec<u8>] {
|
|
||||||
self.output.arbitrary_data()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
self.output.write(w)?;
|
|
||||||
w.write_all(&self.global_index.to_le_bytes())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut serialized = vec![];
|
|
||||||
self.write(&mut serialized).unwrap();
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
Ok(Self { output: ReceivedOutput::read(r)?, global_index: read_u64(r)? })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A collection of timelocked outputs, either received or spendable.
|
|
||||||
#[derive(Zeroize)]
|
|
||||||
pub struct Timelocked<O: Clone + Zeroize>(Timelock, Vec<O>);
|
|
||||||
impl<O: Clone + Zeroize> Drop for Timelocked<O> {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
self.zeroize();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<O: Clone + Zeroize> ZeroizeOnDrop for Timelocked<O> {}
|
|
||||||
|
|
||||||
impl<O: Clone + Zeroize> Timelocked<O> {
|
|
||||||
pub fn timelock(&self) -> Timelock {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the outputs if they're not timelocked, or an empty vector if they are.
|
|
||||||
#[must_use]
|
|
||||||
pub fn not_locked(&self) -> Vec<O> {
|
|
||||||
if self.0 == Timelock::None {
|
|
||||||
return self.1.clone();
|
|
||||||
}
|
|
||||||
vec![]
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns None if the Timelocks aren't comparable. Returns Some(vec![]) if none are unlocked.
|
|
||||||
#[must_use]
|
|
||||||
pub fn unlocked(&self, timelock: Timelock) -> Option<Vec<O>> {
|
|
||||||
// If the Timelocks are comparable, return the outputs if they're now unlocked
|
|
||||||
if self.0 <= timelock {
|
|
||||||
Some(self.1.clone())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn ignore_timelock(&self) -> Vec<O> {
|
|
||||||
self.1.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Scanner {
|
|
||||||
/// Scan a transaction to discover the received outputs.
|
|
||||||
pub fn scan_transaction(&mut self, tx: &Transaction) -> Timelocked<ReceivedOutput> {
|
|
||||||
// Only scan RCT TXs since we can only spend RCT outputs
|
|
||||||
if tx.prefix.version != 2 {
|
|
||||||
return Timelocked(tx.prefix.timelock, vec![]);
|
|
||||||
}
|
|
||||||
|
|
||||||
let Ok(extra) = Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref()) else {
|
|
||||||
return Timelocked(tx.prefix.timelock, vec![]);
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some((tx_key, additional)) = extra.keys() else {
|
|
||||||
return Timelocked(tx.prefix.timelock, vec![]);
|
|
||||||
};
|
|
||||||
|
|
||||||
let payment_id = extra.payment_id();
|
|
||||||
|
|
||||||
let mut res = vec![];
|
|
||||||
for (o, output) in tx.prefix.outputs.iter().enumerate() {
|
|
||||||
// https://github.com/serai-dex/serai/issues/106
|
|
||||||
if let Some(burning_bug) = self.burning_bug.as_ref() {
|
|
||||||
if burning_bug.contains(&output.key) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let output_key = output.key.decompress();
|
|
||||||
if output_key.is_none() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let output_key = output_key.unwrap();
|
|
||||||
|
|
||||||
for key in [Some(Some(&tx_key)), additional.as_ref().map(|additional| additional.get(o))] {
|
|
||||||
let Some(Some(key)) = key else {
|
|
||||||
if key == Some(None) {
|
|
||||||
// This is non-standard. There were additional keys, yet not one for this output
|
|
||||||
// https://github.com/monero-project/monero/
|
|
||||||
// blob/04a1e2875d6e35e27bb21497988a6c822d319c28/
|
|
||||||
// src/cryptonote_basic/cryptonote_format_utils.cpp#L1062
|
|
||||||
// TODO: Should this return? Where does Monero set the trap handler for this exception?
|
|
||||||
continue;
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let (view_tag, shared_key, payment_id_xor) = shared_key(
|
|
||||||
if self.burning_bug.is_none() { Some(uniqueness(&tx.prefix.inputs)) } else { None },
|
|
||||||
self.pair.view.deref() * key,
|
|
||||||
o,
|
|
||||||
);
|
|
||||||
|
|
||||||
let payment_id =
|
|
||||||
if let Some(PaymentId::Encrypted(id)) = payment_id.map(|id| id ^ payment_id_xor) {
|
|
||||||
id
|
|
||||||
} else {
|
|
||||||
payment_id_xor
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(actual_view_tag) = output.view_tag {
|
|
||||||
if actual_view_tag != view_tag {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// P - shared == spend
|
|
||||||
let subaddress = self
|
|
||||||
.subaddresses
|
|
||||||
.get(&(output_key - (&shared_key * &ED25519_BASEPOINT_TABLE)).compress());
|
|
||||||
if subaddress.is_none() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let subaddress = *subaddress.unwrap();
|
|
||||||
|
|
||||||
// If it has torsion, it'll substract the non-torsioned shared key to a torsioned key
|
|
||||||
// We will not have a torsioned key in our HashMap of keys, so we wouldn't identify it as
|
|
||||||
// ours
|
|
||||||
// If we did though, it'd enable bypassing the included burning bug protection
|
|
||||||
assert!(output_key.is_torsion_free());
|
|
||||||
|
|
||||||
let mut key_offset = shared_key;
|
|
||||||
if let Some(subaddress) = subaddress {
|
|
||||||
key_offset += self.pair.subaddress_derivation(subaddress);
|
|
||||||
}
|
|
||||||
// Since we've found an output to us, get its amount
|
|
||||||
let mut commitment = Commitment::zero();
|
|
||||||
|
|
||||||
// Miner transaction
|
|
||||||
if let Some(amount) = output.amount {
|
|
||||||
commitment.amount = amount;
|
|
||||||
// Regular transaction
|
|
||||||
} else {
|
|
||||||
let (mask, amount) = match tx.rct_signatures.base.encrypted_amounts.get(o) {
|
|
||||||
Some(amount) => amount_decryption(amount, shared_key),
|
|
||||||
// This should never happen, yet it may be possible with miner transactions?
|
|
||||||
// Using get just decreases the possibility of a panic and lets us move on in that case
|
|
||||||
None => break,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Rebuild the commitment to verify it
|
|
||||||
commitment = Commitment::new(mask, amount);
|
|
||||||
// If this is a malicious commitment, move to the next output
|
|
||||||
// Any other R value will calculate to a different spend key and are therefore ignorable
|
|
||||||
if Some(&commitment.calculate()) != tx.rct_signatures.base.commitments.get(o) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if commitment.amount != 0 {
|
|
||||||
res.push(ReceivedOutput {
|
|
||||||
absolute: AbsoluteId { tx: tx.hash(), o: o.try_into().unwrap() },
|
|
||||||
|
|
||||||
data: OutputData { key: output_key, key_offset, commitment },
|
|
||||||
|
|
||||||
metadata: Metadata { subaddress, payment_id, arbitrary_data: extra.data() },
|
|
||||||
});
|
|
||||||
|
|
||||||
if let Some(burning_bug) = self.burning_bug.as_mut() {
|
|
||||||
burning_bug.insert(output.key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Break to prevent public keys from being included multiple times, triggering multiple
|
|
||||||
// inclusions of the same output
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Timelocked(tx.prefix.timelock, res)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Scan a block to obtain its spendable outputs. Its the presence in a block giving these
|
|
||||||
/// transactions their global index, and this must be batched as asking for the index of specific
|
|
||||||
/// transactions is a dead giveaway for which transactions you successfully scanned. This
|
|
||||||
/// function obtains the output indexes for the miner transaction, incrementing from there
|
|
||||||
/// instead.
|
|
||||||
pub async fn scan<RPC: RpcConnection>(
|
|
||||||
&mut self,
|
|
||||||
rpc: &Rpc<RPC>,
|
|
||||||
block: &Block,
|
|
||||||
) -> Result<Vec<Timelocked<SpendableOutput>>, RpcError> {
|
|
||||||
let mut index = rpc.get_o_indexes(block.miner_tx.hash()).await?[0];
|
|
||||||
let mut txs = vec![block.miner_tx.clone()];
|
|
||||||
txs.extend(rpc.get_transactions(&block.txs).await?);
|
|
||||||
|
|
||||||
let map = |mut timelock: Timelocked<ReceivedOutput>, index| {
|
|
||||||
if timelock.1.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(Timelocked(
|
|
||||||
timelock.0,
|
|
||||||
timelock
|
|
||||||
.1
|
|
||||||
.drain(..)
|
|
||||||
.map(|output| SpendableOutput {
|
|
||||||
global_index: index + u64::from(output.absolute.o),
|
|
||||||
output,
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut res = vec![];
|
|
||||||
for tx in txs {
|
|
||||||
if let Some(timelock) = map(self.scan_transaction(&tx), index) {
|
|
||||||
res.push(timelock);
|
|
||||||
}
|
|
||||||
index += u64::try_from(
|
|
||||||
tx.prefix
|
|
||||||
.outputs
|
|
||||||
.iter()
|
|
||||||
// Filter to v2 miner TX outputs/RCT outputs since we're tracking the RCT output index
|
|
||||||
.filter(|output| {
|
|
||||||
((tx.prefix.version == 2) && matches!(tx.prefix.inputs.get(0), Some(Input::Gen(..)))) ||
|
|
||||||
output.amount.is_none()
|
|
||||||
})
|
|
||||||
.count(),
|
|
||||||
)
|
|
||||||
.unwrap()
|
|
||||||
}
|
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,271 +0,0 @@
|
|||||||
use core::ops::Deref;
|
|
||||||
use std_shims::{
|
|
||||||
sync::OnceLock,
|
|
||||||
vec::Vec,
|
|
||||||
string::{String, ToString},
|
|
||||||
collections::HashMap,
|
|
||||||
};
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, Zeroizing};
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
use crc::{Crc, CRC_32_ISO_HDLC};
|
|
||||||
|
|
||||||
use curve25519_dalek::scalar::Scalar;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
random_scalar,
|
|
||||||
wallet::seed::{SeedError, Language},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub(crate) const CLASSIC_SEED_LENGTH: usize = 24;
|
|
||||||
pub(crate) const CLASSIC_SEED_LENGTH_WITH_CHECKSUM: usize = 25;
|
|
||||||
|
|
||||||
fn trim(word: &str, len: usize) -> Zeroizing<String> {
|
|
||||||
Zeroizing::new(word.chars().take(len).collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
struct WordList {
|
|
||||||
word_list: Vec<&'static str>,
|
|
||||||
word_map: HashMap<&'static str, usize>,
|
|
||||||
trimmed_word_map: HashMap<String, usize>,
|
|
||||||
unique_prefix_length: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WordList {
|
|
||||||
fn new(word_list: Vec<&'static str>, prefix_length: usize) -> Self {
|
|
||||||
let mut lang = Self {
|
|
||||||
word_list,
|
|
||||||
word_map: HashMap::new(),
|
|
||||||
trimmed_word_map: HashMap::new(),
|
|
||||||
unique_prefix_length: prefix_length,
|
|
||||||
};
|
|
||||||
|
|
||||||
for (i, word) in lang.word_list.iter().enumerate() {
|
|
||||||
lang.word_map.insert(word, i);
|
|
||||||
lang.trimmed_word_map.insert(trim(word, lang.unique_prefix_length).deref().clone(), i);
|
|
||||||
}
|
|
||||||
|
|
||||||
lang
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static LANGUAGES_CELL: OnceLock<HashMap<Language, WordList>> = OnceLock::new();
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
fn LANGUAGES() -> &'static HashMap<Language, WordList> {
|
|
||||||
LANGUAGES_CELL.get_or_init(|| {
|
|
||||||
HashMap::from([
|
|
||||||
(Language::Chinese, WordList::new(include!("./classic/zh.rs"), 1)),
|
|
||||||
(Language::English, WordList::new(include!("./classic/en.rs"), 3)),
|
|
||||||
(Language::Dutch, WordList::new(include!("./classic/nl.rs"), 4)),
|
|
||||||
(Language::French, WordList::new(include!("./classic/fr.rs"), 4)),
|
|
||||||
(Language::Spanish, WordList::new(include!("./classic/es.rs"), 4)),
|
|
||||||
(Language::German, WordList::new(include!("./classic/de.rs"), 4)),
|
|
||||||
(Language::Italian, WordList::new(include!("./classic/it.rs"), 4)),
|
|
||||||
(Language::Portuguese, WordList::new(include!("./classic/pt.rs"), 4)),
|
|
||||||
(Language::Japanese, WordList::new(include!("./classic/ja.rs"), 3)),
|
|
||||||
(Language::Russian, WordList::new(include!("./classic/ru.rs"), 4)),
|
|
||||||
(Language::Esperanto, WordList::new(include!("./classic/eo.rs"), 4)),
|
|
||||||
(Language::Lojban, WordList::new(include!("./classic/jbo.rs"), 4)),
|
|
||||||
(Language::EnglishOld, WordList::new(include!("./classic/ang.rs"), 4)),
|
|
||||||
])
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
pub(crate) fn trim_by_lang(word: &str, lang: Language) -> String {
|
|
||||||
if lang == Language::EnglishOld {
|
|
||||||
word.to_string()
|
|
||||||
} else {
|
|
||||||
word.chars().take(LANGUAGES()[&lang].unique_prefix_length).collect()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn checksum_index(words: &[Zeroizing<String>], lang: &WordList) -> usize {
|
|
||||||
let mut trimmed_words = Zeroizing::new(String::new());
|
|
||||||
for w in words {
|
|
||||||
*trimmed_words += &trim(w, lang.unique_prefix_length);
|
|
||||||
}
|
|
||||||
|
|
||||||
let crc = Crc::<u32>::new(&CRC_32_ISO_HDLC);
|
|
||||||
let mut digest = crc.digest();
|
|
||||||
digest.update(trimmed_words.as_bytes());
|
|
||||||
|
|
||||||
usize::try_from(digest.finalize()).unwrap() % words.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert a private key to a seed
|
|
||||||
fn key_to_seed(lang: Language, key: Zeroizing<Scalar>) -> ClassicSeed {
|
|
||||||
let bytes = Zeroizing::new(key.to_bytes());
|
|
||||||
|
|
||||||
// get the language words
|
|
||||||
let words = &LANGUAGES()[&lang].word_list;
|
|
||||||
let list_len = u64::try_from(words.len()).unwrap();
|
|
||||||
|
|
||||||
// To store the found words & add the checksum word later.
|
|
||||||
let mut seed = Vec::with_capacity(25);
|
|
||||||
|
|
||||||
// convert to words
|
|
||||||
// 4 bytes -> 3 words. 8 digits base 16 -> 3 digits base 1626
|
|
||||||
let mut segment = [0; 4];
|
|
||||||
let mut indices = [0; 4];
|
|
||||||
for i in 0 .. 8 {
|
|
||||||
// convert first 4 byte to u32 & get the word indices
|
|
||||||
let start = i * 4;
|
|
||||||
// convert 4 byte to u32
|
|
||||||
segment.copy_from_slice(&bytes[start .. (start + 4)]);
|
|
||||||
// Actually convert to a u64 so we can add without overflowing
|
|
||||||
indices[0] = u64::from(u32::from_le_bytes(segment));
|
|
||||||
indices[1] = indices[0];
|
|
||||||
indices[0] /= list_len;
|
|
||||||
indices[2] = indices[0] + indices[1];
|
|
||||||
indices[0] /= list_len;
|
|
||||||
indices[3] = indices[0] + indices[2];
|
|
||||||
|
|
||||||
// append words to seed
|
|
||||||
for i in indices.iter().skip(1) {
|
|
||||||
let word = usize::try_from(i % list_len).unwrap();
|
|
||||||
seed.push(Zeroizing::new(words[word].to_string()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
segment.zeroize();
|
|
||||||
indices.zeroize();
|
|
||||||
|
|
||||||
// create a checksum word for all languages except old english
|
|
||||||
if lang != Language::EnglishOld {
|
|
||||||
let checksum = seed[checksum_index(&seed, &LANGUAGES()[&lang])].clone();
|
|
||||||
seed.push(checksum);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut res = Zeroizing::new(String::new());
|
|
||||||
for (i, word) in seed.iter().enumerate() {
|
|
||||||
if i != 0 {
|
|
||||||
*res += " ";
|
|
||||||
}
|
|
||||||
*res += word;
|
|
||||||
}
|
|
||||||
ClassicSeed(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert a seed to bytes
|
|
||||||
pub(crate) fn seed_to_bytes(words: &str) -> Result<(Language, Zeroizing<[u8; 32]>), SeedError> {
|
|
||||||
// get seed words
|
|
||||||
let words = words.split_whitespace().map(|w| Zeroizing::new(w.to_string())).collect::<Vec<_>>();
|
|
||||||
if (words.len() != CLASSIC_SEED_LENGTH) && (words.len() != CLASSIC_SEED_LENGTH_WITH_CHECKSUM) {
|
|
||||||
panic!("invalid seed passed to seed_to_bytes");
|
|
||||||
}
|
|
||||||
|
|
||||||
// find the language
|
|
||||||
let (matched_indices, lang_name, lang) = (|| {
|
|
||||||
let has_checksum = words.len() == CLASSIC_SEED_LENGTH_WITH_CHECKSUM;
|
|
||||||
let mut matched_indices = Zeroizing::new(vec![]);
|
|
||||||
|
|
||||||
// Iterate through all the languages
|
|
||||||
'language: for (lang_name, lang) in LANGUAGES().iter() {
|
|
||||||
matched_indices.zeroize();
|
|
||||||
matched_indices.clear();
|
|
||||||
|
|
||||||
// Iterate through all the words and see if they're all present
|
|
||||||
for word in &words {
|
|
||||||
let trimmed = trim(word, lang.unique_prefix_length);
|
|
||||||
let word = if has_checksum { &trimmed } else { word };
|
|
||||||
|
|
||||||
if let Some(index) = if has_checksum {
|
|
||||||
lang.trimmed_word_map.get(word.deref())
|
|
||||||
} else {
|
|
||||||
lang.word_map.get(&word.as_str())
|
|
||||||
} {
|
|
||||||
matched_indices.push(*index);
|
|
||||||
} else {
|
|
||||||
continue 'language;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if has_checksum {
|
|
||||||
if lang_name == &Language::EnglishOld {
|
|
||||||
Err(SeedError::EnglishOldWithChecksum)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// exclude the last word when calculating a checksum.
|
|
||||||
let last_word = words.last().unwrap().clone();
|
|
||||||
let checksum = words[checksum_index(&words[.. words.len() - 1], lang)].clone();
|
|
||||||
|
|
||||||
// check the trimmed checksum and trimmed last word line up
|
|
||||||
if trim(&checksum, lang.unique_prefix_length) != trim(&last_word, lang.unique_prefix_length)
|
|
||||||
{
|
|
||||||
Err(SeedError::InvalidChecksum)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return Ok((matched_indices, lang_name, lang));
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(SeedError::UnknownLanguage)?
|
|
||||||
})()?;
|
|
||||||
|
|
||||||
// convert to bytes
|
|
||||||
let mut res = Zeroizing::new([0; 32]);
|
|
||||||
let mut indices = Zeroizing::new([0; 4]);
|
|
||||||
for i in 0 .. 8 {
|
|
||||||
// read 3 indices at a time
|
|
||||||
let i3 = i * 3;
|
|
||||||
indices[1] = matched_indices[i3];
|
|
||||||
indices[2] = matched_indices[i3 + 1];
|
|
||||||
indices[3] = matched_indices[i3 + 2];
|
|
||||||
|
|
||||||
let inner = |i| {
|
|
||||||
let mut base = (lang.word_list.len() - indices[i] + indices[i + 1]) % lang.word_list.len();
|
|
||||||
// Shift the index over
|
|
||||||
for _ in 0 .. i {
|
|
||||||
base *= lang.word_list.len();
|
|
||||||
}
|
|
||||||
base
|
|
||||||
};
|
|
||||||
// set the last index
|
|
||||||
indices[0] = indices[1] + inner(1) + inner(2);
|
|
||||||
if (indices[0] % lang.word_list.len()) != indices[1] {
|
|
||||||
Err(SeedError::InvalidSeed)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let pos = i * 4;
|
|
||||||
let mut bytes = u32::try_from(indices[0]).unwrap().to_le_bytes();
|
|
||||||
res[pos .. (pos + 4)].copy_from_slice(&bytes);
|
|
||||||
bytes.zeroize();
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok((*lang_name, res))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
|
||||||
pub struct ClassicSeed(Zeroizing<String>);
|
|
||||||
impl ClassicSeed {
|
|
||||||
pub(crate) fn new<R: RngCore + CryptoRng>(rng: &mut R, lang: Language) -> Self {
|
|
||||||
key_to_seed(lang, Zeroizing::new(random_scalar(rng)))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_string(words: Zeroizing<String>) -> Result<Self, SeedError> {
|
|
||||||
let (lang, entropy) = seed_to_bytes(&words)?;
|
|
||||||
|
|
||||||
// Make sure this is a valid scalar
|
|
||||||
let mut scalar = Scalar::from_canonical_bytes(*entropy);
|
|
||||||
if scalar.is_none() {
|
|
||||||
Err(SeedError::InvalidSeed)?;
|
|
||||||
}
|
|
||||||
scalar.zeroize();
|
|
||||||
|
|
||||||
// Call from_entropy so a trimmed seed becomes a full seed
|
|
||||||
Ok(Self::from_entropy(lang, entropy).unwrap())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_entropy(lang: Language, entropy: Zeroizing<[u8; 32]>) -> Option<Self> {
|
|
||||||
Scalar::from_canonical_bytes(*entropy).map(|scalar| key_to_seed(lang, Zeroizing::new(scalar)))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn to_string(&self) -> Zeroizing<String> {
|
|
||||||
self.0.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn entropy(&self) -> Zeroizing<[u8; 32]> {
|
|
||||||
seed_to_bytes(&self.0).unwrap().1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,92 +0,0 @@
|
|||||||
use core::fmt;
|
|
||||||
use std_shims::string::String;
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
|
||||||
use rand_core::{RngCore, CryptoRng};
|
|
||||||
|
|
||||||
pub(crate) mod classic;
|
|
||||||
use classic::{CLASSIC_SEED_LENGTH, CLASSIC_SEED_LENGTH_WITH_CHECKSUM, ClassicSeed};
|
|
||||||
|
|
||||||
/// Error when decoding a seed.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
|
|
||||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
|
||||||
pub enum SeedError {
|
|
||||||
#[cfg_attr(feature = "std", error("invalid number of words in seed"))]
|
|
||||||
InvalidSeedLength,
|
|
||||||
#[cfg_attr(feature = "std", error("unknown language"))]
|
|
||||||
UnknownLanguage,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid checksum"))]
|
|
||||||
InvalidChecksum,
|
|
||||||
#[cfg_attr(feature = "std", error("english old seeds don't support checksums"))]
|
|
||||||
EnglishOldWithChecksum,
|
|
||||||
#[cfg_attr(feature = "std", error("invalid seed"))]
|
|
||||||
InvalidSeed,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
|
|
||||||
pub enum Language {
|
|
||||||
Chinese,
|
|
||||||
English,
|
|
||||||
Dutch,
|
|
||||||
French,
|
|
||||||
Spanish,
|
|
||||||
German,
|
|
||||||
Italian,
|
|
||||||
Portuguese,
|
|
||||||
Japanese,
|
|
||||||
Russian,
|
|
||||||
Esperanto,
|
|
||||||
Lojban,
|
|
||||||
EnglishOld,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A Monero seed.
|
|
||||||
// TODO: Add polyseed to enum
|
|
||||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub enum Seed {
|
|
||||||
Classic(ClassicSeed),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Debug for Seed {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
Self::Classic(_) => f.debug_struct("Seed::Classic").finish_non_exhaustive(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Seed {
|
|
||||||
/// Create a new seed.
|
|
||||||
pub fn new<R: RngCore + CryptoRng>(rng: &mut R, lang: Language) -> Self {
|
|
||||||
Self::Classic(ClassicSeed::new(rng, lang))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse a seed from a String.
|
|
||||||
pub fn from_string(words: Zeroizing<String>) -> Result<Self, SeedError> {
|
|
||||||
match words.split_whitespace().count() {
|
|
||||||
CLASSIC_SEED_LENGTH | CLASSIC_SEED_LENGTH_WITH_CHECKSUM => {
|
|
||||||
ClassicSeed::from_string(words).map(Self::Classic)
|
|
||||||
}
|
|
||||||
_ => Err(SeedError::InvalidSeedLength)?,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a Seed from entropy.
|
|
||||||
pub fn from_entropy(lang: Language, entropy: Zeroizing<[u8; 32]>) -> Option<Self> {
|
|
||||||
ClassicSeed::from_entropy(lang, entropy).map(Self::Classic)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert a seed to a String.
|
|
||||||
pub fn to_string(&self) -> Zeroizing<String> {
|
|
||||||
match self {
|
|
||||||
Self::Classic(seed) => seed.to_string(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the entropy for this seed.
|
|
||||||
pub fn entropy(&self) -> Zeroizing<[u8; 32]> {
|
|
||||||
match self {
|
|
||||||
Self::Classic(seed) => seed.entropy(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,145 +0,0 @@
|
|||||||
use std::sync::{Arc, RwLock};
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Protocol,
|
|
||||||
wallet::{
|
|
||||||
address::MoneroAddress, Fee, SpendableOutput, Change, SignableTransaction, TransactionError,
|
|
||||||
extra::MAX_ARBITRARY_DATA_SIZE,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
struct SignableTransactionBuilderInternal {
|
|
||||||
protocol: Protocol,
|
|
||||||
fee: Fee,
|
|
||||||
|
|
||||||
r_seed: Option<Zeroizing<[u8; 32]>>,
|
|
||||||
inputs: Vec<SpendableOutput>,
|
|
||||||
payments: Vec<(MoneroAddress, u64)>,
|
|
||||||
change_address: Option<Change>,
|
|
||||||
data: Vec<Vec<u8>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SignableTransactionBuilderInternal {
|
|
||||||
// Takes in the change address so users don't miss that they have to manually set one
|
|
||||||
// If they don't, all leftover funds will become part of the fee
|
|
||||||
fn new(protocol: Protocol, fee: Fee, change_address: Option<Change>) -> Self {
|
|
||||||
Self {
|
|
||||||
protocol,
|
|
||||||
fee,
|
|
||||||
r_seed: None,
|
|
||||||
inputs: vec![],
|
|
||||||
payments: vec![],
|
|
||||||
change_address,
|
|
||||||
data: vec![],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn set_r_seed(&mut self, r_seed: Zeroizing<[u8; 32]>) {
|
|
||||||
self.r_seed = Some(r_seed);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_input(&mut self, input: SpendableOutput) {
|
|
||||||
self.inputs.push(input);
|
|
||||||
}
|
|
||||||
fn add_inputs(&mut self, inputs: &[SpendableOutput]) {
|
|
||||||
self.inputs.extend(inputs.iter().cloned());
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_payment(&mut self, dest: MoneroAddress, amount: u64) {
|
|
||||||
self.payments.push((dest, amount));
|
|
||||||
}
|
|
||||||
fn add_payments(&mut self, payments: &[(MoneroAddress, u64)]) {
|
|
||||||
self.payments.extend(payments);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_data(&mut self, data: Vec<u8>) {
|
|
||||||
self.data.push(data);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A Transaction Builder for Monero transactions.
|
|
||||||
/// All methods provided will modify self while also returning a shallow copy, enabling efficient
|
|
||||||
/// chaining with a clean API.
|
|
||||||
/// In order to fork the builder at some point, clone will still return a deep copy.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct SignableTransactionBuilder(Arc<RwLock<SignableTransactionBuilderInternal>>);
|
|
||||||
impl Clone for SignableTransactionBuilder {
|
|
||||||
fn clone(&self) -> Self {
|
|
||||||
Self(Arc::new(RwLock::new((*self.0.read().unwrap()).clone())))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for SignableTransactionBuilder {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
*self.0.read().unwrap() == *other.0.read().unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl Eq for SignableTransactionBuilder {}
|
|
||||||
|
|
||||||
impl Zeroize for SignableTransactionBuilder {
|
|
||||||
fn zeroize(&mut self) {
|
|
||||||
self.0.write().unwrap().zeroize();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::return_self_not_must_use)]
|
|
||||||
impl SignableTransactionBuilder {
|
|
||||||
fn shallow_copy(&self) -> Self {
|
|
||||||
Self(self.0.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn new(protocol: Protocol, fee: Fee, change_address: Option<Change>) -> Self {
|
|
||||||
Self(Arc::new(RwLock::new(SignableTransactionBuilderInternal::new(
|
|
||||||
protocol,
|
|
||||||
fee,
|
|
||||||
change_address,
|
|
||||||
))))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_r_seed(&mut self, r_seed: Zeroizing<[u8; 32]>) -> Self {
|
|
||||||
self.0.write().unwrap().set_r_seed(r_seed);
|
|
||||||
self.shallow_copy()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_input(&mut self, input: SpendableOutput) -> Self {
|
|
||||||
self.0.write().unwrap().add_input(input);
|
|
||||||
self.shallow_copy()
|
|
||||||
}
|
|
||||||
pub fn add_inputs(&mut self, inputs: &[SpendableOutput]) -> Self {
|
|
||||||
self.0.write().unwrap().add_inputs(inputs);
|
|
||||||
self.shallow_copy()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_payment(&mut self, dest: MoneroAddress, amount: u64) -> Self {
|
|
||||||
self.0.write().unwrap().add_payment(dest, amount);
|
|
||||||
self.shallow_copy()
|
|
||||||
}
|
|
||||||
pub fn add_payments(&mut self, payments: &[(MoneroAddress, u64)]) -> Self {
|
|
||||||
self.0.write().unwrap().add_payments(payments);
|
|
||||||
self.shallow_copy()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_data(&mut self, data: Vec<u8>) -> Result<Self, TransactionError> {
|
|
||||||
if data.len() > MAX_ARBITRARY_DATA_SIZE {
|
|
||||||
Err(TransactionError::TooMuchData)?;
|
|
||||||
}
|
|
||||||
self.0.write().unwrap().add_data(data);
|
|
||||||
Ok(self.shallow_copy())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build(self) -> Result<SignableTransaction, TransactionError> {
|
|
||||||
let read = self.0.read().unwrap();
|
|
||||||
SignableTransaction::new(
|
|
||||||
read.protocol,
|
|
||||||
read.r_seed.clone(),
|
|
||||||
read.inputs.clone(),
|
|
||||||
read.payments.clone(),
|
|
||||||
read.change_address.clone(),
|
|
||||||
read.data.clone(),
|
|
||||||
read.fee,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,854 +0,0 @@
|
|||||||
use core::{ops::Deref, fmt};
|
|
||||||
use std_shims::{
|
|
||||||
vec::Vec,
|
|
||||||
io,
|
|
||||||
string::{String, ToString},
|
|
||||||
};
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
|
||||||
use rand_chacha::ChaCha20Rng;
|
|
||||||
use rand::seq::SliceRandom;
|
|
||||||
|
|
||||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
|
||||||
|
|
||||||
use group::Group;
|
|
||||||
use curve25519_dalek::{
|
|
||||||
constants::{ED25519_BASEPOINT_POINT, ED25519_BASEPOINT_TABLE},
|
|
||||||
scalar::Scalar,
|
|
||||||
edwards::EdwardsPoint,
|
|
||||||
};
|
|
||||||
use dalek_ff_group as dfg;
|
|
||||||
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
use frost::FrostError;
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
Protocol, Commitment, hash, random_scalar,
|
|
||||||
serialize::{
|
|
||||||
read_byte, read_bytes, read_u64, read_scalar, read_point, read_vec, write_byte, write_scalar,
|
|
||||||
write_point, write_raw_vec, write_vec,
|
|
||||||
},
|
|
||||||
ringct::{
|
|
||||||
generate_key_image,
|
|
||||||
clsag::{ClsagError, ClsagInput, Clsag},
|
|
||||||
bulletproofs::{MAX_OUTPUTS, Bulletproofs},
|
|
||||||
RctBase, RctPrunable, RctSignatures,
|
|
||||||
},
|
|
||||||
transaction::{Input, Output, Timelock, TransactionPrefix, Transaction},
|
|
||||||
rpc::{RpcError, RpcConnection, Rpc},
|
|
||||||
wallet::{
|
|
||||||
address::{Network, AddressSpec, MoneroAddress},
|
|
||||||
ViewPair, SpendableOutput, Decoys, PaymentId, ExtraField, Extra, key_image_sort, uniqueness,
|
|
||||||
shared_key, commitment_mask, amount_encryption,
|
|
||||||
extra::{ARBITRARY_DATA_MARKER, MAX_ARBITRARY_DATA_SIZE},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
mod builder;
|
|
||||||
#[cfg(feature = "std")]
|
|
||||||
pub use builder::SignableTransactionBuilder;
|
|
||||||
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
mod multisig;
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
pub use multisig::TransactionMachine;
|
|
||||||
use crate::ringct::EncryptedAmount;
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
struct SendOutput {
|
|
||||||
R: EdwardsPoint,
|
|
||||||
view_tag: u8,
|
|
||||||
dest: EdwardsPoint,
|
|
||||||
commitment: Commitment,
|
|
||||||
amount: [u8; 8],
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SendOutput {
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
fn internal(
|
|
||||||
unique: [u8; 32],
|
|
||||||
output: (usize, (MoneroAddress, u64)),
|
|
||||||
ecdh: EdwardsPoint,
|
|
||||||
R: EdwardsPoint,
|
|
||||||
) -> (Self, Option<[u8; 8]>) {
|
|
||||||
let o = output.0;
|
|
||||||
let output = output.1;
|
|
||||||
|
|
||||||
let (view_tag, shared_key, payment_id_xor) =
|
|
||||||
shared_key(Some(unique).filter(|_| output.0.is_guaranteed()), ecdh, o);
|
|
||||||
|
|
||||||
(
|
|
||||||
Self {
|
|
||||||
R,
|
|
||||||
view_tag,
|
|
||||||
dest: ((&shared_key * &ED25519_BASEPOINT_TABLE) + output.0.spend),
|
|
||||||
commitment: Commitment::new(commitment_mask(shared_key), output.1),
|
|
||||||
amount: amount_encryption(output.1, shared_key),
|
|
||||||
},
|
|
||||||
output
|
|
||||||
.0
|
|
||||||
.payment_id()
|
|
||||||
.map(|id| (u64::from_le_bytes(id) ^ u64::from_le_bytes(payment_id_xor)).to_le_bytes()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new(
|
|
||||||
r: &Zeroizing<Scalar>,
|
|
||||||
unique: [u8; 32],
|
|
||||||
output: (usize, (MoneroAddress, u64)),
|
|
||||||
) -> (Self, Option<[u8; 8]>) {
|
|
||||||
let address = output.1 .0;
|
|
||||||
Self::internal(
|
|
||||||
unique,
|
|
||||||
output,
|
|
||||||
r.deref() * address.view,
|
|
||||||
if address.is_subaddress() {
|
|
||||||
r.deref() * address.spend
|
|
||||||
} else {
|
|
||||||
r.deref() * &ED25519_BASEPOINT_TABLE
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn change(
|
|
||||||
ecdh: EdwardsPoint,
|
|
||||||
unique: [u8; 32],
|
|
||||||
output: (usize, (MoneroAddress, u64)),
|
|
||||||
) -> (Self, Option<[u8; 8]>) {
|
|
||||||
Self::internal(unique, output, ecdh, ED25519_BASEPOINT_POINT)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
|
||||||
#[cfg_attr(feature = "std", derive(thiserror::Error))]
|
|
||||||
pub enum TransactionError {
|
|
||||||
#[cfg_attr(feature = "std", error("multiple addresses with payment IDs"))]
|
|
||||||
MultiplePaymentIds,
|
|
||||||
#[cfg_attr(feature = "std", error("no inputs"))]
|
|
||||||
NoInputs,
|
|
||||||
#[cfg_attr(feature = "std", error("no outputs"))]
|
|
||||||
NoOutputs,
|
|
||||||
#[cfg_attr(feature = "std", error("only one output and no change address"))]
|
|
||||||
NoChange,
|
|
||||||
#[cfg_attr(feature = "std", error("too many outputs"))]
|
|
||||||
TooManyOutputs,
|
|
||||||
#[cfg_attr(feature = "std", error("too much data"))]
|
|
||||||
TooMuchData,
|
|
||||||
#[cfg_attr(feature = "std", error("too many inputs/too much arbitrary data"))]
|
|
||||||
TooLargeTransaction,
|
|
||||||
#[cfg_attr(feature = "std", error("not enough funds (in {0}, out {1})"))]
|
|
||||||
NotEnoughFunds(u64, u64),
|
|
||||||
#[cfg_attr(feature = "std", error("wrong spend private key"))]
|
|
||||||
WrongPrivateKey,
|
|
||||||
#[cfg_attr(feature = "std", error("rpc error ({0})"))]
|
|
||||||
RpcError(RpcError),
|
|
||||||
#[cfg_attr(feature = "std", error("clsag error ({0})"))]
|
|
||||||
ClsagError(ClsagError),
|
|
||||||
#[cfg_attr(feature = "std", error("invalid transaction ({0})"))]
|
|
||||||
InvalidTransaction(RpcError),
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
#[cfg_attr(feature = "std", error("frost error {0}"))]
|
|
||||||
FrostError(FrostError),
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn prepare_inputs<R: Send + RngCore + CryptoRng, RPC: RpcConnection>(
|
|
||||||
rng: &mut R,
|
|
||||||
rpc: &Rpc<RPC>,
|
|
||||||
ring_len: usize,
|
|
||||||
inputs: &[SpendableOutput],
|
|
||||||
spend: &Zeroizing<Scalar>,
|
|
||||||
tx: &mut Transaction,
|
|
||||||
) -> Result<Vec<(Zeroizing<Scalar>, EdwardsPoint, ClsagInput)>, TransactionError> {
|
|
||||||
let mut signable = Vec::with_capacity(inputs.len());
|
|
||||||
|
|
||||||
// Select decoys
|
|
||||||
let decoys = Decoys::select(
|
|
||||||
rng,
|
|
||||||
rpc,
|
|
||||||
ring_len,
|
|
||||||
rpc.get_height().await.map_err(TransactionError::RpcError)? - 1,
|
|
||||||
inputs,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(TransactionError::RpcError)?;
|
|
||||||
|
|
||||||
for (i, input) in inputs.iter().enumerate() {
|
|
||||||
let input_spend = Zeroizing::new(input.key_offset() + spend.deref());
|
|
||||||
let image = generate_key_image(&input_spend);
|
|
||||||
signable.push((
|
|
||||||
input_spend,
|
|
||||||
image,
|
|
||||||
ClsagInput::new(input.commitment().clone(), decoys[i].clone())
|
|
||||||
.map_err(TransactionError::ClsagError)?,
|
|
||||||
));
|
|
||||||
|
|
||||||
tx.prefix.inputs.push(Input::ToKey {
|
|
||||||
amount: None,
|
|
||||||
key_offsets: decoys[i].offsets.clone(),
|
|
||||||
key_image: signable[i].1,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
signable.sort_by(|x, y| x.1.compress().to_bytes().cmp(&y.1.compress().to_bytes()).reverse());
|
|
||||||
tx.prefix.inputs.sort_by(|x, y| {
|
|
||||||
if let (Input::ToKey { key_image: x, .. }, Input::ToKey { key_image: y, .. }) = (x, y) {
|
|
||||||
x.compress().to_bytes().cmp(&y.compress().to_bytes()).reverse()
|
|
||||||
} else {
|
|
||||||
panic!("Input wasn't ToKey")
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(signable)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fee struct, defined as a per-unit cost and a mask for rounding purposes.
|
|
||||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub struct Fee {
|
|
||||||
pub per_weight: u64,
|
|
||||||
pub mask: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Fee {
|
|
||||||
pub fn calculate(&self, weight: usize) -> u64 {
|
|
||||||
((((self.per_weight * u64::try_from(weight).unwrap()) - 1) / self.mask) + 1) * self.mask
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub(crate) enum InternalPayment {
|
|
||||||
Payment((MoneroAddress, u64)),
|
|
||||||
Change(Change, u64),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The eventual output of a SignableTransaction.
|
|
||||||
///
|
|
||||||
/// If the SignableTransaction has a Change with a view key, this will also have the view key.
|
|
||||||
/// Accordingly, it must be treated securely.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize)]
|
|
||||||
pub struct Eventuality {
|
|
||||||
protocol: Protocol,
|
|
||||||
r_seed: Zeroizing<[u8; 32]>,
|
|
||||||
inputs: Vec<EdwardsPoint>,
|
|
||||||
payments: Vec<InternalPayment>,
|
|
||||||
extra: Vec<u8>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A signable transaction, either in a single-signer or multisig context.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug, Zeroize, ZeroizeOnDrop)]
|
|
||||||
pub struct SignableTransaction {
|
|
||||||
protocol: Protocol,
|
|
||||||
r_seed: Option<Zeroizing<[u8; 32]>>,
|
|
||||||
inputs: Vec<SpendableOutput>,
|
|
||||||
payments: Vec<InternalPayment>,
|
|
||||||
data: Vec<Vec<u8>>,
|
|
||||||
fee: u64,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Specification for a change output.
|
|
||||||
#[derive(Clone, PartialEq, Eq, Zeroize)]
|
|
||||||
pub struct Change {
|
|
||||||
address: MoneroAddress,
|
|
||||||
view: Option<Zeroizing<Scalar>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Debug for Change {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
f.debug_struct("Change").field("address", &self.address).finish_non_exhaustive()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Change {
|
|
||||||
/// Create a change output specification from a ViewPair, as needed to maintain privacy.
|
|
||||||
pub fn new(view: &ViewPair, guaranteed: bool) -> Self {
|
|
||||||
Self {
|
|
||||||
address: view.address(
|
|
||||||
Network::Mainnet,
|
|
||||||
if !guaranteed {
|
|
||||||
AddressSpec::Standard
|
|
||||||
} else {
|
|
||||||
AddressSpec::Featured { subaddress: None, payment_id: None, guaranteed: true }
|
|
||||||
},
|
|
||||||
),
|
|
||||||
view: Some(view.view.clone()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a fingerprintable change output specification which will harm privacy.
|
|
||||||
///
|
|
||||||
/// Only use this if you know what you're doing.
|
|
||||||
pub const fn fingerprintable(address: MoneroAddress) -> Self {
|
|
||||||
Self { address, view: None }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SignableTransaction {
|
|
||||||
/// Create a signable transaction.
|
|
||||||
///
|
|
||||||
/// `r_seed` refers to a seed used to derive the transaction's ephemeral keys (colloquially
|
|
||||||
/// called Rs). If None is provided, one will be automatically generated.
|
|
||||||
///
|
|
||||||
/// Up to 16 outputs may be present, including the change output. If the change address is
|
|
||||||
/// specified, leftover funds will be sent to it.
|
|
||||||
///
|
|
||||||
/// Each chunk of data must not exceed MAX_ARBITRARY_DATA_SIZE and will be embedded in TX extra.
|
|
||||||
pub fn new(
|
|
||||||
protocol: Protocol,
|
|
||||||
r_seed: Option<Zeroizing<[u8; 32]>>,
|
|
||||||
inputs: Vec<SpendableOutput>,
|
|
||||||
payments: Vec<(MoneroAddress, u64)>,
|
|
||||||
change_address: Option<Change>,
|
|
||||||
data: Vec<Vec<u8>>,
|
|
||||||
fee_rate: Fee,
|
|
||||||
) -> Result<Self, TransactionError> {
|
|
||||||
// Make sure there's only one payment ID
|
|
||||||
let mut has_payment_id = {
|
|
||||||
let mut payment_ids = 0;
|
|
||||||
let mut count = |addr: MoneroAddress| {
|
|
||||||
if addr.payment_id().is_some() {
|
|
||||||
payment_ids += 1;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
for payment in &payments {
|
|
||||||
count(payment.0);
|
|
||||||
}
|
|
||||||
if let Some(change) = change_address.as_ref() {
|
|
||||||
count(change.address);
|
|
||||||
}
|
|
||||||
if payment_ids > 1 {
|
|
||||||
Err(TransactionError::MultiplePaymentIds)?;
|
|
||||||
}
|
|
||||||
payment_ids == 1
|
|
||||||
};
|
|
||||||
|
|
||||||
if inputs.is_empty() {
|
|
||||||
Err(TransactionError::NoInputs)?;
|
|
||||||
}
|
|
||||||
if payments.is_empty() {
|
|
||||||
Err(TransactionError::NoOutputs)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
for part in &data {
|
|
||||||
if part.len() > MAX_ARBITRARY_DATA_SIZE {
|
|
||||||
Err(TransactionError::TooMuchData)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we don't have two outputs, as required by Monero, error
|
|
||||||
if (payments.len() == 1) && change_address.is_none() {
|
|
||||||
Err(TransactionError::NoChange)?;
|
|
||||||
}
|
|
||||||
let outputs = payments.len() + usize::from(change_address.is_some());
|
|
||||||
// Add a dummy payment ID if there's only 2 payments
|
|
||||||
has_payment_id |= outputs == 2;
|
|
||||||
|
|
||||||
// Calculate the extra length
|
|
||||||
// Assume additional keys are needed in order to cause a worst-case estimation
|
|
||||||
let extra = Extra::fee_weight(outputs, true, has_payment_id, data.as_ref());
|
|
||||||
|
|
||||||
// https://github.com/monero-project/monero/pull/8733
|
|
||||||
const MAX_EXTRA_SIZE: usize = 1060;
|
|
||||||
if extra > MAX_EXTRA_SIZE {
|
|
||||||
Err(TransactionError::TooMuchData)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is a extremely heavy fee weight estimation which can only be trusted for two things
|
|
||||||
// 1) Ensuring we have enough for whatever fee we end up using
|
|
||||||
// 2) Ensuring we aren't over the max size
|
|
||||||
let estimated_tx_size = Transaction::fee_weight(protocol, inputs.len(), outputs, extra);
|
|
||||||
|
|
||||||
// The actual limit is half the block size, and for the minimum block size of 300k, that'd be
|
|
||||||
// 150k
|
|
||||||
// wallet2 will only create transactions up to 100k bytes however
|
|
||||||
const MAX_TX_SIZE: usize = 100_000;
|
|
||||||
|
|
||||||
// This uses the weight (estimated_tx_size) despite the BP clawback
|
|
||||||
// The clawback *increases* the weight, so this will over-estimate, yet it's still safe
|
|
||||||
if estimated_tx_size >= MAX_TX_SIZE {
|
|
||||||
Err(TransactionError::TooLargeTransaction)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calculate the fee.
|
|
||||||
let fee = fee_rate.calculate(estimated_tx_size);
|
|
||||||
|
|
||||||
// Make sure we have enough funds
|
|
||||||
let in_amount = inputs.iter().map(|input| input.commitment().amount).sum::<u64>();
|
|
||||||
let out_amount = payments.iter().map(|payment| payment.1).sum::<u64>() + fee;
|
|
||||||
if in_amount < out_amount {
|
|
||||||
Err(TransactionError::NotEnoughFunds(in_amount, out_amount))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if outputs > MAX_OUTPUTS {
|
|
||||||
Err(TransactionError::TooManyOutputs)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut payments = payments.into_iter().map(InternalPayment::Payment).collect::<Vec<_>>();
|
|
||||||
if let Some(change) = change_address {
|
|
||||||
payments.push(InternalPayment::Change(change, in_amount - out_amount));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self { protocol, r_seed, inputs, payments, data, fee })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn fee(&self) -> u64 {
|
|
||||||
self.fee
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::type_complexity)]
|
|
||||||
fn prepare_payments(
|
|
||||||
seed: &Zeroizing<[u8; 32]>,
|
|
||||||
inputs: &[EdwardsPoint],
|
|
||||||
payments: &mut Vec<InternalPayment>,
|
|
||||||
uniqueness: [u8; 32],
|
|
||||||
) -> (EdwardsPoint, Vec<Zeroizing<Scalar>>, Vec<SendOutput>, Option<[u8; 8]>) {
|
|
||||||
let mut rng = {
|
|
||||||
// Hash the inputs into the seed so we don't re-use Rs
|
|
||||||
// Doesn't re-use uniqueness as that's based on key images, which requires interactivity
|
|
||||||
// to generate. The output keys do not
|
|
||||||
// This remains private so long as the seed is private
|
|
||||||
let mut r_uniqueness = vec![];
|
|
||||||
for input in inputs {
|
|
||||||
r_uniqueness.extend(input.compress().to_bytes());
|
|
||||||
}
|
|
||||||
ChaCha20Rng::from_seed(hash(
|
|
||||||
&[b"monero-serai_outputs".as_ref(), seed.as_ref(), &r_uniqueness].concat(),
|
|
||||||
))
|
|
||||||
};
|
|
||||||
|
|
||||||
// Shuffle the payments
|
|
||||||
payments.shuffle(&mut rng);
|
|
||||||
|
|
||||||
// Used for all non-subaddress outputs, or if there's only one subaddress output and a change
|
|
||||||
let tx_key = Zeroizing::new(random_scalar(&mut rng));
|
|
||||||
let mut tx_public_key = tx_key.deref() * &ED25519_BASEPOINT_TABLE;
|
|
||||||
|
|
||||||
// If any of these outputs are to a subaddress, we need keys distinct to them
|
|
||||||
// The only time this *does not* force having additional keys is when the only other output
|
|
||||||
// is a change output we have the view key for, enabling rewriting rA to aR
|
|
||||||
let mut has_change_view = false;
|
|
||||||
let subaddresses = payments
|
|
||||||
.iter()
|
|
||||||
.filter(|payment| match *payment {
|
|
||||||
InternalPayment::Payment(payment) => payment.0.is_subaddress(),
|
|
||||||
InternalPayment::Change(change, _) => {
|
|
||||||
if change.view.is_some() {
|
|
||||||
has_change_view = true;
|
|
||||||
// It should not be possible to construct a change specification to a subaddress with a
|
|
||||||
// view key
|
|
||||||
debug_assert!(!change.address.is_subaddress());
|
|
||||||
}
|
|
||||||
change.address.is_subaddress()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.count() !=
|
|
||||||
0;
|
|
||||||
|
|
||||||
// We need additional keys if we have any subaddresses
|
|
||||||
// UNLESS there's only two payments and we have the view-key for the change output
|
|
||||||
let additional = if (payments.len() == 2) && has_change_view { false } else { subaddresses };
|
|
||||||
let modified_change_ecdh = subaddresses && (!additional);
|
|
||||||
|
|
||||||
// If we're using the aR rewrite, update tx_public_key from rG to rB
|
|
||||||
if modified_change_ecdh {
|
|
||||||
for payment in &*payments {
|
|
||||||
match payment {
|
|
||||||
InternalPayment::Payment(payment) => {
|
|
||||||
// This should be the only payment and it should be a subaddress
|
|
||||||
debug_assert!(payment.0.is_subaddress());
|
|
||||||
tx_public_key = tx_key.deref() * payment.0.spend;
|
|
||||||
}
|
|
||||||
InternalPayment::Change(_, _) => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
debug_assert!(tx_public_key != (tx_key.deref() * &ED25519_BASEPOINT_TABLE));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Actually create the outputs
|
|
||||||
let mut additional_keys = vec![];
|
|
||||||
let mut outputs = Vec::with_capacity(payments.len());
|
|
||||||
let mut id = None;
|
|
||||||
for (o, mut payment) in payments.drain(..).enumerate() {
|
|
||||||
// Downcast the change output to a payment output if it doesn't require special handling
|
|
||||||
// regarding it's view key
|
|
||||||
payment = if !modified_change_ecdh {
|
|
||||||
if let InternalPayment::Change(change, amount) = &payment {
|
|
||||||
InternalPayment::Payment((change.address, *amount))
|
|
||||||
} else {
|
|
||||||
payment
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
payment
|
|
||||||
};
|
|
||||||
|
|
||||||
let (output, payment_id) = match payment {
|
|
||||||
InternalPayment::Payment(payment) => {
|
|
||||||
// If this is a subaddress, generate a dedicated r. Else, reuse the TX key
|
|
||||||
let dedicated = Zeroizing::new(random_scalar(&mut rng));
|
|
||||||
let use_dedicated = additional && payment.0.is_subaddress();
|
|
||||||
let r = if use_dedicated { &dedicated } else { &tx_key };
|
|
||||||
|
|
||||||
let (mut output, payment_id) = SendOutput::new(r, uniqueness, (o, payment));
|
|
||||||
if modified_change_ecdh {
|
|
||||||
debug_assert_eq!(tx_public_key, output.R);
|
|
||||||
}
|
|
||||||
|
|
||||||
if use_dedicated {
|
|
||||||
additional_keys.push(dedicated);
|
|
||||||
} else {
|
|
||||||
// If this used tx_key, randomize its R
|
|
||||||
// This is so when extra is created, there's a distinct R for it to use
|
|
||||||
output.R = dfg::EdwardsPoint::random(&mut rng).0;
|
|
||||||
}
|
|
||||||
(output, payment_id)
|
|
||||||
}
|
|
||||||
InternalPayment::Change(change, amount) => {
|
|
||||||
// Instead of rA, use Ra, where R is r * subaddress_spend_key
|
|
||||||
// change.view must be Some as if it's None, this payment would've been downcast
|
|
||||||
let ecdh = tx_public_key * change.view.unwrap().deref();
|
|
||||||
SendOutput::change(ecdh, uniqueness, (o, (change.address, amount)))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
outputs.push(output);
|
|
||||||
id = id.or(payment_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Include a random payment ID if we don't actually have one
|
|
||||||
// It prevents transactions from leaking if they're sending to integrated addresses or not
|
|
||||||
// Only do this if we only have two outputs though, as Monero won't add a dummy if there's
|
|
||||||
// more than two outputs
|
|
||||||
if outputs.len() <= 2 {
|
|
||||||
let mut rand = [0; 8];
|
|
||||||
rng.fill_bytes(&mut rand);
|
|
||||||
id = id.or(Some(rand));
|
|
||||||
}
|
|
||||||
|
|
||||||
(tx_public_key, additional_keys, outputs, id)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
fn extra(
|
|
||||||
tx_key: EdwardsPoint,
|
|
||||||
additional: bool,
|
|
||||||
Rs: Vec<EdwardsPoint>,
|
|
||||||
id: Option<[u8; 8]>,
|
|
||||||
data: &mut Vec<Vec<u8>>,
|
|
||||||
) -> Vec<u8> {
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let Rs_len = Rs.len();
|
|
||||||
let mut extra = Extra::new(tx_key, if additional { Rs } else { vec![] });
|
|
||||||
|
|
||||||
if let Some(id) = id {
|
|
||||||
let mut id_vec = Vec::with_capacity(1 + 8);
|
|
||||||
PaymentId::Encrypted(id).write(&mut id_vec).unwrap();
|
|
||||||
extra.push(ExtraField::Nonce(id_vec));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Include data if present
|
|
||||||
let extra_len = Extra::fee_weight(Rs_len, additional, id.is_some(), data.as_ref());
|
|
||||||
for part in data.drain(..) {
|
|
||||||
let mut arb = vec![ARBITRARY_DATA_MARKER];
|
|
||||||
arb.extend(part);
|
|
||||||
extra.push(ExtraField::Nonce(arb));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut serialized = Vec::with_capacity(extra_len);
|
|
||||||
extra.write(&mut serialized).unwrap();
|
|
||||||
debug_assert_eq!(extra_len, serialized.len());
|
|
||||||
serialized
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the eventuality of this transaction.
|
|
||||||
///
|
|
||||||
/// The eventuality is defined as the TX extra/outputs this transaction will create, if signed
|
|
||||||
/// with the specified seed. This eventuality can be compared to on-chain transactions to see
|
|
||||||
/// if the transaction has already been signed and published.
|
|
||||||
pub fn eventuality(&self) -> Option<Eventuality> {
|
|
||||||
let inputs = self.inputs.iter().map(SpendableOutput::key).collect::<Vec<_>>();
|
|
||||||
let (tx_key, additional, outputs, id) = Self::prepare_payments(
|
|
||||||
self.r_seed.as_ref()?,
|
|
||||||
&inputs,
|
|
||||||
&mut self.payments.clone(),
|
|
||||||
// Lie about the uniqueness, used when determining output keys/commitments yet not the
|
|
||||||
// ephemeral keys, which is want we want here
|
|
||||||
// While we do still grab the outputs variable, it's so we can get its Rs
|
|
||||||
[0; 32],
|
|
||||||
);
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
let Rs = outputs.iter().map(|output| output.R).collect();
|
|
||||||
drop(outputs);
|
|
||||||
|
|
||||||
let additional = !additional.is_empty();
|
|
||||||
let extra = Self::extra(tx_key, additional, Rs, id, &mut self.data.clone());
|
|
||||||
|
|
||||||
Some(Eventuality {
|
|
||||||
protocol: self.protocol,
|
|
||||||
r_seed: self.r_seed.clone()?,
|
|
||||||
inputs,
|
|
||||||
payments: self.payments.clone(),
|
|
||||||
extra,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn prepare_transaction<R: RngCore + CryptoRng>(
|
|
||||||
&mut self,
|
|
||||||
rng: &mut R,
|
|
||||||
uniqueness: [u8; 32],
|
|
||||||
) -> (Transaction, Scalar) {
|
|
||||||
// If no seed for the ephemeral keys was provided, make one
|
|
||||||
let r_seed = self.r_seed.clone().unwrap_or_else(|| {
|
|
||||||
let mut res = Zeroizing::new([0; 32]);
|
|
||||||
rng.fill_bytes(res.as_mut());
|
|
||||||
res
|
|
||||||
});
|
|
||||||
|
|
||||||
let (tx_key, additional, outputs, id) = Self::prepare_payments(
|
|
||||||
&r_seed,
|
|
||||||
&self.inputs.iter().map(SpendableOutput::key).collect::<Vec<_>>(),
|
|
||||||
&mut self.payments,
|
|
||||||
uniqueness,
|
|
||||||
);
|
|
||||||
// This function only cares if additional keys were necessary, not what they were
|
|
||||||
let additional = !additional.is_empty();
|
|
||||||
|
|
||||||
let commitments = outputs.iter().map(|output| output.commitment.clone()).collect::<Vec<_>>();
|
|
||||||
let sum = commitments.iter().map(|commitment| commitment.mask).sum();
|
|
||||||
|
|
||||||
// Safe due to the constructor checking MAX_OUTPUTS
|
|
||||||
let bp = Bulletproofs::prove(rng, &commitments, self.protocol.bp_plus()).unwrap();
|
|
||||||
|
|
||||||
// Create the TX extra
|
|
||||||
let extra = Self::extra(
|
|
||||||
tx_key,
|
|
||||||
additional,
|
|
||||||
outputs.iter().map(|output| output.R).collect(),
|
|
||||||
id,
|
|
||||||
&mut self.data,
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut fee = self.inputs.iter().map(|input| input.commitment().amount).sum::<u64>();
|
|
||||||
let mut tx_outputs = Vec::with_capacity(outputs.len());
|
|
||||||
let mut encrypted_amounts = Vec::with_capacity(outputs.len());
|
|
||||||
for output in &outputs {
|
|
||||||
fee -= output.commitment.amount;
|
|
||||||
tx_outputs.push(Output {
|
|
||||||
amount: None,
|
|
||||||
key: output.dest.compress(),
|
|
||||||
view_tag: Some(output.view_tag).filter(|_| matches!(self.protocol, Protocol::v16)),
|
|
||||||
});
|
|
||||||
encrypted_amounts.push(EncryptedAmount::Compact { amount: output.amount });
|
|
||||||
}
|
|
||||||
|
|
||||||
(
|
|
||||||
Transaction {
|
|
||||||
prefix: TransactionPrefix {
|
|
||||||
version: 2,
|
|
||||||
timelock: Timelock::None,
|
|
||||||
inputs: vec![],
|
|
||||||
outputs: tx_outputs,
|
|
||||||
extra,
|
|
||||||
},
|
|
||||||
signatures: vec![],
|
|
||||||
rct_signatures: RctSignatures {
|
|
||||||
base: RctBase {
|
|
||||||
fee,
|
|
||||||
encrypted_amounts,
|
|
||||||
pseudo_outs: vec![],
|
|
||||||
commitments: commitments.iter().map(Commitment::calculate).collect(),
|
|
||||||
},
|
|
||||||
prunable: RctPrunable::Clsag { bulletproofs: bp, clsags: vec![], pseudo_outs: vec![] },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
sum,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Sign this transaction.
|
|
||||||
pub async fn sign<R: Send + RngCore + CryptoRng, RPC: RpcConnection>(
|
|
||||||
mut self,
|
|
||||||
rng: &mut R,
|
|
||||||
rpc: &Rpc<RPC>,
|
|
||||||
spend: &Zeroizing<Scalar>,
|
|
||||||
) -> Result<Transaction, TransactionError> {
|
|
||||||
let mut images = Vec::with_capacity(self.inputs.len());
|
|
||||||
for input in &self.inputs {
|
|
||||||
let mut offset = Zeroizing::new(spend.deref() + input.key_offset());
|
|
||||||
if (offset.deref() * &ED25519_BASEPOINT_TABLE) != input.key() {
|
|
||||||
Err(TransactionError::WrongPrivateKey)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
images.push(generate_key_image(&offset));
|
|
||||||
offset.zeroize();
|
|
||||||
}
|
|
||||||
images.sort_by(key_image_sort);
|
|
||||||
|
|
||||||
let (mut tx, mask_sum) = self.prepare_transaction(
|
|
||||||
rng,
|
|
||||||
uniqueness(
|
|
||||||
&images
|
|
||||||
.iter()
|
|
||||||
.map(|image| Input::ToKey { amount: None, key_offsets: vec![], key_image: *image })
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
let signable =
|
|
||||||
prepare_inputs(rng, rpc, self.protocol.ring_len(), &self.inputs, spend, &mut tx).await?;
|
|
||||||
|
|
||||||
let clsag_pairs = Clsag::sign(rng, signable, mask_sum, tx.signature_hash());
|
|
||||||
match tx.rct_signatures.prunable {
|
|
||||||
RctPrunable::Null => panic!("Signing for RctPrunable::Null"),
|
|
||||||
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
|
|
||||||
clsags.append(&mut clsag_pairs.iter().map(|clsag| clsag.0.clone()).collect::<Vec<_>>());
|
|
||||||
pseudo_outs.append(&mut clsag_pairs.iter().map(|clsag| clsag.1).collect::<Vec<_>>());
|
|
||||||
}
|
|
||||||
RctPrunable::MlsagBorromean { .. } | RctPrunable::MlsagBulletproofs { .. } => {
|
|
||||||
unreachable!("attempted to sign a TX which wasn't CLSAG")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(tx)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eventuality {
|
|
||||||
/// Enables building a HashMap of Extra -> Eventuality for efficiently checking if an on-chain
|
|
||||||
/// transaction may match this eventuality.
|
|
||||||
///
|
|
||||||
/// This extra is cryptographically bound to:
|
|
||||||
/// 1) A specific set of inputs (via their output key)
|
|
||||||
/// 2) A specific seed for the ephemeral keys
|
|
||||||
///
|
|
||||||
/// This extra may be used with a transaction with a distinct set of inputs, yet no honest
|
|
||||||
/// transaction which doesn't satisfy this Eventuality will contain it.
|
|
||||||
pub fn extra(&self) -> &[u8] {
|
|
||||||
&self.extra
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn matches(&self, tx: &Transaction) -> bool {
|
|
||||||
if self.payments.len() != tx.prefix.outputs.len() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify extra.
|
|
||||||
// Even if all the outputs were correct, a malicious extra could still cause a recipient to
|
|
||||||
// fail to receive their funds.
|
|
||||||
// This is the cheapest check available to perform as it does not require TX-specific ECC ops.
|
|
||||||
if self.extra != tx.prefix.extra {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Also ensure no timelock was set.
|
|
||||||
if tx.prefix.timelock != Timelock::None {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate the outputs. This is TX-specific due to uniqueness.
|
|
||||||
let (_, _, outputs, _) = SignableTransaction::prepare_payments(
|
|
||||||
&self.r_seed,
|
|
||||||
&self.inputs,
|
|
||||||
&mut self.payments.clone(),
|
|
||||||
uniqueness(&tx.prefix.inputs),
|
|
||||||
);
|
|
||||||
|
|
||||||
let rct_type = tx.rct_signatures.rct_type();
|
|
||||||
if rct_type != self.protocol.optimal_rct_type() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Remove this when the following for loop is updated
|
|
||||||
assert!(
|
|
||||||
rct_type.compact_encrypted_amounts(),
|
|
||||||
"created an Eventuality for a very old RctType we don't support proving for"
|
|
||||||
);
|
|
||||||
|
|
||||||
for (o, (expected, actual)) in outputs.iter().zip(tx.prefix.outputs.iter()).enumerate() {
|
|
||||||
// Verify the output, commitment, and encrypted amount.
|
|
||||||
if (&Output {
|
|
||||||
amount: None,
|
|
||||||
key: expected.dest.compress(),
|
|
||||||
view_tag: Some(expected.view_tag).filter(|_| matches!(self.protocol, Protocol::v16)),
|
|
||||||
} != actual) ||
|
|
||||||
(Some(&expected.commitment.calculate()) != tx.rct_signatures.base.commitments.get(o)) ||
|
|
||||||
(Some(&EncryptedAmount::Compact { amount: expected.amount }) !=
|
|
||||||
tx.rct_signatures.base.encrypted_amounts.get(o))
|
|
||||||
{
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write<W: io::Write>(&self, w: &mut W) -> io::Result<()> {
|
|
||||||
self.protocol.write(w)?;
|
|
||||||
write_raw_vec(write_byte, self.r_seed.as_ref(), w)?;
|
|
||||||
write_vec(write_point, &self.inputs, w)?;
|
|
||||||
|
|
||||||
fn write_payment<W: io::Write>(payment: &InternalPayment, w: &mut W) -> io::Result<()> {
|
|
||||||
match payment {
|
|
||||||
InternalPayment::Payment(payment) => {
|
|
||||||
w.write_all(&[0])?;
|
|
||||||
write_vec(write_byte, payment.0.to_string().as_bytes(), w)?;
|
|
||||||
w.write_all(&payment.1.to_le_bytes())
|
|
||||||
}
|
|
||||||
InternalPayment::Change(change, amount) => {
|
|
||||||
w.write_all(&[1])?;
|
|
||||||
write_vec(write_byte, change.address.to_string().as_bytes(), w)?;
|
|
||||||
if let Some(view) = change.view.as_ref() {
|
|
||||||
w.write_all(&[1])?;
|
|
||||||
write_scalar(view, w)?;
|
|
||||||
} else {
|
|
||||||
w.write_all(&[0])?;
|
|
||||||
}
|
|
||||||
w.write_all(&amount.to_le_bytes())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
write_vec(write_payment, &self.payments, w)?;
|
|
||||||
|
|
||||||
write_vec(write_byte, &self.extra, w)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
|
||||||
let mut buf = Vec::with_capacity(128);
|
|
||||||
self.write(&mut buf).unwrap();
|
|
||||||
buf
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read<R: io::Read>(r: &mut R) -> io::Result<Self> {
|
|
||||||
fn read_address<R: io::Read>(r: &mut R) -> io::Result<MoneroAddress> {
|
|
||||||
String::from_utf8(read_vec(read_byte, r)?)
|
|
||||||
.ok()
|
|
||||||
.and_then(|str| MoneroAddress::from_str_raw(&str).ok())
|
|
||||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "invalid address"))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_payment<R: io::Read>(r: &mut R) -> io::Result<InternalPayment> {
|
|
||||||
Ok(match read_byte(r)? {
|
|
||||||
0 => InternalPayment::Payment((read_address(r)?, read_u64(r)?)),
|
|
||||||
1 => InternalPayment::Change(
|
|
||||||
Change {
|
|
||||||
address: read_address(r)?,
|
|
||||||
view: match read_byte(r)? {
|
|
||||||
0 => None,
|
|
||||||
1 => Some(Zeroizing::new(read_scalar(r)?)),
|
|
||||||
_ => Err(io::Error::new(io::ErrorKind::Other, "invalid change payment"))?,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
read_u64(r)?,
|
|
||||||
),
|
|
||||||
_ => Err(io::Error::new(io::ErrorKind::Other, "invalid payment"))?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
protocol: Protocol::read(r)?,
|
|
||||||
r_seed: Zeroizing::new(read_bytes::<_, 32>(r)?),
|
|
||||||
inputs: read_vec(read_point, r)?,
|
|
||||||
payments: read_vec(read_payment, r)?,
|
|
||||||
extra: read_vec(read_byte, r)?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,442 +0,0 @@
|
|||||||
use std_shims::{
|
|
||||||
sync::Arc,
|
|
||||||
vec::Vec,
|
|
||||||
io::{self, Read},
|
|
||||||
collections::HashMap,
|
|
||||||
};
|
|
||||||
use std::sync::RwLock;
|
|
||||||
|
|
||||||
use zeroize::Zeroizing;
|
|
||||||
|
|
||||||
use rand_core::{RngCore, CryptoRng, SeedableRng};
|
|
||||||
use rand_chacha::ChaCha20Rng;
|
|
||||||
|
|
||||||
use group::ff::Field;
|
|
||||||
use curve25519_dalek::{traits::Identity, scalar::Scalar, edwards::EdwardsPoint};
|
|
||||||
use dalek_ff_group as dfg;
|
|
||||||
|
|
||||||
use transcript::{Transcript, RecommendedTranscript};
|
|
||||||
use frost::{
|
|
||||||
curve::Ed25519,
|
|
||||||
Participant, FrostError, ThresholdKeys,
|
|
||||||
sign::{
|
|
||||||
Writable, Preprocess, CachedPreprocess, SignatureShare, PreprocessMachine, SignMachine,
|
|
||||||
SignatureMachine, AlgorithmMachine, AlgorithmSignMachine, AlgorithmSignatureMachine,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
random_scalar,
|
|
||||||
ringct::{
|
|
||||||
clsag::{ClsagInput, ClsagDetails, ClsagAddendum, ClsagMultisig, add_key_image_share},
|
|
||||||
RctPrunable,
|
|
||||||
},
|
|
||||||
transaction::{Input, Transaction},
|
|
||||||
rpc::{RpcConnection, Rpc},
|
|
||||||
wallet::{
|
|
||||||
TransactionError, InternalPayment, SignableTransaction, Decoys, key_image_sort, uniqueness,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
/// FROST signing machine to produce a signed transaction.
|
|
||||||
pub struct TransactionMachine {
|
|
||||||
signable: SignableTransaction,
|
|
||||||
|
|
||||||
i: Participant,
|
|
||||||
transcript: RecommendedTranscript,
|
|
||||||
|
|
||||||
decoys: Vec<Decoys>,
|
|
||||||
|
|
||||||
// Hashed key and scalar offset
|
|
||||||
key_images: Vec<(EdwardsPoint, Scalar)>,
|
|
||||||
inputs: Vec<Arc<RwLock<Option<ClsagDetails>>>>,
|
|
||||||
clsags: Vec<AlgorithmMachine<Ed25519, ClsagMultisig>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct TransactionSignMachine {
|
|
||||||
signable: SignableTransaction,
|
|
||||||
|
|
||||||
i: Participant,
|
|
||||||
transcript: RecommendedTranscript,
|
|
||||||
|
|
||||||
decoys: Vec<Decoys>,
|
|
||||||
|
|
||||||
key_images: Vec<(EdwardsPoint, Scalar)>,
|
|
||||||
inputs: Vec<Arc<RwLock<Option<ClsagDetails>>>>,
|
|
||||||
clsags: Vec<AlgorithmSignMachine<Ed25519, ClsagMultisig>>,
|
|
||||||
|
|
||||||
our_preprocess: Vec<Preprocess<Ed25519, ClsagAddendum>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct TransactionSignatureMachine {
|
|
||||||
tx: Transaction,
|
|
||||||
clsags: Vec<AlgorithmSignatureMachine<Ed25519, ClsagMultisig>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SignableTransaction {
|
|
||||||
/// Create a FROST signing machine out of this signable transaction.
|
|
||||||
/// The height is the Monero blockchain height to synchronize around.
|
|
||||||
pub async fn multisig<RPC: RpcConnection>(
|
|
||||||
self,
|
|
||||||
rpc: &Rpc<RPC>,
|
|
||||||
keys: ThresholdKeys<Ed25519>,
|
|
||||||
mut transcript: RecommendedTranscript,
|
|
||||||
height: usize,
|
|
||||||
) -> Result<TransactionMachine, TransactionError> {
|
|
||||||
let mut inputs = vec![];
|
|
||||||
for _ in 0 .. self.inputs.len() {
|
|
||||||
// Doesn't resize as that will use a single Rc for the entire Vec
|
|
||||||
inputs.push(Arc::new(RwLock::new(None)));
|
|
||||||
}
|
|
||||||
let mut clsags = vec![];
|
|
||||||
|
|
||||||
// Create a RNG out of the input shared keys, which either requires the view key or being every
|
|
||||||
// sender, and the payments (address and amount), which a passive adversary may be able to know
|
|
||||||
// depending on how these transactions are coordinated
|
|
||||||
// Being every sender would already let you note rings which happen to use your transactions
|
|
||||||
// multiple times, already breaking privacy there
|
|
||||||
|
|
||||||
transcript.domain_separate(b"monero_transaction");
|
|
||||||
|
|
||||||
// Include the height we're using for our data
|
|
||||||
// The data itself will be included, making this unnecessary, yet a lot of this is technically
|
|
||||||
// unnecessary. Anything which further increases security at almost no cost should be followed
|
|
||||||
transcript.append_message(b"height", u64::try_from(height).unwrap().to_le_bytes());
|
|
||||||
|
|
||||||
// Also include the spend_key as below only the key offset is included, so this transcripts the
|
|
||||||
// sum product
|
|
||||||
// Useful as transcripting the sum product effectively transcripts the key image, further
|
|
||||||
// guaranteeing the one time properties noted below
|
|
||||||
transcript.append_message(b"spend_key", keys.group_key().0.compress().to_bytes());
|
|
||||||
|
|
||||||
if let Some(r_seed) = &self.r_seed {
|
|
||||||
transcript.append_message(b"r_seed", r_seed);
|
|
||||||
}
|
|
||||||
|
|
||||||
for input in &self.inputs {
|
|
||||||
// These outputs can only be spent once. Therefore, it forces all RNGs derived from this
|
|
||||||
// transcript (such as the one used to create one time keys) to be unique
|
|
||||||
transcript.append_message(b"input_hash", input.output.absolute.tx);
|
|
||||||
transcript.append_message(b"input_output_index", [input.output.absolute.o]);
|
|
||||||
// Not including this, with a doxxed list of payments, would allow brute forcing the inputs
|
|
||||||
// to determine RNG seeds and therefore the true spends
|
|
||||||
transcript.append_message(b"input_shared_key", input.key_offset().to_bytes());
|
|
||||||
}
|
|
||||||
|
|
||||||
for payment in &self.payments {
|
|
||||||
match payment {
|
|
||||||
InternalPayment::Payment(payment) => {
|
|
||||||
transcript.append_message(b"payment_address", payment.0.to_string().as_bytes());
|
|
||||||
transcript.append_message(b"payment_amount", payment.1.to_le_bytes());
|
|
||||||
}
|
|
||||||
InternalPayment::Change(change, amount) => {
|
|
||||||
transcript.append_message(b"change_address", change.address.to_string().as_bytes());
|
|
||||||
if let Some(view) = change.view.as_ref() {
|
|
||||||
transcript.append_message(b"change_view_key", Zeroizing::new(view.to_bytes()));
|
|
||||||
}
|
|
||||||
transcript.append_message(b"change_amount", amount.to_le_bytes());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut key_images = vec![];
|
|
||||||
for (i, input) in self.inputs.iter().enumerate() {
|
|
||||||
// Check this the right set of keys
|
|
||||||
let offset = keys.offset(dfg::Scalar(input.key_offset()));
|
|
||||||
if offset.group_key().0 != input.key() {
|
|
||||||
Err(TransactionError::WrongPrivateKey)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let clsag = ClsagMultisig::new(transcript.clone(), input.key(), inputs[i].clone());
|
|
||||||
key_images.push((
|
|
||||||
clsag.H,
|
|
||||||
keys.current_offset().unwrap_or(dfg::Scalar::ZERO).0 + self.inputs[i].key_offset(),
|
|
||||||
));
|
|
||||||
clsags.push(AlgorithmMachine::new(clsag, offset));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Select decoys
|
|
||||||
// Ideally, this would be done post entropy, instead of now, yet doing so would require sign
|
|
||||||
// to be async which isn't preferable. This should be suitably competent though
|
|
||||||
// While this inability means we can immediately create the input, moving it out of the
|
|
||||||
// Arc RwLock, keeping it within an Arc RwLock keeps our options flexible
|
|
||||||
let decoys = Decoys::select(
|
|
||||||
// Using a seeded RNG with a specific height, committed to above, should make these decoys
|
|
||||||
// committed to. They'll also be committed to later via the TX message as a whole
|
|
||||||
&mut ChaCha20Rng::from_seed(transcript.rng_seed(b"decoys")),
|
|
||||||
rpc,
|
|
||||||
self.protocol.ring_len(),
|
|
||||||
height,
|
|
||||||
&self.inputs,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(TransactionError::RpcError)?;
|
|
||||||
|
|
||||||
Ok(TransactionMachine {
|
|
||||||
signable: self,
|
|
||||||
|
|
||||||
i: keys.params().i(),
|
|
||||||
transcript,
|
|
||||||
|
|
||||||
decoys,
|
|
||||||
|
|
||||||
key_images,
|
|
||||||
inputs,
|
|
||||||
clsags,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PreprocessMachine for TransactionMachine {
|
|
||||||
type Preprocess = Vec<Preprocess<Ed25519, ClsagAddendum>>;
|
|
||||||
type Signature = Transaction;
|
|
||||||
type SignMachine = TransactionSignMachine;
|
|
||||||
|
|
||||||
fn preprocess<R: RngCore + CryptoRng>(
|
|
||||||
mut self,
|
|
||||||
rng: &mut R,
|
|
||||||
) -> (TransactionSignMachine, Self::Preprocess) {
|
|
||||||
// Iterate over each CLSAG calling preprocess
|
|
||||||
let mut preprocesses = Vec::with_capacity(self.clsags.len());
|
|
||||||
let clsags = self
|
|
||||||
.clsags
|
|
||||||
.drain(..)
|
|
||||||
.map(|clsag| {
|
|
||||||
let (clsag, preprocess) = clsag.preprocess(rng);
|
|
||||||
preprocesses.push(preprocess);
|
|
||||||
clsag
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
let our_preprocess = preprocesses.clone();
|
|
||||||
|
|
||||||
// We could add further entropy here, and previous versions of this library did so
|
|
||||||
// As of right now, the multisig's key, the inputs being spent, and the FROST data itself
|
|
||||||
// will be used for RNG seeds. In order to recreate these RNG seeds, breaking privacy,
|
|
||||||
// counterparties must have knowledge of the multisig, either the view key or access to the
|
|
||||||
// coordination layer, and then access to the actual FROST signing process
|
|
||||||
// If the commitments are sent in plain text, then entropy here also would be, making it not
|
|
||||||
// increase privacy. If they're not sent in plain text, or are otherwise inaccessible, they
|
|
||||||
// already offer sufficient entropy. That's why further entropy is not included
|
|
||||||
|
|
||||||
(
|
|
||||||
TransactionSignMachine {
|
|
||||||
signable: self.signable,
|
|
||||||
|
|
||||||
i: self.i,
|
|
||||||
transcript: self.transcript,
|
|
||||||
|
|
||||||
decoys: self.decoys,
|
|
||||||
|
|
||||||
key_images: self.key_images,
|
|
||||||
inputs: self.inputs,
|
|
||||||
clsags,
|
|
||||||
|
|
||||||
our_preprocess,
|
|
||||||
},
|
|
||||||
preprocesses,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SignMachine<Transaction> for TransactionSignMachine {
|
|
||||||
type Params = ();
|
|
||||||
type Keys = ThresholdKeys<Ed25519>;
|
|
||||||
type Preprocess = Vec<Preprocess<Ed25519, ClsagAddendum>>;
|
|
||||||
type SignatureShare = Vec<SignatureShare<Ed25519>>;
|
|
||||||
type SignatureMachine = TransactionSignatureMachine;
|
|
||||||
|
|
||||||
fn cache(self) -> CachedPreprocess {
|
|
||||||
unimplemented!(
|
|
||||||
"Monero transactions don't support caching their preprocesses due to {}",
|
|
||||||
"being already bound to a specific transaction"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn from_cache(_: (), _: ThresholdKeys<Ed25519>, _: CachedPreprocess) -> Result<Self, FrostError> {
|
|
||||||
unimplemented!(
|
|
||||||
"Monero transactions don't support caching their preprocesses due to {}",
|
|
||||||
"being already bound to a specific transaction"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_preprocess<R: Read>(&self, reader: &mut R) -> io::Result<Self::Preprocess> {
|
|
||||||
self.clsags.iter().map(|clsag| clsag.read_preprocess(reader)).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn sign(
|
|
||||||
mut self,
|
|
||||||
mut commitments: HashMap<Participant, Self::Preprocess>,
|
|
||||||
msg: &[u8],
|
|
||||||
) -> Result<(TransactionSignatureMachine, Self::SignatureShare), FrostError> {
|
|
||||||
assert!(
|
|
||||||
msg.is_empty(),
|
|
||||||
"message was passed to the TransactionMachine when it generates its own"
|
|
||||||
);
|
|
||||||
|
|
||||||
// Find out who's included
|
|
||||||
// This may not be a valid set of signers yet the algorithm machine will error if it's not
|
|
||||||
commitments.remove(&self.i); // Remove, if it was included for some reason
|
|
||||||
let mut included = commitments.keys().copied().collect::<Vec<_>>();
|
|
||||||
included.push(self.i);
|
|
||||||
included.sort_unstable();
|
|
||||||
|
|
||||||
// Convert the unified commitments to a Vec of the individual commitments
|
|
||||||
let mut images = vec![EdwardsPoint::identity(); self.clsags.len()];
|
|
||||||
let mut commitments = (0 .. self.clsags.len())
|
|
||||||
.map(|c| {
|
|
||||||
included
|
|
||||||
.iter()
|
|
||||||
.map(|l| {
|
|
||||||
// Add all commitments to the transcript for their entropy
|
|
||||||
// While each CLSAG will do this as they need to for security, they have their own
|
|
||||||
// transcripts cloned from this TX's initial premise's transcript. For our TX
|
|
||||||
// transcript to have the CLSAG data for entropy, it'll have to be added ourselves here
|
|
||||||
self.transcript.append_message(b"participant", (*l).to_bytes());
|
|
||||||
|
|
||||||
let preprocess = if *l == self.i {
|
|
||||||
self.our_preprocess[c].clone()
|
|
||||||
} else {
|
|
||||||
commitments.get_mut(l).ok_or(FrostError::MissingParticipant(*l))?[c].clone()
|
|
||||||
};
|
|
||||||
|
|
||||||
{
|
|
||||||
let mut buf = vec![];
|
|
||||||
preprocess.write(&mut buf).unwrap();
|
|
||||||
self.transcript.append_message(b"preprocess", buf);
|
|
||||||
}
|
|
||||||
|
|
||||||
// While here, calculate the key image
|
|
||||||
// Clsag will parse/calculate/validate this as needed, yet doing so here as well
|
|
||||||
// provides the easiest API overall, as this is where the TX is (which needs the key
|
|
||||||
// images in its message), along with where the outputs are determined (where our
|
|
||||||
// outputs may need these in order to guarantee uniqueness)
|
|
||||||
add_key_image_share(
|
|
||||||
&mut images[c],
|
|
||||||
self.key_images[c].0,
|
|
||||||
self.key_images[c].1,
|
|
||||||
&included,
|
|
||||||
*l,
|
|
||||||
preprocess.addendum.key_image.0,
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok((*l, preprocess))
|
|
||||||
})
|
|
||||||
.collect::<Result<HashMap<_, _>, _>>()
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
|
||||||
|
|
||||||
// Remove our preprocess which shouldn't be here. It was just the easiest way to implement the
|
|
||||||
// above
|
|
||||||
for map in &mut commitments {
|
|
||||||
map.remove(&self.i);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the actual transaction
|
|
||||||
let (mut tx, output_masks) = {
|
|
||||||
let mut sorted_images = images.clone();
|
|
||||||
sorted_images.sort_by(key_image_sort);
|
|
||||||
|
|
||||||
self.signable.prepare_transaction(
|
|
||||||
// Technically, r_seed is used for the transaction keys if it's provided
|
|
||||||
&mut ChaCha20Rng::from_seed(self.transcript.rng_seed(b"transaction_keys_bulletproofs")),
|
|
||||||
uniqueness(
|
|
||||||
&sorted_images
|
|
||||||
.iter()
|
|
||||||
.map(|image| Input::ToKey { amount: None, key_offsets: vec![], key_image: *image })
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
// Sort the inputs, as expected
|
|
||||||
let mut sorted = Vec::with_capacity(self.clsags.len());
|
|
||||||
while !self.clsags.is_empty() {
|
|
||||||
sorted.push((
|
|
||||||
images.swap_remove(0),
|
|
||||||
self.signable.inputs.swap_remove(0),
|
|
||||||
self.decoys.swap_remove(0),
|
|
||||||
self.inputs.swap_remove(0),
|
|
||||||
self.clsags.swap_remove(0),
|
|
||||||
commitments.swap_remove(0),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
sorted.sort_by(|x, y| key_image_sort(&x.0, &y.0));
|
|
||||||
|
|
||||||
let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"pseudo_out_masks"));
|
|
||||||
let mut sum_pseudo_outs = Scalar::zero();
|
|
||||||
while !sorted.is_empty() {
|
|
||||||
let value = sorted.remove(0);
|
|
||||||
|
|
||||||
let mask = if sorted.is_empty() {
|
|
||||||
output_masks - sum_pseudo_outs
|
|
||||||
} else {
|
|
||||||
let mask = random_scalar(&mut rng);
|
|
||||||
sum_pseudo_outs += mask;
|
|
||||||
mask
|
|
||||||
};
|
|
||||||
|
|
||||||
tx.prefix.inputs.push(Input::ToKey {
|
|
||||||
amount: None,
|
|
||||||
key_offsets: value.2.offsets.clone(),
|
|
||||||
key_image: value.0,
|
|
||||||
});
|
|
||||||
|
|
||||||
*value.3.write().unwrap() = Some(ClsagDetails::new(
|
|
||||||
ClsagInput::new(value.1.commitment().clone(), value.2).map_err(|_| {
|
|
||||||
panic!("Signing an input which isn't present in the ring we created for it")
|
|
||||||
})?,
|
|
||||||
mask,
|
|
||||||
));
|
|
||||||
|
|
||||||
self.clsags.push(value.4);
|
|
||||||
commitments.push(value.5);
|
|
||||||
}
|
|
||||||
|
|
||||||
let msg = tx.signature_hash();
|
|
||||||
|
|
||||||
// Iterate over each CLSAG calling sign
|
|
||||||
let mut shares = Vec::with_capacity(self.clsags.len());
|
|
||||||
let clsags = self
|
|
||||||
.clsags
|
|
||||||
.drain(..)
|
|
||||||
.map(|clsag| {
|
|
||||||
let (clsag, share) = clsag.sign(commitments.remove(0), &msg)?;
|
|
||||||
shares.push(share);
|
|
||||||
Ok(clsag)
|
|
||||||
})
|
|
||||||
.collect::<Result<_, _>>()?;
|
|
||||||
|
|
||||||
Ok((TransactionSignatureMachine { tx, clsags }, shares))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SignatureMachine<Transaction> for TransactionSignatureMachine {
|
|
||||||
type SignatureShare = Vec<SignatureShare<Ed25519>>;
|
|
||||||
|
|
||||||
fn read_share<R: Read>(&self, reader: &mut R) -> io::Result<Self::SignatureShare> {
|
|
||||||
self.clsags.iter().map(|clsag| clsag.read_share(reader)).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn complete(
|
|
||||||
mut self,
|
|
||||||
shares: HashMap<Participant, Self::SignatureShare>,
|
|
||||||
) -> Result<Transaction, FrostError> {
|
|
||||||
let mut tx = self.tx;
|
|
||||||
match tx.rct_signatures.prunable {
|
|
||||||
RctPrunable::Null => panic!("Signing for RctPrunable::Null"),
|
|
||||||
RctPrunable::Clsag { ref mut clsags, ref mut pseudo_outs, .. } => {
|
|
||||||
for (c, clsag) in self.clsags.drain(..).enumerate() {
|
|
||||||
let (clsag, pseudo_out) = clsag.complete(
|
|
||||||
shares.iter().map(|(l, shares)| (*l, shares[c].clone())).collect::<HashMap<_, _>>(),
|
|
||||||
)?;
|
|
||||||
clsags.push(clsag);
|
|
||||||
pseudo_outs.push(pseudo_out);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
RctPrunable::MlsagBorromean { .. } | RctPrunable::MlsagBulletproofs { .. } => {
|
|
||||||
unreachable!("attempted to sign a multisig TX which wasn't CLSAG")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(tx)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,287 +0,0 @@
|
|||||||
use core::ops::Deref;
|
|
||||||
use std_shims::{sync::OnceLock, collections::HashSet};
|
|
||||||
|
|
||||||
use zeroize::Zeroizing;
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
use curve25519_dalek::{constants::ED25519_BASEPOINT_TABLE, scalar::Scalar};
|
|
||||||
|
|
||||||
use tokio::sync::Mutex;
|
|
||||||
|
|
||||||
use monero_serai::{
|
|
||||||
random_scalar,
|
|
||||||
rpc::{HttpRpc, Rpc},
|
|
||||||
wallet::{
|
|
||||||
ViewPair, Scanner,
|
|
||||||
address::{Network, AddressType, AddressSpec, AddressMeta, MoneroAddress},
|
|
||||||
SpendableOutput,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn random_address() -> (Scalar, ViewPair, MoneroAddress) {
|
|
||||||
let spend = random_scalar(&mut OsRng);
|
|
||||||
let spend_pub = &spend * &ED25519_BASEPOINT_TABLE;
|
|
||||||
let view = Zeroizing::new(random_scalar(&mut OsRng));
|
|
||||||
(
|
|
||||||
spend,
|
|
||||||
ViewPair::new(spend_pub, view.clone()),
|
|
||||||
MoneroAddress {
|
|
||||||
meta: AddressMeta::new(Network::Mainnet, AddressType::Standard),
|
|
||||||
spend: spend_pub,
|
|
||||||
view: view.deref() * &ED25519_BASEPOINT_TABLE,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Support transactions already on-chain
|
|
||||||
// TODO: Don't have a side effect of mining blocks more blocks than needed under race conditions
|
|
||||||
// TODO: mine as much as needed instead of default 10 blocks
|
|
||||||
pub async fn mine_until_unlocked(rpc: &Rpc<HttpRpc>, addr: &str, tx_hash: [u8; 32]) {
|
|
||||||
// mine until tx is in a block
|
|
||||||
let mut height = rpc.get_height().await.unwrap();
|
|
||||||
let mut found = false;
|
|
||||||
while !found {
|
|
||||||
let block = rpc.get_block_by_number(height - 1).await.unwrap();
|
|
||||||
found = match block.txs.iter().find(|&&x| x == tx_hash) {
|
|
||||||
Some(_) => true,
|
|
||||||
None => {
|
|
||||||
rpc.generate_blocks(addr, 1).await.unwrap();
|
|
||||||
height += 1;
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// mine 9 more blocks to unlock the tx
|
|
||||||
rpc.generate_blocks(addr, 9).await.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mines 60 blocks and returns an unlocked miner TX output.
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub async fn get_miner_tx_output(rpc: &Rpc<HttpRpc>, view: &ViewPair) -> SpendableOutput {
|
|
||||||
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
|
||||||
|
|
||||||
// Mine 60 blocks to unlock a miner TX
|
|
||||||
let start = rpc.get_height().await.unwrap();
|
|
||||||
rpc
|
|
||||||
.generate_blocks(&view.address(Network::Mainnet, AddressSpec::Standard).to_string(), 60)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let block = rpc.get_block_by_number(start).await.unwrap();
|
|
||||||
scanner.scan(rpc, &block).await.unwrap().swap_remove(0).ignore_timelock().swap_remove(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn rpc() -> Rpc<HttpRpc> {
|
|
||||||
let rpc = HttpRpc::new("http://127.0.0.1:18081".to_string()).unwrap();
|
|
||||||
|
|
||||||
// Only run once
|
|
||||||
if rpc.get_height().await.unwrap() != 1 {
|
|
||||||
return rpc;
|
|
||||||
}
|
|
||||||
|
|
||||||
let addr = MoneroAddress {
|
|
||||||
meta: AddressMeta::new(Network::Mainnet, AddressType::Standard),
|
|
||||||
spend: &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE,
|
|
||||||
view: &random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE,
|
|
||||||
}
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
// Mine 40 blocks to ensure decoy availability
|
|
||||||
rpc.generate_blocks(&addr, 40).await.unwrap();
|
|
||||||
|
|
||||||
// Make sure we recognize the protocol
|
|
||||||
rpc.get_protocol().await.unwrap();
|
|
||||||
|
|
||||||
rpc
|
|
||||||
}
|
|
||||||
|
|
||||||
pub static SEQUENTIAL: OnceLock<Mutex<()>> = OnceLock::new();
|
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! async_sequential {
|
|
||||||
($(async fn $name: ident() $body: block)*) => {
|
|
||||||
$(
|
|
||||||
#[allow(clippy::tests_outside_test_module)]
|
|
||||||
#[tokio::test]
|
|
||||||
async fn $name() {
|
|
||||||
let guard = runner::SEQUENTIAL.get_or_init(|| tokio::sync::Mutex::new(())).lock().await;
|
|
||||||
let local = tokio::task::LocalSet::new();
|
|
||||||
local.run_until(async move {
|
|
||||||
if let Err(err) = tokio::task::spawn_local(async move { $body }).await {
|
|
||||||
drop(guard);
|
|
||||||
Err(err).unwrap()
|
|
||||||
}
|
|
||||||
}).await;
|
|
||||||
}
|
|
||||||
)*
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! test {
|
|
||||||
(
|
|
||||||
$name: ident,
|
|
||||||
(
|
|
||||||
$first_tx: expr,
|
|
||||||
$first_checks: expr,
|
|
||||||
),
|
|
||||||
$((
|
|
||||||
$tx: expr,
|
|
||||||
$checks: expr,
|
|
||||||
)$(,)?),*
|
|
||||||
) => {
|
|
||||||
async_sequential! {
|
|
||||||
async fn $name() {
|
|
||||||
use core::{ops::Deref, any::Any};
|
|
||||||
use std::collections::HashSet;
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
use std::collections::HashMap;
|
|
||||||
|
|
||||||
use zeroize::Zeroizing;
|
|
||||||
use rand_core::OsRng;
|
|
||||||
|
|
||||||
use curve25519_dalek::constants::ED25519_BASEPOINT_TABLE;
|
|
||||||
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
use transcript::{Transcript, RecommendedTranscript};
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
use frost::{
|
|
||||||
curve::Ed25519,
|
|
||||||
Participant,
|
|
||||||
tests::{THRESHOLD, key_gen},
|
|
||||||
};
|
|
||||||
|
|
||||||
use monero_serai::{
|
|
||||||
random_scalar,
|
|
||||||
wallet::{
|
|
||||||
address::{Network, AddressSpec}, ViewPair, Scanner, Change, SignableTransaction,
|
|
||||||
SignableTransactionBuilder,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
use runner::{random_address, rpc, mine_until_unlocked, get_miner_tx_output};
|
|
||||||
|
|
||||||
type Builder = SignableTransactionBuilder;
|
|
||||||
|
|
||||||
// Run each function as both a single signer and as a multisig
|
|
||||||
#[allow(clippy::redundant_closure_call)]
|
|
||||||
for multisig in [false, true] {
|
|
||||||
// Only run the multisig variant if multisig is enabled
|
|
||||||
if multisig {
|
|
||||||
#[cfg(not(feature = "multisig"))]
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let spend = Zeroizing::new(random_scalar(&mut OsRng));
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
let keys = key_gen::<_, Ed25519>(&mut OsRng);
|
|
||||||
|
|
||||||
let spend_pub = if !multisig {
|
|
||||||
spend.deref() * &ED25519_BASEPOINT_TABLE
|
|
||||||
} else {
|
|
||||||
#[cfg(not(feature = "multisig"))]
|
|
||||||
panic!("Multisig branch called without the multisig feature");
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
keys[&Participant::new(1).unwrap()].group_key().0
|
|
||||||
};
|
|
||||||
|
|
||||||
let rpc = rpc().await;
|
|
||||||
|
|
||||||
let view = ViewPair::new(spend_pub, Zeroizing::new(random_scalar(&mut OsRng)));
|
|
||||||
let addr = view.address(Network::Mainnet, AddressSpec::Standard);
|
|
||||||
|
|
||||||
let miner_tx = get_miner_tx_output(&rpc, &view).await;
|
|
||||||
|
|
||||||
let builder = SignableTransactionBuilder::new(
|
|
||||||
rpc.get_protocol().await.unwrap(),
|
|
||||||
rpc.get_fee().await.unwrap(),
|
|
||||||
Some(Change::new(
|
|
||||||
&ViewPair::new(
|
|
||||||
&random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE,
|
|
||||||
Zeroizing::new(random_scalar(&mut OsRng))
|
|
||||||
),
|
|
||||||
false
|
|
||||||
)),
|
|
||||||
);
|
|
||||||
|
|
||||||
let sign = |tx: SignableTransaction| {
|
|
||||||
let rpc = rpc.clone();
|
|
||||||
let spend = spend.clone();
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
let keys = keys.clone();
|
|
||||||
async move {
|
|
||||||
if !multisig {
|
|
||||||
tx.sign(&mut OsRng, &rpc, &spend).await.unwrap()
|
|
||||||
} else {
|
|
||||||
#[cfg(not(feature = "multisig"))]
|
|
||||||
panic!("Multisig branch called without the multisig feature");
|
|
||||||
#[cfg(feature = "multisig")]
|
|
||||||
{
|
|
||||||
let mut machines = HashMap::new();
|
|
||||||
for i in (1 ..= THRESHOLD).map(|i| Participant::new(i).unwrap()) {
|
|
||||||
machines.insert(
|
|
||||||
i,
|
|
||||||
tx
|
|
||||||
.clone()
|
|
||||||
.multisig(
|
|
||||||
&rpc,
|
|
||||||
keys[&i].clone(),
|
|
||||||
RecommendedTranscript::new(b"Monero Serai Test Transaction"),
|
|
||||||
rpc.get_height().await.unwrap() - 10,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
frost::tests::sign_without_caching(&mut OsRng, machines, &[])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// TODO: Generate a distinct wallet for each transaction to prevent overlap
|
|
||||||
let next_addr = addr;
|
|
||||||
|
|
||||||
let temp = Box::new({
|
|
||||||
let mut builder = builder.clone();
|
|
||||||
builder.add_input(miner_tx);
|
|
||||||
let (tx, state) = ($first_tx)(rpc.clone(), builder, next_addr).await;
|
|
||||||
|
|
||||||
let signed = sign(tx).await;
|
|
||||||
rpc.publish_transaction(&signed).await.unwrap();
|
|
||||||
mine_until_unlocked(&rpc, &random_address().2.to_string(), signed.hash()).await;
|
|
||||||
let tx = rpc.get_transaction(signed.hash()).await.unwrap();
|
|
||||||
let scanner =
|
|
||||||
Scanner::from_view(view.clone(), Some(HashSet::new()));
|
|
||||||
($first_checks)(rpc.clone(), tx, scanner, state).await
|
|
||||||
});
|
|
||||||
#[allow(unused_variables, unused_mut, unused_assignments)]
|
|
||||||
let mut carried_state: Box<dyn Any> = temp;
|
|
||||||
|
|
||||||
$(
|
|
||||||
let (tx, state) = ($tx)(
|
|
||||||
rpc.clone(),
|
|
||||||
builder.clone(),
|
|
||||||
next_addr,
|
|
||||||
*carried_state.downcast().unwrap()
|
|
||||||
).await;
|
|
||||||
|
|
||||||
let signed = sign(tx).await;
|
|
||||||
rpc.publish_transaction(&signed).await.unwrap();
|
|
||||||
mine_until_unlocked(&rpc, &random_address().2.to_string(), signed.hash()).await;
|
|
||||||
let tx = rpc.get_transaction(signed.hash()).await.unwrap();
|
|
||||||
#[allow(unused_assignments)]
|
|
||||||
{
|
|
||||||
let scanner =
|
|
||||||
Scanner::from_view(view.clone(), Some(HashSet::new()));
|
|
||||||
carried_state =
|
|
||||||
Box::new(($checks)(rpc.clone(), tx, scanner, state).await);
|
|
||||||
}
|
|
||||||
)*
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,300 +0,0 @@
|
|||||||
use rand::RngCore;
|
|
||||||
|
|
||||||
use monero_serai::{transaction::Transaction, wallet::address::SubaddressIndex};
|
|
||||||
|
|
||||||
mod runner;
|
|
||||||
|
|
||||||
test!(
|
|
||||||
scan_standard_address,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
let view = runner::random_address().1;
|
|
||||||
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
|
||||||
builder.add_payment(view.address(Network::Mainnet, AddressSpec::Standard), 5);
|
|
||||||
(builder.build().unwrap(), scanner)
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, mut state: Scanner| async move {
|
|
||||||
let output = state.scan_transaction(&tx).not_locked().swap_remove(0);
|
|
||||||
assert_eq!(output.commitment().amount, 5);
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
scan_subaddress,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
let subaddress = SubaddressIndex::new(0, 1).unwrap();
|
|
||||||
|
|
||||||
let view = runner::random_address().1;
|
|
||||||
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
|
||||||
scanner.register_subaddress(subaddress);
|
|
||||||
|
|
||||||
builder.add_payment(view.address(Network::Mainnet, AddressSpec::Subaddress(subaddress)), 5);
|
|
||||||
(builder.build().unwrap(), (scanner, subaddress))
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, mut state: (Scanner, SubaddressIndex)| async move {
|
|
||||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
|
||||||
assert_eq!(output.commitment().amount, 5);
|
|
||||||
assert_eq!(output.metadata.subaddress, Some(state.1));
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
scan_integrated_address,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
let view = runner::random_address().1;
|
|
||||||
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
|
||||||
|
|
||||||
let mut payment_id = [0u8; 8];
|
|
||||||
OsRng.fill_bytes(&mut payment_id);
|
|
||||||
|
|
||||||
builder.add_payment(view.address(Network::Mainnet, AddressSpec::Integrated(payment_id)), 5);
|
|
||||||
(builder.build().unwrap(), (scanner, payment_id))
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
|
|
||||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
|
||||||
assert_eq!(output.commitment().amount, 5);
|
|
||||||
assert_eq!(output.metadata.payment_id, state.1);
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
scan_featured_standard,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
let view = runner::random_address().1;
|
|
||||||
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
|
||||||
builder.add_payment(
|
|
||||||
view.address(
|
|
||||||
Network::Mainnet,
|
|
||||||
AddressSpec::Featured { subaddress: None, payment_id: None, guaranteed: false },
|
|
||||||
),
|
|
||||||
5,
|
|
||||||
);
|
|
||||||
(builder.build().unwrap(), scanner)
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, mut state: Scanner| async move {
|
|
||||||
let output = state.scan_transaction(&tx).not_locked().swap_remove(0);
|
|
||||||
assert_eq!(output.commitment().amount, 5);
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
scan_featured_subaddress,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
let subaddress = SubaddressIndex::new(0, 2).unwrap();
|
|
||||||
|
|
||||||
let view = runner::random_address().1;
|
|
||||||
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
|
||||||
scanner.register_subaddress(subaddress);
|
|
||||||
|
|
||||||
builder.add_payment(
|
|
||||||
view.address(
|
|
||||||
Network::Mainnet,
|
|
||||||
AddressSpec::Featured {
|
|
||||||
subaddress: Some(subaddress),
|
|
||||||
payment_id: None,
|
|
||||||
guaranteed: false,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
5,
|
|
||||||
);
|
|
||||||
(builder.build().unwrap(), (scanner, subaddress))
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, mut state: (Scanner, SubaddressIndex)| async move {
|
|
||||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
|
||||||
assert_eq!(output.commitment().amount, 5);
|
|
||||||
assert_eq!(output.metadata.subaddress, Some(state.1));
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
scan_featured_integrated,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
let view = runner::random_address().1;
|
|
||||||
let scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
|
||||||
let mut payment_id = [0u8; 8];
|
|
||||||
OsRng.fill_bytes(&mut payment_id);
|
|
||||||
|
|
||||||
builder.add_payment(
|
|
||||||
view.address(
|
|
||||||
Network::Mainnet,
|
|
||||||
AddressSpec::Featured {
|
|
||||||
subaddress: None,
|
|
||||||
payment_id: Some(payment_id),
|
|
||||||
guaranteed: false,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
5,
|
|
||||||
);
|
|
||||||
(builder.build().unwrap(), (scanner, payment_id))
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
|
|
||||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
|
||||||
assert_eq!(output.commitment().amount, 5);
|
|
||||||
assert_eq!(output.metadata.payment_id, state.1);
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
scan_featured_integrated_subaddress,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
let subaddress = SubaddressIndex::new(0, 3).unwrap();
|
|
||||||
|
|
||||||
let view = runner::random_address().1;
|
|
||||||
let mut scanner = Scanner::from_view(view.clone(), Some(HashSet::new()));
|
|
||||||
scanner.register_subaddress(subaddress);
|
|
||||||
|
|
||||||
let mut payment_id = [0u8; 8];
|
|
||||||
OsRng.fill_bytes(&mut payment_id);
|
|
||||||
|
|
||||||
builder.add_payment(
|
|
||||||
view.address(
|
|
||||||
Network::Mainnet,
|
|
||||||
AddressSpec::Featured {
|
|
||||||
subaddress: Some(subaddress),
|
|
||||||
payment_id: Some(payment_id),
|
|
||||||
guaranteed: false,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
5,
|
|
||||||
);
|
|
||||||
(builder.build().unwrap(), (scanner, payment_id, subaddress))
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8], SubaddressIndex)| async move {
|
|
||||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
|
||||||
assert_eq!(output.commitment().amount, 5);
|
|
||||||
assert_eq!(output.metadata.payment_id, state.1);
|
|
||||||
assert_eq!(output.metadata.subaddress, Some(state.2));
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
scan_guaranteed_standard,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
let view = runner::random_address().1;
|
|
||||||
let scanner = Scanner::from_view(view.clone(), None);
|
|
||||||
|
|
||||||
builder.add_payment(
|
|
||||||
view.address(
|
|
||||||
Network::Mainnet,
|
|
||||||
AddressSpec::Featured { subaddress: None, payment_id: None, guaranteed: true },
|
|
||||||
),
|
|
||||||
5,
|
|
||||||
);
|
|
||||||
(builder.build().unwrap(), scanner)
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, mut state: Scanner| async move {
|
|
||||||
let output = state.scan_transaction(&tx).not_locked().swap_remove(0);
|
|
||||||
assert_eq!(output.commitment().amount, 5);
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
scan_guaranteed_subaddress,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
let subaddress = SubaddressIndex::new(1, 0).unwrap();
|
|
||||||
|
|
||||||
let view = runner::random_address().1;
|
|
||||||
let mut scanner = Scanner::from_view(view.clone(), None);
|
|
||||||
scanner.register_subaddress(subaddress);
|
|
||||||
|
|
||||||
builder.add_payment(
|
|
||||||
view.address(
|
|
||||||
Network::Mainnet,
|
|
||||||
AddressSpec::Featured {
|
|
||||||
subaddress: Some(subaddress),
|
|
||||||
payment_id: None,
|
|
||||||
guaranteed: true,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
5,
|
|
||||||
);
|
|
||||||
(builder.build().unwrap(), (scanner, subaddress))
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, mut state: (Scanner, SubaddressIndex)| async move {
|
|
||||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
|
||||||
assert_eq!(output.commitment().amount, 5);
|
|
||||||
assert_eq!(output.metadata.subaddress, Some(state.1));
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
scan_guaranteed_integrated,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
let view = runner::random_address().1;
|
|
||||||
let scanner = Scanner::from_view(view.clone(), None);
|
|
||||||
let mut payment_id = [0u8; 8];
|
|
||||||
OsRng.fill_bytes(&mut payment_id);
|
|
||||||
|
|
||||||
builder.add_payment(
|
|
||||||
view.address(
|
|
||||||
Network::Mainnet,
|
|
||||||
AddressSpec::Featured {
|
|
||||||
subaddress: None,
|
|
||||||
payment_id: Some(payment_id),
|
|
||||||
guaranteed: true,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
5,
|
|
||||||
);
|
|
||||||
(builder.build().unwrap(), (scanner, payment_id))
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8])| async move {
|
|
||||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
|
||||||
assert_eq!(output.commitment().amount, 5);
|
|
||||||
assert_eq!(output.metadata.payment_id, state.1);
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
scan_guaranteed_integrated_subaddress,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
let subaddress = SubaddressIndex::new(1, 1).unwrap();
|
|
||||||
|
|
||||||
let view = runner::random_address().1;
|
|
||||||
let mut scanner = Scanner::from_view(view.clone(), None);
|
|
||||||
scanner.register_subaddress(subaddress);
|
|
||||||
|
|
||||||
let mut payment_id = [0u8; 8];
|
|
||||||
OsRng.fill_bytes(&mut payment_id);
|
|
||||||
|
|
||||||
builder.add_payment(
|
|
||||||
view.address(
|
|
||||||
Network::Mainnet,
|
|
||||||
AddressSpec::Featured {
|
|
||||||
subaddress: Some(subaddress),
|
|
||||||
payment_id: Some(payment_id),
|
|
||||||
guaranteed: true,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
5,
|
|
||||||
);
|
|
||||||
(builder.build().unwrap(), (scanner, payment_id, subaddress))
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, mut state: (Scanner, [u8; 8], SubaddressIndex)| async move {
|
|
||||||
let output = state.0.scan_transaction(&tx).not_locked().swap_remove(0);
|
|
||||||
assert_eq!(output.commitment().amount, 5);
|
|
||||||
assert_eq!(output.metadata.payment_id, state.1);
|
|
||||||
assert_eq!(output.metadata.subaddress, Some(state.2));
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
@@ -1,118 +0,0 @@
|
|||||||
use monero_serai::{
|
|
||||||
transaction::Transaction,
|
|
||||||
wallet::{extra::Extra, address::SubaddressIndex, ReceivedOutput, SpendableOutput},
|
|
||||||
rpc::Rpc,
|
|
||||||
};
|
|
||||||
|
|
||||||
mod runner;
|
|
||||||
|
|
||||||
test!(
|
|
||||||
spend_miner_output,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, addr| async move {
|
|
||||||
builder.add_payment(addr, 5);
|
|
||||||
(builder.build().unwrap(), ())
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, mut scanner: Scanner, _| async move {
|
|
||||||
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
|
||||||
assert_eq!(output.commitment().amount, 5);
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
spend_multiple_outputs,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, addr| async move {
|
|
||||||
builder.add_payment(addr, 1000000000000);
|
|
||||||
builder.add_payment(addr, 2000000000000);
|
|
||||||
(builder.build().unwrap(), ())
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, mut scanner: Scanner, _| async move {
|
|
||||||
let mut outputs = scanner.scan_transaction(&tx).not_locked();
|
|
||||||
outputs.sort_by(|x, y| x.commitment().amount.cmp(&y.commitment().amount));
|
|
||||||
assert_eq!(outputs[0].commitment().amount, 1000000000000);
|
|
||||||
assert_eq!(outputs[1].commitment().amount, 2000000000000);
|
|
||||||
outputs
|
|
||||||
},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
|rpc, mut builder: Builder, addr, outputs: Vec<ReceivedOutput>| async move {
|
|
||||||
for output in outputs {
|
|
||||||
builder.add_input(SpendableOutput::from(&rpc, output).await.unwrap());
|
|
||||||
}
|
|
||||||
builder.add_payment(addr, 6);
|
|
||||||
(builder.build().unwrap(), ())
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, mut scanner: Scanner, _| async move {
|
|
||||||
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
|
||||||
assert_eq!(output.commitment().amount, 6);
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
// Ideally, this would be single_R, yet it isn't feasible to apply allow(non_snake_case) here
|
|
||||||
single_r_subaddress_send,
|
|
||||||
(
|
|
||||||
// Consume this builder for an output we can use in the future
|
|
||||||
// This is needed because we can't get the input from the passed in builder
|
|
||||||
|_, mut builder: Builder, addr| async move {
|
|
||||||
builder.add_payment(addr, 1000000000000);
|
|
||||||
(builder.build().unwrap(), ())
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, mut scanner: Scanner, _| async move {
|
|
||||||
let mut outputs = scanner.scan_transaction(&tx).not_locked();
|
|
||||||
outputs.sort_by(|x, y| x.commitment().amount.cmp(&y.commitment().amount));
|
|
||||||
assert_eq!(outputs[0].commitment().amount, 1000000000000);
|
|
||||||
outputs
|
|
||||||
},
|
|
||||||
),
|
|
||||||
(
|
|
||||||
|rpc: Rpc<_>, _, _, mut outputs: Vec<ReceivedOutput>| async move {
|
|
||||||
let change_view = ViewPair::new(
|
|
||||||
&random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE,
|
|
||||||
Zeroizing::new(random_scalar(&mut OsRng)),
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut builder = SignableTransactionBuilder::new(
|
|
||||||
rpc.get_protocol().await.unwrap(),
|
|
||||||
rpc.get_fee().await.unwrap(),
|
|
||||||
Some(Change::new(&change_view, false)),
|
|
||||||
);
|
|
||||||
builder.add_input(SpendableOutput::from(&rpc, outputs.swap_remove(0)).await.unwrap());
|
|
||||||
|
|
||||||
// Send to a subaddress
|
|
||||||
let sub_view = ViewPair::new(
|
|
||||||
&random_scalar(&mut OsRng) * &ED25519_BASEPOINT_TABLE,
|
|
||||||
Zeroizing::new(random_scalar(&mut OsRng)),
|
|
||||||
);
|
|
||||||
builder.add_payment(
|
|
||||||
sub_view
|
|
||||||
.address(Network::Mainnet, AddressSpec::Subaddress(SubaddressIndex::new(0, 1).unwrap())),
|
|
||||||
1,
|
|
||||||
);
|
|
||||||
(builder.build().unwrap(), (change_view, sub_view))
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, views: (ViewPair, ViewPair)| async move {
|
|
||||||
// Make sure the change can pick up its output
|
|
||||||
let mut change_scanner = Scanner::from_view(views.0, Some(HashSet::new()));
|
|
||||||
assert!(change_scanner.scan_transaction(&tx).not_locked().len() == 1);
|
|
||||||
|
|
||||||
// Make sure the subaddress can pick up its output
|
|
||||||
let mut sub_scanner = Scanner::from_view(views.1, Some(HashSet::new()));
|
|
||||||
sub_scanner.register_subaddress(SubaddressIndex::new(0, 1).unwrap());
|
|
||||||
let sub_outputs = sub_scanner.scan_transaction(&tx).not_locked();
|
|
||||||
assert!(sub_outputs.len() == 1);
|
|
||||||
assert_eq!(sub_outputs[0].commitment().amount, 1);
|
|
||||||
|
|
||||||
// Make sure only one R was included in TX extra
|
|
||||||
assert!(Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref())
|
|
||||||
.unwrap()
|
|
||||||
.keys()
|
|
||||||
.unwrap()
|
|
||||||
.1
|
|
||||||
.is_none());
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
@@ -1,247 +0,0 @@
|
|||||||
use std::{
|
|
||||||
collections::{HashSet, HashMap},
|
|
||||||
str::FromStr,
|
|
||||||
};
|
|
||||||
|
|
||||||
use rand_core::{OsRng, RngCore};
|
|
||||||
|
|
||||||
use serde::Deserialize;
|
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use monero_rpc::{
|
|
||||||
monero::{
|
|
||||||
Amount, Address,
|
|
||||||
cryptonote::{hash::Hash, subaddress::Index},
|
|
||||||
util::address::PaymentId,
|
|
||||||
},
|
|
||||||
TransferOptions, WalletClient,
|
|
||||||
};
|
|
||||||
|
|
||||||
use monero_serai::{
|
|
||||||
transaction::Transaction,
|
|
||||||
rpc::{HttpRpc, Rpc},
|
|
||||||
wallet::{
|
|
||||||
address::{Network, AddressSpec, SubaddressIndex, MoneroAddress},
|
|
||||||
extra::{MAX_TX_EXTRA_NONCE_SIZE, Extra},
|
|
||||||
Scanner,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
mod runner;
|
|
||||||
|
|
||||||
async fn make_integrated_address(payment_id: [u8; 8]) -> String {
|
|
||||||
#[derive(Deserialize, Debug)]
|
|
||||||
struct IntegratedAddressResponse {
|
|
||||||
integrated_address: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
let rpc = HttpRpc::new("http://127.0.0.1:6061".to_string()).unwrap();
|
|
||||||
let res = rpc
|
|
||||||
.json_rpc_call::<IntegratedAddressResponse>(
|
|
||||||
"make_integrated_address",
|
|
||||||
Some(json!({ "payment_id": hex::encode(payment_id) })),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
res.integrated_address
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn initialize_rpcs() -> (WalletClient, Rpc<HttpRpc>, monero_rpc::monero::Address) {
|
|
||||||
let wallet_rpc =
|
|
||||||
monero_rpc::RpcClientBuilder::new().build("http://127.0.0.1:6061").unwrap().wallet();
|
|
||||||
let daemon_rpc = runner::rpc().await;
|
|
||||||
|
|
||||||
let address_resp = wallet_rpc.get_address(0, None).await;
|
|
||||||
let wallet_rpc_addr = if address_resp.is_ok() {
|
|
||||||
address_resp.unwrap().address
|
|
||||||
} else {
|
|
||||||
wallet_rpc.create_wallet("wallet".to_string(), None, "English".to_string()).await.unwrap();
|
|
||||||
let addr = wallet_rpc.get_address(0, None).await.unwrap().address;
|
|
||||||
daemon_rpc.generate_blocks(&addr.to_string(), 70).await.unwrap();
|
|
||||||
addr
|
|
||||||
};
|
|
||||||
(wallet_rpc, daemon_rpc, wallet_rpc_addr)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn from_wallet_rpc_to_self(spec: AddressSpec) {
|
|
||||||
// initialize rpc
|
|
||||||
let (wallet_rpc, daemon_rpc, wallet_rpc_addr) = initialize_rpcs().await;
|
|
||||||
|
|
||||||
// make an addr
|
|
||||||
let (_, view_pair, _) = runner::random_address();
|
|
||||||
let addr = Address::from_str(&view_pair.address(Network::Mainnet, spec).to_string()).unwrap();
|
|
||||||
|
|
||||||
// refresh & make a tx
|
|
||||||
wallet_rpc.refresh(None).await.unwrap();
|
|
||||||
let tx = wallet_rpc
|
|
||||||
.transfer(
|
|
||||||
HashMap::from([(addr, Amount::ONE_XMR)]),
|
|
||||||
monero_rpc::TransferPriority::Default,
|
|
||||||
TransferOptions::default(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
let tx_hash: [u8; 32] = tx.tx_hash.0.try_into().unwrap();
|
|
||||||
|
|
||||||
// unlock it
|
|
||||||
runner::mine_until_unlocked(&daemon_rpc, &wallet_rpc_addr.to_string(), tx_hash).await;
|
|
||||||
|
|
||||||
// create the scanner
|
|
||||||
let mut scanner = Scanner::from_view(view_pair, Some(HashSet::new()));
|
|
||||||
if let AddressSpec::Subaddress(index) = spec {
|
|
||||||
scanner.register_subaddress(index);
|
|
||||||
}
|
|
||||||
|
|
||||||
// retrieve it and confirm
|
|
||||||
let tx = daemon_rpc.get_transaction(tx_hash).await.unwrap();
|
|
||||||
let output = scanner.scan_transaction(&tx).not_locked().swap_remove(0);
|
|
||||||
|
|
||||||
match spec {
|
|
||||||
AddressSpec::Subaddress(index) => assert_eq!(output.metadata.subaddress, Some(index)),
|
|
||||||
AddressSpec::Integrated(payment_id) => {
|
|
||||||
assert_eq!(output.metadata.payment_id, payment_id);
|
|
||||||
assert_eq!(output.metadata.subaddress, None);
|
|
||||||
}
|
|
||||||
AddressSpec::Standard | AddressSpec::Featured { .. } => {
|
|
||||||
assert_eq!(output.metadata.subaddress, None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assert_eq!(output.commitment().amount, 1000000000000);
|
|
||||||
}
|
|
||||||
|
|
||||||
async_sequential!(
|
|
||||||
async fn receipt_of_wallet_rpc_tx_standard() {
|
|
||||||
from_wallet_rpc_to_self(AddressSpec::Standard).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn receipt_of_wallet_rpc_tx_subaddress() {
|
|
||||||
from_wallet_rpc_to_self(AddressSpec::Subaddress(SubaddressIndex::new(0, 1).unwrap())).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn receipt_of_wallet_rpc_tx_integrated() {
|
|
||||||
let mut payment_id = [0u8; 8];
|
|
||||||
OsRng.fill_bytes(&mut payment_id);
|
|
||||||
from_wallet_rpc_to_self(AddressSpec::Integrated(payment_id)).await;
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
send_to_wallet_rpc_standard,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
// initialize rpc
|
|
||||||
let (wallet_rpc, _, wallet_rpc_addr) = initialize_rpcs().await;
|
|
||||||
|
|
||||||
// add destination
|
|
||||||
builder.add_payment(
|
|
||||||
MoneroAddress::from_str(Network::Mainnet, &wallet_rpc_addr.to_string()).unwrap(),
|
|
||||||
1000000,
|
|
||||||
);
|
|
||||||
(builder.build().unwrap(), (wallet_rpc,))
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, data: (WalletClient,)| async move {
|
|
||||||
// confirm receipt
|
|
||||||
data.0.refresh(None).await.unwrap();
|
|
||||||
let transfer =
|
|
||||||
data.0.get_transfer(Hash::from_slice(&tx.hash()), None).await.unwrap().unwrap();
|
|
||||||
assert_eq!(transfer.amount.as_pico(), 1000000);
|
|
||||||
assert_eq!(transfer.subaddr_index, Index { major: 0, minor: 0 });
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
send_to_wallet_rpc_subaddress,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
// initialize rpc
|
|
||||||
let (wallet_rpc, _, _) = initialize_rpcs().await;
|
|
||||||
|
|
||||||
// make the addr
|
|
||||||
let (subaddress, index) = wallet_rpc.create_address(0, None).await.unwrap();
|
|
||||||
|
|
||||||
builder.add_payment(
|
|
||||||
MoneroAddress::from_str(Network::Mainnet, &subaddress.to_string()).unwrap(),
|
|
||||||
1000000,
|
|
||||||
);
|
|
||||||
(builder.build().unwrap(), (wallet_rpc, index))
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, data: (WalletClient, u32)| async move {
|
|
||||||
// confirm receipt
|
|
||||||
data.0.refresh(None).await.unwrap();
|
|
||||||
let transfer =
|
|
||||||
data.0.get_transfer(Hash::from_slice(&tx.hash()), None).await.unwrap().unwrap();
|
|
||||||
assert_eq!(transfer.amount.as_pico(), 1000000);
|
|
||||||
assert_eq!(transfer.subaddr_index, Index { major: 0, minor: data.1 });
|
|
||||||
|
|
||||||
// Make sure only one R was included in TX extra
|
|
||||||
assert!(Extra::read::<&[u8]>(&mut tx.prefix.extra.as_ref())
|
|
||||||
.unwrap()
|
|
||||||
.keys()
|
|
||||||
.unwrap()
|
|
||||||
.1
|
|
||||||
.is_none());
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
send_to_wallet_rpc_integrated,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
// initialize rpc
|
|
||||||
let (wallet_rpc, _, _) = initialize_rpcs().await;
|
|
||||||
|
|
||||||
// make the addr
|
|
||||||
let mut payment_id = [0u8; 8];
|
|
||||||
OsRng.fill_bytes(&mut payment_id);
|
|
||||||
let addr = make_integrated_address(payment_id).await;
|
|
||||||
|
|
||||||
builder.add_payment(MoneroAddress::from_str(Network::Mainnet, &addr).unwrap(), 1000000);
|
|
||||||
(builder.build().unwrap(), (wallet_rpc, payment_id))
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, data: (WalletClient, [u8; 8])| async move {
|
|
||||||
// confirm receipt
|
|
||||||
data.0.refresh(None).await.unwrap();
|
|
||||||
let transfer =
|
|
||||||
data.0.get_transfer(Hash::from_slice(&tx.hash()), None).await.unwrap().unwrap();
|
|
||||||
assert_eq!(transfer.amount.as_pico(), 1000000);
|
|
||||||
assert_eq!(transfer.subaddr_index, Index { major: 0, minor: 0 });
|
|
||||||
assert_eq!(transfer.payment_id.0, PaymentId::from_slice(&data.1));
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
test!(
|
|
||||||
send_to_wallet_rpc_with_arb_data,
|
|
||||||
(
|
|
||||||
|_, mut builder: Builder, _| async move {
|
|
||||||
// initialize rpc
|
|
||||||
let (wallet_rpc, _, wallet_rpc_addr) = initialize_rpcs().await;
|
|
||||||
|
|
||||||
// add destination
|
|
||||||
builder.add_payment(
|
|
||||||
MoneroAddress::from_str(Network::Mainnet, &wallet_rpc_addr.to_string()).unwrap(),
|
|
||||||
1000000,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Make 2 data that is the full 255 bytes
|
|
||||||
for _ in 0 .. 2 {
|
|
||||||
// Subtract 1 since we prefix data with 127
|
|
||||||
let data = vec![b'a'; MAX_TX_EXTRA_NONCE_SIZE - 1];
|
|
||||||
builder.add_data(data).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
(builder.build().unwrap(), (wallet_rpc,))
|
|
||||||
},
|
|
||||||
|_, tx: Transaction, _, data: (WalletClient,)| async move {
|
|
||||||
// confirm receipt
|
|
||||||
data.0.refresh(None).await.unwrap();
|
|
||||||
let transfer =
|
|
||||||
data.0.get_transfer(Hash::from_slice(&tx.hash()), None).await.unwrap().unwrap();
|
|
||||||
assert_eq!(transfer.amount.as_pico(), 1000000);
|
|
||||||
assert_eq!(transfer.subaddr_index, Index { major: 0, minor: 0 });
|
|
||||||
},
|
|
||||||
),
|
|
||||||
);
|
|
||||||
@@ -1,13 +1,25 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "serai-db"
|
name = "serai-db"
|
||||||
version = "0.1.0"
|
version = "0.1.1"
|
||||||
description = "A simple database trait and backends for it"
|
description = "A simple database trait and backends for it"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
repository = "https://github.com/serai-dex/serai/tree/develop/common/db"
|
repository = "https://github.com/serai-dex/serai/tree/develop/common/db"
|
||||||
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
keywords = []
|
keywords = []
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
rust-version = "1.71"
|
||||||
|
|
||||||
[package.metadata.docs.rs]
|
[package.metadata.docs.rs]
|
||||||
all-features = true
|
all-features = true
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
parity-db = { version = "0.4", default-features = false, optional = true }
|
||||||
|
rocksdb = { version = "0.23", default-features = false, features = ["zstd"], optional = true }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
parity-db = ["dep:parity-db"]
|
||||||
|
rocksdb = ["dep:rocksdb"]
|
||||||
|
|||||||
8
common/db/README.md
Normal file
8
common/db/README.md
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
# Serai DB
|
||||||
|
|
||||||
|
An inefficient, minimal abstraction around databases.
|
||||||
|
|
||||||
|
The abstraction offers `get`, `put`, and `del` with helper functions and macros
|
||||||
|
built on top. Database iteration is not offered, forcing the caller to manually
|
||||||
|
implement indexing schemes. This ensures wide compatibility across abstracted
|
||||||
|
databases.
|
||||||
180
common/db/src/create_db.rs
Normal file
180
common/db/src/create_db.rs
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
#[doc(hidden)]
|
||||||
|
pub fn serai_db_key(
|
||||||
|
db_dst: &'static [u8],
|
||||||
|
item_dst: &'static [u8],
|
||||||
|
key: impl AsRef<[u8]>,
|
||||||
|
) -> Vec<u8> {
|
||||||
|
let db_len = u8::try_from(db_dst.len()).unwrap();
|
||||||
|
let dst_len = u8::try_from(item_dst.len()).unwrap();
|
||||||
|
[[db_len].as_ref(), db_dst, [dst_len].as_ref(), item_dst, key.as_ref()].concat()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a series of structs which provide namespacing for keys
|
||||||
|
///
|
||||||
|
/// # Description
|
||||||
|
///
|
||||||
|
/// Creates a unit struct and a default implementation for the `key`, `get`, and `set`. The macro
|
||||||
|
/// uses a syntax similar to defining a function. Parameters are concatenated to produce a key,
|
||||||
|
/// they must be `scale` encodable. The return type is used to auto encode and decode the database
|
||||||
|
/// value bytes using `borsh`.
|
||||||
|
///
|
||||||
|
/// # Arguments
|
||||||
|
///
|
||||||
|
/// * `db_name` - A database name
|
||||||
|
/// * `field_name` - An item name
|
||||||
|
/// * `args` - Comma separated list of key arguments
|
||||||
|
/// * `field_type` - The return type
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```ignore
|
||||||
|
/// create_db!(
|
||||||
|
/// TributariesDb {
|
||||||
|
/// AttemptsDb: (key_bytes: &[u8], attempt_id: u32) -> u64,
|
||||||
|
/// ExpiredDb: (genesis: [u8; 32]) -> Vec<u8>
|
||||||
|
/// }
|
||||||
|
/// )
|
||||||
|
/// ```
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! create_db {
|
||||||
|
($db_name: ident {
|
||||||
|
$(
|
||||||
|
$field_name: ident:
|
||||||
|
$(<$($generic_name: tt: $generic_type: tt),+>)?(
|
||||||
|
$($arg: ident: $arg_type: ty),*
|
||||||
|
) -> $field_type: ty$(,)?
|
||||||
|
)*
|
||||||
|
}) => {
|
||||||
|
$(
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub(crate) struct $field_name$(
|
||||||
|
<$($generic_name: $generic_type),+>
|
||||||
|
)?$(
|
||||||
|
(core::marker::PhantomData<($($generic_name),+)>)
|
||||||
|
)?;
|
||||||
|
impl$(<$($generic_name: $generic_type),+>)? $field_name$(<$($generic_name),+>)? {
|
||||||
|
pub(crate) fn key($($arg: $arg_type),*) -> Vec<u8> {
|
||||||
|
use scale::Encode;
|
||||||
|
$crate::serai_db_key(
|
||||||
|
stringify!($db_name).as_bytes(),
|
||||||
|
stringify!($field_name).as_bytes(),
|
||||||
|
($($arg),*).encode()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
pub(crate) fn set(
|
||||||
|
txn: &mut impl DbTxn
|
||||||
|
$(, $arg: $arg_type)*,
|
||||||
|
data: &$field_type
|
||||||
|
) {
|
||||||
|
let key = Self::key($($arg),*);
|
||||||
|
txn.put(&key, borsh::to_vec(data).unwrap());
|
||||||
|
}
|
||||||
|
pub(crate) fn get(
|
||||||
|
getter: &impl Get,
|
||||||
|
$($arg: $arg_type),*
|
||||||
|
) -> Option<$field_type> {
|
||||||
|
getter.get(Self::key($($arg),*)).map(|data| {
|
||||||
|
borsh::from_slice(data.as_ref()).unwrap()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
// Returns a PhantomData of all generic types so if the generic was only used in the value,
|
||||||
|
// not the keys, this doesn't have unused generic types
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub(crate) fn del(
|
||||||
|
txn: &mut impl DbTxn
|
||||||
|
$(, $arg: $arg_type)*
|
||||||
|
) -> core::marker::PhantomData<($($($generic_name),+)?)> {
|
||||||
|
txn.del(&Self::key($($arg),*));
|
||||||
|
core::marker::PhantomData
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn take(
|
||||||
|
txn: &mut impl DbTxn
|
||||||
|
$(, $arg: $arg_type)*
|
||||||
|
) -> Option<$field_type> {
|
||||||
|
let key = Self::key($($arg),*);
|
||||||
|
let res = txn.get(&key).map(|data| borsh::from_slice(data.as_ref()).unwrap());
|
||||||
|
if res.is_some() {
|
||||||
|
txn.del(key);
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)*
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! db_channel {
|
||||||
|
($db_name: ident {
|
||||||
|
$($field_name: ident:
|
||||||
|
$(<$($generic_name: tt: $generic_type: tt),+>)?(
|
||||||
|
$($arg: ident: $arg_type: ty),*
|
||||||
|
) -> $field_type: ty$(,)?
|
||||||
|
)*
|
||||||
|
}) => {
|
||||||
|
$(
|
||||||
|
create_db! {
|
||||||
|
$db_name {
|
||||||
|
$field_name: $(<$($generic_name: $generic_type),+>)?(
|
||||||
|
$($arg: $arg_type,)*
|
||||||
|
index: u32
|
||||||
|
) -> $field_type
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl$(<$($generic_name: $generic_type),+>)? $field_name$(<$($generic_name),+>)? {
|
||||||
|
pub(crate) fn send(
|
||||||
|
txn: &mut impl DbTxn
|
||||||
|
$(, $arg: $arg_type)*
|
||||||
|
, value: &$field_type
|
||||||
|
) {
|
||||||
|
// Use index 0 to store the amount of messages
|
||||||
|
let messages_sent_key = Self::key($($arg,)* 0);
|
||||||
|
let messages_sent = txn.get(&messages_sent_key).map(|counter| {
|
||||||
|
u32::from_le_bytes(counter.try_into().unwrap())
|
||||||
|
}).unwrap_or(0);
|
||||||
|
txn.put(&messages_sent_key, (messages_sent + 1).to_le_bytes());
|
||||||
|
|
||||||
|
// + 2 as index 1 is used for the amount of messages read
|
||||||
|
// Using distinct counters enables send to be called without mutating anything recv may
|
||||||
|
// at the same time
|
||||||
|
let index_to_use = messages_sent + 2;
|
||||||
|
|
||||||
|
Self::set(txn, $($arg,)* index_to_use, value);
|
||||||
|
}
|
||||||
|
pub(crate) fn peek(
|
||||||
|
getter: &impl Get
|
||||||
|
$(, $arg: $arg_type)*
|
||||||
|
) -> Option<$field_type> {
|
||||||
|
let messages_recvd_key = Self::key($($arg,)* 1);
|
||||||
|
let messages_recvd = getter.get(&messages_recvd_key).map(|counter| {
|
||||||
|
u32::from_le_bytes(counter.try_into().unwrap())
|
||||||
|
}).unwrap_or(0);
|
||||||
|
|
||||||
|
let index_to_read = messages_recvd + 2;
|
||||||
|
|
||||||
|
Self::get(getter, $($arg,)* index_to_read)
|
||||||
|
}
|
||||||
|
pub(crate) fn try_recv(
|
||||||
|
txn: &mut impl DbTxn
|
||||||
|
$(, $arg: $arg_type)*
|
||||||
|
) -> Option<$field_type> {
|
||||||
|
let messages_recvd_key = Self::key($($arg,)* 1);
|
||||||
|
let messages_recvd = txn.get(&messages_recvd_key).map(|counter| {
|
||||||
|
u32::from_le_bytes(counter.try_into().unwrap())
|
||||||
|
}).unwrap_or(0);
|
||||||
|
|
||||||
|
let index_to_read = messages_recvd + 2;
|
||||||
|
|
||||||
|
let res = Self::get(txn, $($arg,)* index_to_read);
|
||||||
|
if res.is_some() {
|
||||||
|
Self::del(txn, $($arg,)* index_to_read);
|
||||||
|
txn.put(&messages_recvd_key, (messages_recvd + 1).to_le_bytes());
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)*
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -1,102 +1,100 @@
|
|||||||
use core::fmt::Debug;
|
mod create_db;
|
||||||
extern crate alloc;
|
pub use create_db::*;
|
||||||
use alloc::sync::Arc;
|
|
||||||
use std::{
|
|
||||||
sync::RwLock,
|
|
||||||
collections::{HashSet, HashMap},
|
|
||||||
};
|
|
||||||
|
|
||||||
/// An object implementing get.
|
mod mem;
|
||||||
pub trait Get: Send + Sync + Debug {
|
pub use mem::*;
|
||||||
|
|
||||||
|
#[cfg(feature = "rocksdb")]
|
||||||
|
mod rocks;
|
||||||
|
#[cfg(feature = "rocksdb")]
|
||||||
|
pub use rocks::{RocksDB, new_rocksdb};
|
||||||
|
|
||||||
|
#[cfg(feature = "parity-db")]
|
||||||
|
mod parity_db;
|
||||||
|
#[cfg(feature = "parity-db")]
|
||||||
|
pub use parity_db::{ParityDb, new_parity_db};
|
||||||
|
|
||||||
|
/// An object implementing `get`.
|
||||||
|
pub trait Get {
|
||||||
|
/// Get a value from the database.
|
||||||
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>>;
|
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An atomic database operation.
|
/// An atomic database transaction.
|
||||||
|
///
|
||||||
|
/// A transaction is only required to atomically commit. It is not required that two `Get` calls
|
||||||
|
/// made with the same transaction return the same result, if another transaction wrote to that
|
||||||
|
/// key.
|
||||||
|
///
|
||||||
|
/// If two transactions are created, and both write (including deletions) to the same key, behavior
|
||||||
|
/// is undefined. The transaction may block, deadlock, panic, overwrite one of the two values
|
||||||
|
/// randomly, or any other action, at time of write or at time of commit.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub trait DbTxn: Send + Sync + Debug + Get {
|
pub trait DbTxn: Sized + Send + Get {
|
||||||
|
/// Write a value to this key.
|
||||||
fn put(&mut self, key: impl AsRef<[u8]>, value: impl AsRef<[u8]>);
|
fn put(&mut self, key: impl AsRef<[u8]>, value: impl AsRef<[u8]>);
|
||||||
|
/// Delete the value from this key.
|
||||||
fn del(&mut self, key: impl AsRef<[u8]>);
|
fn del(&mut self, key: impl AsRef<[u8]>);
|
||||||
|
/// Commit this transaction.
|
||||||
fn commit(self);
|
fn commit(self);
|
||||||
|
/// Close this transaction.
|
||||||
|
///
|
||||||
|
/// This is equivalent to `Drop` on transactions which can be dropped. This is explicit and works
|
||||||
|
/// with transactions which can't be dropped.
|
||||||
|
fn close(self) {
|
||||||
|
drop(self);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A database supporting atomic operations.
|
// Credit for the idea goes to https://jack.wrenn.fyi/blog/undroppable
|
||||||
pub trait Db: 'static + Send + Sync + Clone + Debug + Get {
|
pub struct Undroppable<T>(Option<T>);
|
||||||
|
impl<T> Drop for Undroppable<T> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
// Use an assertion at compile time to prevent this code from compiling if generated
|
||||||
|
#[allow(clippy::assertions_on_constants)]
|
||||||
|
const {
|
||||||
|
assert!(false, "Undroppable DbTxn was dropped. Ensure all code paths call commit or close");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<T: DbTxn> Get for Undroppable<T> {
|
||||||
|
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
||||||
|
self.0.as_ref().unwrap().get(key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<T: DbTxn> DbTxn for Undroppable<T> {
|
||||||
|
fn put(&mut self, key: impl AsRef<[u8]>, value: impl AsRef<[u8]>) {
|
||||||
|
self.0.as_mut().unwrap().put(key, value);
|
||||||
|
}
|
||||||
|
fn del(&mut self, key: impl AsRef<[u8]>) {
|
||||||
|
self.0.as_mut().unwrap().del(key);
|
||||||
|
}
|
||||||
|
fn commit(mut self) {
|
||||||
|
self.0.take().unwrap().commit();
|
||||||
|
let _ = core::mem::ManuallyDrop::new(self);
|
||||||
|
}
|
||||||
|
fn close(mut self) {
|
||||||
|
drop(self.0.take().unwrap());
|
||||||
|
let _ = core::mem::ManuallyDrop::new(self);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A database supporting atomic transaction.
|
||||||
|
pub trait Db: 'static + Send + Sync + Clone + Get {
|
||||||
|
/// The type representing a database transaction.
|
||||||
type Transaction<'a>: DbTxn;
|
type Transaction<'a>: DbTxn;
|
||||||
|
/// Calculate a key for a database entry.
|
||||||
|
///
|
||||||
|
/// Keys are separated by the database, the item within the database, and the item's key itself.
|
||||||
fn key(db_dst: &'static [u8], item_dst: &'static [u8], key: impl AsRef<[u8]>) -> Vec<u8> {
|
fn key(db_dst: &'static [u8], item_dst: &'static [u8], key: impl AsRef<[u8]>) -> Vec<u8> {
|
||||||
let db_len = u8::try_from(db_dst.len()).unwrap();
|
let db_len = u8::try_from(db_dst.len()).unwrap();
|
||||||
let dst_len = u8::try_from(item_dst.len()).unwrap();
|
let dst_len = u8::try_from(item_dst.len()).unwrap();
|
||||||
[[db_len].as_ref(), db_dst, [dst_len].as_ref(), item_dst, key.as_ref()].concat()
|
[[db_len].as_ref(), db_dst, [dst_len].as_ref(), item_dst, key.as_ref()].concat()
|
||||||
}
|
}
|
||||||
fn txn(&mut self) -> Self::Transaction<'_>;
|
/// Open a new transaction which may be dropped.
|
||||||
}
|
fn unsafe_txn(&mut self) -> Self::Transaction<'_>;
|
||||||
|
/// Open a new transaction which must be committed or closed.
|
||||||
/// An atomic operation for the in-memory databae.
|
fn txn(&mut self) -> Undroppable<Self::Transaction<'_>> {
|
||||||
#[must_use]
|
Undroppable(Some(self.unsafe_txn()))
|
||||||
#[derive(PartialEq, Eq, Debug)]
|
|
||||||
pub struct MemDbTxn<'a>(&'a MemDb, HashMap<Vec<u8>, Vec<u8>>, HashSet<Vec<u8>>);
|
|
||||||
|
|
||||||
impl<'a> Get for MemDbTxn<'a> {
|
|
||||||
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
|
||||||
if self.2.contains(key.as_ref()) {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
self.1.get(key.as_ref()).cloned().or_else(|| self.0 .0.read().unwrap().get(key.as_ref()).cloned())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<'a> DbTxn for MemDbTxn<'a> {
|
|
||||||
fn put(&mut self, key: impl AsRef<[u8]>, value: impl AsRef<[u8]>) {
|
|
||||||
self.2.remove(key.as_ref());
|
|
||||||
self.1.insert(key.as_ref().to_vec(), value.as_ref().to_vec());
|
|
||||||
}
|
|
||||||
fn del(&mut self, key: impl AsRef<[u8]>) {
|
|
||||||
self.1.remove(key.as_ref());
|
|
||||||
self.2.insert(key.as_ref().to_vec());
|
|
||||||
}
|
|
||||||
fn commit(mut self) {
|
|
||||||
let mut db = self.0 .0.write().unwrap();
|
|
||||||
for (key, value) in self.1.drain() {
|
|
||||||
db.insert(key, value);
|
|
||||||
}
|
|
||||||
for key in self.2 {
|
|
||||||
db.remove(&key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An in-memory database.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct MemDb(Arc<RwLock<HashMap<Vec<u8>, Vec<u8>>>>);
|
|
||||||
|
|
||||||
impl PartialEq for MemDb {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
*self.0.read().unwrap() == *other.0.read().unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl Eq for MemDb {}
|
|
||||||
|
|
||||||
impl Default for MemDb {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self(Arc::new(RwLock::new(HashMap::new())))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MemDb {
|
|
||||||
/// Create a new in-memory database.
|
|
||||||
pub fn new() -> Self {
|
|
||||||
Self::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Get for MemDb {
|
|
||||||
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
|
||||||
self.0.read().unwrap().get(key.as_ref()).cloned()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl Db for MemDb {
|
|
||||||
type Transaction<'a> = MemDbTxn<'a>;
|
|
||||||
fn txn(&mut self) -> MemDbTxn<'_> {
|
|
||||||
MemDbTxn(self, HashMap::new(), HashSet::new())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: Also bind RocksDB
|
|
||||||
|
|||||||
80
common/db/src/mem.rs
Normal file
80
common/db/src/mem.rs
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
use core::fmt::Debug;
|
||||||
|
use std::{
|
||||||
|
sync::{Arc, RwLock},
|
||||||
|
collections::{HashSet, HashMap},
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::*;
|
||||||
|
|
||||||
|
/// An atomic operation for the in-memory database.
|
||||||
|
#[must_use]
|
||||||
|
#[derive(PartialEq, Eq, Debug)]
|
||||||
|
pub struct MemDbTxn<'a>(&'a MemDb, HashMap<Vec<u8>, Vec<u8>>, HashSet<Vec<u8>>);
|
||||||
|
|
||||||
|
impl Get for MemDbTxn<'_> {
|
||||||
|
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
||||||
|
if self.2.contains(key.as_ref()) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
self
|
||||||
|
.1
|
||||||
|
.get(key.as_ref())
|
||||||
|
.cloned()
|
||||||
|
.or_else(|| self.0 .0.read().unwrap().get(key.as_ref()).cloned())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl DbTxn for MemDbTxn<'_> {
|
||||||
|
fn put(&mut self, key: impl AsRef<[u8]>, value: impl AsRef<[u8]>) {
|
||||||
|
self.2.remove(key.as_ref());
|
||||||
|
self.1.insert(key.as_ref().to_vec(), value.as_ref().to_vec());
|
||||||
|
}
|
||||||
|
fn del(&mut self, key: impl AsRef<[u8]>) {
|
||||||
|
self.1.remove(key.as_ref());
|
||||||
|
self.2.insert(key.as_ref().to_vec());
|
||||||
|
}
|
||||||
|
fn commit(mut self) {
|
||||||
|
let mut db = self.0 .0.write().unwrap();
|
||||||
|
for (key, value) in self.1.drain() {
|
||||||
|
db.insert(key, value);
|
||||||
|
}
|
||||||
|
for key in self.2 {
|
||||||
|
db.remove(&key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An in-memory database.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct MemDb(Arc<RwLock<HashMap<Vec<u8>, Vec<u8>>>>);
|
||||||
|
|
||||||
|
impl PartialEq for MemDb {
|
||||||
|
fn eq(&self, other: &MemDb) -> bool {
|
||||||
|
*self.0.read().unwrap() == *other.0.read().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Eq for MemDb {}
|
||||||
|
|
||||||
|
impl Default for MemDb {
|
||||||
|
fn default() -> MemDb {
|
||||||
|
MemDb(Arc::new(RwLock::new(HashMap::new())))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl MemDb {
|
||||||
|
/// Create a new in-memory database.
|
||||||
|
pub fn new() -> MemDb {
|
||||||
|
MemDb::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Get for MemDb {
|
||||||
|
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
||||||
|
self.0.read().unwrap().get(key.as_ref()).cloned()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Db for MemDb {
|
||||||
|
type Transaction<'a> = MemDbTxn<'a>;
|
||||||
|
fn unsafe_txn(&mut self) -> MemDbTxn<'_> {
|
||||||
|
MemDbTxn(self, HashMap::new(), HashSet::new())
|
||||||
|
}
|
||||||
|
}
|
||||||
47
common/db/src/parity_db.rs
Normal file
47
common/db/src/parity_db.rs
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
pub use ::parity_db::{Options, Db as ParityDb};
|
||||||
|
|
||||||
|
use crate::*;
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub struct Transaction<'a>(&'a Arc<ParityDb>, Vec<(u8, Vec<u8>, Option<Vec<u8>>)>);
|
||||||
|
|
||||||
|
impl Get for Transaction<'_> {
|
||||||
|
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
||||||
|
let mut res = self.0.get(&key);
|
||||||
|
for change in &self.1 {
|
||||||
|
if change.1 == key.as_ref() {
|
||||||
|
res.clone_from(&change.2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl DbTxn for Transaction<'_> {
|
||||||
|
fn put(&mut self, key: impl AsRef<[u8]>, value: impl AsRef<[u8]>) {
|
||||||
|
self.1.push((0, key.as_ref().to_vec(), Some(value.as_ref().to_vec())))
|
||||||
|
}
|
||||||
|
fn del(&mut self, key: impl AsRef<[u8]>) {
|
||||||
|
self.1.push((0, key.as_ref().to_vec(), None))
|
||||||
|
}
|
||||||
|
fn commit(self) {
|
||||||
|
self.0.commit(self.1).unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Get for Arc<ParityDb> {
|
||||||
|
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
||||||
|
ParityDb::get(self, 0, key.as_ref()).unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Db for Arc<ParityDb> {
|
||||||
|
type Transaction<'a> = Transaction<'a>;
|
||||||
|
fn unsafe_txn(&mut self) -> Self::Transaction<'_> {
|
||||||
|
Transaction(self, vec![])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_parity_db(path: &str) -> Arc<ParityDb> {
|
||||||
|
Arc::new(ParityDb::open_or_create(&Options::with_columns(std::path::Path::new(path), 1)).unwrap())
|
||||||
|
}
|
||||||
66
common/db/src/rocks.rs
Normal file
66
common/db/src/rocks.rs
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use rocksdb::{
|
||||||
|
DBCompressionType, ThreadMode, SingleThreaded, LogLevel, WriteOptions,
|
||||||
|
Transaction as RocksTransaction, Options, OptimisticTransactionDB,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::*;
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub struct Transaction<'a, T: ThreadMode>(
|
||||||
|
RocksTransaction<'a, OptimisticTransactionDB<T>>,
|
||||||
|
&'a OptimisticTransactionDB<T>,
|
||||||
|
);
|
||||||
|
|
||||||
|
impl<T: ThreadMode> Get for Transaction<'_, T> {
|
||||||
|
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
||||||
|
self.0.get(key).expect("couldn't read from RocksDB via transaction")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<T: ThreadMode> DbTxn for Transaction<'_, T> {
|
||||||
|
fn put(&mut self, key: impl AsRef<[u8]>, value: impl AsRef<[u8]>) {
|
||||||
|
self.0.put(key, value).expect("couldn't write to RocksDB via transaction")
|
||||||
|
}
|
||||||
|
fn del(&mut self, key: impl AsRef<[u8]>) {
|
||||||
|
self.0.delete(key).expect("couldn't delete from RocksDB via transaction")
|
||||||
|
}
|
||||||
|
fn commit(self) {
|
||||||
|
self.0.commit().expect("couldn't commit to RocksDB via transaction");
|
||||||
|
self.1.flush_wal(true).expect("couldn't flush RocksDB WAL");
|
||||||
|
self.1.flush().expect("couldn't flush RocksDB");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ThreadMode> Get for Arc<OptimisticTransactionDB<T>> {
|
||||||
|
fn get(&self, key: impl AsRef<[u8]>) -> Option<Vec<u8>> {
|
||||||
|
OptimisticTransactionDB::get(self, key).expect("couldn't read from RocksDB")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<T: Send + ThreadMode + 'static> Db for Arc<OptimisticTransactionDB<T>> {
|
||||||
|
type Transaction<'a> = Transaction<'a, T>;
|
||||||
|
fn unsafe_txn(&mut self) -> Self::Transaction<'_> {
|
||||||
|
let mut opts = WriteOptions::default();
|
||||||
|
opts.set_sync(true);
|
||||||
|
Transaction(self.transaction_opt(&opts, &Default::default()), &**self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type RocksDB = Arc<OptimisticTransactionDB<SingleThreaded>>;
|
||||||
|
pub fn new_rocksdb(path: &str) -> RocksDB {
|
||||||
|
let mut options = Options::default();
|
||||||
|
options.create_if_missing(true);
|
||||||
|
options.set_compression_type(DBCompressionType::Zstd);
|
||||||
|
|
||||||
|
options.set_wal_compression_type(DBCompressionType::Zstd);
|
||||||
|
// 10 MB
|
||||||
|
options.set_max_total_wal_size(10 * 1024 * 1024);
|
||||||
|
options.set_wal_size_limit_mb(10);
|
||||||
|
|
||||||
|
options.set_log_level(LogLevel::Warn);
|
||||||
|
// 1 MB
|
||||||
|
options.set_max_log_file_size(1024 * 1024);
|
||||||
|
options.set_recycle_log_file_num(1);
|
||||||
|
|
||||||
|
Arc::new(OptimisticTransactionDB::open(&options, path).unwrap())
|
||||||
|
}
|
||||||
17
common/env/Cargo.toml
vendored
Normal file
17
common/env/Cargo.toml
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
[package]
|
||||||
|
name = "serai-env"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "A common library for Serai apps to access environment variables"
|
||||||
|
license = "AGPL-3.0-only"
|
||||||
|
repository = "https://github.com/serai-dex/serai/tree/develop/common/env"
|
||||||
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
|
keywords = []
|
||||||
|
edition = "2021"
|
||||||
|
rust-version = "1.71"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
9
common/env/src/lib.rs
vendored
Normal file
9
common/env/src/lib.rs
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
#![cfg_attr(docsrs, feature(doc_cfg))]
|
||||||
|
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
|
||||||
|
|
||||||
|
// Obtain a variable from the Serai environment/secret store.
|
||||||
|
pub fn var(variable: &str) -> Option<String> {
|
||||||
|
// TODO: Move this to a proper secret store
|
||||||
|
// TODO: Unset this variable
|
||||||
|
std::env::var(variable).ok()
|
||||||
|
}
|
||||||
20
common/patchable-async-sleep/Cargo.toml
Normal file
20
common/patchable-async-sleep/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
[package]
|
||||||
|
name = "patchable-async-sleep"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "An async sleep function, patchable to the preferred runtime"
|
||||||
|
license = "MIT"
|
||||||
|
repository = "https://github.com/serai-dex/serai/tree/develop/common/patchable-async-sleep"
|
||||||
|
authors = ["Luke Parker <lukeparker5132@gmail.com>"]
|
||||||
|
keywords = ["async", "sleep", "tokio", "smol", "async-std"]
|
||||||
|
edition = "2021"
|
||||||
|
rust-version = "1.71"
|
||||||
|
|
||||||
|
[package.metadata.docs.rs]
|
||||||
|
all-features = true
|
||||||
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
tokio = { version = "1", default-features = false, features = [ "time"] }
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user