mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 20:29:23 +00:00
Response to usage of unwrap in non-test code
This commit replaces all usage of `unwrap` with `expect` within `networks/monero`, clarifying why the panic risked is unreachable. This commit also replaces some uses of `unwrap` with solutions which are guaranteed not to fail. Notably, compilation on 128-bit systems is prevented, ensuring `u64::try_from(usize::MAX)` will never panic at runtime. Slight breaking changes are additionally included as necessary to massage out some avoidable panics.
This commit is contained in:
@@ -29,7 +29,11 @@ pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
|
|||||||
let uv3 = u * v3;
|
let uv3 = u * v3;
|
||||||
let v7 = v3 * v3 * v;
|
let v7 = v3 * v3 * v;
|
||||||
let uv7 = u * v7;
|
let uv7 = u * v7;
|
||||||
uv3 * uv7.pow((-FieldElement::from(5u8)) * FieldElement::from(8u8).invert().unwrap())
|
uv3 *
|
||||||
|
uv7.pow(
|
||||||
|
(-FieldElement::from(5u8)) *
|
||||||
|
FieldElement::from(8u8).invert().expect("eight was coprime with the prime 2^{255}-19"),
|
||||||
|
)
|
||||||
};
|
};
|
||||||
let x = X.square() * x;
|
let x = X.square() * x;
|
||||||
|
|
||||||
@@ -45,9 +49,23 @@ pub fn hash_to_point(bytes: [u8; 32]) -> EdwardsPoint {
|
|||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
let mut Y = z - w;
|
let mut Y = z - w;
|
||||||
|
|
||||||
Y *= Z.invert().unwrap();
|
/*
|
||||||
|
If sign, `z = -486662`, else, `z = -486662 * v`
|
||||||
|
`w = v + 1`
|
||||||
|
|
||||||
|
We need `z + w \ne 0`, which would require `z \cong -w \mod 2^{255}-19`. This requires:
|
||||||
|
- If `sign`, `v \mod 2^{255}-19 \ne 486661`.
|
||||||
|
- If `!sign`, `(v + 1) \mod 2^{255}-19 \ne (v * 486662) \mod 2^{255}-19` which is equivalent to
|
||||||
|
`(v * 486661) \mod 2^{255}-19 \ne 1`.
|
||||||
|
|
||||||
|
In summary, if `sign`, `v` must not `486661`, and if `!sign`, `v` must not be the
|
||||||
|
multiplicative inverse of `486661`. Since `v` is the output of a hash function, this should
|
||||||
|
have negligible probability. Additionally, since the definition of `sign` is dependent on `v`,
|
||||||
|
it may be truly impossible to reach.
|
||||||
|
*/
|
||||||
|
Y *= Z.invert().expect("if sign, v was 486661. if !sign, v was 486661^{-1}");
|
||||||
let mut bytes = Y.to_repr();
|
let mut bytes = Y.to_repr();
|
||||||
bytes[31] |= sign.unwrap_u8() << 7;
|
bytes[31] |= sign.unwrap_u8() << 7;
|
||||||
|
|
||||||
decompress_point(bytes).unwrap().mul_by_cofactor()
|
decompress_point(bytes).expect("point from hash-to-curve wasn't on-curve").mul_by_cofactor()
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ fn keccak256(data: &[u8]) -> [u8; 32] {
|
|||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
pub static H: LazyLock<EdwardsPoint> = LazyLock::new(|| {
|
pub static H: LazyLock<EdwardsPoint> = LazyLock::new(|| {
|
||||||
decompress_point(keccak256(&ED25519_BASEPOINT_POINT.compress().to_bytes()))
|
decompress_point(keccak256(&ED25519_BASEPOINT_POINT.compress().to_bytes()))
|
||||||
.unwrap()
|
.expect("known on-curve point wasn't on-curve")
|
||||||
.mul_by_cofactor()
|
.mul_by_cofactor()
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -78,11 +78,11 @@ pub fn bulletproofs_generators(dst: &'static [u8]) -> Generators {
|
|||||||
let i = 2 * i;
|
let i = 2 * i;
|
||||||
|
|
||||||
let mut even = preimage.clone();
|
let mut even = preimage.clone();
|
||||||
write_varint(&i, &mut even).unwrap();
|
write_varint(&i, &mut even).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
res.H.push(hash_to_point(keccak256(&even)));
|
res.H.push(hash_to_point(keccak256(&even)));
|
||||||
|
|
||||||
let mut odd = preimage.clone();
|
let mut odd = preimage.clone();
|
||||||
write_varint(&(i + 1), &mut odd).unwrap();
|
write_varint(&(i + 1), &mut odd).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
res.G.push(hash_to_point(keccak256(&odd)));
|
res.G.push(hash_to_point(keccak256(&odd)));
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
|
|||||||
@@ -18,10 +18,12 @@ use curve25519_dalek::{
|
|||||||
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
const VARINT_CONTINUATION_MASK: u8 = 0b1000_0000;
|
||||||
|
|
||||||
mod sealed {
|
mod sealed {
|
||||||
|
use core::fmt::Debug;
|
||||||
|
|
||||||
/// A trait for a number readable/writable as a VarInt.
|
/// A trait for a number readable/writable as a VarInt.
|
||||||
///
|
///
|
||||||
/// This is sealed to prevent unintended implementations.
|
/// This is sealed to prevent unintended implementations.
|
||||||
pub trait VarInt: TryInto<u64> + TryFrom<u64> + Copy {
|
pub trait VarInt: TryInto<u64, Error: Debug> + TryFrom<u64, Error: Debug> + Copy {
|
||||||
const BITS: usize;
|
const BITS: usize;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -34,6 +36,10 @@ mod sealed {
|
|||||||
impl VarInt for u64 {
|
impl VarInt for u64 {
|
||||||
const BITS: usize = 64;
|
const BITS: usize = 64;
|
||||||
}
|
}
|
||||||
|
// Don't compile for platforms where `usize` exceeds `u64`, preventing various possible runtime
|
||||||
|
// exceptions
|
||||||
|
const _NO_128_BIT_PLATFORMS: [(); (u64::BITS - usize::BITS) as usize] =
|
||||||
|
[(); (u64::BITS - usize::BITS) as usize];
|
||||||
impl VarInt for usize {
|
impl VarInt for usize {
|
||||||
const BITS: usize = core::mem::size_of::<usize>() * 8;
|
const BITS: usize = core::mem::size_of::<usize>() * 8;
|
||||||
}
|
}
|
||||||
@@ -43,8 +49,12 @@ mod sealed {
|
|||||||
///
|
///
|
||||||
/// This function will panic if the VarInt exceeds u64::MAX.
|
/// This function will panic if the VarInt exceeds u64::MAX.
|
||||||
pub fn varint_len<V: sealed::VarInt>(varint: V) -> usize {
|
pub fn varint_len<V: sealed::VarInt>(varint: V) -> usize {
|
||||||
let varint_u64: u64 = varint.try_into().map_err(|_| "varint exceeded u64").unwrap();
|
let varint_u64: u64 = varint.try_into().expect("varint exceeded u64");
|
||||||
((usize::try_from(u64::BITS - varint_u64.leading_zeros()).unwrap().saturating_sub(1)) / 7) + 1
|
((usize::try_from(u64::BITS - varint_u64.leading_zeros())
|
||||||
|
.expect("64 > usize::MAX")
|
||||||
|
.saturating_sub(1)) /
|
||||||
|
7) +
|
||||||
|
1
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write a byte.
|
/// Write a byte.
|
||||||
@@ -58,9 +68,10 @@ pub fn write_byte<W: Write>(byte: &u8, w: &mut W) -> io::Result<()> {
|
|||||||
///
|
///
|
||||||
/// This will panic if the VarInt exceeds u64::MAX.
|
/// This will panic if the VarInt exceeds u64::MAX.
|
||||||
pub fn write_varint<W: Write, U: sealed::VarInt>(varint: &U, w: &mut W) -> io::Result<()> {
|
pub fn write_varint<W: Write, U: sealed::VarInt>(varint: &U, w: &mut W) -> io::Result<()> {
|
||||||
let mut varint: u64 = (*varint).try_into().map_err(|_| "varint exceeded u64").unwrap();
|
let mut varint: u64 = (*varint).try_into().expect("varint exceeded u64");
|
||||||
while {
|
while {
|
||||||
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK)).unwrap();
|
let mut b = u8::try_from(varint & u64::from(!VARINT_CONTINUATION_MASK))
|
||||||
|
.expect("& eight_bit_mask left more than 8 bits set");
|
||||||
varint >>= 7;
|
varint >>= 7;
|
||||||
if varint != 0 {
|
if varint != 0 {
|
||||||
b |= VARINT_CONTINUATION_MASK;
|
b |= VARINT_CONTINUATION_MASK;
|
||||||
@@ -210,7 +221,11 @@ pub fn read_array<R: Read, T: Debug, F: Fn(&mut R) -> io::Result<T>, const N: us
|
|||||||
f: F,
|
f: F,
|
||||||
r: &mut R,
|
r: &mut R,
|
||||||
) -> io::Result<[T; N]> {
|
) -> io::Result<[T; N]> {
|
||||||
read_raw_vec(f, N, r).map(|vec| vec.try_into().unwrap())
|
read_raw_vec(f, N, r).map(|vec| {
|
||||||
|
vec.try_into().expect(
|
||||||
|
"read vector of specific length yet couldn't transform to an array of the same length",
|
||||||
|
)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read a length-prefixed variable-length list of elements.
|
/// Read a length-prefixed variable-length list of elements.
|
||||||
|
|||||||
@@ -124,7 +124,7 @@ impl Commitment {
|
|||||||
/// defined serialization.
|
/// defined serialization.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut res = Vec::with_capacity(32 + 8);
|
let mut res = Vec::with_capacity(32 + 8);
|
||||||
self.write(&mut res).unwrap();
|
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -230,7 +230,7 @@ impl Decoys {
|
|||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut res =
|
let mut res =
|
||||||
Vec::with_capacity((1 + (2 * self.offsets.len())) + 1 + 1 + (self.ring.len() * 64));
|
Vec::with_capacity((1 + (2 * self.offsets.len())) + 1 + 1 + (self.ring.len() * 64));
|
||||||
self.write(&mut res).unwrap();
|
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,8 @@ use monero_io::*;
|
|||||||
static PRECOMPUTED_SCALARS: LazyLock<[Scalar; 8]> = LazyLock::new(|| {
|
static PRECOMPUTED_SCALARS: LazyLock<[Scalar; 8]> = LazyLock::new(|| {
|
||||||
let mut precomputed_scalars = [Scalar::ONE; 8];
|
let mut precomputed_scalars = [Scalar::ONE; 8];
|
||||||
for (i, scalar) in precomputed_scalars.iter_mut().enumerate().skip(1) {
|
for (i, scalar) in precomputed_scalars.iter_mut().enumerate().skip(1) {
|
||||||
*scalar = Scalar::from(u8::try_from((i * 2) + 1).unwrap());
|
*scalar =
|
||||||
|
Scalar::from(u64::try_from((i * 2) + 1).expect("enumerating more than u64::MAX / 2 items"));
|
||||||
}
|
}
|
||||||
precomputed_scalars
|
precomputed_scalars
|
||||||
});
|
});
|
||||||
@@ -59,7 +60,7 @@ impl UnreducedScalar {
|
|||||||
let bits = self.as_bits();
|
let bits = self.as_bits();
|
||||||
let mut naf = [0i8; 256];
|
let mut naf = [0i8; 256];
|
||||||
for (b, bit) in bits.into_iter().enumerate() {
|
for (b, bit) in bits.into_iter().enumerate() {
|
||||||
naf[b] = i8::try_from(bit).unwrap();
|
naf[b] = i8::try_from(bit).expect("bit didn't fit within an i8");
|
||||||
}
|
}
|
||||||
|
|
||||||
for i in 0 .. 256 {
|
for i in 0 .. 256 {
|
||||||
@@ -129,8 +130,13 @@ impl UnreducedScalar {
|
|||||||
for &numb in self.non_adjacent_form().iter().rev() {
|
for &numb in self.non_adjacent_form().iter().rev() {
|
||||||
recovered += recovered;
|
recovered += recovered;
|
||||||
match numb.cmp(&0) {
|
match numb.cmp(&0) {
|
||||||
Ordering::Greater => recovered += PRECOMPUTED_SCALARS[usize::try_from(numb).unwrap() / 2],
|
Ordering::Greater => {
|
||||||
Ordering::Less => recovered -= PRECOMPUTED_SCALARS[usize::try_from(-numb).unwrap() / 2],
|
recovered += PRECOMPUTED_SCALARS[usize::try_from(numb).expect("positive i8 -> usize") / 2]
|
||||||
|
}
|
||||||
|
Ordering::Less => {
|
||||||
|
recovered -=
|
||||||
|
PRECOMPUTED_SCALARS[usize::try_from(-numb).expect("negated negative i8 -> usize") / 2]
|
||||||
|
}
|
||||||
Ordering::Equal => (),
|
Ordering::Equal => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,7 +16,9 @@ fn generators(prefix: &'static str, path: &str) {
|
|||||||
generators_string.extend(
|
generators_string.extend(
|
||||||
format!(
|
format!(
|
||||||
"
|
"
|
||||||
curve25519_dalek::edwards::CompressedEdwardsY({:?}).decompress().unwrap(),
|
curve25519_dalek::edwards::CompressedEdwardsY({:?})
|
||||||
|
.decompress()
|
||||||
|
.expect(\"generator from build script wasn't on-curve\"),
|
||||||
",
|
",
|
||||||
generator.compress().to_bytes()
|
generator.compress().to_bytes()
|
||||||
)
|
)
|
||||||
@@ -33,10 +35,10 @@ fn generators(prefix: &'static str, path: &str) {
|
|||||||
let mut H_str = String::new();
|
let mut H_str = String::new();
|
||||||
serialize(&mut H_str, &generators.H);
|
serialize(&mut H_str, &generators.H);
|
||||||
|
|
||||||
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
|
let path = Path::new(&env::var("OUT_DIR").expect("cargo didn't set $OUT_DIR")).join(path);
|
||||||
let _ = remove_file(&path);
|
let _ = remove_file(&path);
|
||||||
File::create(&path)
|
File::create(&path)
|
||||||
.unwrap()
|
.expect("failed to create file in $OUT_DIR")
|
||||||
.write_all(
|
.write_all(
|
||||||
format!(
|
format!(
|
||||||
"
|
"
|
||||||
@@ -52,15 +54,15 @@ fn generators(prefix: &'static str, path: &str) {
|
|||||||
)
|
)
|
||||||
.as_bytes(),
|
.as_bytes(),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.expect("couldn't write generated source code to file on disk");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = "compile-time-generators"))]
|
#[cfg(not(feature = "compile-time-generators"))]
|
||||||
fn generators(prefix: &'static str, path: &str) {
|
fn generators(prefix: &'static str, path: &str) {
|
||||||
let path = Path::new(&env::var("OUT_DIR").unwrap()).join(path);
|
let path = Path::new(&env::var("OUT_DIR").expect("cargo didn't set $OUT_DIR")).join(path);
|
||||||
let _ = remove_file(&path);
|
let _ = remove_file(&path);
|
||||||
File::create(&path)
|
File::create(&path)
|
||||||
.unwrap()
|
.expect("failed to create file in $OUT_DIR")
|
||||||
.write_all(
|
.write_all(
|
||||||
format!(
|
format!(
|
||||||
r#"
|
r#"
|
||||||
@@ -71,7 +73,7 @@ fn generators(prefix: &'static str, path: &str) {
|
|||||||
)
|
)
|
||||||
.as_bytes(),
|
.as_bytes(),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.expect("couldn't write generated source code to file on disk");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
#![allow(non_snake_case)]
|
#![allow(non_snake_case)]
|
||||||
|
|
||||||
use std_shims::{
|
use std_shims::{
|
||||||
vec,
|
|
||||||
vec::Vec,
|
vec::Vec,
|
||||||
io::{self, Read, Write},
|
io::{self, Read, Write},
|
||||||
};
|
};
|
||||||
@@ -124,9 +123,15 @@ impl Bulletproof {
|
|||||||
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||||
Ok(Bulletproof::Original(
|
Ok(Bulletproof::Original(
|
||||||
OriginalStatement::new(&commitments)
|
OriginalStatement::new(&commitments)
|
||||||
.unwrap()
|
.expect("failed to create statement despite checking amount of commitments")
|
||||||
.prove(rng, OriginalWitness::new(outputs).unwrap())
|
.prove(
|
||||||
.unwrap(),
|
rng,
|
||||||
|
OriginalWitness::new(outputs)
|
||||||
|
.expect("failed to create witness despite checking amount of commitments"),
|
||||||
|
)
|
||||||
|
.expect(
|
||||||
|
"failed to prove Bulletproof::Original despite ensuring statement/witness consistency",
|
||||||
|
),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -144,9 +149,15 @@ impl Bulletproof {
|
|||||||
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
let commitments = outputs.iter().map(Commitment::calculate).collect::<Vec<_>>();
|
||||||
Ok(Bulletproof::Plus(
|
Ok(Bulletproof::Plus(
|
||||||
PlusStatement::new(&commitments)
|
PlusStatement::new(&commitments)
|
||||||
.unwrap()
|
.expect("failed to create statement despite checking amount of commitments")
|
||||||
.prove(rng, &Zeroizing::new(PlusWitness::new(outputs).unwrap()))
|
.prove(
|
||||||
.unwrap(),
|
rng,
|
||||||
|
&Zeroizing::new(
|
||||||
|
PlusWitness::new(outputs)
|
||||||
|
.expect("failed to create witness despite checking amount of commitments"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.expect("failed to prove Bulletproof::Plus despite ensuring statement/witness consistency"),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -255,8 +266,8 @@ impl Bulletproof {
|
|||||||
|
|
||||||
/// Serialize a Bulletproof(+) to a `Vec<u8>`.
|
/// Serialize a Bulletproof(+) to a `Vec<u8>`.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut serialized = vec![];
|
let mut serialized = Vec::with_capacity(512);
|
||||||
self.write(&mut serialized).unwrap();
|
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
serialized
|
serialized
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -174,7 +174,11 @@ impl IpStatement {
|
|||||||
R_vec.push(R * INV_EIGHT());
|
R_vec.push(R * INV_EIGHT());
|
||||||
|
|
||||||
// Now that we've calculate L, R, transcript them to receive x (26-27)
|
// Now that we've calculate L, R, transcript them to receive x (26-27)
|
||||||
transcript = Self::transcript_L_R(transcript, *L_vec.last().unwrap(), *R_vec.last().unwrap());
|
transcript = Self::transcript_L_R(
|
||||||
|
transcript,
|
||||||
|
*L_vec.last().expect("couldn't get last L_vec despite always being non-empty"),
|
||||||
|
*R_vec.last().expect("couldn't get last R_vec despite always being non-empty"),
|
||||||
|
);
|
||||||
let x = transcript;
|
let x = transcript;
|
||||||
let x_inv = x.invert();
|
let x_inv = x.invert();
|
||||||
|
|
||||||
|
|||||||
@@ -227,8 +227,11 @@ impl<'a> AggregateRangeStatement<'a> {
|
|||||||
let x_ip = transcript;
|
let x_ip = transcript;
|
||||||
|
|
||||||
let ip = IpStatement::new_without_P_transcript(y_inv_pow_n, x_ip)
|
let ip = IpStatement::new_without_P_transcript(y_inv_pow_n, x_ip)
|
||||||
.prove(transcript, IpWitness::new(l, r).unwrap())
|
.prove(
|
||||||
.unwrap();
|
transcript,
|
||||||
|
IpWitness::new(l, r).expect("Bulletproofs::Original created an invalid IpWitness"),
|
||||||
|
)
|
||||||
|
.expect("Bulletproofs::Original failed to prove the inner-product");
|
||||||
|
|
||||||
let res = AggregateRangeProof { A, S, T1, T2, tau_x, mu, t_hat, ip };
|
let res = AggregateRangeProof { A, S, T1, T2, tau_x, mu, t_hat, ip };
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
|||||||
@@ -106,7 +106,9 @@ impl<'a> AggregateRangeStatement<'a> {
|
|||||||
|
|
||||||
let mut d = ScalarVector::new(mn);
|
let mut d = ScalarVector::new(mn);
|
||||||
for j in 1 ..= V.len() {
|
for j in 1 ..= V.len() {
|
||||||
z_pow.push(*z_pow.last().unwrap() * z_pow[0]);
|
z_pow.push(
|
||||||
|
*z_pow.last().expect("couldn't get last z_pow despite always being non-empty") * z_pow[0],
|
||||||
|
);
|
||||||
d = d + &(Self::d_j(j, V.len()) * (z_pow[j - 1]));
|
d = d + &(Self::d_j(j, V.len()) * (z_pow[j - 1]));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -229,8 +231,15 @@ impl<'a> AggregateRangeStatement<'a> {
|
|||||||
Some(AggregateRangeProof {
|
Some(AggregateRangeProof {
|
||||||
A,
|
A,
|
||||||
wip: WipStatement::new(generators, A_hat, y)
|
wip: WipStatement::new(generators, A_hat, y)
|
||||||
.prove(rng, transcript, &Zeroizing::new(WipWitness::new(a_l, a_r, alpha).unwrap()))
|
.prove(
|
||||||
.unwrap(),
|
rng,
|
||||||
|
transcript,
|
||||||
|
&Zeroizing::new(
|
||||||
|
WipWitness::new(a_l, a_r, alpha)
|
||||||
|
.expect("Bulletproofs::Plus created an invalid WipWitness"),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.expect("Bulletproof::Plus failed to prove the weighted inner-product"),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -230,7 +230,9 @@ impl WipStatement {
|
|||||||
let c_l = a1.clone().weighted_inner_product(&b2, &y);
|
let c_l = a1.clone().weighted_inner_product(&b2, &y);
|
||||||
let c_r = (a2.clone() * y_n_hat).weighted_inner_product(&b1, &y);
|
let c_r = (a2.clone() * y_n_hat).weighted_inner_product(&b1, &y);
|
||||||
|
|
||||||
let y_inv_n_hat = y_inv.pop().unwrap();
|
let y_inv_n_hat = y_inv
|
||||||
|
.pop()
|
||||||
|
.expect("couldn't pop y_inv despite y_inv being of same length as times iterated");
|
||||||
|
|
||||||
let mut L_terms = (a1.clone() * y_inv_n_hat)
|
let mut L_terms = (a1.clone() * y_inv_n_hat)
|
||||||
.0
|
.0
|
||||||
@@ -331,7 +333,9 @@ impl WipStatement {
|
|||||||
let mut res = Vec::with_capacity(y.len());
|
let mut res = Vec::with_capacity(y.len());
|
||||||
res.push(inv_y);
|
res.push(inv_y);
|
||||||
while res.len() < y.len() {
|
while res.len() < y.len() {
|
||||||
res.push(inv_y * res.last().unwrap());
|
res.push(
|
||||||
|
inv_y * res.last().expect("couldn't get last inv_y despite inv_y always being non-empty"),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
res
|
res
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -199,9 +199,10 @@ fn core(
|
|||||||
// (c_p * I) + (c_c * D) + (s_i * PH)
|
// (c_p * I) + (c_c * D) + (s_i * PH)
|
||||||
let R = match A_c1 {
|
let R = match A_c1 {
|
||||||
Mode::Sign(..) => EdwardsPoint::multiscalar_mul([c_p, c_c, s[i]], [I, D, &PH]),
|
Mode::Sign(..) => EdwardsPoint::multiscalar_mul([c_p, c_c, s[i]], [I, D, &PH]),
|
||||||
Mode::Verify(..) => {
|
Mode::Verify(..) => images_precomp
|
||||||
images_precomp.as_ref().unwrap().vartime_mixed_multiscalar_mul([c_p, c_c], [s[i]], [PH])
|
.as_ref()
|
||||||
}
|
.expect("value populated when verifying wasn't populated")
|
||||||
|
.vartime_mixed_multiscalar_mul([c_p, c_c], [s[i]], [PH]),
|
||||||
};
|
};
|
||||||
|
|
||||||
to_hash.truncate(((2 * n) + 3) * 32);
|
to_hash.truncate(((2 * n) + 3) * 32);
|
||||||
|
|||||||
@@ -223,7 +223,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||||||
self
|
self
|
||||||
.mask_recv
|
.mask_recv
|
||||||
.take()
|
.take()
|
||||||
.unwrap()
|
.expect("image was none multiple times, despite setting to Some on first iteration")
|
||||||
.recv()
|
.recv()
|
||||||
.ok_or(FrostError::InternalError("CLSAG mask was not provided"))?,
|
.ok_or(FrostError::InternalError("CLSAG mask was not provided"))?,
|
||||||
);
|
);
|
||||||
@@ -243,7 +243,8 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||||||
// Accumulate the interpolated share
|
// Accumulate the interpolated share
|
||||||
let interpolated_key_image_share =
|
let interpolated_key_image_share =
|
||||||
addendum.key_image_share * lagrange::<dfg::Scalar>(l, view.included());
|
addendum.key_image_share * lagrange::<dfg::Scalar>(l, view.included());
|
||||||
*self.image.as_mut().unwrap() += interpolated_key_image_share;
|
*self.image.as_mut().expect("image populated on first iteration wasn't Some") +=
|
||||||
|
interpolated_key_image_share;
|
||||||
|
|
||||||
self
|
self
|
||||||
.key_image_shares
|
.key_image_shares
|
||||||
@@ -272,14 +273,15 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||||||
// opening of the commitment being re-randomized (and what it's re-randomized to)
|
// opening of the commitment being re-randomized (and what it's re-randomized to)
|
||||||
let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"decoy_responses"));
|
let mut rng = ChaCha20Rng::from_seed(self.transcript.rng_seed(b"decoy_responses"));
|
||||||
|
|
||||||
self.msg_hash = Some(msg_hash.try_into().expect("CLSAG message hash should be 32-bytes"));
|
let msg_hash = msg_hash.try_into().expect("CLSAG message hash should be 32-bytes");
|
||||||
|
self.msg_hash = Some(msg_hash);
|
||||||
|
|
||||||
let sign_core = Clsag::sign_core(
|
let sign_core = Clsag::sign_core(
|
||||||
&mut rng,
|
&mut rng,
|
||||||
&self.image.expect("verifying a share despite never processing any addendums").0,
|
&self.image.expect("verifying a share despite never processing any addendums").0,
|
||||||
&self.context,
|
&self.context,
|
||||||
self.mask.expect("mask wasn't set"),
|
self.mask.expect("mask wasn't set"),
|
||||||
self.msg_hash.as_ref().unwrap(),
|
&msg_hash,
|
||||||
nonce_sums[0][0].0,
|
nonce_sums[0][0].0,
|
||||||
nonce_sums[0][1].0,
|
nonce_sums[0][1].0,
|
||||||
);
|
);
|
||||||
@@ -301,7 +303,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||||||
_: &[Vec<dfg::EdwardsPoint>],
|
_: &[Vec<dfg::EdwardsPoint>],
|
||||||
sum: dfg::Scalar,
|
sum: dfg::Scalar,
|
||||||
) -> Option<Self::Signature> {
|
) -> Option<Self::Signature> {
|
||||||
let interim = self.interim.as_ref().unwrap();
|
let interim = self.interim.as_ref().expect("verify called before sign_share");
|
||||||
let mut clsag = interim.clsag.clone();
|
let mut clsag = interim.clsag.clone();
|
||||||
// We produced shares as `r - p x`, yet the signature is actually `r - p x - c x`
|
// We produced shares as `r - p x`, yet the signature is actually `r - p x - c x`
|
||||||
// Substract `c x` (saved as `c`) now
|
// Substract `c x` (saved as `c`) now
|
||||||
@@ -311,7 +313,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||||||
self.context.decoys.ring(),
|
self.context.decoys.ring(),
|
||||||
&self.image.expect("verifying a signature despite never processing any addendums").0,
|
&self.image.expect("verifying a signature despite never processing any addendums").0,
|
||||||
&interim.pseudo_out,
|
&interim.pseudo_out,
|
||||||
self.msg_hash.as_ref().unwrap(),
|
self.msg_hash.as_ref().expect("verify called before sign_share"),
|
||||||
)
|
)
|
||||||
.is_ok()
|
.is_ok()
|
||||||
{
|
{
|
||||||
@@ -326,7 +328,7 @@ impl Algorithm<Ed25519> for ClsagMultisig {
|
|||||||
nonces: &[Vec<dfg::EdwardsPoint>],
|
nonces: &[Vec<dfg::EdwardsPoint>],
|
||||||
share: dfg::Scalar,
|
share: dfg::Scalar,
|
||||||
) -> Result<Vec<(dfg::Scalar, dfg::EdwardsPoint)>, ()> {
|
) -> Result<Vec<(dfg::Scalar, dfg::EdwardsPoint)>, ()> {
|
||||||
let interim = self.interim.as_ref().unwrap();
|
let interim = self.interim.as_ref().expect("verify_share called before sign_share");
|
||||||
|
|
||||||
// For a share `r - p x`, the following two equalities should hold:
|
// For a share `r - p x`, the following two equalities should hold:
|
||||||
// - `(r - p x)G == R.0 - pV`, where `V = xG`
|
// - `(r - p x)G == R.0 - pV`, where `V = xG`
|
||||||
|
|||||||
@@ -135,35 +135,13 @@ impl SimpleRequestRpc {
|
|||||||
};
|
};
|
||||||
|
|
||||||
async fn body_from_response(response: Response<'_>) -> Result<Vec<u8>, RpcError> {
|
async fn body_from_response(response: Response<'_>) -> Result<Vec<u8>, RpcError> {
|
||||||
/*
|
|
||||||
let length = usize::try_from(
|
|
||||||
response
|
|
||||||
.headers()
|
|
||||||
.get("content-length")
|
|
||||||
.ok_or(RpcError::InvalidNode("no content-length header"))?
|
|
||||||
.to_str()
|
|
||||||
.map_err(|_| RpcError::InvalidNode("non-ascii content-length value"))?
|
|
||||||
.parse::<u32>()
|
|
||||||
.map_err(|_| RpcError::InvalidNode("non-u32 content-length value"))?,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
// Only pre-allocate 1 MB so a malicious node which claims a content-length of 1 GB actually
|
|
||||||
// has to send 1 GB of data to cause a 1 GB allocation
|
|
||||||
let mut res = Vec::with_capacity(length.max(1024 * 1024));
|
|
||||||
let mut body = response.into_body();
|
|
||||||
while res.len() < length {
|
|
||||||
let Some(data) = body.data().await else { break };
|
|
||||||
res.extend(data.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?.as_ref());
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
let mut res = Vec::with_capacity(128);
|
let mut res = Vec::with_capacity(128);
|
||||||
response
|
response
|
||||||
.body()
|
.body()
|
||||||
.await
|
.await
|
||||||
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?
|
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?
|
||||||
.read_to_end(&mut res)
|
.read_to_end(&mut res)
|
||||||
.unwrap();
|
.map_err(|e| RpcError::ConnectionError(format!("{e:?}")))?;
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -219,7 +197,12 @@ impl SimpleRequestRpc {
|
|||||||
})?
|
})?
|
||||||
.to_header_string(),
|
.to_header_string(),
|
||||||
)
|
)
|
||||||
.unwrap(),
|
.map_err(|_| {
|
||||||
|
RpcError::InternalError(
|
||||||
|
"digest-auth challenge response wasn't a valid string for an HTTP header"
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
|
})?,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -269,7 +252,7 @@ impl SimpleRequestRpc {
|
|||||||
))?
|
))?
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
body_from_response(response.unwrap()).await?
|
body_from_response(response.expect("no response yet also no error?")).await?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -121,7 +121,7 @@ impl FeeRate {
|
|||||||
/// defined serialization.
|
/// defined serialization.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut res = Vec::with_capacity(16);
|
let mut res = Vec::with_capacity(16);
|
||||||
self.write(&mut res).unwrap();
|
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -139,15 +139,22 @@ impl FeeRate {
|
|||||||
///
|
///
|
||||||
/// This function may panic upon overflow.
|
/// This function may panic upon overflow.
|
||||||
pub fn calculate_fee_from_weight(&self, weight: usize) -> u64 {
|
pub fn calculate_fee_from_weight(&self, weight: usize) -> u64 {
|
||||||
let fee = self.per_weight * u64::try_from(weight).unwrap();
|
let fee =
|
||||||
|
self.per_weight * u64::try_from(weight).expect("couldn't convert weight (usize) to u64");
|
||||||
let fee = fee.div_ceil(self.mask) * self.mask;
|
let fee = fee.div_ceil(self.mask) * self.mask;
|
||||||
debug_assert_eq!(weight, self.calculate_weight_from_fee(fee), "Miscalculated weight from fee");
|
debug_assert_eq!(
|
||||||
|
Some(weight),
|
||||||
|
self.calculate_weight_from_fee(fee),
|
||||||
|
"Miscalculated weight from fee"
|
||||||
|
);
|
||||||
fee
|
fee
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Calculate the weight from the fee.
|
/// Calculate the weight from the fee.
|
||||||
pub fn calculate_weight_from_fee(&self, fee: u64) -> usize {
|
///
|
||||||
usize::try_from(fee / self.per_weight).unwrap()
|
/// Returns `None` if the weight would not fit within a `usize`.
|
||||||
|
pub fn calculate_weight_from_fee(&self, fee: u64) -> Option<usize> {
|
||||||
|
usize::try_from(fee / self.per_weight).ok()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -272,8 +279,14 @@ pub trait Rpc: Sync + Clone {
|
|||||||
let res = self
|
let res = self
|
||||||
.post(
|
.post(
|
||||||
route,
|
route,
|
||||||
if let Some(params) = params {
|
if let Some(params) = params.as_ref() {
|
||||||
serde_json::to_string(¶ms).unwrap().into_bytes()
|
serde_json::to_string(params)
|
||||||
|
.map_err(|e| {
|
||||||
|
RpcError::InternalError(format!(
|
||||||
|
"couldn't convert parameters ({params:?}) to JSON: {e:?}"
|
||||||
|
))
|
||||||
|
})?
|
||||||
|
.into_bytes()
|
||||||
} else {
|
} else {
|
||||||
vec![]
|
vec![]
|
||||||
},
|
},
|
||||||
@@ -295,7 +308,10 @@ pub trait Rpc: Sync + Clone {
|
|||||||
async move {
|
async move {
|
||||||
let mut req = json!({ "method": method });
|
let mut req = json!({ "method": method });
|
||||||
if let Some(params) = params {
|
if let Some(params) = params {
|
||||||
req.as_object_mut().unwrap().insert("params".into(), params);
|
req
|
||||||
|
.as_object_mut()
|
||||||
|
.expect("accessing object as object failed?")
|
||||||
|
.insert("params".into(), params);
|
||||||
}
|
}
|
||||||
Ok(self.rpc_call::<_, JsonRpcResponse<Response>>("json_rpc", Some(req)).await?.result)
|
Ok(self.rpc_call::<_, JsonRpcResponse<Response>>("json_rpc", Some(req)).await?.result)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ impl BlockHeader {
|
|||||||
/// Serialize the BlockHeader to a `Vec<u8>`.
|
/// Serialize the BlockHeader to a `Vec<u8>`.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut serialized = vec![];
|
let mut serialized = vec![];
|
||||||
self.write(&mut serialized).unwrap();
|
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
serialized
|
serialized
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -111,7 +111,7 @@ impl Block {
|
|||||||
/// Serialize the Block to a `Vec<u8>`.
|
/// Serialize the Block to a `Vec<u8>`.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut serialized = vec![];
|
let mut serialized = vec![];
|
||||||
self.write(&mut serialized).unwrap();
|
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
serialized
|
serialized
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -122,7 +122,13 @@ impl Block {
|
|||||||
pub fn serialize_pow_hash(&self) -> Vec<u8> {
|
pub fn serialize_pow_hash(&self) -> Vec<u8> {
|
||||||
let mut blob = self.header.serialize();
|
let mut blob = self.header.serialize();
|
||||||
blob.extend_from_slice(&merkle_root(self.miner_transaction.hash(), &self.transactions));
|
blob.extend_from_slice(&merkle_root(self.miner_transaction.hash(), &self.transactions));
|
||||||
write_varint(&(1 + u64::try_from(self.transactions.len()).unwrap()), &mut blob).unwrap();
|
write_varint(
|
||||||
|
&(1 +
|
||||||
|
u64::try_from(self.transactions.len())
|
||||||
|
.expect("amount of transactions in block exceeded u64::MAX")),
|
||||||
|
&mut blob,
|
||||||
|
)
|
||||||
|
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
blob
|
blob
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -132,7 +138,11 @@ impl Block {
|
|||||||
// Monero pre-appends a VarInt of the block-to-hash'ss length before getting the block hash,
|
// Monero pre-appends a VarInt of the block-to-hash'ss length before getting the block hash,
|
||||||
// but doesn't do this when getting the proof of work hash :)
|
// but doesn't do this when getting the proof of work hash :)
|
||||||
let mut hashing_blob = Vec::with_capacity(9 + hashable.len());
|
let mut hashing_blob = Vec::with_capacity(9 + hashable.len());
|
||||||
write_varint(&u64::try_from(hashable.len()).unwrap(), &mut hashing_blob).unwrap();
|
write_varint(
|
||||||
|
&u64::try_from(hashable.len()).expect("length of block hash's preimage exceeded u64::MAX"),
|
||||||
|
&mut hashing_blob,
|
||||||
|
)
|
||||||
|
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
hashing_blob.append(&mut hashable);
|
hashing_blob.append(&mut hashable);
|
||||||
|
|
||||||
let hash = keccak256(hashing_blob);
|
let hash = keccak256(hashing_blob);
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ pub(crate) fn merkle_root(root: [u8; 32], leafs: &[[u8; 32]]) -> [u8; 32] {
|
|||||||
|
|
||||||
let mut paired_hashes = Vec::with_capacity(overage);
|
let mut paired_hashes = Vec::with_capacity(overage);
|
||||||
while let Some(left) = rightmost.next() {
|
while let Some(left) = rightmost.next() {
|
||||||
let right = rightmost.next().unwrap();
|
let right = rightmost.next().expect("rightmost is of even length");
|
||||||
paired_hashes.push(keccak256([left.as_ref(), &right].concat()));
|
paired_hashes.push(keccak256([left.as_ref(), &right].concat()));
|
||||||
}
|
}
|
||||||
drop(rightmost);
|
drop(rightmost);
|
||||||
|
|||||||
@@ -326,7 +326,9 @@ impl RctPrunable {
|
|||||||
/// Serialize the RctPrunable to a `Vec<u8>`.
|
/// Serialize the RctPrunable to a `Vec<u8>`.
|
||||||
pub fn serialize(&self, rct_type: RctType) -> Vec<u8> {
|
pub fn serialize(&self, rct_type: RctType) -> Vec<u8> {
|
||||||
let mut serialized = vec![];
|
let mut serialized = vec![];
|
||||||
self.write(&mut serialized, rct_type).unwrap();
|
self
|
||||||
|
.write(&mut serialized, rct_type)
|
||||||
|
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
serialized
|
serialized
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -441,7 +443,7 @@ impl RctProofs {
|
|||||||
/// Serialize the RctProofs to a `Vec<u8>`.
|
/// Serialize the RctProofs to a `Vec<u8>`.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut serialized = vec![];
|
let mut serialized = vec![];
|
||||||
self.write(&mut serialized).unwrap();
|
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
serialized
|
serialized
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ impl Input {
|
|||||||
/// Serialize the Input to a `Vec<u8>`.
|
/// Serialize the Input to a `Vec<u8>`.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut res = vec![];
|
let mut res = vec![];
|
||||||
self.write(&mut res).unwrap();
|
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -106,7 +106,7 @@ impl Output {
|
|||||||
/// Write the Output to a `Vec<u8>`.
|
/// Write the Output to a `Vec<u8>`.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut res = Vec::with_capacity(8 + 1 + 32);
|
let mut res = Vec::with_capacity(8 + 1 + 32);
|
||||||
self.write(&mut res).unwrap();
|
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -163,7 +163,7 @@ impl Timelock {
|
|||||||
/// Serialize the Timelock to a `Vec<u8>`.
|
/// Serialize the Timelock to a `Vec<u8>`.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut res = Vec::with_capacity(1);
|
let mut res = Vec::with_capacity(1);
|
||||||
self.write(&mut res).unwrap();
|
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -259,8 +259,8 @@ impl TransactionPrefix {
|
|||||||
|
|
||||||
fn hash(&self, version: u64) -> [u8; 32] {
|
fn hash(&self, version: u64) -> [u8; 32] {
|
||||||
let mut buf = vec![];
|
let mut buf = vec![];
|
||||||
write_varint(&version, &mut buf).unwrap();
|
write_varint(&version, &mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
self.write(&mut buf).unwrap();
|
self.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
keccak256(buf)
|
keccak256(buf)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -451,7 +451,7 @@ impl<P: PotentiallyPruned> Transaction<P> {
|
|||||||
/// Write the Transaction to a `Vec<u8>`.
|
/// Write the Transaction to a `Vec<u8>`.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut res = Vec::with_capacity(2048);
|
let mut res = Vec::with_capacity(2048);
|
||||||
self.write(&mut res).unwrap();
|
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -493,15 +493,16 @@ impl<P: PotentiallyPruned> Transaction<P> {
|
|||||||
let mut buf = Vec::with_capacity(512);
|
let mut buf = Vec::with_capacity(512);
|
||||||
|
|
||||||
// We don't use `self.write` as that may write the signatures (if this isn't pruned)
|
// We don't use `self.write` as that may write the signatures (if this isn't pruned)
|
||||||
write_varint(&self.version(), &mut buf).unwrap();
|
write_varint(&self.version(), &mut buf)
|
||||||
prefix.write(&mut buf).unwrap();
|
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
|
prefix.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
|
|
||||||
// We explicitly write the signatures ourselves here
|
// We explicitly write the signatures ourselves here
|
||||||
let PrunableHash::V1(signatures) = prunable else {
|
let PrunableHash::V1(signatures) = prunable else {
|
||||||
panic!("hashing v1 TX with non-v1 prunable data")
|
panic!("hashing v1 TX with non-v1 prunable data")
|
||||||
};
|
};
|
||||||
for signature in signatures {
|
for signature in signatures {
|
||||||
signature.write(&mut buf).unwrap();
|
signature.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
}
|
}
|
||||||
|
|
||||||
keccak256(buf)
|
keccak256(buf)
|
||||||
@@ -513,7 +514,10 @@ impl<P: PotentiallyPruned> Transaction<P> {
|
|||||||
|
|
||||||
if let Some(proofs) = proofs {
|
if let Some(proofs) = proofs {
|
||||||
let mut buf = Vec::with_capacity(512);
|
let mut buf = Vec::with_capacity(512);
|
||||||
proofs.base().write(&mut buf, proofs.rct_type()).unwrap();
|
proofs
|
||||||
|
.base()
|
||||||
|
.write(&mut buf, proofs.rct_type())
|
||||||
|
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
hashes.extend(keccak256(&buf));
|
hashes.extend(keccak256(&buf));
|
||||||
} else {
|
} else {
|
||||||
// Serialization of RctBase::Null
|
// Serialization of RctBase::Null
|
||||||
@@ -540,7 +544,10 @@ impl Transaction<NotPruned> {
|
|||||||
Transaction::V2 { proofs, .. } => {
|
Transaction::V2 { proofs, .. } => {
|
||||||
self.hash_with_prunable_hash(PrunableHash::V2(if let Some(proofs) = proofs {
|
self.hash_with_prunable_hash(PrunableHash::V2(if let Some(proofs) = proofs {
|
||||||
let mut buf = Vec::with_capacity(1024);
|
let mut buf = Vec::with_capacity(1024);
|
||||||
proofs.prunable.write(&mut buf, proofs.rct_type()).unwrap();
|
proofs
|
||||||
|
.prunable
|
||||||
|
.write(&mut buf, proofs.rct_type())
|
||||||
|
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
keccak256(buf)
|
keccak256(buf)
|
||||||
} else {
|
} else {
|
||||||
[0; 32]
|
[0; 32]
|
||||||
@@ -563,7 +570,10 @@ impl Transaction<NotPruned> {
|
|||||||
Transaction::V2 { proofs, .. } => self.hash_with_prunable_hash({
|
Transaction::V2 { proofs, .. } => self.hash_with_prunable_hash({
|
||||||
let Some(proofs) = proofs else { None? };
|
let Some(proofs) = proofs else { None? };
|
||||||
let mut buf = Vec::with_capacity(1024);
|
let mut buf = Vec::with_capacity(1024);
|
||||||
proofs.prunable.signature_write(&mut buf).unwrap();
|
proofs
|
||||||
|
.prunable
|
||||||
|
.signature_write(&mut buf)
|
||||||
|
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
PrunableHash::V2(keccak256(buf))
|
PrunableHash::V2(keccak256(buf))
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -76,8 +76,10 @@ pub(crate) fn decode(data: &str) -> Option<Vec<u8>> {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
let used_bytes = used_bytes
|
||||||
|
.expect("chunk of bounded length exhaustively searched but couldn't find matching length");
|
||||||
// Only push on the used bytes
|
// Only push on the used bytes
|
||||||
res.extend(&sum.to_be_bytes()[(BLOCK_LEN - used_bytes.unwrap()) ..]);
|
res.extend(&sum.to_be_bytes()[(BLOCK_LEN - used_bytes) ..]);
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(res)
|
Some(res)
|
||||||
|
|||||||
@@ -357,21 +357,21 @@ pub struct Address<const ADDRESS_BYTES: u128> {
|
|||||||
|
|
||||||
impl<const ADDRESS_BYTES: u128> fmt::Debug for Address<ADDRESS_BYTES> {
|
impl<const ADDRESS_BYTES: u128> fmt::Debug for Address<ADDRESS_BYTES> {
|
||||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||||
let hex = |bytes: &[u8]| -> String {
|
let hex = |bytes: &[u8]| -> Result<String, fmt::Error> {
|
||||||
let mut res = String::with_capacity(2 + (2 * bytes.len()));
|
let mut res = String::with_capacity(2 + (2 * bytes.len()));
|
||||||
res.push_str("0x");
|
res.push_str("0x");
|
||||||
for b in bytes {
|
for b in bytes {
|
||||||
write!(&mut res, "{b:02x}").unwrap();
|
write!(&mut res, "{b:02x}")?;
|
||||||
}
|
}
|
||||||
res
|
Ok(res)
|
||||||
};
|
};
|
||||||
|
|
||||||
fmt
|
fmt
|
||||||
.debug_struct("Address")
|
.debug_struct("Address")
|
||||||
.field("network", &self.network)
|
.field("network", &self.network)
|
||||||
.field("kind", &self.kind)
|
.field("kind", &self.kind)
|
||||||
.field("spend", &hex(&self.spend.compress().to_bytes()))
|
.field("spend", &hex(&self.spend.compress().to_bytes())?)
|
||||||
.field("view", &hex(&self.view.compress().to_bytes()))
|
.field("view", &hex(&self.view.compress().to_bytes())?)
|
||||||
// This is not a real field yet is the most valuable thing to know when debugging
|
// This is not a real field yet is the most valuable thing to know when debugging
|
||||||
.field("(address)", &self.to_string())
|
.field("(address)", &self.to_string())
|
||||||
.finish()
|
.finish()
|
||||||
@@ -389,7 +389,8 @@ impl<const ADDRESS_BYTES: u128> fmt::Display for Address<ADDRESS_BYTES> {
|
|||||||
if let AddressType::Featured { subaddress, payment_id, guaranteed } = self.kind {
|
if let AddressType::Featured { subaddress, payment_id, guaranteed } = self.kind {
|
||||||
let features_uint =
|
let features_uint =
|
||||||
(u8::from(guaranteed) << 2) + (u8::from(payment_id.is_some()) << 1) + u8::from(subaddress);
|
(u8::from(guaranteed) << 2) + (u8::from(payment_id.is_some()) << 1) + u8::from(subaddress);
|
||||||
write_varint(&features_uint, &mut data).unwrap();
|
write_varint(&features_uint, &mut data)
|
||||||
|
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
}
|
}
|
||||||
if let Some(id) = self.kind.payment_id() {
|
if let Some(id) = self.kind.payment_id() {
|
||||||
data.extend(id);
|
data.extend(id);
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ use crate::{
|
|||||||
WalletOutput,
|
WalletOutput,
|
||||||
};
|
};
|
||||||
|
|
||||||
const RECENT_WINDOW: usize = 15;
|
const RECENT_WINDOW: u64 = 15;
|
||||||
const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME;
|
const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME;
|
||||||
#[allow(clippy::cast_precision_loss)]
|
#[allow(clippy::cast_precision_loss)]
|
||||||
const TIP_APPLICATION: f64 = (DEFAULT_LOCK_WINDOW * BLOCK_TIME) as f64;
|
const TIP_APPLICATION: f64 = (DEFAULT_LOCK_WINDOW * BLOCK_TIME) as f64;
|
||||||
@@ -27,7 +27,7 @@ async fn select_n(
|
|||||||
rpc: &impl DecoyRpc,
|
rpc: &impl DecoyRpc,
|
||||||
height: usize,
|
height: usize,
|
||||||
real_output: u64,
|
real_output: u64,
|
||||||
ring_len: usize,
|
ring_len: u8,
|
||||||
fingerprintable_deterministic: bool,
|
fingerprintable_deterministic: bool,
|
||||||
) -> Result<Vec<(u64, [EdwardsPoint; 2])>, RpcError> {
|
) -> Result<Vec<(u64, [EdwardsPoint; 2])>, RpcError> {
|
||||||
if height < DEFAULT_LOCK_WINDOW {
|
if height < DEFAULT_LOCK_WINDOW {
|
||||||
@@ -48,8 +48,9 @@ async fn select_n(
|
|||||||
// This assumes that each miner TX had one output (as sane) and checks we have sufficient
|
// This assumes that each miner TX had one output (as sane) and checks we have sufficient
|
||||||
// outputs even when excluding them (due to their own timelock requirements)
|
// outputs even when excluding them (due to their own timelock requirements)
|
||||||
// Considering this a temporal error for very new chains, it's sufficiently sane to have
|
// Considering this a temporal error for very new chains, it's sufficiently sane to have
|
||||||
if highest_output_exclusive_bound.saturating_sub(u64::try_from(COINBASE_LOCK_WINDOW).unwrap()) <
|
if highest_output_exclusive_bound.saturating_sub(
|
||||||
u64::try_from(ring_len).unwrap()
|
u64::try_from(COINBASE_LOCK_WINDOW).expect("coinbase lock window exceeds 2^{64}"),
|
||||||
|
) < u64::from(ring_len)
|
||||||
{
|
{
|
||||||
Err(RpcError::InternalError("not enough decoy candidates".to_string()))?;
|
Err(RpcError::InternalError("not enough decoy candidates".to_string()))?;
|
||||||
}
|
}
|
||||||
@@ -67,7 +68,7 @@ async fn select_n(
|
|||||||
let mut do_not_select = HashSet::new();
|
let mut do_not_select = HashSet::new();
|
||||||
do_not_select.insert(real_output);
|
do_not_select.insert(real_output);
|
||||||
|
|
||||||
let decoy_count = ring_len - 1;
|
let decoy_count = usize::from(ring_len - 1);
|
||||||
let mut res = Vec::with_capacity(decoy_count);
|
let mut res = Vec::with_capacity(decoy_count);
|
||||||
|
|
||||||
let mut iters = 0;
|
let mut iters = 0;
|
||||||
@@ -87,8 +88,9 @@ async fn select_n(
|
|||||||
// We check both that we aren't at the maximum amount of iterations and that the not-yet
|
// We check both that we aren't at the maximum amount of iterations and that the not-yet
|
||||||
// selected candidates exceed the amount of candidates necessary to trigger the next iteration
|
// selected candidates exceed the amount of candidates necessary to trigger the next iteration
|
||||||
if (iters == MAX_ITERS) ||
|
if (iters == MAX_ITERS) ||
|
||||||
((highest_output_exclusive_bound - u64::try_from(do_not_select.len()).unwrap()) <
|
((highest_output_exclusive_bound -
|
||||||
u64::try_from(ring_len).unwrap())
|
u64::try_from(do_not_select.len()).expect("amount of ignored decoys exceeds 2^{64}")) <
|
||||||
|
u64::from(ring_len))
|
||||||
{
|
{
|
||||||
Err(RpcError::InternalError("hit decoy selection round limit".to_string()))?;
|
Err(RpcError::InternalError("hit decoy selection round limit".to_string()))?;
|
||||||
}
|
}
|
||||||
@@ -99,13 +101,18 @@ async fn select_n(
|
|||||||
// Use a gamma distribution, as Monero does
|
// Use a gamma distribution, as Monero does
|
||||||
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c45
|
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c45
|
||||||
// /src/wallet/wallet2.cpp#L142-L143
|
// /src/wallet/wallet2.cpp#L142-L143
|
||||||
let mut age = Gamma::<f64>::new(19.28, 1.0 / 1.61).unwrap().sample(rng).exp();
|
let mut age = Gamma::<f64>::new(19.28, 1.0 / 1.61)
|
||||||
|
.expect("constant Gamma distribution could no longer be created")
|
||||||
|
.sample(rng)
|
||||||
|
.exp();
|
||||||
#[allow(clippy::cast_precision_loss)]
|
#[allow(clippy::cast_precision_loss)]
|
||||||
if age > TIP_APPLICATION {
|
if age > TIP_APPLICATION {
|
||||||
age -= TIP_APPLICATION;
|
age -= TIP_APPLICATION;
|
||||||
} else {
|
} else {
|
||||||
// f64 does not have try_from available, which is why these are written with `as`
|
// f64 does not have try_from available, which is why these are written with `as`
|
||||||
age = (rng.next_u64() % u64::try_from(RECENT_WINDOW * BLOCK_TIME).unwrap()) as f64;
|
age = (rng.next_u64() %
|
||||||
|
(RECENT_WINDOW * u64::try_from(BLOCK_TIME).expect("BLOCK_TIME exceeded u64::MAX")))
|
||||||
|
as f64;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
|
#[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
|
||||||
@@ -135,7 +142,11 @@ async fn select_n(
|
|||||||
candidates.push(real_output);
|
candidates.push(real_output);
|
||||||
// Sort candidates so the real spends aren't the ones at the end
|
// Sort candidates so the real spends aren't the ones at the end
|
||||||
candidates.sort();
|
candidates.sort();
|
||||||
Some(candidates.binary_search(&real_output).unwrap())
|
Some(
|
||||||
|
candidates
|
||||||
|
.binary_search(&real_output)
|
||||||
|
.expect("selected a ring which didn't include the real spend"),
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
@@ -169,11 +180,15 @@ async fn select_n(
|
|||||||
async fn select_decoys<R: RngCore + CryptoRng>(
|
async fn select_decoys<R: RngCore + CryptoRng>(
|
||||||
rng: &mut R,
|
rng: &mut R,
|
||||||
rpc: &impl DecoyRpc,
|
rpc: &impl DecoyRpc,
|
||||||
ring_len: usize,
|
ring_len: u8,
|
||||||
height: usize,
|
height: usize,
|
||||||
input: &WalletOutput,
|
input: &WalletOutput,
|
||||||
fingerprintable_deterministic: bool,
|
fingerprintable_deterministic: bool,
|
||||||
) -> Result<Decoys, RpcError> {
|
) -> Result<Decoys, RpcError> {
|
||||||
|
if ring_len == 0 {
|
||||||
|
Err(RpcError::InternalError("requesting a ring of length 0".to_string()))?;
|
||||||
|
}
|
||||||
|
|
||||||
// Select all decoys for this transaction, assuming we generate a sane transaction
|
// Select all decoys for this transaction, assuming we generate a sane transaction
|
||||||
// We should almost never naturally generate an insane transaction, hence why this doesn't
|
// We should almost never naturally generate an insane transaction, hence why this doesn't
|
||||||
// bother with an overage
|
// bother with an overage
|
||||||
@@ -215,10 +230,13 @@ async fn select_decoys<R: RngCore + CryptoRng>(
|
|||||||
Decoys::new(
|
Decoys::new(
|
||||||
offsets,
|
offsets,
|
||||||
// Binary searches for the real spend since we don't know where it sorted to
|
// Binary searches for the real spend since we don't know where it sorted to
|
||||||
u8::try_from(ring.partition_point(|x| x.0 < input.relative_id.index_on_blockchain)).unwrap(),
|
// TODO: Define our own collection whose `len` function returns `u8` to ensure this bound
|
||||||
|
// with types
|
||||||
|
u8::try_from(ring.partition_point(|x| x.0 < input.relative_id.index_on_blockchain))
|
||||||
|
.expect("ring of size <= u8::MAX had an index exceeding u8::MAX"),
|
||||||
ring.into_iter().map(|output| output.1).collect(),
|
ring.into_iter().map(|output| output.1).collect(),
|
||||||
)
|
)
|
||||||
.unwrap(),
|
.expect("selected a syntactically-invalid set of Decoys"),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -234,7 +252,7 @@ impl OutputWithDecoys {
|
|||||||
pub async fn new(
|
pub async fn new(
|
||||||
rng: &mut (impl Send + Sync + RngCore + CryptoRng),
|
rng: &mut (impl Send + Sync + RngCore + CryptoRng),
|
||||||
rpc: &impl DecoyRpc,
|
rpc: &impl DecoyRpc,
|
||||||
ring_len: usize,
|
ring_len: u8,
|
||||||
height: usize,
|
height: usize,
|
||||||
output: WalletOutput,
|
output: WalletOutput,
|
||||||
) -> Result<OutputWithDecoys, RpcError> {
|
) -> Result<OutputWithDecoys, RpcError> {
|
||||||
@@ -253,7 +271,7 @@ impl OutputWithDecoys {
|
|||||||
pub async fn fingerprintable_deterministic_new(
|
pub async fn fingerprintable_deterministic_new(
|
||||||
rng: &mut (impl Send + Sync + RngCore + CryptoRng),
|
rng: &mut (impl Send + Sync + RngCore + CryptoRng),
|
||||||
rpc: &impl DecoyRpc,
|
rpc: &impl DecoyRpc,
|
||||||
ring_len: usize,
|
ring_len: u8,
|
||||||
height: usize,
|
height: usize,
|
||||||
output: WalletOutput,
|
output: WalletOutput,
|
||||||
) -> Result<OutputWithDecoys, RpcError> {
|
) -> Result<OutputWithDecoys, RpcError> {
|
||||||
@@ -297,7 +315,7 @@ impl OutputWithDecoys {
|
|||||||
/// defined serialization.
|
/// defined serialization.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut serialized = Vec::with_capacity(128);
|
let mut serialized = Vec::with_capacity(128);
|
||||||
self.write(&mut serialized).unwrap();
|
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
serialized
|
serialized
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ impl PaymentId {
|
|||||||
/// Serialize the PaymentId to a `Vec<u8>`.
|
/// Serialize the PaymentId to a `Vec<u8>`.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut res = Vec::with_capacity(1 + 8);
|
let mut res = Vec::with_capacity(1 + 8);
|
||||||
self.write(&mut res).unwrap();
|
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -100,7 +100,7 @@ pub enum ExtraField {
|
|||||||
///
|
///
|
||||||
/// This is used within miner transactions who are merge-mining Monero to specify the foreign
|
/// This is used within miner transactions who are merge-mining Monero to specify the foreign
|
||||||
/// block they mined.
|
/// block they mined.
|
||||||
MergeMining(usize, [u8; 32]),
|
MergeMining(u64, [u8; 32]),
|
||||||
/// The additional transaction keys.
|
/// The additional transaction keys.
|
||||||
///
|
///
|
||||||
/// These are the per-output commitments to the randomness used for deriving outputs.
|
/// These are the per-output commitments to the randomness used for deriving outputs.
|
||||||
@@ -132,7 +132,7 @@ impl ExtraField {
|
|||||||
}
|
}
|
||||||
ExtraField::MergeMining(height, merkle) => {
|
ExtraField::MergeMining(height, merkle) => {
|
||||||
w.write_all(&[3])?;
|
w.write_all(&[3])?;
|
||||||
write_varint(&u64::try_from(*height).unwrap(), w)?;
|
write_varint(height, w)?;
|
||||||
w.write_all(merkle)?;
|
w.write_all(merkle)?;
|
||||||
}
|
}
|
||||||
ExtraField::PublicKeys(keys) => {
|
ExtraField::PublicKeys(keys) => {
|
||||||
@@ -150,7 +150,7 @@ impl ExtraField {
|
|||||||
/// Serialize the ExtraField to a `Vec<u8>`.
|
/// Serialize the ExtraField to a `Vec<u8>`.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut res = Vec::with_capacity(1 + 8);
|
let mut res = Vec::with_capacity(1 + 8);
|
||||||
self.write(&mut res).unwrap();
|
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -280,7 +280,7 @@ impl Extra {
|
|||||||
/// Serialize the Extra to a `Vec<u8>`.
|
/// Serialize the Extra to a `Vec<u8>`.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut buf = vec![];
|
let mut buf = vec![];
|
||||||
self.write(&mut buf).unwrap();
|
self.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
buf
|
buf
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ impl SharedKeyDerivations {
|
|||||||
// If Gen, this should be the only input, making this loop somewhat pointless
|
// If Gen, this should be the only input, making this loop somewhat pointless
|
||||||
// This works and even if there were somehow multiple inputs, it'd be a false negative
|
// This works and even if there were somehow multiple inputs, it'd be a false negative
|
||||||
Input::Gen(height) => {
|
Input::Gen(height) => {
|
||||||
write_varint(height, &mut u).unwrap();
|
write_varint(height, &mut u).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
}
|
}
|
||||||
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
|
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
|
||||||
}
|
}
|
||||||
@@ -83,7 +83,8 @@ impl SharedKeyDerivations {
|
|||||||
// || o
|
// || o
|
||||||
{
|
{
|
||||||
let output_derivation: &mut Vec<u8> = output_derivation.as_mut();
|
let output_derivation: &mut Vec<u8> = output_derivation.as_mut();
|
||||||
write_varint(&o, output_derivation).unwrap();
|
write_varint(&o, output_derivation)
|
||||||
|
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
}
|
}
|
||||||
|
|
||||||
let view_tag = keccak256([b"view_tag".as_ref(), &output_derivation].concat())[0];
|
let view_tag = keccak256([b"view_tag".as_ref(), &output_derivation].concat())[0];
|
||||||
@@ -145,7 +146,11 @@ impl SharedKeyDerivations {
|
|||||||
let amount_scalar = Scalar::from_bytes_mod_order(*amount) - amount_shared_sec_scalar;
|
let amount_scalar = Scalar::from_bytes_mod_order(*amount) - amount_shared_sec_scalar;
|
||||||
|
|
||||||
// d2b from rctTypes.cpp
|
// d2b from rctTypes.cpp
|
||||||
let amount = u64::from_le_bytes(amount_scalar.to_bytes()[0 .. 8].try_into().unwrap());
|
let amount = u64::from_le_bytes(
|
||||||
|
amount_scalar.to_bytes()[.. 8]
|
||||||
|
.try_into()
|
||||||
|
.expect("32-byte array couldn't have an 8-byte slice taken"),
|
||||||
|
);
|
||||||
|
|
||||||
Commitment::new(mask, amount)
|
Commitment::new(mask, amount)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ use crate::{
|
|||||||
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
|
||||||
pub(crate) struct AbsoluteId {
|
pub(crate) struct AbsoluteId {
|
||||||
pub(crate) transaction: [u8; 32],
|
pub(crate) transaction: [u8; 32],
|
||||||
pub(crate) index_in_transaction: u32,
|
pub(crate) index_in_transaction: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl core::fmt::Debug for AbsoluteId {
|
impl core::fmt::Debug for AbsoluteId {
|
||||||
@@ -46,7 +46,7 @@ impl AbsoluteId {
|
|||||||
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
|
||||||
/// defined serialization.
|
/// defined serialization.
|
||||||
fn read<R: Read>(r: &mut R) -> io::Result<AbsoluteId> {
|
fn read<R: Read>(r: &mut R) -> io::Result<AbsoluteId> {
|
||||||
Ok(AbsoluteId { transaction: read_bytes(r)?, index_in_transaction: read_u32(r)? })
|
Ok(AbsoluteId { transaction: read_bytes(r)?, index_in_transaction: read_u64(r)? })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -128,11 +128,11 @@ impl OutputData {
|
|||||||
self.commitment.write(w)
|
self.commitment.write(w)
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/* Commented as it's unused, due to self being private
|
||||||
/// Serialize the OutputData to a `Vec<u8>`.
|
/// Serialize the OutputData to a `Vec<u8>`.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut res = Vec::with_capacity(32 + 32 + 40);
|
let mut res = Vec::with_capacity(32 + 32 + 40);
|
||||||
self.write(&mut res).unwrap();
|
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
@@ -194,9 +194,17 @@ impl Metadata {
|
|||||||
w.write_all(&[0])?;
|
w.write_all(&[0])?;
|
||||||
}
|
}
|
||||||
|
|
||||||
w.write_all(&u32::try_from(self.arbitrary_data.len()).unwrap().to_le_bytes())?;
|
w.write_all(
|
||||||
|
&u64::try_from(self.arbitrary_data.len())
|
||||||
|
.expect("amount of arbitrary data chunks exceeded u64::MAX")
|
||||||
|
.to_le_bytes(),
|
||||||
|
)?;
|
||||||
for part in &self.arbitrary_data {
|
for part in &self.arbitrary_data {
|
||||||
w.write_all(&[u8::try_from(part.len()).unwrap()])?;
|
// TODO: Define our own collection whose `len` function returns `u8` to ensure this bound
|
||||||
|
// with types
|
||||||
|
w.write_all(&[
|
||||||
|
u8::try_from(part.len()).expect("piece of arbitrary data exceeded max length of u8::MAX")
|
||||||
|
])?;
|
||||||
w.write_all(part)?;
|
w.write_all(part)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -224,7 +232,7 @@ impl Metadata {
|
|||||||
payment_id: if read_byte(r)? == 1 { PaymentId::read(r).ok() } else { None },
|
payment_id: if read_byte(r)? == 1 { PaymentId::read(r).ok() } else { None },
|
||||||
arbitrary_data: {
|
arbitrary_data: {
|
||||||
let mut data = vec![];
|
let mut data = vec![];
|
||||||
for _ in 0 .. read_u32(r)? {
|
for _ in 0 .. read_u64(r)? {
|
||||||
let len = read_byte(r)?;
|
let len = read_byte(r)?;
|
||||||
data.push(read_raw_vec(read_byte, usize::from(len), r)?);
|
data.push(read_raw_vec(read_byte, usize::from(len), r)?);
|
||||||
}
|
}
|
||||||
@@ -260,7 +268,7 @@ impl WalletOutput {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// The index of the output within the transaction.
|
/// The index of the output within the transaction.
|
||||||
pub fn index_in_transaction(&self) -> u32 {
|
pub fn index_in_transaction(&self) -> u64 {
|
||||||
self.absolute_id.index_in_transaction
|
self.absolute_id.index_in_transaction
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -349,7 +357,7 @@ impl WalletOutput {
|
|||||||
/// defined serialization.
|
/// defined serialization.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut serialized = Vec::with_capacity(128);
|
let mut serialized = Vec::with_capacity(128);
|
||||||
self.write(&mut serialized).unwrap();
|
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
serialized
|
serialized
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -228,14 +228,11 @@ impl InternalScanner {
|
|||||||
// Decrypt the payment ID
|
// Decrypt the payment ID
|
||||||
let payment_id = payment_id.map(|id| id ^ SharedKeyDerivations::payment_id_xor(ecdh));
|
let payment_id = payment_id.map(|id| id ^ SharedKeyDerivations::payment_id_xor(ecdh));
|
||||||
|
|
||||||
|
let o = u64::try_from(o).expect("couldn't convert output index (usize) to u64");
|
||||||
|
|
||||||
res.push(WalletOutput {
|
res.push(WalletOutput {
|
||||||
absolute_id: AbsoluteId {
|
absolute_id: AbsoluteId { transaction: tx_hash, index_in_transaction: o },
|
||||||
transaction: tx_hash,
|
relative_id: RelativeId { index_on_blockchain: output_index_for_first_ringct_output + o },
|
||||||
index_in_transaction: o.try_into().unwrap(),
|
|
||||||
},
|
|
||||||
relative_id: RelativeId {
|
|
||||||
index_on_blockchain: output_index_for_first_ringct_output + u64::try_from(o).unwrap(),
|
|
||||||
},
|
|
||||||
data: OutputData { key: output_key, key_offset, commitment },
|
data: OutputData { key: output_key, key_offset, commitment },
|
||||||
metadata: Metadata {
|
metadata: Metadata {
|
||||||
additional_timelock: tx.prefix().additional_timelock,
|
additional_timelock: tx.prefix().additional_timelock,
|
||||||
@@ -295,7 +292,8 @@ impl InternalScanner {
|
|||||||
|
|
||||||
// Update the RingCT starting index for the next TX
|
// Update the RingCT starting index for the next TX
|
||||||
if matches!(tx, Transaction::V2 { .. }) {
|
if matches!(tx, Transaction::V2 { .. }) {
|
||||||
output_index_for_first_ringct_output += u64::try_from(tx.prefix().outputs.len()).unwrap()
|
output_index_for_first_ringct_output += u64::try_from(tx.prefix().outputs.len())
|
||||||
|
.expect("couldn't convert amount of outputs (usize) to u64")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -446,7 +446,7 @@ impl SignableTransaction {
|
|||||||
/// defined serialization.
|
/// defined serialization.
|
||||||
pub fn serialize(&self) -> Vec<u8> {
|
pub fn serialize(&self) -> Vec<u8> {
|
||||||
let mut buf = Vec::with_capacity(256);
|
let mut buf = Vec::with_capacity(256);
|
||||||
self.write(&mut buf).unwrap();
|
self.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
buf
|
buf
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -553,8 +553,12 @@ impl SignableTransaction {
|
|||||||
let mut tx = tx.transaction_without_signatures();
|
let mut tx = tx.transaction_without_signatures();
|
||||||
|
|
||||||
// Sign the CLSAGs
|
// Sign the CLSAGs
|
||||||
let clsags_and_pseudo_outs =
|
let clsags_and_pseudo_outs = Clsag::sign(
|
||||||
Clsag::sign(rng, clsag_signs, mask_sum, tx.signature_hash().unwrap())
|
rng,
|
||||||
|
clsag_signs,
|
||||||
|
mask_sum,
|
||||||
|
tx.signature_hash().expect("signing a transaction which isn't signed?"),
|
||||||
|
)
|
||||||
.map_err(SendError::ClsagError)?;
|
.map_err(SendError::ClsagError)?;
|
||||||
|
|
||||||
// Fill in the CLSAGs/pseudo-outs
|
// Fill in the CLSAGs/pseudo-outs
|
||||||
|
|||||||
@@ -251,7 +251,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let tx = tx.transaction_without_signatures();
|
let tx = tx.transaction_without_signatures();
|
||||||
let msg = tx.signature_hash().unwrap();
|
let msg = tx.signature_hash().expect("signing a transaction which isn't signed?");
|
||||||
|
|
||||||
// Iterate over each CLSAG calling sign
|
// Iterate over each CLSAG calling sign
|
||||||
let mut shares = Vec::with_capacity(to_sign.len());
|
let mut shares = Vec::with_capacity(to_sign.len());
|
||||||
|
|||||||
@@ -73,7 +73,9 @@ impl SignableTransaction {
|
|||||||
{
|
{
|
||||||
let id = (u64::from_le_bytes(id) ^ u64::from_le_bytes(*id_xor)).to_le_bytes();
|
let id = (u64::from_le_bytes(id) ^ u64::from_le_bytes(*id_xor)).to_le_bytes();
|
||||||
let mut id_vec = Vec::with_capacity(1 + 8);
|
let mut id_vec = Vec::with_capacity(1 + 8);
|
||||||
PaymentId::Encrypted(id).write(&mut id_vec).unwrap();
|
PaymentId::Encrypted(id)
|
||||||
|
.write(&mut id_vec)
|
||||||
|
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
extra.push_nonce(id_vec);
|
extra.push_nonce(id_vec);
|
||||||
} else {
|
} else {
|
||||||
/*
|
/*
|
||||||
@@ -96,7 +98,9 @@ impl SignableTransaction {
|
|||||||
.expect("multiple change outputs?");
|
.expect("multiple change outputs?");
|
||||||
let mut id_vec = Vec::with_capacity(1 + 8);
|
let mut id_vec = Vec::with_capacity(1 + 8);
|
||||||
// The dummy payment ID is [0; 8], which when xor'd with the mask, is just the mask
|
// The dummy payment ID is [0; 8], which when xor'd with the mask, is just the mask
|
||||||
PaymentId::Encrypted(*payment_id_xor).write(&mut id_vec).unwrap();
|
PaymentId::Encrypted(*payment_id_xor)
|
||||||
|
.write(&mut id_vec)
|
||||||
|
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
extra.push_nonce(id_vec);
|
extra.push_nonce(id_vec);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -109,7 +113,7 @@ impl SignableTransaction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut serialized = Vec::with_capacity(32 * amount_of_keys);
|
let mut serialized = Vec::with_capacity(32 * amount_of_keys);
|
||||||
extra.write(&mut serialized).unwrap();
|
extra.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
serialized
|
serialized
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -180,7 +184,8 @@ impl SignableTransaction {
|
|||||||
push_scalar(&mut bp);
|
push_scalar(&mut bp);
|
||||||
}
|
}
|
||||||
for _ in 0 .. 2 {
|
for _ in 0 .. 2 {
|
||||||
write_varint(&lr_len, &mut bp).unwrap();
|
write_varint(&lr_len, &mut bp)
|
||||||
|
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
for _ in 0 .. lr_len {
|
for _ in 0 .. lr_len {
|
||||||
push_point(&mut bp);
|
push_point(&mut bp);
|
||||||
}
|
}
|
||||||
@@ -204,7 +209,8 @@ impl SignableTransaction {
|
|||||||
push_scalar(&mut bp);
|
push_scalar(&mut bp);
|
||||||
}
|
}
|
||||||
for _ in 0 .. 2 {
|
for _ in 0 .. 2 {
|
||||||
write_varint(&lr_len, &mut bp).unwrap();
|
write_varint(&lr_len, &mut bp)
|
||||||
|
.expect("write failed but <Vec as io::Write> doesn't fail");
|
||||||
for _ in 0 .. lr_len {
|
for _ in 0 .. lr_len {
|
||||||
push_point(&mut bp);
|
push_point(&mut bp);
|
||||||
}
|
}
|
||||||
@@ -261,7 +267,8 @@ impl SignableTransaction {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
weight_and_fee.unwrap()
|
weight_and_fee
|
||||||
|
.expect("length of highest possible fee was greater than highest possible fee length")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -21,7 +21,9 @@ fn seeded_rng(
|
|||||||
mut input_keys: Vec<EdwardsPoint>,
|
mut input_keys: Vec<EdwardsPoint>,
|
||||||
) -> ChaCha20Rng {
|
) -> ChaCha20Rng {
|
||||||
// Apply the DST
|
// Apply the DST
|
||||||
let mut transcript = Zeroizing::new(vec![u8::try_from(dst.len()).unwrap()]);
|
let mut transcript = Zeroizing::new(vec![
|
||||||
|
u8::try_from(dst.len()).expect("internal RNG with constant DST had a too-long DST specified")
|
||||||
|
]);
|
||||||
transcript.extend(dst);
|
transcript.extend(dst);
|
||||||
|
|
||||||
// Bind to the outgoing view key to prevent foreign entities from rebuilding the transcript
|
// Bind to the outgoing view key to prevent foreign entities from rebuilding the transcript
|
||||||
@@ -116,12 +118,12 @@ impl SignableTransaction {
|
|||||||
fn transaction_keys(&self) -> (Zeroizing<Scalar>, Vec<Zeroizing<Scalar>>) {
|
fn transaction_keys(&self) -> (Zeroizing<Scalar>, Vec<Zeroizing<Scalar>>) {
|
||||||
let mut tx_keys = TransactionKeys::new(&self.outgoing_view_key, self.input_keys());
|
let mut tx_keys = TransactionKeys::new(&self.outgoing_view_key, self.input_keys());
|
||||||
|
|
||||||
let tx_key = tx_keys.next().unwrap();
|
let tx_key = tx_keys.next().expect("TransactionKeys (never-ending) was exhausted");
|
||||||
|
|
||||||
let mut additional_keys = vec![];
|
let mut additional_keys = vec![];
|
||||||
if self.should_use_additional_keys() {
|
if self.should_use_additional_keys() {
|
||||||
for _ in 0 .. self.payments.len() {
|
for _ in 0 .. self.payments.len() {
|
||||||
additional_keys.push(tx_keys.next().unwrap());
|
additional_keys.push(tx_keys.next().expect("TransactionKeys (never-ending) was exhausted"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
(tx_key, additional_keys)
|
(tx_key, additional_keys)
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ use monero_wallet::{
|
|||||||
mod builder;
|
mod builder;
|
||||||
pub use builder::SignableTransactionBuilder;
|
pub use builder::SignableTransactionBuilder;
|
||||||
|
|
||||||
pub fn ring_len(rct_type: RctType) -> usize {
|
pub fn ring_len(rct_type: RctType) -> u8 {
|
||||||
match rct_type {
|
match rct_type {
|
||||||
RctType::ClsagBulletproof => 11,
|
RctType::ClsagBulletproof => 11,
|
||||||
RctType::ClsagBulletproofPlus => 16,
|
RctType::ClsagBulletproofPlus => 16,
|
||||||
@@ -118,7 +118,7 @@ pub fn check_weight_and_fee(tx: &Transaction, fee_rate: FeeRate) {
|
|||||||
let fee = proofs.base.fee;
|
let fee = proofs.base.fee;
|
||||||
|
|
||||||
let weight = tx.weight();
|
let weight = tx.weight();
|
||||||
let expected_weight = fee_rate.calculate_weight_from_fee(fee);
|
let expected_weight = fee_rate.calculate_weight_from_fee(fee).unwrap();
|
||||||
assert_eq!(weight, expected_weight);
|
assert_eq!(weight, expected_weight);
|
||||||
|
|
||||||
let expected_fee = fee_rate.calculate_fee_from_weight(weight);
|
let expected_fee = fee_rate.calculate_fee_from_weight(weight);
|
||||||
|
|||||||
Reference in New Issue
Block a user