Response to usage of unwrap in non-test code

This commit replaces all usage of `unwrap` with `expect` within
`networks/monero`, clarifying why the panic risked is unreachable. This commit
also replaces some uses of `unwrap` with solutions which are guaranteed not to
fail.

Notably, compilation on 128-bit systems is prevented, ensuring
`u64::try_from(usize::MAX)` will never panic at runtime.

Slight breaking changes are additionally included as necessary to massage out
some avoidable panics.
This commit is contained in:
Luke Parker
2025-08-08 21:28:47 -04:00
parent 4f65a0b147
commit a5f4c450c6
31 changed files with 310 additions and 169 deletions

View File

@@ -76,8 +76,10 @@ pub(crate) fn decode(data: &str) -> Option<Vec<u8>> {
break;
}
}
let used_bytes = used_bytes
.expect("chunk of bounded length exhaustively searched but couldn't find matching length");
// Only push on the used bytes
res.extend(&sum.to_be_bytes()[(BLOCK_LEN - used_bytes.unwrap()) ..]);
res.extend(&sum.to_be_bytes()[(BLOCK_LEN - used_bytes) ..]);
}
Some(res)

View File

@@ -357,21 +357,21 @@ pub struct Address<const ADDRESS_BYTES: u128> {
impl<const ADDRESS_BYTES: u128> fmt::Debug for Address<ADDRESS_BYTES> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
let hex = |bytes: &[u8]| -> String {
let hex = |bytes: &[u8]| -> Result<String, fmt::Error> {
let mut res = String::with_capacity(2 + (2 * bytes.len()));
res.push_str("0x");
for b in bytes {
write!(&mut res, "{b:02x}").unwrap();
write!(&mut res, "{b:02x}")?;
}
res
Ok(res)
};
fmt
.debug_struct("Address")
.field("network", &self.network)
.field("kind", &self.kind)
.field("spend", &hex(&self.spend.compress().to_bytes()))
.field("view", &hex(&self.view.compress().to_bytes()))
.field("spend", &hex(&self.spend.compress().to_bytes())?)
.field("view", &hex(&self.view.compress().to_bytes())?)
// This is not a real field yet is the most valuable thing to know when debugging
.field("(address)", &self.to_string())
.finish()
@@ -389,7 +389,8 @@ impl<const ADDRESS_BYTES: u128> fmt::Display for Address<ADDRESS_BYTES> {
if let AddressType::Featured { subaddress, payment_id, guaranteed } = self.kind {
let features_uint =
(u8::from(guaranteed) << 2) + (u8::from(payment_id.is_some()) << 1) + u8::from(subaddress);
write_varint(&features_uint, &mut data).unwrap();
write_varint(&features_uint, &mut data)
.expect("write failed but <Vec as io::Write> doesn't fail");
}
if let Some(id) = self.kind.payment_id() {
data.extend(id);

View File

@@ -17,7 +17,7 @@ use crate::{
WalletOutput,
};
const RECENT_WINDOW: usize = 15;
const RECENT_WINDOW: u64 = 15;
const BLOCKS_PER_YEAR: usize = 365 * 24 * 60 * 60 / BLOCK_TIME;
#[allow(clippy::cast_precision_loss)]
const TIP_APPLICATION: f64 = (DEFAULT_LOCK_WINDOW * BLOCK_TIME) as f64;
@@ -27,7 +27,7 @@ async fn select_n(
rpc: &impl DecoyRpc,
height: usize,
real_output: u64,
ring_len: usize,
ring_len: u8,
fingerprintable_deterministic: bool,
) -> Result<Vec<(u64, [EdwardsPoint; 2])>, RpcError> {
if height < DEFAULT_LOCK_WINDOW {
@@ -48,8 +48,9 @@ async fn select_n(
// This assumes that each miner TX had one output (as sane) and checks we have sufficient
// outputs even when excluding them (due to their own timelock requirements)
// Considering this a temporal error for very new chains, it's sufficiently sane to have
if highest_output_exclusive_bound.saturating_sub(u64::try_from(COINBASE_LOCK_WINDOW).unwrap()) <
u64::try_from(ring_len).unwrap()
if highest_output_exclusive_bound.saturating_sub(
u64::try_from(COINBASE_LOCK_WINDOW).expect("coinbase lock window exceeds 2^{64}"),
) < u64::from(ring_len)
{
Err(RpcError::InternalError("not enough decoy candidates".to_string()))?;
}
@@ -67,7 +68,7 @@ async fn select_n(
let mut do_not_select = HashSet::new();
do_not_select.insert(real_output);
let decoy_count = ring_len - 1;
let decoy_count = usize::from(ring_len - 1);
let mut res = Vec::with_capacity(decoy_count);
let mut iters = 0;
@@ -87,8 +88,9 @@ async fn select_n(
// We check both that we aren't at the maximum amount of iterations and that the not-yet
// selected candidates exceed the amount of candidates necessary to trigger the next iteration
if (iters == MAX_ITERS) ||
((highest_output_exclusive_bound - u64::try_from(do_not_select.len()).unwrap()) <
u64::try_from(ring_len).unwrap())
((highest_output_exclusive_bound -
u64::try_from(do_not_select.len()).expect("amount of ignored decoys exceeds 2^{64}")) <
u64::from(ring_len))
{
Err(RpcError::InternalError("hit decoy selection round limit".to_string()))?;
}
@@ -99,13 +101,18 @@ async fn select_n(
// Use a gamma distribution, as Monero does
// https://github.com/monero-project/monero/blob/cc73fe71162d564ffda8e549b79a350bca53c45
// /src/wallet/wallet2.cpp#L142-L143
let mut age = Gamma::<f64>::new(19.28, 1.0 / 1.61).unwrap().sample(rng).exp();
let mut age = Gamma::<f64>::new(19.28, 1.0 / 1.61)
.expect("constant Gamma distribution could no longer be created")
.sample(rng)
.exp();
#[allow(clippy::cast_precision_loss)]
if age > TIP_APPLICATION {
age -= TIP_APPLICATION;
} else {
// f64 does not have try_from available, which is why these are written with `as`
age = (rng.next_u64() % u64::try_from(RECENT_WINDOW * BLOCK_TIME).unwrap()) as f64;
age = (rng.next_u64() %
(RECENT_WINDOW * u64::try_from(BLOCK_TIME).expect("BLOCK_TIME exceeded u64::MAX")))
as f64;
}
#[allow(clippy::cast_sign_loss, clippy::cast_possible_truncation)]
@@ -135,7 +142,11 @@ async fn select_n(
candidates.push(real_output);
// Sort candidates so the real spends aren't the ones at the end
candidates.sort();
Some(candidates.binary_search(&real_output).unwrap())
Some(
candidates
.binary_search(&real_output)
.expect("selected a ring which didn't include the real spend"),
)
} else {
None
};
@@ -169,11 +180,15 @@ async fn select_n(
async fn select_decoys<R: RngCore + CryptoRng>(
rng: &mut R,
rpc: &impl DecoyRpc,
ring_len: usize,
ring_len: u8,
height: usize,
input: &WalletOutput,
fingerprintable_deterministic: bool,
) -> Result<Decoys, RpcError> {
if ring_len == 0 {
Err(RpcError::InternalError("requesting a ring of length 0".to_string()))?;
}
// Select all decoys for this transaction, assuming we generate a sane transaction
// We should almost never naturally generate an insane transaction, hence why this doesn't
// bother with an overage
@@ -215,10 +230,13 @@ async fn select_decoys<R: RngCore + CryptoRng>(
Decoys::new(
offsets,
// Binary searches for the real spend since we don't know where it sorted to
u8::try_from(ring.partition_point(|x| x.0 < input.relative_id.index_on_blockchain)).unwrap(),
// TODO: Define our own collection whose `len` function returns `u8` to ensure this bound
// with types
u8::try_from(ring.partition_point(|x| x.0 < input.relative_id.index_on_blockchain))
.expect("ring of size <= u8::MAX had an index exceeding u8::MAX"),
ring.into_iter().map(|output| output.1).collect(),
)
.unwrap(),
.expect("selected a syntactically-invalid set of Decoys"),
)
}
@@ -234,7 +252,7 @@ impl OutputWithDecoys {
pub async fn new(
rng: &mut (impl Send + Sync + RngCore + CryptoRng),
rpc: &impl DecoyRpc,
ring_len: usize,
ring_len: u8,
height: usize,
output: WalletOutput,
) -> Result<OutputWithDecoys, RpcError> {
@@ -253,7 +271,7 @@ impl OutputWithDecoys {
pub async fn fingerprintable_deterministic_new(
rng: &mut (impl Send + Sync + RngCore + CryptoRng),
rpc: &impl DecoyRpc,
ring_len: usize,
ring_len: u8,
height: usize,
output: WalletOutput,
) -> Result<OutputWithDecoys, RpcError> {
@@ -297,7 +315,7 @@ impl OutputWithDecoys {
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(128);
self.write(&mut serialized).unwrap();
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
serialized
}

View File

@@ -67,7 +67,7 @@ impl PaymentId {
/// Serialize the PaymentId to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(1 + 8);
self.write(&mut res).unwrap();
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
res
}
@@ -100,7 +100,7 @@ pub enum ExtraField {
///
/// This is used within miner transactions who are merge-mining Monero to specify the foreign
/// block they mined.
MergeMining(usize, [u8; 32]),
MergeMining(u64, [u8; 32]),
/// The additional transaction keys.
///
/// These are the per-output commitments to the randomness used for deriving outputs.
@@ -132,7 +132,7 @@ impl ExtraField {
}
ExtraField::MergeMining(height, merkle) => {
w.write_all(&[3])?;
write_varint(&u64::try_from(*height).unwrap(), w)?;
write_varint(height, w)?;
w.write_all(merkle)?;
}
ExtraField::PublicKeys(keys) => {
@@ -150,7 +150,7 @@ impl ExtraField {
/// Serialize the ExtraField to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(1 + 8);
self.write(&mut res).unwrap();
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
res
}
@@ -280,7 +280,7 @@ impl Extra {
/// Serialize the Extra to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut buf = vec![];
self.write(&mut buf).unwrap();
self.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
buf
}

View File

@@ -61,7 +61,7 @@ impl SharedKeyDerivations {
// If Gen, this should be the only input, making this loop somewhat pointless
// This works and even if there were somehow multiple inputs, it'd be a false negative
Input::Gen(height) => {
write_varint(height, &mut u).unwrap();
write_varint(height, &mut u).expect("write failed but <Vec as io::Write> doesn't fail");
}
Input::ToKey { key_image, .. } => u.extend(key_image.compress().to_bytes()),
}
@@ -83,7 +83,8 @@ impl SharedKeyDerivations {
// || o
{
let output_derivation: &mut Vec<u8> = output_derivation.as_mut();
write_varint(&o, output_derivation).unwrap();
write_varint(&o, output_derivation)
.expect("write failed but <Vec as io::Write> doesn't fail");
}
let view_tag = keccak256([b"view_tag".as_ref(), &output_derivation].concat())[0];
@@ -145,7 +146,11 @@ impl SharedKeyDerivations {
let amount_scalar = Scalar::from_bytes_mod_order(*amount) - amount_shared_sec_scalar;
// d2b from rctTypes.cpp
let amount = u64::from_le_bytes(amount_scalar.to_bytes()[0 .. 8].try_into().unwrap());
let amount = u64::from_le_bytes(
amount_scalar.to_bytes()[.. 8]
.try_into()
.expect("32-byte array couldn't have an 8-byte slice taken"),
);
Commitment::new(mask, amount)
}

View File

@@ -18,7 +18,7 @@ use crate::{
#[derive(Clone, PartialEq, Eq, Zeroize, ZeroizeOnDrop)]
pub(crate) struct AbsoluteId {
pub(crate) transaction: [u8; 32],
pub(crate) index_in_transaction: u32,
pub(crate) index_in_transaction: u64,
}
impl core::fmt::Debug for AbsoluteId {
@@ -46,7 +46,7 @@ impl AbsoluteId {
/// This is not a Monero protocol defined struct, and this is accordingly not a Monero protocol
/// defined serialization.
fn read<R: Read>(r: &mut R) -> io::Result<AbsoluteId> {
Ok(AbsoluteId { transaction: read_bytes(r)?, index_in_transaction: read_u32(r)? })
Ok(AbsoluteId { transaction: read_bytes(r)?, index_in_transaction: read_u64(r)? })
}
}
@@ -128,11 +128,11 @@ impl OutputData {
self.commitment.write(w)
}
/*
/* Commented as it's unused, due to self being private
/// Serialize the OutputData to a `Vec<u8>`.
pub fn serialize(&self) -> Vec<u8> {
let mut res = Vec::with_capacity(32 + 32 + 40);
self.write(&mut res).unwrap();
self.write(&mut res).expect("write failed but <Vec as io::Write> doesn't fail");
res
}
*/
@@ -194,9 +194,17 @@ impl Metadata {
w.write_all(&[0])?;
}
w.write_all(&u32::try_from(self.arbitrary_data.len()).unwrap().to_le_bytes())?;
w.write_all(
&u64::try_from(self.arbitrary_data.len())
.expect("amount of arbitrary data chunks exceeded u64::MAX")
.to_le_bytes(),
)?;
for part in &self.arbitrary_data {
w.write_all(&[u8::try_from(part.len()).unwrap()])?;
// TODO: Define our own collection whose `len` function returns `u8` to ensure this bound
// with types
w.write_all(&[
u8::try_from(part.len()).expect("piece of arbitrary data exceeded max length of u8::MAX")
])?;
w.write_all(part)?;
}
Ok(())
@@ -224,7 +232,7 @@ impl Metadata {
payment_id: if read_byte(r)? == 1 { PaymentId::read(r).ok() } else { None },
arbitrary_data: {
let mut data = vec![];
for _ in 0 .. read_u32(r)? {
for _ in 0 .. read_u64(r)? {
let len = read_byte(r)?;
data.push(read_raw_vec(read_byte, usize::from(len), r)?);
}
@@ -260,7 +268,7 @@ impl WalletOutput {
}
/// The index of the output within the transaction.
pub fn index_in_transaction(&self) -> u32 {
pub fn index_in_transaction(&self) -> u64 {
self.absolute_id.index_in_transaction
}
@@ -349,7 +357,7 @@ impl WalletOutput {
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut serialized = Vec::with_capacity(128);
self.write(&mut serialized).unwrap();
self.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
serialized
}

View File

@@ -228,14 +228,11 @@ impl InternalScanner {
// Decrypt the payment ID
let payment_id = payment_id.map(|id| id ^ SharedKeyDerivations::payment_id_xor(ecdh));
let o = u64::try_from(o).expect("couldn't convert output index (usize) to u64");
res.push(WalletOutput {
absolute_id: AbsoluteId {
transaction: tx_hash,
index_in_transaction: o.try_into().unwrap(),
},
relative_id: RelativeId {
index_on_blockchain: output_index_for_first_ringct_output + u64::try_from(o).unwrap(),
},
absolute_id: AbsoluteId { transaction: tx_hash, index_in_transaction: o },
relative_id: RelativeId { index_on_blockchain: output_index_for_first_ringct_output + o },
data: OutputData { key: output_key, key_offset, commitment },
metadata: Metadata {
additional_timelock: tx.prefix().additional_timelock,
@@ -295,7 +292,8 @@ impl InternalScanner {
// Update the RingCT starting index for the next TX
if matches!(tx, Transaction::V2 { .. }) {
output_index_for_first_ringct_output += u64::try_from(tx.prefix().outputs.len()).unwrap()
output_index_for_first_ringct_output += u64::try_from(tx.prefix().outputs.len())
.expect("couldn't convert amount of outputs (usize) to u64")
}
}

View File

@@ -446,7 +446,7 @@ impl SignableTransaction {
/// defined serialization.
pub fn serialize(&self) -> Vec<u8> {
let mut buf = Vec::with_capacity(256);
self.write(&mut buf).unwrap();
self.write(&mut buf).expect("write failed but <Vec as io::Write> doesn't fail");
buf
}
@@ -553,9 +553,13 @@ impl SignableTransaction {
let mut tx = tx.transaction_without_signatures();
// Sign the CLSAGs
let clsags_and_pseudo_outs =
Clsag::sign(rng, clsag_signs, mask_sum, tx.signature_hash().unwrap())
.map_err(SendError::ClsagError)?;
let clsags_and_pseudo_outs = Clsag::sign(
rng,
clsag_signs,
mask_sum,
tx.signature_hash().expect("signing a transaction which isn't signed?"),
)
.map_err(SendError::ClsagError)?;
// Fill in the CLSAGs/pseudo-outs
let inputs_len = tx.prefix().inputs.len();

View File

@@ -251,7 +251,7 @@ impl SignMachine<Transaction> for TransactionSignMachine {
}
let tx = tx.transaction_without_signatures();
let msg = tx.signature_hash().unwrap();
let msg = tx.signature_hash().expect("signing a transaction which isn't signed?");
// Iterate over each CLSAG calling sign
let mut shares = Vec::with_capacity(to_sign.len());

View File

@@ -73,7 +73,9 @@ impl SignableTransaction {
{
let id = (u64::from_le_bytes(id) ^ u64::from_le_bytes(*id_xor)).to_le_bytes();
let mut id_vec = Vec::with_capacity(1 + 8);
PaymentId::Encrypted(id).write(&mut id_vec).unwrap();
PaymentId::Encrypted(id)
.write(&mut id_vec)
.expect("write failed but <Vec as io::Write> doesn't fail");
extra.push_nonce(id_vec);
} else {
/*
@@ -96,7 +98,9 @@ impl SignableTransaction {
.expect("multiple change outputs?");
let mut id_vec = Vec::with_capacity(1 + 8);
// The dummy payment ID is [0; 8], which when xor'd with the mask, is just the mask
PaymentId::Encrypted(*payment_id_xor).write(&mut id_vec).unwrap();
PaymentId::Encrypted(*payment_id_xor)
.write(&mut id_vec)
.expect("write failed but <Vec as io::Write> doesn't fail");
extra.push_nonce(id_vec);
}
}
@@ -109,7 +113,7 @@ impl SignableTransaction {
}
let mut serialized = Vec::with_capacity(32 * amount_of_keys);
extra.write(&mut serialized).unwrap();
extra.write(&mut serialized).expect("write failed but <Vec as io::Write> doesn't fail");
serialized
}
@@ -180,7 +184,8 @@ impl SignableTransaction {
push_scalar(&mut bp);
}
for _ in 0 .. 2 {
write_varint(&lr_len, &mut bp).unwrap();
write_varint(&lr_len, &mut bp)
.expect("write failed but <Vec as io::Write> doesn't fail");
for _ in 0 .. lr_len {
push_point(&mut bp);
}
@@ -204,7 +209,8 @@ impl SignableTransaction {
push_scalar(&mut bp);
}
for _ in 0 .. 2 {
write_varint(&lr_len, &mut bp).unwrap();
write_varint(&lr_len, &mut bp)
.expect("write failed but <Vec as io::Write> doesn't fail");
for _ in 0 .. lr_len {
push_point(&mut bp);
}
@@ -261,7 +267,8 @@ impl SignableTransaction {
break;
}
}
weight_and_fee.unwrap()
weight_and_fee
.expect("length of highest possible fee was greater than highest possible fee length")
}
}

View File

@@ -21,7 +21,9 @@ fn seeded_rng(
mut input_keys: Vec<EdwardsPoint>,
) -> ChaCha20Rng {
// Apply the DST
let mut transcript = Zeroizing::new(vec![u8::try_from(dst.len()).unwrap()]);
let mut transcript = Zeroizing::new(vec![
u8::try_from(dst.len()).expect("internal RNG with constant DST had a too-long DST specified")
]);
transcript.extend(dst);
// Bind to the outgoing view key to prevent foreign entities from rebuilding the transcript
@@ -116,12 +118,12 @@ impl SignableTransaction {
fn transaction_keys(&self) -> (Zeroizing<Scalar>, Vec<Zeroizing<Scalar>>) {
let mut tx_keys = TransactionKeys::new(&self.outgoing_view_key, self.input_keys());
let tx_key = tx_keys.next().unwrap();
let tx_key = tx_keys.next().expect("TransactionKeys (never-ending) was exhausted");
let mut additional_keys = vec![];
if self.should_use_additional_keys() {
for _ in 0 .. self.payments.len() {
additional_keys.push(tx_keys.next().unwrap());
additional_keys.push(tx_keys.next().expect("TransactionKeys (never-ending) was exhausted"));
}
}
(tx_key, additional_keys)

View File

@@ -21,7 +21,7 @@ use monero_wallet::{
mod builder;
pub use builder::SignableTransactionBuilder;
pub fn ring_len(rct_type: RctType) -> usize {
pub fn ring_len(rct_type: RctType) -> u8 {
match rct_type {
RctType::ClsagBulletproof => 11,
RctType::ClsagBulletproofPlus => 16,
@@ -118,7 +118,7 @@ pub fn check_weight_and_fee(tx: &Transaction, fee_rate: FeeRate) {
let fee = proofs.base.fee;
let weight = tx.weight();
let expected_weight = fee_rate.calculate_weight_from_fee(fee);
let expected_weight = fee_rate.calculate_weight_from_fee(fee).unwrap();
assert_eq!(weight, expected_weight);
let expected_fee = fee_rate.calculate_fee_from_weight(weight);