mirror of
https://github.com/serai-dex/serai.git
synced 2025-12-08 12:19:24 +00:00
Populate UnbalancedMerkleTrees in headers
This commit is contained in:
@@ -64,84 +64,6 @@ impl UnbalancedMerkleTree {
|
||||
}
|
||||
Self { root: current[0] }
|
||||
}
|
||||
|
||||
/// Calculate the Merkle tree root for a list of hashes, passed in as their SCALE encoding.
|
||||
///
|
||||
/// This method does not perform any allocations and is quite optimized. It is intended to be
|
||||
/// called from within the Substrate runtime, a resource-constrained environment. It does take in
|
||||
/// an owned Vec, despite solely using it as a mutable slice, due to the trashing of its content.
|
||||
///
|
||||
/// Please see the documentation of `UnbalancedMerkleTree` and `UnbalancedMerkleTree::new` for
|
||||
/// context on structure.
|
||||
///
|
||||
/// A SCALE encoding will be length-prefixed with a Compact number per
|
||||
/// https://docs.polkadot.com/polkadot-protocol/basics/data-encoding/#data-types.
|
||||
#[doc(hidden)]
|
||||
pub fn from_scale_encoded_list_of_hashes(tag: u8, encoding: Vec<u8>) -> Self {
|
||||
let mut hashes = encoding;
|
||||
|
||||
// Learn the length of the length prefix
|
||||
let length_prefix_len = {
|
||||
let mut slice = hashes.as_slice();
|
||||
<scale::Compact<u32> as scale::Decode>::skip(&mut slice).unwrap();
|
||||
hashes.len() - slice.len()
|
||||
};
|
||||
|
||||
// We calculate the hashes in-place to avoid redundant allocations
|
||||
let mut hashes = hashes.as_mut_slice();
|
||||
|
||||
let mut amount_of_hashes;
|
||||
while {
|
||||
amount_of_hashes = (hashes.len() - length_prefix_len) / 32;
|
||||
amount_of_hashes > 1
|
||||
} {
|
||||
let complete_pairs = amount_of_hashes / 2;
|
||||
for i in 0 .. complete_pairs {
|
||||
// We hash the i'th pair of 32-byte elements
|
||||
let hash = {
|
||||
// The starting position of these elements
|
||||
let start = length_prefix_len + ((2 * i) * 32);
|
||||
/*
|
||||
We write the tag to the byte before this pair starts.
|
||||
|
||||
In the case of the first pair, this corrupts a byte of the length prefix.
|
||||
|
||||
In the case of the nth pair, this corrupts the prior-hashed pair's second element.
|
||||
This is safe as it was already hashed and the data there won't be read again. While
|
||||
we do write, and later read, the carried hash outputs to this buffer, those will
|
||||
always be written to either a pair's first element or a (n * prior-)hashed pair's
|
||||
second element (where n > 2), never the immediately preceding pair's second element.
|
||||
*/
|
||||
hashes[start - 1] = tag;
|
||||
sp_core::blake2_256(&hashes[(start - 1) .. (start + 64)])
|
||||
};
|
||||
// We save this hash to the i'th position
|
||||
{
|
||||
let start = length_prefix_len + (i * 32);
|
||||
hashes[start .. (start + 32)].copy_from_slice(hash.as_slice());
|
||||
}
|
||||
}
|
||||
|
||||
let mut end_of_hashes_on_next_layer = length_prefix_len + (complete_pairs * 32);
|
||||
|
||||
// If there was an odd hash which wasn't hashed on this layer, carry it
|
||||
if (amount_of_hashes % 2) == 1 {
|
||||
let mut hash = [0xff; 32];
|
||||
hash.copy_from_slice(&hashes[(hashes.len() - 32) ..]);
|
||||
|
||||
let start = end_of_hashes_on_next_layer;
|
||||
end_of_hashes_on_next_layer = start + 32;
|
||||
hashes[start .. end_of_hashes_on_next_layer].copy_from_slice(&hash);
|
||||
}
|
||||
|
||||
hashes = &mut hashes[.. end_of_hashes_on_next_layer];
|
||||
}
|
||||
|
||||
match hashes[length_prefix_len ..].try_into() {
|
||||
Ok(root) => Self { root },
|
||||
Err(_) => Self::EMPTY,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An unbalanced Merkle tree which is incrementally created.
|
||||
@@ -177,6 +99,8 @@ impl IncrementalUnbalancedMerkleTree {
|
||||
/// Append a leaf to this merkle tree.
|
||||
///
|
||||
/// The conditions on this leaf are the same as defined by `UnbalancedMerkleTree::new`.
|
||||
///
|
||||
/// This will not calculate any hashes not necessary for the eventual root.
|
||||
pub fn append(&mut self, tag: u8, leaf: [u8; 32]) {
|
||||
self.branches.push((1, leaf));
|
||||
self.reduce(tag);
|
||||
|
||||
Reference in New Issue
Block a user