Thoroughly update versions and methodology

For hash-pinned dependencies, adds comments documenting the associated
versions.

Adds a pin to `slither-analyzer` which was prior missing.

Updates to Monero 0.18.4.4.

`mimalloc` now has the correct option set when building for `musl`. A C++
compiler is no longer required in its Docker image.

The runtime's `Dockerfile` now symlinks a `libc.so` already present on the
image instead of creating one itself. It also builds the runtime within the
image to ensure it only happens once. The test to ensure the methodology is
reproducible has been updated to not simply create containers from the image,
yet rebuild the image entirely, accordingly. This also is more robust and
arguably should have already been done.

The pin to the exact hash of the `patch-polkadot-sdk` repo in every
`Cargo.toml` has been removed. The lockfile already serves that role,
simplifying updating in the future.

The latest Rust nightly is adopted as well (superseding
https://github.com/serai-dex/serai/pull/697).

The `librocksdb-sys` patch is replaced with a `kvdb-rocksdb` patch, removing a
git dependency, thanks to https://github.com/paritytech/parity-common/pull/950.
This commit is contained in:
Luke Parker
2025-12-01 03:44:25 -05:00
parent 30ea9d9a06
commit 9a75f92864
59 changed files with 524 additions and 554 deletions

View File

@@ -17,10 +17,5 @@ rustdoc-args = ["--cfg", "docsrs"]
workspace = true
[dependencies]
rand_core = "0.6"
hex = "0.4"
dockertest = "0.5"
serai-docker-tests = { path = "../docker" }
tokio = { version = "1", features = ["time"] }
rand_core = { version = "0.6", default-features = false, features = ["std"] }
hex = { version = "0.4", default-features = false, features = ["std"] }

View File

@@ -1,101 +1,81 @@
#[test]
pub fn reproducibly_builds() {
use std::{collections::HashSet, process::Command};
use std::{collections::HashSet, path::PathBuf, process::Command};
use rand_core::{RngCore, OsRng};
use dockertest::{PullPolicy, Image, TestBodySpecification, DockerTest};
const RUNS: usize = 3;
const TIMEOUT: u16 = 3 * 60 * 60; // 3 hours
serai_docker_tests::build("runtime".to_string());
let mut ids = vec![[0; 8]; RUNS];
for id in &mut ids {
OsRng.fill_bytes(id);
let mut images = vec![];
for _ in 0 .. RUNS {
let mut image = [0; 32];
OsRng.fill_bytes(&mut image);
images.push(format!("runtime-{}", hex::encode(image)));
}
let mut test = DockerTest::new().with_network(dockertest::Network::Isolated);
for id in &ids {
test.provide_container(
TestBodySpecification::with_image(
Image::with_repository("serai-dev-runtime").pull_policy(PullPolicy::Never),
)
.set_handle(format!("runtime-build-{}", hex::encode(id)))
.replace_cmd(vec![
"sh".to_string(),
"-c".to_string(),
// Sleep for a minute after building to prevent the container from closing before we
// retrieve the hash
"cd /serai/substrate/runtime && cargo clean && cargo build --release &&
printf \"Runtime hash: \" > hash &&
sha256sum /serai/target/release/wbuild/serai-runtime/serai_runtime.wasm >> hash &&
cat hash &&
sleep 60"
.to_string(),
]),
let path = Command::new("cargo")
.arg("locate-project")
.arg("--workspace")
.arg("--message-format")
.arg("plain")
.output()
.unwrap();
assert!(path.status.success());
let mut path = PathBuf::from(String::from_utf8(path.stdout).unwrap().trim());
assert_eq!(path.file_name().unwrap(), "Cargo.toml");
assert!(path.pop());
let mut commands = vec![];
for image in &images {
commands.push(
Command::new("docker")
.current_dir(&path)
.arg("build")
.arg("--progress")
.arg("plain")
.arg("--no-cache")
.arg("--file")
.arg("./orchestration/runtime/Dockerfile")
.arg("--tag")
.arg(image)
.arg(".")
.spawn()
.unwrap(),
);
}
test.run(|_| async {
let ids = ids;
let mut containers = vec![];
for container in String::from_utf8(
Command::new("docker").arg("ps").arg("--format").arg("{{.Names}}").output().unwrap().stdout,
)
.expect("output wasn't utf-8")
.lines()
{
for id in &ids {
if container.contains(&hex::encode(id)) {
containers.push(container.trim().to_string());
}
}
let mut outputs = vec![];
for (image, mut command) in images.into_iter().zip(commands) {
assert!(command.wait().unwrap().success());
outputs.push(
Command::new("docker")
.arg("run")
.arg("--quiet")
.arg("--rm")
.arg(&image)
.arg("sha256sum")
.arg("/serai/target/release/wbuild/serai-runtime/serai_runtime.wasm")
.output(),
);
// Attempt to clean up the image
let _ = Command::new("docker").arg("rmi").arg(&image).output();
}
let mut expected = None;
for output in outputs {
let output = output.unwrap();
assert!(output.status.success());
if expected.is_none() {
expected = Some(output.stdout.clone());
}
assert_eq!(containers.len(), RUNS, "couldn't find all containers");
assert_eq!(expected, Some(output.stdout));
}
let mut res = vec![None; RUNS];
'attempt: for _ in 0 .. (TIMEOUT / 10) {
tokio::time::sleep(core::time::Duration::from_secs(10)).await;
'runner: for (i, container) in containers.iter().enumerate() {
if res[i].is_some() {
continue;
}
let logs = Command::new("docker").arg("logs").arg(container).output().unwrap();
let Some(last_log) =
std::str::from_utf8(&logs.stdout).expect("output wasn't utf-8").lines().last()
else {
continue 'runner;
};
let split = last_log.split("Runtime hash: ").collect::<Vec<_>>();
if split.len() == 2 {
res[i] = Some(split[1].to_string());
continue 'runner;
}
}
for item in &res {
if item.is_none() {
continue 'attempt;
}
}
break;
}
// If we didn't get results from all runners, panic
for item in &res {
if item.is_none() {
panic!("couldn't get runtime hashes within allowed time");
}
}
let mut identical = HashSet::new();
for res in res.clone() {
identical.insert(res.unwrap());
}
assert_eq!(identical.len(), 1, "got different runtime hashes {res:?}");
});
let result = String::from_utf8(expected.unwrap()).unwrap();
let hash = result.split_whitespace().next().unwrap();
// Check this appears to be a 32-byte hash (encoded as hex)
assert_eq!(hash.len(), 64);
hex::decode(hash).unwrap();
println!("Hash: {hash}");
}