Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 29 additions & 6 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,13 @@ permissions:
jobs:
nightly:
runs-on: ubuntu-24.04
timeout-minutes: 15
steps:
- uses: dtolnay/rust-toolchain@nightly
- uses: actions/checkout@v4
- run: cargo +nightly -Z package-workspace package

fedora:
runs-on: ubuntu-24.04
timeout-minutes: 5
container:
image: quay.io/fedora/fedora:latest
options: "--privileged --pid=host -v /var/tmp:/var/tmp --tmpfs /tmp:rw,exec,nosuid,nodev -v /:/run/host"
Expand All @@ -47,22 +45,47 @@ jobs:
smoke:
name: Unprivileged smoke test
runs-on: ubuntu-24.04
timeout-minutes: 15
steps:
- uses: actions/checkout@v5
- uses: bootc-dev/actions/bootc-ubuntu-setup@main
- uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- run: just test-integration

# Fuzz smoke test — runs each fuzz target briefly to catch panics
# on malformed input. The extended run only fires on pushes to main.
fuzz:
name: Fuzz smoke test
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@nightly
- uses: extractions/setup-just@v2
- uses: Swatinem/rust-cache@v2
with:
workspaces: crates/composefs/fuzz
- name: Install cargo-fuzz
run: cargo install cargo-fuzz --locked
- name: Generate corpus
run: just generate-corpus
- name: Run fuzz targets
run: just fuzz-all
- name: Upload fuzz artifacts on failure
if: failure()
uses: actions/upload-artifact@v4
with:
name: fuzz-artifacts
path: |
crates/composefs/fuzz/artifacts/
target/fuzz-logs/

# Full integration tests: builds a bootc container image, runs all
# tests (both unprivileged and privileged). Privileged tests execute
# inside bcvk ephemeral VMs booted from the container image.
integration:
name: Integration tests (${{ matrix.name }})
needs: smoke
runs-on: ubuntu-24.04
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
Expand Down Expand Up @@ -92,7 +115,6 @@ jobs:
examples:
name: Example tests (${{ matrix.example.dir }}/${{ matrix.example.os }})
runs-on: ubuntu-24.04
timeout-minutes: 45

strategy:
matrix:
Expand Down Expand Up @@ -179,13 +201,14 @@ jobs:
# repository settings as the single required status check.
required-checks:
if: always()
needs: [nightly, fedora, smoke, integration, examples]
needs: [nightly, fedora, smoke, fuzz, integration, examples]
runs-on: ubuntu-latest
steps:
- run: exit 1
if: >-
needs.nightly.result != 'success' ||
needs.fedora.result != 'success' ||
needs.smoke.result != 'success' ||
needs.fuzz.result != 'success' ||
needs.integration.result != 'success' ||
needs.examples.result != 'success'
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
/Cargo.lock
/target/
/vendor/
# Fuzz sub-workspaces have their own target directories
**/fuzz/target/
**/fuzz/corpus/
**/fuzz/artifacts/
30 changes: 30 additions & 0 deletions Justfile
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,36 @@ test-integration-vm: build _integration-container-build
# Run everything: checks + full integration tests including VM
ci: check test-integration-vm

# Run a specific erofs fuzz target (e.g., `just fuzz read_image -- -max_total_time=60`)
fuzz target *ARGS:
cd crates/composefs && cargo +nightly fuzz run {{target}} {{ARGS}}

# Run all erofs fuzz targets for a given duration each (default: 120 seconds)
fuzz-all seconds="120":
#!/usr/bin/env bash
set -euo pipefail
mkdir -p target/fuzz-logs
for target in $(cd crates/composefs && cargo +nightly fuzz list); do
echo "--- Fuzzing $target for {{seconds}}s ---"
log="target/fuzz-logs/$target.log"
if (cd crates/composefs && cargo +nightly fuzz run "$target" -- -max_total_time={{seconds}}) > "$log" 2>&1; then
echo " $target: OK"
tail -1 "$log"
else
echo " $target: FAILED"
cat "$log"
exit 1
fi
done

# Generate seed corpus for fuzz targets
generate-corpus:
cargo run --manifest-path crates/composefs/fuzz/Cargo.toml --bin generate-corpus

# List available fuzz targets
fuzz-list:
cd crates/composefs && cargo +nightly fuzz list

# Clean build artifacts
clean:
cargo clean
22 changes: 11 additions & 11 deletions crates/composefs-oci/src/image.rs
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ mod test {
append_tar_dir(&mut builder, "var");
append_tar_dir(&mut builder, "var/log");

// Regular files — inline (<=64 bytes, the INLINE_CONTENT_MAX threshold)
// Regular files — inline (<=64 bytes, the INLINE_CONTENT_MAX_V0 threshold)
append_tar_file(&mut builder, "etc/hostname", b"busybox-container\n");
append_tar_file(
&mut builder,
Expand Down Expand Up @@ -318,7 +318,7 @@ mod test {
/// with `get_entry()`, and verify every entry type round-trips correctly.
#[tokio::test]
async fn test_build_baseimage_roundtrip() -> Result<()> {
use composefs::{repository::Repository, test::tempdir, INLINE_CONTENT_MAX};
use composefs::{repository::Repository, test::tempdir, INLINE_CONTENT_MAX_V0};
use rustix::fs::CWD;
use std::ffi::OsStr;
use std::sync::Arc;
Expand Down Expand Up @@ -367,14 +367,14 @@ mod test {
assert_eq!(entry.stat.st_mode, 0o755, "{dir} mode");
}

// --- Inline files (<=INLINE_CONTENT_MAX bytes) ---
// --- Inline files (<=INLINE_CONTENT_MAX_V0 bytes) ---
let hostname = by_path("/etc/hostname");
match &hostname.item {
TarItem::Leaf(LeafContent::Regular(RegularFile::Inline(data))) => {
assert_eq!(data.as_ref(), b"busybox-container\n");
assert!(
data.len() <= INLINE_CONTENT_MAX,
"hostname should be inline ({} bytes <= {INLINE_CONTENT_MAX})",
data.len() <= INLINE_CONTENT_MAX_V0,
"hostname should be inline ({} bytes <= {INLINE_CONTENT_MAX_V0})",
data.len()
);
}
Expand All @@ -386,21 +386,21 @@ mod test {
TarItem::Leaf(LeafContent::Regular(RegularFile::Inline(data))) => {
assert!(data.starts_with(b"nameserver"));
assert!(
data.len() <= INLINE_CONTENT_MAX,
"resolv.conf should be inline ({} bytes <= {INLINE_CONTENT_MAX})",
data.len() <= INLINE_CONTENT_MAX_V0,
"resolv.conf should be inline ({} bytes <= {INLINE_CONTENT_MAX_V0})",
data.len()
);
}
other => panic!("expected inline file for /etc/resolv.conf, got {other:?}"),
}

// --- External files (>INLINE_CONTENT_MAX bytes) ---
// --- External files (>INLINE_CONTENT_MAX_V0 bytes) ---
let passwd = by_path("/etc/passwd");
match &passwd.item {
TarItem::Leaf(LeafContent::Regular(RegularFile::External(_, size))) => {
assert!(
*size as usize > INLINE_CONTENT_MAX,
"passwd should be external ({size} bytes > {INLINE_CONTENT_MAX})"
*size as usize > INLINE_CONTENT_MAX_V0,
"passwd should be external ({size} bytes > {INLINE_CONTENT_MAX_V0})"
);
}
other => panic!("expected external file for /etc/passwd, got {other:?}"),
Expand All @@ -426,7 +426,7 @@ mod test {
match &readme.item {
TarItem::Leaf(LeafContent::Regular(RegularFile::External(_, size))) => {
assert!(
*size as usize > INLINE_CONTENT_MAX,
*size as usize > INLINE_CONTENT_MAX_V0,
"README should be external ({size} bytes)"
);
}
Expand Down
47 changes: 24 additions & 23 deletions crates/composefs-oci/src/tar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ use composefs::{
shared_internals::IO_BUF_CAPACITY,
splitstream::{SplitStreamBuilder, SplitStreamData, SplitStreamReader},
tree::{LeafContent, RegularFile, Stat},
INLINE_CONTENT_MAX,
INLINE_CONTENT_MAX_V0,
};

use crate::ImportStats;
Expand Down Expand Up @@ -145,7 +145,7 @@ async fn stream_large_file<ObjectID: FsVerityHashValue>(
/// Concurrency is limited to `available_parallelism()` to avoid overwhelming the
/// system with too many concurrent I/O operations.
///
/// Files larger than `INLINE_CONTENT_MAX` are stored externally in the object store,
/// Files larger than `INLINE_CONTENT_MAX_V0` are stored externally in the object store,
/// while smaller files and metadata are stored inline in the split stream.
///
/// # Arguments
Expand Down Expand Up @@ -200,7 +200,8 @@ pub async fn split_async<ObjectID: FsVerityHashValue>(
ParseEvent::Entry { consumed, entry } => {
// Extract what we need before mutating buf
let actual_size = entry.size as usize;
let is_large_file = entry.entry_type.is_file() && actual_size > INLINE_CONTENT_MAX;
let is_large_file =
entry.entry_type.is_file() && actual_size > INLINE_CONTENT_MAX_V0;

// Write all header bytes (including extension headers) inline
builder.push_inline(&buf.split_to(consumed));
Expand Down Expand Up @@ -363,7 +364,7 @@ pub fn get_entry<ObjectID: FsVerityHashValue>(
SplitStreamData::External(id) => match entry.entry_type {
EntryType::Regular | EntryType::Continuous => {
ensure!(
size as usize > INLINE_CONTENT_MAX,
size as usize > INLINE_CONTENT_MAX_V0,
"Splitstream incorrectly stored a small ({size} byte) file external"
);
TarItem::Leaf(LeafContent::Regular(RegularFile::External(id, size)))
Expand All @@ -377,7 +378,7 @@ pub fn get_entry<ObjectID: FsVerityHashValue>(
EntryType::Directory => TarItem::Directory,
EntryType::Regular | EntryType::Continuous => {
ensure!(
content.len() <= INLINE_CONTENT_MAX,
content.len() <= INLINE_CONTENT_MAX_V0,
"Splitstream incorrectly stored a large ({} byte) file inline",
content.len()
);
Expand Down Expand Up @@ -630,12 +631,12 @@ mod tests {
let mut builder = Builder::new(&mut tar_data);

// File exactly at the threshold should be inline
let threshold_content = vec![b'X'; INLINE_CONTENT_MAX];
let threshold_content = vec![b'X'; INLINE_CONTENT_MAX_V0];
let header1 =
append_file(&mut builder, "threshold_file.txt", &threshold_content).unwrap();

// File just over threshold should be external
let over_threshold_content = vec![b'Y'; INLINE_CONTENT_MAX + 1];
let over_threshold_content = vec![b'Y'; INLINE_CONTENT_MAX_V0 + 1];
let header2 = append_file(
&mut builder,
"over_threshold_file.txt",
Expand Down Expand Up @@ -687,7 +688,7 @@ mod tests {
if let TarItem::Leaf(LeafContent::Regular(RegularFile::Inline(ref content))) =
entries[0].item
{
assert_eq!(content.len(), INLINE_CONTENT_MAX);
assert_eq!(content.len(), INLINE_CONTENT_MAX_V0);
assert_eq!(content[0], b'X');
} else {
panic!("Expected inline regular file for threshold file");
Expand All @@ -702,7 +703,7 @@ mod tests {
);
if let TarItem::Leaf(LeafContent::Regular(RegularFile::External(_, size))) = entries[1].item
{
assert_eq!(size, (INLINE_CONTENT_MAX + 1) as u64);
assert_eq!(size, (INLINE_CONTENT_MAX_V0 + 1) as u64);
} else {
panic!("Expected external regular file for over-threshold file");
}
Expand All @@ -720,7 +721,7 @@ mod tests {
let header1 = append_file(&mut builder, "small.txt", small_content).unwrap();

// Add a large file
let large_content = vec![b'L'; INLINE_CONTENT_MAX + 100];
let large_content = vec![b'L'; INLINE_CONTENT_MAX_V0 + 100];
let header2 = append_file(&mut builder, "large.txt", &large_content).unwrap();

builder.finish().unwrap();
Expand Down Expand Up @@ -782,14 +783,14 @@ mod tests {
if let TarItem::Leaf(LeafContent::Regular(RegularFile::External(ref id, size))) =
entries[1].item
{
assert_eq!(size, (INLINE_CONTENT_MAX + 100) as u64);
assert_eq!(size, (INLINE_CONTENT_MAX_V0 + 100) as u64);
// Verify the external content matches
use std::io::Read;
let mut external_data = Vec::new();
std::fs::File::from(repo.open_object(id).unwrap())
.read_to_end(&mut external_data)
.unwrap();
let expected_content = vec![b'L'; INLINE_CONTENT_MAX + 100];
let expected_content = vec![b'L'; INLINE_CONTENT_MAX_V0 + 100];
assert_eq!(
external_data, expected_content,
"External file content should match"
Expand Down Expand Up @@ -1008,7 +1009,7 @@ mod tests {
append_file(&mut builder, "small.txt", small_content).unwrap();

// Large file (should be external/streamed)
let large_content = vec![b'L'; INLINE_CONTENT_MAX + 100];
let large_content = vec![b'L'; INLINE_CONTENT_MAX_V0 + 100];
append_file(&mut builder, "large.txt", &large_content).unwrap();

// Another small file
Expand Down Expand Up @@ -1068,13 +1069,13 @@ mod tests {
if let TarItem::Leaf(LeafContent::Regular(RegularFile::External(ref id, size))) =
entries[1].item
{
assert_eq!(size, (INLINE_CONTENT_MAX + 100) as u64);
assert_eq!(size, (INLINE_CONTENT_MAX_V0 + 100) as u64);
// Verify the external content matches
let mut external_data = Vec::new();
std::fs::File::from(repo.open_object(id).unwrap())
.read_to_end(&mut external_data)
.unwrap();
let expected_content = vec![b'L'; INLINE_CONTENT_MAX + 100];
let expected_content = vec![b'L'; INLINE_CONTENT_MAX_V0 + 100];
assert_eq!(
external_data, expected_content,
"External file content should match"
Expand Down Expand Up @@ -1103,7 +1104,7 @@ mod tests {

// Three large files to test parallel streaming
for i in 0..3 {
let content = vec![(i + 0x41) as u8; INLINE_CONTENT_MAX + 1000]; // 'A', 'B', 'C'
let content = vec![(i + 0x41) as u8; INLINE_CONTENT_MAX_V0 + 1000]; // 'A', 'B', 'C'
let filename = format!("file{}.bin", i);
append_file(&mut builder, &filename, &content).unwrap();
}
Expand Down Expand Up @@ -1152,12 +1153,12 @@ mod tests {
if let TarItem::Leaf(LeafContent::Regular(RegularFile::External(ref id, size))) =
entry.item
{
assert_eq!(size, (INLINE_CONTENT_MAX + 1000) as u64);
assert_eq!(size, (INLINE_CONTENT_MAX_V0 + 1000) as u64);
let mut external_data = Vec::new();
std::fs::File::from(repo.open_object(id).unwrap())
.read_to_end(&mut external_data)
.unwrap();
let expected_content = vec![(i + 0x41) as u8; INLINE_CONTENT_MAX + 1000];
let expected_content = vec![(i + 0x41) as u8; INLINE_CONTENT_MAX_V0 + 1000];
assert_eq!(
external_data, expected_content,
"External file {} content should match",
Expand Down Expand Up @@ -1477,15 +1478,15 @@ mod tests {

/// Strategy for generating a file size that exercises both the inline and
/// external code paths, with emphasis on the boundary region around
/// INLINE_CONTENT_MAX (64 bytes) and 512-byte block alignment edges.
/// INLINE_CONTENT_MAX_V0 (64 bytes) and 512-byte block alignment edges.
fn file_size_strategy() -> impl Strategy<Value = usize> {
prop_oneof![
3 => 0..=INLINE_CONTENT_MAX, // inline (small)
2 => (INLINE_CONTENT_MAX + 1)..=(INLINE_CONTENT_MAX + 2048), // just over threshold
1 => (INLINE_CONTENT_MAX + 2048)..=100_000usize, // comfortably large
3 => 0..=INLINE_CONTENT_MAX_V0, // inline (small)
2 => (INLINE_CONTENT_MAX_V0 + 1)..=(INLINE_CONTENT_MAX_V0 + 2048), // just over threshold
1 => (INLINE_CONTENT_MAX_V0 + 2048)..=100_000usize, // comfortably large
// Boundary-focused: sizes near 512-byte block alignment
2 => prop::sample::select(vec![
0, 1, 63, 64, 65, // around INLINE_CONTENT_MAX
0, 1, 63, 64, 65, // around INLINE_CONTENT_MAX_V0
511, 512, 513, // around one block
1023, 1024, 1025, // around two blocks
]),
Expand Down
Loading
Loading