Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[workspace]
members = ["halide-cache", "lager"]
resolver = "3"
7 changes: 7 additions & 0 deletions halide-cache/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions halide-cache/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ lager = { path = "../lager" }
blake3 = "1.8.2"
clap = { version = "4.5.53", features = ["derive"] }
named-lock = "0.4.1"
anyhow = "1.0.102"
207 changes: 83 additions & 124 deletions halide-cache/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,93 +19,99 @@ struct Args {

const MAX_CACHE_SIZE_BYTES: u64 = 10737418240; // 10 GiB

fn main() {
let args = Args::parse();
struct Dependencies<'a> {
path: &'a Path,
dependencies: &'a [PathBuf],
env: &'a [String],
}

impl<'a> Dependencies<'a> {
fn make_address(&self) -> anyhow::Result<lager::Address> {
let mut hasher = blake3::Hasher::new();

hasher.update(self.path.as_os_str().as_encoded_bytes());
hasher.update(&[0u8]);
hasher.update(&[0u8]);

let cache_dir = match env::var("HALIDE_CACHE_DIR") {
Ok(env) => env,
Err(e) => {
eprintln!("HALIDE_CACHE_DIR environment variable not set: {}", e);
std::process::exit(1);
for d in self.dependencies {
let file = std::fs::File::open(d)?;
hasher.update_reader(file)?;
hasher.update(&[0u8]);
}
};
hasher.update(&[0u8]);

for e in self.env {
hasher.update(e.as_bytes());
hasher.update(&[0u8]);
}
hasher.update(&[0u8]);

let mut buf = [0u8; _];
hasher.finalize_xof().fill(&mut buf);
Ok(buf.into())
}
}

fn main() -> anyhow::Result<()> {
let args = Args::parse();

let cache_dir = env::var("HALIDE_CACHE_DIR")?;

if !Path::new(&cache_dir).exists() {
fs::create_dir_all(&cache_dir).unwrap();
fs::create_dir_all(&cache_dir)?;
}
let lager = match Lager::new(Path::new(&cache_dir)) {
Ok(l) => l,
Err(e) => {
eprintln!("Failed to initialize Lager cache: {}", e);
std::process::exit(1);
}
};
let lager = Lager::new(Path::new(&cache_dir))?;

let zivid_env = collect_zivid_env();

let mut object_dependencies = zivid_env.clone();
object_dependencies.push(
args.generated_object
.clone()
.into_os_string()
.into_string()
.unwrap(),
);

let mut header_dependencies = zivid_env;
header_dependencies.push(
args.generated_header
.clone()
.into_os_string()
.into_string()
.unwrap(),
);

hash_all_dependencies_contents(
args.dependencies,
&mut object_dependencies,
&mut header_dependencies,
);

let object_address = hash_vector(&object_dependencies);
let header_address = hash_vector(&header_dependencies);
let object_dependencies = Dependencies {
path: &args.generated_object,
dependencies: &args.dependencies,
env: &zivid_env,
};

let header_dependencies = Dependencies {
path: &args.generated_header,
dependencies: &args.dependencies,
env: &zivid_env,
};

let header_address = header_dependencies.make_address()?;
let object_address = object_dependencies.make_address()?;
if cache_hit(
&args.generated_object,
&args.generated_header,
&lager,
&object_address,
&header_address,
) {
return;
)? {
return Ok(());
}

let status = Command::new(&args.builder[0])
.args(&args.builder[1..])
.status()
.expect("Failed to execute command");
.status()?;

if status.success() {
lager
.store_at(&object_address, &args.generated_object)
.expect("Store at failed for the Halide object");
lager
.store_at(&header_address, &args.generated_header)
.expect("Store at failed for the Halide header");
lager.store_at(&object_address, &args.generated_object)?;
lager.store_at(&header_address, &args.generated_header)?;
}

try_cleaning_up(lager);
try_cleaning_up(lager)?;

Ok(())
}

fn try_cleaning_up(lager: Lager) {
let lock = NamedLock::create("lager_lock").unwrap();
fn try_cleaning_up(lager: Lager) -> anyhow::Result<()> {
let lock = NamedLock::create("lager_lock")?;
if let Ok(_guard) = lock.lock() {
let mut lru = LRU::new(lager);
lru.scan().unwrap();
lru.scan()?;
if lru.lager_size() > MAX_CACHE_SIZE_BYTES {
lru.evict_until(MAX_CACHE_SIZE_BYTES).unwrap();
lru.evict_until(MAX_CACHE_SIZE_BYTES)?;
}
}
Ok(())
}

fn collect_zivid_env() -> Vec<String> {
Expand All @@ -116,77 +122,30 @@ fn collect_zivid_env() -> Vec<String> {
v
}

fn hash_all_dependencies_contents(
dependencies: Vec<PathBuf>,
object_dependencies: &mut Vec<String>,
header_dependencies: &mut Vec<String>,
) {
for dep in &dependencies {
let file_content_hash = match compute_hash_of_file(dep) {
Ok(hash) => hash,
Err(e) => {
eprintln!("Failed to compute hash for {:?}: {}", dependencies, e);
std::process::exit(1);
}
};
object_dependencies.push(file_content_hash.clone());
header_dependencies.push(file_content_hash);
}
}

fn cache_hit(
generated_object: &PathBuf,
generated_header: &PathBuf,
generated_object: &Path,
generated_header: &Path,
lager: &Lager,
object_address: &Address,
header_address: &Address,
) -> bool {
match lager.retrieve(object_address, generated_object.as_path()) {
Ok(_) => {
println!("Cache hit for Halide object. {:?}", generated_object);
match lager.retrieve(header_address, generated_header.as_path()) {
Ok(_) => {
println!("Cache hit for Halide header {:?}", generated_header);
return true;
}
Err(e) => {
eprintln!("Error for Halide header {}", e);
}
}
}
Err(_) => {
// Cache miss, proceed to build
) -> anyhow::Result<bool> {
match (
lager.retrieve(object_address, generated_object),
lager.retrieve(header_address, generated_header),
) {
(Ok(_), Ok(_)) => {
println!(
"Cache hits for Halide objects: {:?} and {:?}",
generated_object, generated_header
);
Ok(true)
}
}
false
}

fn serialize_vector(vec: &[String]) -> String {
format!(
"[{}]",
vec.iter()
.map(|s| format!("\"{}\"", s))
.collect::<Vec<String>>()
.join(",")
)
}
fn compute_hash_of_file<P: AsRef<std::path::Path>>(
path: P,
) -> Result<String, Box<dyn std::error::Error>> {
let mut file = std::fs::File::open(path)?;
let mut hasher = blake3::Hasher::new();
std::io::copy(&mut file, &mut hasher)?;
let hash = hasher.finalize();
Ok(hash.to_hex().to_string())
}

fn hash_vector(vector: &[String]) -> Address {
let input = serialize_vector(vector);
let mut hasher = blake3::Hasher::new();
hasher.update(input.as_bytes());
let hash = hasher.finalize().to_hex().to_string();
match hash.into_bytes().as_slice().try_into() {
Ok(addr) => addr,
Err(_) => panic!("Hash length mismatch"),
(
Err(lager::Error::NotFound { address: _ }),
Err(lager::Error::NotFound { address: _ }),
) => Ok(false),
(Err(oe), Err(he)) => Err(anyhow::anyhow!(oe).context(he)),
(Ok(_), Err(e)) => Err(anyhow::anyhow!(e).context("Retrieving the object was successful")),
(Err(e), Ok(_)) => Err(anyhow::anyhow!(e).context("Retrieving the header was successful")),
}
}
8 changes: 8 additions & 0 deletions halide-cache/tests/basic.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
#[test]
fn run_basic_tests() {
let success = std::process::Command::new("tests/run")
.arg("tests/builder")
.status();
assert!(success.is_ok());
assert!(success.unwrap().success());
}
21 changes: 21 additions & 0 deletions halide-cache/tests/builder
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
#!/bin/bash

read -r case < "$HALIDE_TEST_DEPENDENCIES/case.txt"
read -r direction < "$HALIDE_TEST_DEPENDENCIES/direction.txt"

process() {
cat "$1" |
if [[ case = upper ]]; then
tr '[:lower:]' '[:upper:]'
else
cat
fi |
if [[ $direction = reverse ]]; then
rev
else
cat
fi > "$2"
}

process "$1" "$2"
process "$1" "$3"
Loading
Loading