Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
73 changes: 0 additions & 73 deletions crates/karva_cache/src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -240,40 +240,6 @@ fn collect_run_dirs(cache_dir: &Utf8Path) -> Result<Vec<String>> {
Ok(run_dirs)
}

/// Reads durations from the most recent test run.
///
/// Finds the most recent `run-{timestamp}` directory, then aggregates
/// all durations from all worker directories within it.
pub fn read_recent_durations(cache_dir: &Utf8PathBuf) -> Result<HashMap<String, Duration>> {
let run_dirs = collect_run_dirs(cache_dir)?;

let most_recent = run_dirs
.last()
.ok_or_else(|| anyhow::anyhow!("No run directories found"))?;

let run_dir = cache_dir.join(most_recent);

let mut aggregated_durations = HashMap::new();

for entry in fs::read_dir(&run_dir)? {
let entry = entry?;
let worker_path = Utf8PathBuf::try_from(entry.path())
.map_err(|e| anyhow::anyhow!("Invalid UTF-8 path: {e}"))?;

if !worker_path.is_dir() {
continue;
}

if let Some(durations) =
read_and_parse::<HashMap<String, Duration>>(&worker_path, DURATIONS_FILE)?
{
aggregated_durations.extend(durations);
}
}

Ok(aggregated_durations)
}

/// Result of a cache prune operation.
pub struct PruneResult {
/// Names of the removed run directories.
Expand Down Expand Up @@ -323,18 +289,6 @@ mod tests {

use super::*;

fn create_cache_with_durations(
dir: &std::path::Path,
run_name: &str,
worker_id: usize,
durations: &HashMap<String, Duration>,
) {
let worker_dir = dir.join(run_name).join(format!("worker-{worker_id}"));
fs::create_dir_all(&worker_dir).unwrap();
let json = serde_json::to_string(durations).unwrap();
fs::write(worker_dir.join(DURATIONS_FILE), json).unwrap();
}

fn create_cache_with_stats(
dir: &std::path::Path,
run_name: &str,
Expand All @@ -346,33 +300,6 @@ mod tests {
fs::write(worker_dir.join(STATS_FILE), stats_json).unwrap();
}

#[test]
fn read_recent_durations_returns_from_most_recent_run() {
let tmp = tempfile::tempdir().unwrap();
let cache_dir = Utf8PathBuf::try_from(tmp.path().to_path_buf()).unwrap();

let mut old_durations = HashMap::new();
old_durations.insert("test_old".to_string(), Duration::from_millis(100));
create_cache_with_durations(tmp.path(), "run-100", 0, &old_durations);

let mut new_durations = HashMap::new();
new_durations.insert("test_new".to_string(), Duration::from_millis(200));
create_cache_with_durations(tmp.path(), "run-200", 0, &new_durations);

let result = read_recent_durations(&cache_dir).unwrap();
assert!(result.contains_key("test_new"));
assert!(!result.contains_key("test_old"));
}

#[test]
fn read_recent_durations_errors_when_no_runs() {
let tmp = tempfile::tempdir().unwrap();
let cache_dir = Utf8PathBuf::try_from(tmp.path().to_path_buf()).unwrap();

let result = read_recent_durations(&cache_dir);
assert!(result.is_err());
}

#[test]
fn aggregate_results_merges_stats_from_multiple_workers() {
let tmp = tempfile::tempdir().unwrap();
Expand Down
2 changes: 1 addition & 1 deletion crates/karva_cache/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ pub(crate) mod hash;

pub use cache::{
AggregatedResults, Cache, PruneResult, clean_cache, prune_cache, read_last_failed,
read_recent_durations, write_last_failed,
write_last_failed,
};
pub use hash::RunHash;

Expand Down
24 changes: 2 additions & 22 deletions crates/karva_runner/src/orchestration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,7 @@ use crossbeam_channel::{Receiver, TryRecvError};

use crate::shutdown::shutdown_receiver;
use karva_cache::{
AggregatedResults, CACHE_DIR, Cache, RunHash, read_last_failed, read_recent_durations,
write_last_failed,
AggregatedResults, CACHE_DIR, Cache, RunHash, read_last_failed, write_last_failed,
};
use karva_cli::SubTestCommand;
use karva_collector::{CollectedPackage, CollectionSettings};
Expand Down Expand Up @@ -280,20 +279,6 @@ pub fn run_parallel_tests(

let cache_dir = project.cwd().join(CACHE_DIR);

// Read durations from the most recent run to optimize partitioning
let previous_durations = if config.no_cache {
std::collections::HashMap::new()
} else {
read_recent_durations(&cache_dir).unwrap_or_default()
};

if !previous_durations.is_empty() {
tracing::debug!(
"Found {} previous test durations to guide partitioning",
previous_durations.len()
);
}

let last_failed_set: HashSet<String> = if config.last_failed {
read_last_failed(&cache_dir)
.unwrap_or_default()
Expand All @@ -303,12 +288,7 @@ pub fn run_parallel_tests(
HashSet::new()
};

let partitions = partition_collected_tests(
&collected,
num_workers,
&previous_durations,
&last_failed_set,
);
let partitions = partition_collected_tests(&collected, num_workers, &last_failed_set);

let run_hash = RunHash::current_time();

Expand Down
Loading
Loading