Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 7 additions & 4 deletions crates/karva_test_semantic/src/diagnostic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,10 +115,13 @@ fn report_dependency_chain(
dependency_chain: &[FixtureChainEntry],
fixture_name: &str,
) {
let reversed: Vec<_> = dependency_chain.iter().rev().collect();

for (i, entry) in reversed.iter().enumerate() {
let next_name = reversed.get(i + 1).map_or(fixture_name, |next| &next.name);
// Walk the chain top-down, pairing each entry with the fixture it depends on.
// The final entry depends on `fixture_name` (the one that actually failed).
let mut entries = dependency_chain.iter().rev().peekable();
while let Some(entry) = entries.next() {
let next_name = entries
.peek()
.map_or(fixture_name, |next| next.name.as_str());

let mut sub = SubDiagnostic::new(
SubDiagnosticSeverity::Info,
Expand Down
79 changes: 26 additions & 53 deletions crates/karva_test_semantic/src/extensions/fixtures/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -204,78 +204,51 @@ impl DiscoveredFixture {
}
}

const MISSING_FIXTURE_INFO: &str = "Could not find fixture information";

/// Get the fixture function marker from a function.
///
/// The second name is for older versions of pytest.
fn get_fixture_function_marker<'py>(function: &Bound<'py, PyAny>) -> PyResult<Bound<'py, PyAny>> {
let attribute_names = ["_fixture_function_marker", "_pytestfixturefunction"];

// Older versions of pytest
for name in attribute_names {
if let Ok(attr) = function.getattr(name) {
return Ok(attr);
}
}

Err(PyAttributeError::new_err(
"Could not find fixture information",
))
["_fixture_function_marker", "_pytestfixturefunction"]
.iter()
.find_map(|name| function.getattr(*name).ok())
.ok_or_else(|| PyAttributeError::new_err(MISSING_FIXTURE_INFO))
}

/// Get the fixture function from a function.
///
/// Falls back to the pre-8.0 pytest `__pytest_wrapped__.obj` path.
fn get_fixture_function<'py>(function: &Bound<'py, PyAny>) -> PyResult<Bound<'py, PyAny>> {
if let Ok(attr) = function.getattr("_fixture_function") {
return Ok(attr);
}

// Older versions of pytest
if let Ok(attr) = function.getattr("__pytest_wrapped__") {
if let Ok(attr) = attr.getattr("obj") {
return Ok(attr);
}
if let Ok(wrapped) = function.getattr("__pytest_wrapped__")
&& let Ok(obj) = wrapped.getattr("obj")
{
return Ok(obj);
}

Err(PyAttributeError::new_err(
"Could not find fixture information",
))
Err(PyAttributeError::new_err(MISSING_FIXTURE_INFO))
}

pub fn get_auto_use_fixtures<'a>(
parents: &'a [&'a DiscoveredPackage],
current: &'a dyn HasFixtures<'a>,
scope: FixtureScope,
) -> Vec<&'a DiscoveredFixture> {
let mut auto_use_fixtures_called = Vec::new();
let auto_use_fixtures = current.auto_use_fixtures(&scope.scopes_above());

for fixture in auto_use_fixtures {
let fixture_name = fixture.name().function_name().to_string();

if auto_use_fixtures_called
.iter()
.any(|fixture: &&DiscoveredFixture| fixture.name().function_name() == fixture_name)
{
continue;
}

auto_use_fixtures_called.push(fixture);
}

for parent in parents {
let parent_fixtures = parent.auto_use_fixtures(&[scope]);
for fixture in parent_fixtures {
let fixture_name = fixture.name().function_name().to_string();

if auto_use_fixtures_called
.iter()
.any(|fixture: &&DiscoveredFixture| fixture.name().function_name() == fixture_name)
{
continue;
}

auto_use_fixtures_called.push(fixture);
}
}

auto_use_fixtures_called
let current_fixtures = current.auto_use_fixtures(&scope.scopes_above());
let parent_fixtures = parents
.iter()
.flat_map(|parent| parent.auto_use_fixtures(&[scope]));

let mut seen: std::collections::HashSet<&str> = std::collections::HashSet::new();
current_fixtures
.into_iter()
.chain(parent_fixtures)
.filter(|fixture| seen.insert(fixture.name().function_name()))
.collect()
}

#[cfg(test)]
Expand Down
40 changes: 1 addition & 39 deletions crates/karva_test_semantic/src/extensions/functions/mod.rs
Original file line number Diff line number Diff line change
@@ -1,45 +1,7 @@
pub use self::python::{FailError, Param, SkipError, fail, param, skip};
pub use self::raises::{ExceptionInfo, RaisesContext};
pub use self::snapshot::{Command, SnapshotMismatchError, SnapshotSettings};
use pyo3::prelude::*;
pub use python::Param;

pub mod python;
pub mod raises;
pub mod snapshot;

// SkipError exception that can be raised to skip tests at runtime with an optional reason
pyo3::create_exception!(karva, SkipError, pyo3::exceptions::PyException);

// FailError exception that can be raised to fail tests at runtime with an optional reason
pyo3::create_exception!(karva, FailError, pyo3::exceptions::PyException);

/// Skip the current test at runtime with an optional reason.
///
/// This function raises a `SkipError` exception which will be caught by the test runner
/// and mark the test as skipped.
#[pyfunction]
#[pyo3(signature = (reason = None))]
pub fn skip(_py: Python<'_>, reason: Option<String>) -> PyResult<()> {
let message = reason.unwrap_or_default();
Err(SkipError::new_err(message))
}

/// Fail the current test at runtime with an optional reason.
///
/// This function raises a `FailError` exception which will be caught by the test runner
/// and mark the test as failed with the given reason.
#[pyfunction]
#[pyo3(signature = (reason = None))]
pub fn fail(_py: Python<'_>, reason: Option<String>) -> PyResult<()> {
Err(FailError::new_err(reason))
}

#[pyfunction]
#[pyo3(signature = (*values, tags = None))]
pub fn param(
py: Python<'_>,
values: Vec<Py<PyAny>>,
tags: Option<Vec<Py<PyAny>>>,
) -> PyResult<Param> {
Param::new(py, values, tags.unwrap_or_default())
}
36 changes: 36 additions & 0 deletions crates/karva_test_semantic/src/extensions/functions/python.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,12 @@ use pyo3::prelude::*;
use crate::extensions::tags::parametrize::Parametrization;
use crate::extensions::tags::{Tag, Tags};

// SkipError exception that can be raised to skip tests at runtime with an optional reason
pyo3::create_exception!(karva, SkipError, pyo3::exceptions::PyException);

// FailError exception that can be raised to fail tests at runtime with an optional reason
pyo3::create_exception!(karva, FailError, pyo3::exceptions::PyException);

#[derive(Debug, Clone)]
#[pyclass(from_py_object)]
pub struct Param {
Expand Down Expand Up @@ -37,3 +43,33 @@ impl Param {
Self { values, tags }
}
}

/// Skip the current test at runtime with an optional reason.
///
/// This function raises a `SkipError` exception which will be caught by the test runner
/// and mark the test as skipped.
#[pyfunction]
#[pyo3(signature = (reason = None))]
pub fn skip(_py: Python<'_>, reason: Option<String>) -> PyResult<()> {
Err(SkipError::new_err(reason))
}

/// Fail the current test at runtime with an optional reason.
///
/// This function raises a `FailError` exception which will be caught by the test runner
/// and mark the test as failed with the given reason.
#[pyfunction]
#[pyo3(signature = (reason = None))]
pub fn fail(_py: Python<'_>, reason: Option<String>) -> PyResult<()> {
Err(FailError::new_err(reason))
}

#[pyfunction]
#[pyo3(signature = (*values, tags = None))]
pub fn param(
py: Python<'_>,
values: Vec<Py<PyAny>>,
tags: Option<Vec<Py<PyAny>>>,
) -> PyResult<Param> {
Param::new(py, values, tags.unwrap_or_default())
}
5 changes: 3 additions & 2 deletions crates/karva_test_semantic/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ mod context;
pub(crate) mod diagnostic;
pub(crate) mod discovery;
pub(crate) mod extensions;
mod py_attach;
mod python;
mod runner;
pub mod utils;
Expand All @@ -17,8 +18,8 @@ use karva_project::path::{TestPath, TestPathError};
use ruff_python_ast::PythonVersion;

use crate::discovery::StandardDiscoverer;
use crate::py_attach::attach_with_output;
use crate::runner::PackageRunner;
use crate::utils::attach_with_project;

/// Run tests given the system, settings, Python version, reporter, and test paths.
///
Expand All @@ -33,7 +34,7 @@ pub fn run_tests(
) -> TestRunResult {
let context = Context::new(cwd, settings, python_version, reporter);

attach_with_project(settings.terminal().show_python_output, |py| {
attach_with_output(settings.terminal().show_python_output, |py| {
let session = StandardDiscoverer::new(&context).discover_with_py(py, test_paths);

PackageRunner::new(&context).execute(py, &session);
Expand Down
71 changes: 71 additions & 0 deletions crates/karva_test_semantic/src/py_attach.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
//! Python interpreter attachment helpers.
//!
//! Wraps [`pyo3::Python::attach`] with first-time interpreter initialization
//! and optional suppression of `sys.stdout` / `sys.stderr` to `/dev/null`
//! for the duration of the callback.

use pyo3::prelude::*;

/// Initialize the Python interpreter (idempotent) and attach to it for the
/// duration of `f`.
fn attach<F, R>(f: F) -> R
where
F: for<'py> FnOnce(Python<'py>) -> R,
{
Python::initialize();
Python::attach(f)
}

/// Like [`attach`], but redirects Python's `sys.stdout` and `sys.stderr` to
/// `/dev/null` for the duration of `f` when `show_output` is `false`.
///
/// If `/dev/null` cannot be opened we fall back to unsuppressed output rather
/// than failing the test run.
pub fn attach_with_output<F, R>(show_output: bool, f: F) -> R
where
F: for<'py> FnOnce(Python<'py>) -> R,
{
attach(|py| {
if show_output {
return f(py);
}

let Ok(null_file) = open_devnull(py) else {
return f(py);
};

let _ = redirect_stdio(py, &null_file);
let result = f(py);
let _ = flush_and_mute(py, &null_file);
result
})
}

fn open_devnull(py: Python<'_>) -> PyResult<Bound<'_, PyAny>> {
let os = py.import("os")?;
let builtins = py.import("builtins")?;
builtins
.getattr("open")?
.call1((os.getattr("devnull")?, "w"))
}

fn redirect_stdio<'py>(py: Python<'py>, null_file: &Bound<'py, PyAny>) -> PyResult<()> {
let sys = py.import("sys")?;
for stream in ["stdout", "stderr"] {
sys.setattr(stream, null_file.clone())?;
}
Ok(())
}

/// Close whatever is currently on `sys.stdout`/`sys.stderr` (so pending writes
/// flush) and reset both to `null_file`. We don't restore the originals — the
/// runner doesn't emit to real stdout after the callback returns, and a test
/// may have swapped the streams itself.
fn flush_and_mute<'py>(py: Python<'py>, null_file: &Bound<'py, PyAny>) -> PyResult<()> {
let sys = py.import("sys")?;
for stream in ["stdout", "stderr"] {
sys.getattr(stream)?.call_method0("close")?;
sys.setattr(stream, null_file.clone())?;
}
Ok(())
}
3 changes: 1 addition & 2 deletions crates/karva_test_semantic/src/runner/fixture_resolver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ use crate::extensions::fixtures::{
DiscoveredFixture, FixtureScope, HasFixtures, NormalizedFixture, RequiresFixtures,
get_auto_use_fixtures,
};
use crate::utils::iter_with_ancestors;

/// Resolves fixtures at runtime during test execution.
///
Expand Down Expand Up @@ -150,7 +149,7 @@ fn find_fixture<'a>(
return Some(fixture);
}

for (parent, _ancestors) in iter_with_ancestors(parents) {
for parent in parents {
if let Some(fixture) = parent.get_fixture(name)
&& current_fixture
.is_none_or(|current_fixture| current_fixture.name() != fixture.name())
Expand Down
Loading
Loading