Skip to content
Snippets Groups Projects

Pytest enhancements

Merged Kevin Kuns requested to merge kevin.kuns/pygwinc:pytest-enhancements into master
1 file
+ 20
14
Compare changes
  • Side-by-side
  • Inline
+ 88
0
@@ -40,6 +40,14 @@ def pytest_addoption(parser):
help="Do not preclear tpaths",
)
parser.addoption(
"--generate",
action="store_true",
default=False,
dest="generate",
help="Generate test data",
)
@pytest.fixture
def plot(request):
@@ -270,3 +278,83 @@ def relfile_test(_file_, request, pre = None, post = None, fname = None):
else:
return relfile(_file_, testname, fname = fname)
@pytest.fixture
def compare_noise(pprint):
"""
Fixture to compare two sets of traces
A list of noises passed, failed, and skipped are printed. Comparisons are
skipped if the psd's are sufficiently small (controlled by psd_tol) indicating
that the noise is essentially zero or if a trace is missing.
An assertion error is raised if any noises fail.
"""
import numpy as np
def compare(traces, ref_traces, psd_tol=1e-52):
passed = []
failed = []
skipped = []
if ref_traces.budget:
for ref_trace in ref_traces:
if np.all(ref_trace.psd < psd_tol):
skipped.append(ref_trace.name)
continue
try:
trace = traces[ref_trace.name]
except KeyError:
skipped.append(ref_trace.name)
continue
if np.allclose(trace.psd, ref_trace.psd, atol=0):
passed.append(trace.name)
else:
failed.append(trace.name)
else:
if np.allclose(ref_traces.psd, traces.psd, atol=0):
passed.append(traces.name)
else:
failed.append(traces.name)
pprint('Noises failed:')
pprint(40 * '-')
for noise in failed:
pprint(noise)
pprint(40 * '+')
pprint('Noises passed:')
pprint(40 * '-')
for noise in passed:
pprint(noise)
pprint(40 * '+')
pprint('Noises skipped:')
pprint(40 * '-')
for noise in skipped:
pprint(noise)
assert len(failed) == 0
return compare
def pytest_collection_modifyitems(config, items):
"""
Modifies tests to be selectively skipped with command line options
https://docs.pytest.org/en/latest/example/simple.html#control-skipping-of-tests-according-to-command-line-option
"""
# run tests marked as generate if and only if --generate is given
# skip all others in this case
if config.getoption('--generate'):
skip = pytest.mark.skip(
reason='only running tests that generate data')
for item in items:
if 'generate' not in item.keywords:
item.add_marker(skip)
else:
skip = pytest.mark.skip(
reason='generates test data: needs --generate to run')
for item in items:
if 'generate' in item.keywords:
item.add_marker(skip)
Loading