tests/wheel jobs failing in inference tests with most recent version of pytest
See, for example, https://git.ligo.org/emfollow/gwcelery/-/jobs/3573146. Relevant part of pytest log:
=================================== FAILURES ===================================
_________________________ test_start_pe[lalinference] __________________________
monkeypatch = <_pytest.monkeypatch.MonkeyPatch object at 0x7fb0c8493950>
tmp_path = PosixPath('/tmp/pytest-of-root/pytest-0/test_start_pe_lalinference_0')
pipeline = 'lalinference'
@pytest.mark.parametrize(
'pipeline', ['lalinference', 'bilby', 'rapidpe'])
def test_start_pe(monkeypatch, tmp_path, pipeline):
path_to_sub = 'pe.dag.condor.sub'
@app.task
def mock_task():
return path_to_sub
def mock_condor_submit(path):
assert path == path_to_sub
if pipeline == 'rapidpe':
event_pipeline_info = {
'gstlal': {'sid': 'S1234', 'graceid': 'G1234'},
'pycbc': {'sid': 'S1235', 'graceid': 'G1235'}}
for event_pipeline in event_pipeline_info:
dag_prepare_task = Mock(return_value=mock_task.s())
submit_rapidpe = Mock(side_effect=mock_condor_submit)
dag_finished = Mock()
monkeypatch.setattr('gwcelery.tasks.gracedb.upload.run', Mock())
monkeypatch.setattr('os.path.expanduser', Mock(
return_value=str(tmp_path)))
monkeypatch.setattr('gwcelery.tasks.inference.dag_prepare_task',
dag_prepare_task)
monkeypatch.setattr(
'gwcelery.tasks.inference.submit_rapidpe.run',
submit_rapidpe)
monkeypatch.setattr(
'gwcelery.tasks.inference.dag_finished.run',
dag_finished)
inference.start_pe({
'graceid': event_pipeline_info[event_pipeline]['graceid'],
'pipeline': event_pipeline,
'extra_attributes': {'CoincInspiral': {'snr': 10}}},
event_pipeline_info[event_pipeline]['sid'], pipeline)
dag_prepare_task.assert_called_once()
submit_rapidpe.assert_called_once()
dag_finished.assert_called_once()
else:
dag_prepare_task = Mock(return_value=mock_task.si(),
name="dag_prepare_task")
condor_submit = Mock(side_effect=mock_condor_submit,
name="condor_submit")
dag_finished = Mock(name="dag_finished")
monkeypatch.setattr('gwcelery.tasks.gracedb.upload.run', Mock())
monkeypatch.setattr('distutils.dir_util.mkpath',
Mock(return_value=str(tmp_path)))
monkeypatch.setattr('gwcelery.tasks.inference.dag_prepare_task',
dag_prepare_task)
monkeypatch.setattr('gwcelery.tasks.condor.submit.run', condor_submit)
monkeypatch.setattr(
'gwcelery.tasks.inference.dag_finished.run',
dag_finished)
> inference.start_pe({'graceid': 'G1234'}, 'S1234', pipeline)
gwcelery/tests/test_tasks_inference.py:672:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/local/lib/python3.11/site-packages/celery/local.py:182: in __call__
return self._get_current_object()(*a, **kw)
/usr/local/lib/python3.11/site-packages/celery/app/task.py:411: in __call__
return self.run(*args, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
event = {'graceid': 'G1234'}, superevent_id = 'S1234'
pe_pipeline = 'lalinference'
@app.task(ignore_result=True, shared=False)
def start_pe(event, superevent_id, pe_pipeline):
"""Run Parameter Estimation on a given event.
Parameters
----------
event : dict
The json contents of a target G event retrieved from
gracedb.get_event(), whose mass and spin information are used to
determine analysis settings.
superevent_id : str
The GraceDB ID of a target superevent
pe_pipeline : str
The parameter estimation pipeline used,
lalinference, bilby, or rapidpe.
"""
# make an event directory
pipeline_dir = os.path.expanduser('~/.cache/{}'.format(pe_pipeline))
mkpath(pipeline_dir)
event_dir = os.path.join(pipeline_dir, superevent_id)
if pe_pipeline == 'bilby':
if (
app.conf['gracedb_host'] == 'gracedb-playground.ligo.org' and
event['extra_attributes']['CoincInspiral']['mchirp'] >= 12
):
# Count the number of BBH jobs and do not start a run if it exceeds
# 5 so that we do not use up disk space. We assume that the job is
# running if a data dump pickle file exists under the run
# directory, which is the largest file produced by PE and removed
# when the run completes.
number_of_bbh_running = 0
for p in glob.glob(
os.path.join(
pipeline_dir,
"*/*/data/*_generation_data_dump.pickle"
)
):
path_to_ev = os.path.join(os.path.dirname(p), "../event.json")
if os.path.exists(path_to_ev):
with open(path_to_ev, "r") as f:
ev = json.load(f)
mc = ev['extra_attributes']['CoincInspiral']['mchirp']
if mc >= 12:
number_of_bbh_running += 1
if number_of_bbh_running > 5:
gracedb.upload.delay(
filecontents=None, filename=None, graceid=superevent_id,
message='Parameter estimation will not start to save disk '
f'space (There are {number_of_bbh_running} BBH '
'jobs running).',
tags='pe'
)
return
modes = ["production"]
rundirs = [os.path.join(event_dir, m) for m in modes]
kwargs_list = [{'bilby_mode': m} for m in modes]
analyses = [f'{m}-mode bilby' for m in modes]
condor_submit_task = condor.submit
elif pe_pipeline == 'rapidpe':
rundirs = [event_dir]
kwargs_list = [{'event_pipeline': event["pipeline"]}]
analyses = [pe_pipeline]
condor_submit_task = submit_rapidpe
else:
rundirs = [event_dir]
kwargs_list = [{}]
analyses = [pe_pipeline]
condor_submit_task = condor.submit
> os.mkdir(event_dir)
E FileNotFoundError: [Errno 2] No such file or directory: '/root/.cache/lalinference/S1234'
gwcelery/tasks/inference.py:989: FileNotFoundError