Commit 271cb6ff authored by Cody Messick's avatar Cody Messick

Fixed bug in gstlal_inspiral offline dag cache naming scheme where dags with

small template banks and a short durations would crash in the data reduction
stage.
parent bb2c24a8
......@@ -460,7 +460,8 @@ def rank_and_merge(dag, createPriorDistStatsJob, calcRankPDFsJob, calcLikelihood
sqlitenode = inspiral_pipe.generic_node(toSqliteJob, dag, parent_nodes = merge_nodes,
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_cache_files = {"input-cache":inputs},
output_files = {"database":db}
output_files = {"database":db},
input_cache_file_name = os.path.join(toSqliteJob.output_path, os.path.splitext(db)[0] + '.cache')
)
sqlitenode = inspiral_pipe.generic_node(lalappsRunSqliteJob, dag, parent_nodes = [sqlitenode],
opts = {"sql-file":options.cluster_sql_file, "tmp-space":inspiral_pipe.condor_scratch_space()},
......@@ -485,7 +486,8 @@ def rank_and_merge(dag, createPriorDistStatsJob, calcRankPDFsJob, calcLikelihood
sqlitenode = inspiral_pipe.generic_node(toSqliteJob, dag, parent_nodes = merge_nodes,
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_cache_files = {"input-cache":inputs},
output_files = {"database":injdb}
output_files = {"database":injdb},
input_cache_file_name = os.path.join(toSqliteJob.output_path, os.path.splitext(injdb)[0] + '.cache')
)
sqlitenode = inspiral_pipe.generic_node(lalappsRunSqliteJob, dag, parent_nodes = [sqlitenode],
opts = {"sql-file":options.injection_sql_file, "tmp-space":inspiral_pipe.condor_scratch_space()},
......@@ -510,7 +512,8 @@ def finalize_runs(dag, lalappsRunSqliteJob, toXMLJob, ligolwInspinjFindJob, toSq
sqlitenode = inspiral_pipe.generic_node(toSqliteJob, dag, parent_nodes = innodes[None],
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_cache_files = {"input-cache": dbs},
output_files = {"database":noninjdb}
output_files = {"database":noninjdb},
input_cache_file_name = os.path.join(toSqliteJob.output_path, os.path.splitext(noninjdb)[0] + '.cache')
)
# cluster the final non injection database
......@@ -526,7 +529,8 @@ def finalize_runs(dag, lalappsRunSqliteJob, toXMLJob, ligolwInspinjFindJob, toSq
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_files = {"": (vetoes + [options.frame_segments_file])},
input_cache_files = {"input-cache": [node.input_files[""] for node in chunk_nodes]},
output_files = {"database":noninjdb}
output_files = {"database":noninjdb},
input_cache_file_name = os.path.join(toSqliteJob.output_path, os.path.splitext(noninjdb)[0] + ".cache")
)
# cluster the final non injection database
......@@ -554,7 +558,8 @@ def finalize_runs(dag, lalappsRunSqliteJob, toXMLJob, ligolwInspinjFindJob, toSq
sqlitenode = inspiral_pipe.generic_node(toSqliteJob, dag, parent_nodes = thisinjnodes,
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_cache_files = {"input-cache":dbs},
output_files = {"database":injdb}
output_files = {"database":injdb},
input_cache_file_name = os.path.join(toSqliteJob.output_path, os.path.splitext(injdb)[0] + '.cache')
)
# cluster
......@@ -581,7 +586,8 @@ def finalize_runs(dag, lalappsRunSqliteJob, toXMLJob, ligolwInspinjFindJob, toSq
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_files = {"": (vetoes + [options.frame_segments_file, injections])},
input_cache_files = {"input-cache": [node.input_files[""] for node in chunk_nodes]},
output_files = {"database":injdb}
output_files = {"database":injdb},
input_cache_file_name = os.path.join(toSqliteJob.output_path, os.path.splitext(injdb)[0] + ".cache")
)
# cluster
......@@ -623,7 +629,8 @@ def compute_FAP(marginalizeJob, gstlalInspiralComputeFarFromSnrChisqHistogramsJo
margout.append(inspiral_pipe.group_T050017_filename_from_T050017_files([CacheEntry.from_T050017("file://localhost%s" % os.path.abspath(filename)) for filename in margin[n:n+margnum]], '.xml.gz'))
margnodes.append(inspiral_pipe.generic_node(marginalizeJob, dag, parent_nodes = final_sqlite_nodes + rankpdf_nodes,
output_files = {"output":margout[-1]},
input_cache_files = {"likelihood-cache":margin[n:n+margnum]}
input_cache_files = {"likelihood-cache":margin[n:n+margnum]},
input_cache_file_name = os.path.join(marginalizeJob.output_path, os.path.splitext(margout[-1])[0] + '.cache')
))
margnode = inspiral_pipe.generic_node(marginalizeJob, dag, parent_nodes = margnodes,
......
......@@ -219,7 +219,7 @@ class generic_node(InspiralNode):
an empty argument by setting it to "". However options set to None are simply
ignored.
"""
def __init__(self, job, dag, parent_nodes, opts = {}, input_files = {}, output_files = {}, input_cache_files = {}, output_cache_files = {}):
def __init__(self, job, dag, parent_nodes, opts = {}, input_files = {}, output_files = {}, input_cache_files = {}, output_cache_files = {}, input_cache_file_name = None):
InspiralNode.__init__(self, job, dag, parent_nodes)
self.input_files = input_files.copy()
......@@ -250,7 +250,10 @@ class generic_node(InspiralNode):
for opt, val in input_cache_files.items():
cache_entries = [lal.CacheEntry.from_T050017("file://localhost%s" % os.path.abspath(filename)) for filename in val]
cache_file_name = group_T050017_filename_from_T050017_files(cache_entries, '.cache', path = job.tag_base)
if input_cache_file_name is None:
cache_file_name = group_T050017_filename_from_T050017_files(cache_entries, '.cache', path = job.tag_base)
else:
cache_file_name = input_cache_file_name
with open(cache_file_name, "w") as cache_file:
lal.Cache(cache_entries).tofile(cache_file)
self.add_var_opt(opt, cache_file_name)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment