Commit 0c533eff authored by Cody Messick's avatar Cody Messick

inspiral_pipe module now automatically puts cache files into cache directory

parent 140df87a
......@@ -457,7 +457,7 @@ def inspiral_node_gen(gstlalInspiralJob, gstlalInspiralInjJob, dag, svd_nodes, s
"injections": injections
},
input_cache_files = {"svd-bank-cache":svd_names},
input_cache_file_name = inspiral_pipe.group_T050017_filename_from_T050017_files([CacheEntry.from_T050017(filename) for filename in svd_names], '.cache', path = os.path.join(gstlalInspiralInjJob.tag_base, 'cache')).replace('SVD', 'SVD_%s' % sim_name),
input_cache_file_name = inspiral_pipe.group_T050017_filename_from_T050017_files([CacheEntry.from_T050017(filename) for filename in svd_names], '.cache').replace('SVD', 'SVD_%s' % sim_name),
output_cache_files = {
"output-cache":output_names,
"likelihood-file-cache":dist_stat_names
......@@ -545,7 +545,7 @@ def rank_and_merge(dag, createPriorDistStatsJob, calcRankPDFsJob, calcLikelihood
parent_nodes = [priornode] + parents,
input_cache_files = {"likelihood-cache":diststats + [priornode.output_files["write-likelihood"]]},
output_files = {"output":inspiral_pipe.T050017_filename(instruments, '%04d_MARG_DIST_STATS' % (n,), int(boundary_seg[0]), int(boundary_seg[1]), '.xml.gz', path = marginalizeJob.output_path)},
input_cache_file_name = inspiral_pipe.T050017_filename(instruments, '%04d_MARG_DIST_STATS' % (n,), int(boundary_seg[0]), int(boundary_seg[1]), '.cache', path = os.path.join(marginalizeJob.output_path, 'cache'))
input_cache_file_name = inspiral_pipe.T050017_filename(instruments, '%04d_MARG_DIST_STATS' % (n,), int(boundary_seg[0]), int(boundary_seg[1]), '.cache')
)
calcranknode = inspiral_pipe.generic_node(calcRankPDFsJob, dag,
......@@ -619,7 +619,7 @@ def rank_and_merge(dag, createPriorDistStatsJob, calcRankPDFsJob, calcLikelihood
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_cache_files = {"input-cache":inputs},
output_files = {"database":db},
input_cache_file_name = db.replace('.sqlite','.cache')
input_cache_file_name = os.path.basename(db).replace('.sqlite','.cache')
)
sqlitenode = inspiral_pipe.generic_node(lalappsRunSqliteJob, dag, parent_nodes = [sqlitenode],
opts = {"sql-file":options.cluster_sql_file, "tmp-space":inspiral_pipe.condor_scratch_space()},
......@@ -646,7 +646,7 @@ def rank_and_merge(dag, createPriorDistStatsJob, calcRankPDFsJob, calcLikelihood
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_cache_files = {"input-cache":inputs},
output_files = {"database":injdb},
input_cache_file_name = injdb.replace('.sqlite','.cache')
input_cache_file_name = os.path.basename(injdb).replace('.sqlite','.cache')
)
sqlitenode = inspiral_pipe.generic_node(lalappsRunSqliteJob, dag, parent_nodes = [sqlitenode],
opts = {"sql-file":options.injection_sql_file, "tmp-space":inspiral_pipe.condor_scratch_space()},
......@@ -683,7 +683,7 @@ def merge_in_bin(dag, toSqliteJob, lalappsRunSqliteJob, options):
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_cache_files = {"input-cache":dbs},
output_files = {"database":noninjdb},
input_cache_file_name = os.path.join(os.path.join(toSqliteJob.output_path, 'cache'), os.path.basename(noninjdb)).replace('.sqlite','.cache')
input_cache_file_name = os.path.basename(noninjdb).replace('.sqlite','.cache')
)
sqlitenode = inspiral_pipe.generic_node(lalappsRunSqliteJob, dag, parent_nodes = [sqlitenode],
......@@ -704,7 +704,7 @@ def merge_in_bin(dag, toSqliteJob, lalappsRunSqliteJob, options):
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_cache_files = {"input-cache":dbs},
output_files = {"database":injdb},
input_cache_file_name = os.path.join(os.path.join(toSqliteJob.output_path, 'cache'), os.path.basename(injdb)).replace('.sqlite','.cache')
input_cache_file_name = os.path.basename(injdb).replace('.sqlite','.cache')
)
sqlitenode = inspiral_pipe.generic_node(lalappsRunSqliteJob, dag, parent_nodes = [sqlitenode],
......@@ -730,7 +730,7 @@ def merge_in_bin(dag, toSqliteJob, lalappsRunSqliteJob, options):
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_cache_files = {"input-cache":[ce.path for ce in ce_list]},
output_files = {"database":noninjdb},
input_cache_file_name = os.path.join(os.path.join(toSqliteJob.output_path, 'cache'), os.path.basename(noninjdb)).replace('.sqlite','.cache')
input_cache_file_name = os.path.basename(noninjdb).replace('.sqlite','.cache')
)
sqlitenode = inspiral_pipe.generic_node(lalappsRunSqliteJob, dag, parent_nodes = [sqlitenode],
......@@ -750,7 +750,7 @@ def merge_in_bin(dag, toSqliteJob, lalappsRunSqliteJob, options):
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_cache_files = {"input-cache":[ce.path for ce in ce_list]},
output_files = {"database":injdb},
input_cache_file_name = os.path.join(os.path.join(toSqliteJob.output_path, 'cache'), os.path.basename(injdb)).replace('.sqlite','.cache')
input_cache_file_name = os.path.basename(injdb).replace('.sqlite','.cache')
)
sqlitenode = inspiral_pipe.generic_node(lalappsRunSqliteJob, dag, parent_nodes = [sqlitenode],
......@@ -791,7 +791,7 @@ def finalize_runs(dag, lalappsRunSqliteJob, toXMLJob, ligolwInspinjFindJob, toSq
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_cache_files = {"input-cache": dbs},
output_files = {"database":noninjdb},
input_cache_file_name = noninjdb.replace('.sqlite','.cache')
input_cache_file_name = os.path.basename(noninjdb).replace('.sqlite','.cache')
)
# cluster the final non injection database
......@@ -816,7 +816,7 @@ def finalize_runs(dag, lalappsRunSqliteJob, toXMLJob, ligolwInspinjFindJob, toSq
input_files = {"": (vetoes + [options.frame_segments_file])},
input_cache_files = {"input-cache": [node.input_files[""] for node in chunk_nodes]},
output_files = {"database":noninjdb},
input_cache_file_name = os.path.join(os.path.join(toSqliteJob.output_path, 'cache'), noninjdb).replace('.sqlite','.cache')
input_cache_file_name = os.path.basename(noninjdb).replace('.sqlite','.cache')
)
# cluster the final non injection database
......@@ -855,7 +855,7 @@ def finalize_runs(dag, lalappsRunSqliteJob, toXMLJob, ligolwInspinjFindJob, toSq
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_cache_files = {"input-cache":dbs},
output_files = {"database":injdb},
input_cache_file_name = os.path.join(os.path.join(os.path.dirname(os.path.dirname(injdb)), 'cache'), os.path.basename(injdb)).replace('.sqlite','.cache')
input_cache_file_name = os.path.basename(injdb).replace('.sqlite','.cache')
)
# cluster
......@@ -888,7 +888,7 @@ def finalize_runs(dag, lalappsRunSqliteJob, toXMLJob, ligolwInspinjFindJob, toSq
input_files = {"": (vetoes + [options.frame_segments_file, injections])},
input_cache_files = {"input-cache": [node.input_files[""] for node in chunk_nodes]},
output_files = {"database":injdb},
input_cache_file_name = os.path.join(os.path.join(toSqliteJob.output_path, 'cache'), injdb.replace('.sqlite','.cache'))
input_cache_file_name = injdb.replace('.sqlite','.cache')
)
# cluster
......@@ -937,7 +937,7 @@ def compute_FAP(marginalizeJob, gstlalInspiralComputeFarFromSnrChisqHistogramsJo
margnodes.append(inspiral_pipe.generic_node(marginalizeJob, dag, parent_nodes = parents,
output_files = {"output":margout[-1]},
input_cache_files = {"likelihood-cache":margin[n:n+margnum]},
input_cache_file_name = os.path.join(os.path.join(os.path.dirname(margout[-1]), 'cache'), os.path.basename(margout[-1])).replace('.xml.gz','.cache')
input_cache_file_name = os.path.basename(margout[-1]).replace('.xml.gz','.cache')
))
if options.marginalized_likelihood_file:
......@@ -949,7 +949,7 @@ def compute_FAP(marginalizeJob, gstlalInspiralComputeFarFromSnrChisqHistogramsJo
margnode = inspiral_pipe.generic_node(marginalizeJob, dag, parent_nodes = margnodes,
output_files = {"output":"marginalized_likelihood.xml.gz"},
input_cache_files = {"likelihood-cache":margout},
input_cache_file_name = os.path.join(marginalizeJob.output_path, "cache/marginalized_likelihood.cache")
input_cache_file_name = "marginalized_likelihood.cache"
)
parents = [margnode]
marginalized_likelihood_file = margnode.output_files["output"]
......
......@@ -258,7 +258,7 @@ class generic_node(InspiralNode):
if input_cache_file_name is None:
cache_file_name = group_T050017_filename_from_T050017_files(cache_entries, '.cache', path = cache_dir)
else:
cache_file_name = input_cache_file_name
cache_file_name = os.path.join(cache_dir, input_cache_file_name)
with open(cache_file_name, "w") as cache_file:
lal.Cache(cache_entries).tofile(cache_file)
self.add_var_opt(opt, cache_file_name)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment