Commit 9d6b6f2c authored by Kipp Cannon's avatar Kipp Cannon

inspiral_pipe.T050017_filename():

- move segment boundary logic from calling codes into this function
parent f77c45c7
......@@ -235,7 +235,7 @@ with open(options.output_cache, "w") as output_cache_file:
row.mtotal = row.mass1 + row.mass2
sngl_inspiral_table[:] = rows
output = inspiral_pipe.T050017_filename(options.instrument, "%04d_GSTLAL_SPLIT_BANK" % bank_count, 0, 0, ".xml.gz", path = options.output_path)
output = inspiral_pipe.T050017_filename(options.instrument, "%04d_GSTLAL_SPLIT_BANK" % bank_count, (0, 0), ".xml.gz", path = options.output_path)
if not options.write_svd_caches:
output_cache_file.write("%s\n" % CacheEntry.from_T050017("file://localhost%s" % os.path.abspath(output)))
else:
......@@ -251,7 +251,7 @@ with open(options.output_cache, "w") as output_cache_file:
pass
for svd_cache_group in inspiral_pipe.group(svd_caches, options.num_banks):
output = inspiral_pipe.T050017_filename(options.instrument, "%04d_%04d_GSTLAL_SPLIT_BANK" % (svd_cache_group[0][0], svd_cache_group[-1][0]), 0, 0, ".cache", path = cache_path)
output = inspiral_pipe.T050017_filename(options.instrument, "%04d_%04d_GSTLAL_SPLIT_BANK" % (svd_cache_group[0][0], svd_cache_group[-1][0]), (0, 0), ".cache", path = cache_path)
with open(output, 'w') as svd_cache:
for bank_count, split_bank_cache_entry in svd_cache_group:
svd_cache.write(split_bank_cache_entry)
......
......@@ -191,7 +191,7 @@ def psd_node_gen(refPSDJob, dag, parent_nodes, segsdict, channel_dict, options):
"frame-segments-name": options.frame_segments_name},
input_files = { "frame-cache":options.frame_cache,
"frame-segments-file":options.frame_segments_file},
output_files = {"write-psd":inspiral_pipe.T050017_filename(ifos, "REFERENCE_PSD", int(seg[0]), int(seg[1]), '.xml.gz', path = subdir_path([refPSDJob.output_path, str(int(seg[0]))[:5]]))}
output_files = {"write-psd":inspiral_pipe.T050017_filename(ifos, "REFERENCE_PSD", seg, '.xml.gz', path = subdir_path([refPSDJob.output_path, str(int(seg[0]))[:5]]))}
)
return psd_nodes
......@@ -232,7 +232,7 @@ def svd_node_gen(svdJob, dag, parent_nodes, psd, bank_cache, options, seg, templ
if f in template_mchirp_dict:
mchirp_interval = (min(mchirp_interval[0], template_mchirp_dict[f][0]), max(mchirp_interval[1], template_mchirp_dict[f][1]))
svd_bank_name = inspiral_pipe.T050017_filename(ifo, '%04d_SVD' % (i+bin_offset,), int(seg[0]), int(seg[1]), '.xml.gz', path = svdJob.output_path)
svd_bank_name = inspiral_pipe.T050017_filename(ifo, '%04d_SVD' % (i+bin_offset,), seg, '.xml.gz', path = svdJob.output_path)
if '%04d' % (i+bin_offset,) not in new_template_mchirp_dict and mchirp_interval != (float("inf"), 0):
new_template_mchirp_dict['%04d' % (i+bin_offset,)] = mchirp_interval
......@@ -355,8 +355,8 @@ def inspiral_node_gen(gstlalInspiralJob, gstlalInspiralInjJob, dag, svd_nodes, s
bgbin_indices = ['%04d' % (i + numchunks * chunk_counter,) for i,s in enumerate(svd_bank_strings)]
# setup output names
output_paths = [subdir_path([output_seg_path, bgbin_indices[i]]) for i, s in enumerate(svd_bank_strings)]
output_names = [inspiral_pipe.T050017_filename(ifos, '%s_LLOID' % (bgbin_indices[i],), int(seg[0]), int(seg[1]), '.xml.gz', path = output_paths[i]) for i, s in enumerate(svd_bank_strings)]
dist_stat_names = [inspiral_pipe.T050017_filename(ifos, '%s_DIST_STATS' % (bgbin_indices[i],), int(seg[0]), int(seg[1]), '.xml.gz', path = output_paths[i]) for i,s in enumerate(svd_bank_strings)]
output_names = [inspiral_pipe.T050017_filename(ifos, '%s_LLOID' % (bgbin_indices[i],), seg, '.xml.gz', path = output_paths[i]) for i, s in enumerate(svd_bank_strings)]
dist_stat_names = [inspiral_pipe.T050017_filename(ifos, '%s_DIST_STATS' % (bgbin_indices[i],), seg, '.xml.gz', path = output_paths[i]) for i,s in enumerate(svd_bank_strings)]
for bgbin in bgbin_indices:
bgbin_chunk_map.setdefault(bgbin, chunk_counter)
......@@ -425,8 +425,8 @@ def inspiral_node_gen(gstlalInspiralJob, gstlalInspiralInjJob, dag, svd_nodes, s
for chunk_counter, bgbin_list in enumerate(chunks(bgbin_svd_bank_strings, numchunks)):
bgbin_indices, svd_bank_strings = zip(*bgbin_list)
output_paths = [subdir_path([output_seg_inj_path, bgbin_index]) for bgbin_index in bgbin_indices]
output_names = [inspiral_pipe.T050017_filename(ifos, '%s_LLOID_%s' % (bgbin_index, sim_name), int(seg[0]), int(seg[1]), '.xml.gz', path = output_paths[i]) for i, bgbin_index in enumerate(bgbin_indices)]
dist_stat_names = [inspiral_pipe.T050017_filename(ifos, '%s_DIST_STATS_%s' % (bgbin_index, sim_name), int(seg[0]), int(seg[1]), '.xml.gz', path = output_paths[i]) for i, bgbin_index in enumerate(bgbin_indices)]
output_names = [inspiral_pipe.T050017_filename(ifos, '%s_LLOID_%s' % (bgbin_index, sim_name), seg, '.xml.gz', path = output_paths[i]) for i, bgbin_index in enumerate(bgbin_indices)]
dist_stat_names = [inspiral_pipe.T050017_filename(ifos, '%s_DIST_STATS_%s' % (bgbin_index, sim_name), seg, '.xml.gz', path = output_paths[i]) for i, bgbin_index in enumerate(bgbin_indices)]
svd_names = [s for i, s in enumerate(svd_bank_cache_maker(svd_bank_strings, injection = True))]
try:
reference_psd = psd_nodes[(ifos, seg)].output_files["write-psd"]
......@@ -553,7 +553,7 @@ def rank_and_merge(dag, createPriorDistStatsJob, calcRankPDFsJob, calcRankPDFsWi
priornode = inspiral_pipe.generic_node(createPriorDistStatsJob, dag,
parent_nodes = [],
opts = {"instrument":instrument_set, "synthesize-injection-count":10000000, "background-prior":1, "min-instruments":options.min_instruments},
output_files = {"write-likelihood":inspiral_pipe.T050017_filename(instruments, '%04d_CREATE_PRIOR_DIST_STATS' % (n,), int(boundary_seg[0]), int(boundary_seg[1]), '.xml.gz', path = createPriorDistStatsJob.output_path)}
output_files = {"write-likelihood":inspiral_pipe.T050017_filename(instruments, '%04d_CREATE_PRIOR_DIST_STATS' % (n,), boundary_seg, '.xml.gz', path = createPriorDistStatsJob.output_path)}
)
# Create a file that has the priors *and* all of the diststats
# for a given bin marginalized over time. This is all that will
......@@ -561,21 +561,21 @@ def rank_and_merge(dag, createPriorDistStatsJob, calcRankPDFsJob, calcRankPDFsWi
diststats_per_bin_node = inspiral_pipe.generic_node(marginalizeJob, dag,
parent_nodes = [priornode] + parents,
input_cache_files = {"likelihood-cache":diststats + [priornode.output_files["write-likelihood"]]},
output_files = {"output":inspiral_pipe.T050017_filename(instruments, '%04d_MARG_DIST_STATS' % (n,), int(boundary_seg[0]), int(boundary_seg[1]), '.xml.gz', path = marginalizeJob.output_path)},
input_cache_file_name = inspiral_pipe.T050017_filename(instruments, '%04d_MARG_DIST_STATS' % (n,), int(boundary_seg[0]), int(boundary_seg[1]), '.cache')
output_files = {"output":inspiral_pipe.T050017_filename(instruments, '%04d_MARG_DIST_STATS' % (n,), boundary_seg, '.xml.gz', path = marginalizeJob.output_path)},
input_cache_file_name = inspiral_pipe.T050017_filename(instruments, '%04d_MARG_DIST_STATS' % (n,), boundary_seg, '.cache')
)
calcranknode = inspiral_pipe.generic_node(calcRankPDFsJob, dag,
parent_nodes = [diststats_per_bin_node],
input_files = {"":diststats_per_bin_node.output_files["output"]},
output_files = {"output":inspiral_pipe.T050017_filename(instruments, '%04d_CALC_RANK_PDFS' % (n,), int(boundary_seg[0]), int(boundary_seg[1]), '.xml.gz', path = calcRankPDFsJob.output_path)}
output_files = {"output":inspiral_pipe.T050017_filename(instruments, '%04d_CALC_RANK_PDFS' % (n,), boundary_seg, '.xml.gz', path = calcRankPDFsJob.output_path)}
)
calcrankzerolagnode = inspiral_pipe.generic_node(calcRankPDFsWithZerolagJob, dag,
parent_nodes = [diststats_per_bin_node],
opts = {"add-zerolag-to-background":""},
input_files = {"":diststats_per_bin_node.output_files["output"]},
output_files = {"output":inspiral_pipe.T050017_filename(instruments, '%04d_CALC_RANK_PDFS_WZL' % (n,), int(boundary_seg[0]), int(boundary_seg[1]), '.xml.gz', path = calcRankPDFsWithZerolagJob.output_path)}
output_files = {"output":inspiral_pipe.T050017_filename(instruments, '%04d_CALC_RANK_PDFS_WZL' % (n,), boundary_seg, '.xml.gz', path = calcRankPDFsWithZerolagJob.output_path)}
)
margnodes['%04d' %(n,)] = diststats_per_bin_node
......@@ -835,7 +835,7 @@ def finalize_runs(dag, lalappsRunSqliteJob, toXMLJob, ligolwInspinjFindJob, toSq
noninjdb = options.non_injection_db
else:
noninjdb = inspiral_pipe.T050017_filename(instruments, 'ALL_LLOID', int(boundary_seg[0]), int(boundary_seg[1]), '.sqlite')
noninjdb = inspiral_pipe.T050017_filename(instruments, 'ALL_LLOID', boundary_seg, '.sqlite')
sqlitenode = inspiral_pipe.generic_node(toSqliteJob, dag, parent_nodes = chunk_nodes,
opts = {"replace":"", "tmp-space":inspiral_pipe.condor_scratch_space()},
input_files = {"": (vetoes + [options.frame_segments_file])},
......@@ -898,7 +898,7 @@ def finalize_runs(dag, lalappsRunSqliteJob, toXMLJob, ligolwInspinjFindJob, toSq
# Setup the final output names, etc.
injdb = inspiral_pipe.T050017_filename(instruments, 'ALL_LLOID_%s' % sim_tag_from_inj_file(injections), int(boundary_seg[0]), int(boundary_seg[1]), '.sqlite')
injdb = inspiral_pipe.T050017_filename(instruments, 'ALL_LLOID_%s' % sim_tag_from_inj_file(injections), boundary_seg, '.sqlite')
injdbs.append(injdb)
injxml = injdb.replace('.sqlite','.xml.gz')
......@@ -1332,7 +1332,7 @@ elif options.reference_psd is None:
inspiral_pipe.generic_node(horizonJob, dag,
parent_nodes = psd_nodes.values(),
input_files = {"":[node.output_files["write-psd"] for node in psd_nodes.values()]},
output_files = {"":inspiral_pipe.T050017_filename(instruments, "HORIZON", int(boundary_seg[0]), int(boundary_seg[1]), '.png', path = output_dir)}
output_files = {"":inspiral_pipe.T050017_filename(instruments, "HORIZON", boundary_seg, '.png', path = output_dir)}
)
#
......@@ -1343,7 +1343,7 @@ elif options.reference_psd is None:
inspiral_pipe.generic_node(medianPSDJob, dag,
parent_nodes = psd_nodes.values(),
input_files = {"":[node.output_files["write-psd"] for node in psd_nodes.values()]},
output_files = {"output-name": inspiral_pipe.T050017_filename(instruments, "REFERENCE_PSD", int(boundary_seg[0]), int(boundary_seg[1]), '.xml.gz', path = subdir_path([medianPSDJob.output_path, str(int(boundary_seg[0]))[:5]]))}
output_files = {"output-name": inspiral_pipe.T050017_filename(instruments, "REFERENCE_PSD", boundary_seg, '.xml.gz', path = subdir_path([medianPSDJob.output_path, str(int(boundary_seg[0]))[:5]]))}
)
ref_psd = median_psd_node.output_files["output-name"]
......
......@@ -286,7 +286,7 @@ for instrument in coinc_params_distributions.instruments:
fig = plotfar.plot_snr_chi_pdf(coinc_params_distributions, instrument, snr_chi_type, options.max_snr, sngls = sngls)
if fig is None:
continue
plotname = inspiral_pipe.T050017_filename(instrument, "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_%s_SNRCHI2" % (options.user_tag, snr_chi_type.upper()), int(seglists.extent_all()[0]), int(seglists.extent_all()[1]), options.output_format, path = options.output_dir)
plotname = inspiral_pipe.T050017_filename(instrument, "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_%s_SNRCHI2" % (options.user_tag, snr_chi_type.upper()), seg, options.output_format, path = options.output_dir)
if options.verbose:
print >>sys.stderr, "writing %s" % plotname
fig.savefig(plotname)
......@@ -294,7 +294,7 @@ for instrument in coinc_params_distributions.instruments:
# Trigger and event rates
fig = plotfar.plot_rates(coinc_params_distributions)
plotname = inspiral_pipe.T050017_filename("H1L1V1", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_RATES" % options.user_tag, int(seglists.extent_all()[0]), int(seglists.extent_all()[1]), options.output_format, path = options.output_dir)
plotname = inspiral_pipe.T050017_filename("H1L1V1", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_RATES" % options.user_tag, seg, options.output_format, path = options.output_dir)
if options.verbose:
print >>sys.stderr, "writing %s" % plotname
fig.savefig(plotname)
......@@ -308,7 +308,7 @@ if options.plot_snr_snr_pdfs:
horizon_distances = coinc_params_distributions.SNRPDF.quantized_horizon_distances(horizon_distances)
fig = plotfar.plot_snr_joint_pdf(coinc_params_distributions.SNRPDF, instruments, horizon_distances, coinc_params_distributions.min_instruments, options.max_snr, sngls = sngls)
if fig is not None:
plotname = inspiral_pipe.T050017_filename(instruments, "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_SNR_PDF_%s" % (options.user_tag, "_".join(["%s_%s" % (k, horizon_distances[k]) for k in sorted(horizon_distances)]) ), int(seglists.extent_all()[0]), int(seglists.extent_all()[1]), options.output_format, path = options.output_dir)
plotname = inspiral_pipe.T050017_filename(instruments, "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_SNR_PDF_%s" % (options.user_tag, "_".join(["%s_%s" % (k, horizon_distances[k]) for k in sorted(horizon_distances)]) ), seg, options.output_format, path = options.output_dir)
if options.verbose:
print >>sys.stderr, "writing %s" % plotname
fig.savefig(plotname)
......@@ -318,7 +318,7 @@ if options.plot_snr_snr_pdfs:
if ranking_data is not None:
for instruments, binnedarray in ranking_data.background_likelihood_pdfs.items():
fig = plotfar.plot_likelihood_ratio_pdf(ranking_data, instruments, (options.min_log_lambda, options.max_log_lambda), "Noise", binnedarray_string = "background_likelihood_pdfs")
plotname = inspiral_pipe.T050017_filename(instruments or "COMBINED", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_NOISE_LIKELIHOOD_RATIO_PDF" % options.user_tag, int(seglists.extent_all()[0]), int(seglists.extent_all()[1]), options.output_format, path = options.output_dir)
plotname = inspiral_pipe.T050017_filename(instruments or "COMBINED", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_NOISE_LIKELIHOOD_RATIO_PDF" % options.user_tag, seglists.extent_all, options.output_format, path = options.output_dir)
if options.verbose:
print >>sys.stderr, "writing %s" % plotname
fig.savefig(plotname)
......@@ -330,13 +330,13 @@ if ranking_data is not None:
else:
xhi = options.max_log_lambda
fig = plotfar.plot_likelihood_ratio_ccdf(fapfar, (options.min_log_lambda, xhi), observed_ln_likelihood_ratios = zerolag_ln_likelihood_ratios, is_open_box = True)
plotname = inspiral_pipe.T050017_filename("COMBINED", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_NOISE_LIKELIHOOD_RATIO_CCDF_openbox" % options.user_tag, int(seglists.extent_all()[0]), int(seglists.extent_all()[1]), options.output_format, path = options.output_dir)
plotname = inspiral_pipe.T050017_filename("COMBINED", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_NOISE_LIKELIHOOD_RATIO_CCDF_openbox" % options.user_tag, seg, options.output_format, path = options.output_dir)
if options.verbose:
print >>sys.stderr, "writing %s" % plotname
fig.savefig(plotname)
fig = plotfar.plot_likelihood_ratio_ccdf(fapfar, (options.min_log_lambda, xhi), observed_ln_likelihood_ratios = background_ln_likelihood_ratios, is_open_box = False)
plotname = inspiral_pipe.T050017_filename("COMBINED", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_NOISE_LIKELIHOOD_RATIO_CCDF_closedbox" % options.user_tag, int(seglists.extent_all()[0]), int(seglists.extent_all()[1]), options.output_format, path = options.output_dir)
plotname = inspiral_pipe.T050017_filename("COMBINED", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_NOISE_LIKELIHOOD_RATIO_CCDF_closedbox" % options.user_tag, seg, options.output_format, path = options.output_dir)
if options.verbose:
print >>sys.stderr, "writing %s" % plotname
fig.savefig(plotname)
......@@ -351,7 +351,7 @@ lsctables.use_in(LIGOLWContentHandler)
options, filenames = parse_command_line()
filename_template = inspiral_pipe.T050017_filename('H1L1V1', 'GSTLAL_INSPIRAL_PLOTBANKS_%s', 0, 0, '.png', path = options.output_dir)
filename_template = inspiral_pipe.T050017_filename('H1L1V1', 'GSTLAL_INSPIRAL_PLOTBANKS_%s', (0, 0), '.png', path = options.output_dir)
# Make svd bank plots
if options.plot_svd_bank:
......
......@@ -709,19 +709,19 @@ for bin_type in opts.bin_types:
fig_eff.tight_layout(pad = .8)
# save and close figures
tag = inspiral_pipe.T050017_filename(instr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_VOLUME_VS_FAR_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), UL.start_time, UL.end_time, ".png", path = opts.output_dir)
tag = inspiral_pipe.T050017_filename(instr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_VOLUME_VS_FAR_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), (UL.start_time, UL.end_time), ".png", path = opts.output_dir)
fig_far.savefig(tag)
pyplot.close(fig_far)
tag = inspiral_pipe.T050017_filename(instr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_RANGE_VS_FAR_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), UL.start_time, UL.end_time, ".png", path = opts.output_dir)
tag = inspiral_pipe.T050017_filename(instr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_RANGE_VS_FAR_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), (UL.start_time, UL.end_time), ".png", path = opts.output_dir)
fig_far_range.savefig(tag)
pyplot.close(fig_far_range)
tag = inspiral_pipe.T050017_filename(instr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_VOLUME_VS_SNR_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), UL.start_time, UL.end_time, ".png", path = opts.output_dir)
tag = inspiral_pipe.T050017_filename(instr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_VOLUME_VS_SNR_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), (UL.start_time, UL.end_time), ".png", path = opts.output_dir)
fig_snr.savefig(tag)
pyplot.close(fig_snr)
tag = inspiral_pipe.T050017_filename(instr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_EFFICIENCY_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), UL.start_time, UL.end_time, ".png", path = opts.output_dir)
tag = inspiral_pipe.T050017_filename(instr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_EFFICIENCY_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), (UL.start_time, UL.end_time), ".png", path = opts.output_dir)
fig_eff.savefig(tag)
pyplot.close(fig_eff)
......
......@@ -1829,7 +1829,7 @@ seg_class.finish()
#
filename_template = inspiral_pipe.T050017_filename("H1L1V1", "GSTLAL_INSPIRAL_PLOTSUMMARY_%s_%02d_%s_%s", contents.seglists.extent_all()[0], contents.seglists.extent_all()[1], "%s", path = options.output_dir)
filename_template = inspiral_pipe.T050017_filename("H1L1V1", "GSTLAL_INSPIRAL_PLOTSUMMARY_%s_%02d_%s_%s", contents.seglists.extent_all(), "%s", path = options.output_dir)
while len(plots):
plot_group, plot = plots.pop(0)
for fig, filename_fragment, is_open_box in plot.finish():
......
......@@ -156,7 +156,7 @@ for i, f in enumerate(groups):
clipleft = [options.overlap / 2] * len(f) # overlap must be even
clipright = [options.overlap / 2] * len(f) # overlap must be even
bank_ids = range(bank_ids[-1] + 1, bank_ids[-1] + 1 + len(f))
svd_bank_name = inspiral_pipe.T050017_filename(ifo, "GSTLAL_SVD_BANK_%d" % i, 0, 0, ".xml.gz", path = svdJob.output_path)
svd_bank_name = inspiral_pipe.T050017_filename(ifo, "GSTLAL_SVD_BANK_%d" % i, (0, 0), ".xml.gz", path = svdJob.output_path)
svd_bank_name = os.path.join(os.getcwd(), svd_bank_name)
dag.output_cache.append(CacheEntry(ifo, "GSTLAL_SVD_BANK_%d" % i, segments.segment(0, 0), "file://localhost%s" % (svd_bank_name,)))
......
......@@ -120,7 +120,7 @@ if __name__ == '__main__':
cumulative_segments = os.path.join(os.path.join(options.directory, d), 'H1L1-0000_SEGMENTS-%s00000-100000.xml.gz' % (d,))
noninjdball = os.path.join(os.path.join(options.directory, d), 'H1L1-ALL_LLOID-%s00000-100000.sqlite' % (d,))
for injection_file in inj_file_bins:
injdball[injection_file] = os.path.join(os.path.join(options.directory, d), inspiral_pipe.T050017_filename(instruments, "ALL_LLOID_%s" % injtag(injection_file) , int('%s00000' % (d,)), int('%d00000' % (int(d)+1,)), '.sqlite'))
injdball[injection_file] = os.path.join(os.path.join(options.directory, d), inspiral_pipe.T050017_filename(instruments, "ALL_LLOID_%s" % injtag(injection_file), (int(d) * 100000, (int(d) + 1) * 100000), '.sqlite'))
if float(now()) - float("%s00000" % d) > 125000 and all([os.path.exists(f) for f in injdball.values()]+[os.path.exists(noninjdball), os.path.exists(cumulative_segments)]):
print >> sys.stderr, "directory is greater than 125000 seconds old and has already been processed...continuing"
......
......@@ -36,9 +36,11 @@
# - In inspiral_pipe.py Fix the InsiralJob.___init___: fix the arguments
# - On line 201, fix the comment or explain what the comment is meant to be
import math
import sys, os
import subprocess, socket, tempfile, copy, doctest
from glue import pipeline
from glue import segments
from glue.ligolw import lsctables, ligolw
from glue.ligolw import utils as ligolw_utils
from gstlal import svd_bank
......@@ -359,13 +361,15 @@ def build_bank_groups(cachedict, numbanks = [2], maxjobs = None):
return outstrs
def T050017_filename(instruments, description, start, end, extension, path = None):
def T050017_filename(instruments, description, seg, extension, path = None):
"""!
A function to generate a T050017 filename.
"""
if not isinstance(instruments, basestring):
instruments = "".join(sorted(instruments))
duration = end - start
start, end = seg
start = int(math.floor(start))
duration = int(math.ceil(end)) - start
extension = extension.strip('.')
if path is not None:
return '%s/%s-%s-%d-%d.%s' % (path, instruments, description, start, duration, extension)
......@@ -419,8 +423,7 @@ def group_T050017_filename_from_T050017_files(cache_entries, extension, path = N
split_description = cache_entries[0].description.split('_')
min_bin = [x for x in split_description[:2] if x.isdigit()]
max_bin = [x for x in cache_entries[-1].description.split('_')[:2] if x.isdigit()]
min_seg = min([int(x.segment[0]) for x in cache_entries])
max_seg = max([int(x.segment[1]) for x in cache_entries])
seg = segments.segmentlist(cache_entry.segment for cache_entry in cache_entries).extent()
if min_bin:
min_bin = min_bin[0]
if max_bin:
......@@ -434,14 +437,14 @@ def group_T050017_filename_from_T050017_files(cache_entries, extension, path = N
# all of the DIST_STATS files from a given background bin and
# then CREATE_PRIOR_DIST_STATS files which are not generated
# for specific bins
return T050017_filename(''.join(observatories), cache_entries[0].description, min_seg, max_seg, extension, path = path)
return T050017_filename(''.join(observatories), cache_entries[0].description, seg, extension, path = path)
elif min_bin and max_bin and min_bin != max_bin:
if split_description[1].isdigit():
description_base = split_description[2:]
else:
description_base = split_description[1:]
# Files from different bins, thus segments must be same
return T050017_filename(''.join(observatories), '_'.join([min_bin, max_bin] + description_base), min_seg, max_seg, extension, path = path)
return T050017_filename(''.join(observatories), '_'.join([min_bin, max_bin] + description_base), seg, extension, path = path)
else:
print >>sys.stderr, "ERROR: first and last file of cache file do not match known pattern, cannot name group file under T050017 convention. \nFile 1: %s\nFile 2: %s" % (cache_entries[0].path, cache_entries[-1].path)
raise ValueError
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment