Commit bb9d415d authored by Patrick Godwin's avatar Patrick Godwin

gstlal_inspiral_rerank_pipe, inspiral_pipe.py: fix remaining issues with rerank dag

parent 2c0d7c12
Pipeline #78230 passed with stages
in 21 minutes and 22 seconds
......@@ -43,6 +43,7 @@ import numpy
from lal.utils import CacheEntry
from ligo import segments
from ligo.lw import ligolw
from ligo.lw import lsctables
import ligo.lw.utils as ligolw_utils
......@@ -66,9 +67,8 @@ lsctables.use_in(LIGOLWContentHandler)
def parse_command_line():
parser = OptionParser(description = __doc__)
# reference_psd
parser.add_option("--reference-psd", help = "Set the reference PSD. Required.")
parser.add_option("--gps-start-time", metavar = "seconds", help = "Set the start time of the segment to analyze in GPS seconds. Required unless --data-source=lvshm")
parser.add_option("--gps-end-time", metavar = "seconds", help = "Set the end time of the segment to analyze in GPS seconds. Required unless --data-source=lvshm")
# mass model options
parser.add_option("--template-bank", metavar = "filename", help = "Set the template bank xml file.")
......@@ -94,6 +94,8 @@ def parse_command_line():
# FIXME: uhhhhh... yeah
parser.add_option("--injections", action = "append", help = "append injection files to analyze. Must prepend filename with X:Y:, where X and Y are floats, e.g. 1.2:3.1:filename, so that the injections are only searched for in regions of the template bank with X <= chirp mass < Y.")
parser.add_option("--analysis-path", metavar = "path", help = "Set the path to the analysis you want to rerank.")
# caches
parser.add_option("--dist-stats-cache", metavar = "filename", help = "Set the cache file for dist stats")
parser.add_option("--lloid-cache", metavar = "filename", help = "Set the cache file for LLOID")
......@@ -171,14 +173,22 @@ def set_up_jobs(options):
inj_snr_condor_opts['+MemoryUsage'] = "( 2000 ) * 2 / 3"
inj_snr_condor_opts['request_cpus'] = "2"
marg_condor_opts = default_condor_opts.copy()
marg_condor_opts['+MemoryUsage'] = "( 2000 ) * 2 / 3"
# set condor commands
base_condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, default_condor_opts)
calc_rank_pdf_condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, calc_rank_pdf_condor_opts)
inj_snr_condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, inj_snr_condor_opts)
marg_condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, marg_condor_opts)
sh_condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, {"want_graceful_removal":"True", "kill_sig":"15"})
# set up rest of jobs
# jobs only needed to find paths
# NOTE: find a better way to do this?
jobs['svd'] = dagparts.DAGJob("gstlal_svd_bank", condor_commands = base_condor_commands)
jobs['medianPSD'] = dagparts.DAGJob("gstlal_median_of_psds", condor_commands = base_condor_commands)
# set up rest of jobs
jobs['model'] = dagparts.DAGJob("gstlal_inspiral_mass_model", condor_commands = base_condor_commands)
jobs['modelAdd'] = dagparts.DAGJob("gstlal_inspiral_add_mass_models", condor_commands = base_condor_commands)
jobs['horizon'] = dagparts.DAGJob("gstlal_plot_psd_horizon", condor_commands = base_condor_commands)
......@@ -186,7 +196,7 @@ def set_up_jobs(options):
jobs['calcRankPDFs'] = dagparts.DAGJob("gstlal_inspiral_calc_rank_pdfs", condor_commands = calc_rank_pdf_condor_commands)
jobs['calcRankPDFsWithZerolag'] = dagparts.DAGJob("gstlal_inspiral_calc_rank_pdfs", tag_base="gstlal_inspiral_calc_rank_pdfs_with_zerolag", condor_commands=calc_rank_pdf_condor_commands)
jobs['calcLikelihood'] = dagparts.DAGJob("gstlal_inspiral_calc_likelihood", condor_commands = base_condor_commands)
jobs['marginalize'] = dagparts.DAGJob("gstlal_inspiral_marginalize_likelihood", condor_commands = base_condor_commands)
jobs['marginalize'] = dagparts.DAGJob("gstlal_inspiral_marginalize_likelihood", condor_commands = marg_condor_commands)
jobs['marginalizeWithZerolag'] = dagparts.DAGJob("gstlal_inspiral_marginalize_likelihood", tag_base="gstlal_inspiral_marginalize_likelihood_with_zerolag", condor_commands=base_condor_commands)
jobs['injSplitter'] = dagparts.DAGJob("gstlal_injsplitter", tag_base="gstlal_injsplitter", condor_commands = base_condor_commands)
......@@ -217,11 +227,18 @@ def set_up_jobs(options):
if __name__ == '__main__':
options, filenames = parse_command_line()
jobs = set_up_jobs(options)
# load analysis output from run
lloid_output, lloid_diststats, svd_dtdphi_map, instrument_set, boundary_seg = inspiral_pipe.load_analysis_output(options)
lloid_output, lloid_diststats, svd_dtdphi_map, instrument_set = inspiral_pipe.load_analysis_output(options)
instruments = "".join(sorted(instrument_set))
# load reference psd
boundary_seg = segments.segment(int(options.gps_start_time), int(options.gps_end_time))
gpsmod5 = str(int(boundary_seg[0]))[:5]
ref_psd_path = os.path.join(options.analysis_path, inspiral_pipe.subdir_path([jobs['medianPSD'].output_path, gpsmod5]))
reference_psd = dagparts.T050017_filename(instruments, "REFERENCE_PSD", boundary_seg, '.xml.gz', path = ref_psd_path)
# output directories
output_dir = "plots"
if not os.path.exists("logs"):
......@@ -232,16 +249,15 @@ if __name__ == '__main__':
#
dag = dagparts.DAG("trigger_rerank_pipe")
jobs = set_up_jobs(options)
# generate xml integrity checker (if requested) and pre-script to back up data
#inspiral_pipe.set_up_scripts(options)
# mass model job
model_node, model_file = inspiral_pipe.mass_model_layer(dag, jobs, [], instruments, options, boundary_seg, options.reference_psd)
model_node, model_file = inspiral_pipe.mass_model_layer(dag, jobs, [], instruments, options, boundary_seg, reference_psd)
# marginalize jobs
marg_nodes = inspiral_pipe.marginalize_layer(dag, jobs, [], lloid_output, lloid_diststats, options, boundary_seg, instrument_set, model_node, model_file, options.reference_psd, svd_dtdphi_map)
marg_nodes = inspiral_pipe.marginalize_layer(dag, jobs, [], lloid_output, lloid_diststats, options, boundary_seg, instrument_set, model_node, model_file, reference_psd, svd_dtdphi_map)
# calc rank PDF jobs
rankpdf_nodes, rankpdf_zerolag_nodes = inspiral_pipe.calc_rank_pdf_layer(dag, jobs, marg_nodes, options, boundary_seg, instrument_set)
......
......@@ -620,7 +620,8 @@ def marginalize_layer(dag, jobs, svd_nodes, lloid_output, lloid_diststats, optio
svd_file = one_ifo_svd_nodes[bin_key].output_files["write-svd"]
else:
parent_nodes = model_node
svd_file = dagparts.T050017_filename(instrument_set[0], '%s_SVD' % bin_key, boundary_seg, '.xml.gz', path = jobs['svd'].output_path)
svd_path = os.path.join(options.analysis_path, jobs['svd'].output_path)
svd_file = dagparts.T050017_filename(instrument_set[0], '%s_SVD' % bin_key, boundary_seg, '.xml.gz', path = svd_path)
# FIXME we keep this here in case we someday want to have a
# mass bin dependent prior, but it really doesn't matter for
......@@ -1046,12 +1047,9 @@ def load_analysis_output(options):
# load dist stats
lloid_diststats = {}
boundary_seg = None
for ce in map(CacheEntry, open(options.dist_stats_cache)):
if 'DIST_STATS' in ce.description:
lloid_diststats[ce.description.split("_")[0]] = [ce.path]
if not boundary_seg:
boundary_seg = ce.segment
if 'DIST_STATS' in ce.description and not 'CREATE_PRIOR' in ce.description:
lloid_diststats.setdefault(ce.description.split("_")[0], []).append(ce.path)
# load svd dtdphi map
svd_dtdphi_map = {}
......@@ -1067,7 +1065,7 @@ def load_analysis_output(options):
# FIXME: don't do this, find a cleaner way of handling this generally
options.injections = [inj.split(':')[-1] for inj in options.injections]
return bgbin_lloid_map, lloid_diststats, svd_dtdphi_map, instrument_set, boundary_seg
return bgbin_lloid_map, lloid_diststats, svd_dtdphi_map, instrument_set
def get_threshold_values(template_mchirp_dict, bgbin_indices, svd_bank_strings, options):
......@@ -1217,7 +1215,7 @@ def get_svd_bank_params(svd_bank_cache, online = False):
def sim_tag_from_inj_file(injections):
if injections is None:
return None
return injections.replace('.xml', '').replace('.gz', '').replace('-','_')
return os.path.basename(injections).replace('.xml', '').replace('.gz', '').replace('-','_')
def load_bank_cache(options):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment