Commit 3b98654d authored by Hiroaki Ohta's avatar Hiroaki Ohta
Browse files

gstlal_inspiral_pipe: add VT's estimation jobs

parent 4e6d420c
Pipeline #214757 passed with stages
in 87 minutes and 40 seconds
......@@ -139,7 +139,10 @@ def parse_command_line():
parser.add_option("--marginalized-likelihood-file", metavar = "filename", help = "Set the marginalized likelihood file (required iff running injection-only analysis)")
parser.add_option("--marginalized-likelihood-with-zerolag-file", metavar = "filename", help = "Set the marginalized likelihood with zerolag file (required iff running injection-only analysis)")
# Analytic VT options
parser.add_option("--num-split-inj-files", metavar = "count", default = 100, type = "int", help = "How many files to split the injections into for expected SNR and V*t estimation. Default is 100.")
parser.add_option("--ngroup", metavar = "count", default = 10, type = "int", help = "How many likelihood files to divide for analysis. Default is 10.")
# Condor commands
parser.add_option("--condor-command", action = "append", default = [], metavar = "command=value", help = "set condor commands of the form command=value; can be given multiple times")
parser.add_option("--max-inspiral-jobs", type="int", metavar = "jobs", help = "Set the maximum number of gstlal_inspiral jobs to run simultaneously, default no constraint.")
......@@ -310,6 +313,10 @@ def set_up_jobs(options):
jobs['ligolwAdd'] = dagparts.DAGJob("ligolw_add", condor_commands = base_condor_commands)
jobs['calcLikelihoodInj'] = dagparts.DAGJob("gstlal_inspiral_calc_likelihood", tag_base='gstlal_inspiral_calc_likelihood_inj', condor_commands=base_condor_commands)
jobs['ComputeFarFromSnrChisqHistograms'] = dagparts.DAGJob("gstlal_compute_far_from_snr_chisq_histograms", condor_commands = base_condor_commands)
jobs['injTmpltMatch'] = dagparts.DAGJob("gstlal_inspiral_injection_template_match", condor_commands = base_condor_commands)
jobs['lnlrcdfSignal'] = dagparts.DAGJob("gstlal_inspiral_lnlrcdf_signal", condor_commands = inspiral_1ifo_condor_opts)
jobs['makeMcVtplot'] = dagparts.DAGJob("gstlal_inspiral_make_mc_vtplot", condor_commands = base_condor_commands)
jobs['makeMcVtplotCheck'] = dagparts.DAGJob("gstlal_inspiral_make_mc_vtplot", tag_base = "gstlal_inspiral_make_mc_vtplot_check", condor_commands = base_condor_commands)
jobs['ligolwInspinjFind'] = dagparts.DAGJob("lalapps_inspinjfind", condor_commands = base_condor_commands)
jobs['toSqlite'] = dagparts.DAGJob("ligolw_sqlite", tag_base = "ligolw_sqlite_from_xml", condor_commands = base_condor_commands)
jobs['toSqliteNoCache'] = dagparts.DAGJob("ligolw_sqlite", tag_base = "ligolw_sqlite_from_xml_final", condor_commands = base_condor_commands)
......@@ -438,6 +445,12 @@ if __name__ == '__main__':
# Compute FAR
farnode = inspiral_pipe.compute_far_layer(dag, jobs, final_marg_nodes, injdbs, noninjdb, final_sqlite_nodes, options)
# analytic VT job
inj_tmplt_match_nodes = inspiral_pipe.injection_template_match_layer(dag, jobs, [], options, instruments)
lnlrcdf_signal_nodes = inspiral_pipe.lnlrcdf_signal_layer(dag, jobs, marg_nodes, inj_tmplt_match_nodes, options, boundary_seg, instrument_set)
inspiral_pipe.make_mc_vtplot_layer(dag, jobs, lnlrcdf_signal_nodes, farnode, options, instrument_set, output_dir)
inspiral_pipe.make_mc_vtplot_layer(dag, jobs, lnlrcdf_signal_nodes, farnode, options, instrument_set, output_dir, injdbs)
# make summary plots
plotnodes = inspiral_pipe.summary_plot_layer(dag, jobs, farnode, options, injdbs, noninjdb, output_dir)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment