From 1e3f733e263663c1f843057cebc8282faf954ec3 Mon Sep 17 00:00:00 2001 From: Chad Hanna <crh184@psu.edu> Date: Mon, 5 Sep 2016 13:27:10 -0400 Subject: [PATCH] gstlal_ll_inspiral_pipe: add aggregator job --- gstlal-inspiral/bin/gstlal_ll_inspiral_pipe | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/gstlal-inspiral/bin/gstlal_ll_inspiral_pipe b/gstlal-inspiral/bin/gstlal_ll_inspiral_pipe index 3b10e5d9ce..90dfd92985 100755 --- a/gstlal-inspiral/bin/gstlal_ll_inspiral_pipe +++ b/gstlal-inspiral/bin/gstlal_ll_inspiral_pipe @@ -236,8 +236,6 @@ def parse_command_line(): parser.add_option("--state-backup-destination", metavar = "URL", help = "Location to back state up to, e.g. gstlalcbc@ldas-pcdev1.ligo.caltech.edu.") parser.add_option("--web-dir", help = "set the output path to write the ''offline'' style web page to") - - options, filenames = parser.parse_args() fail = "" @@ -300,17 +298,25 @@ else: gstlalInspiralJob = inspiral_pipe.generic_job('gstlal_inspiral', condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.inspiral_condor_command, {"want_graceful_removal":"True", "kill_sig":"15"})) if inj_channel_dict: gstlalInspiralInjJob = inspiral_pipe.generic_job('gstlal_inspiral', tag_base = "gstlal_inspiral_inj", condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.inspiral_condor_command, {"want_graceful_removal":"True", "kill_sig":"15"})) + # A local universe job that will run in a loop marginalizing all of the likelihoods margJob = inspiral_pipe.generic_job('gstlal_inspiral_marginalize_likelihoods_online', universe = "local", condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.non_inspiral_condor_command)) + # an lvalert_listen job listenJob = lvalert_listen_job("lvalert_listen", gracedb_service_url = options.gracedb_service_url, gracedb_group = options.gracedb_group, gracedb_search = options.gracedb_search, gracedb_pipeline = options.gracedb_pipeline, progs = options.lvalert_listener_program, inj_progs = options.inj_lvalert_listener_program, condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.non_inspiral_condor_command), inj_gracedb_service_url = options.inj_gracedb_service_url, inj_gracedb_group = options.inj_gracedb_group, inj_gracedb_search = options.inj_gracedb_search, inj_gracedb_pipeline = options.inj_gracedb_pipeline, injections = True if inj_channel_dict else False) + # get urls job urlsJob = inspiral_pipe.generic_job("gstlal_ll_inspiral_get_urls", universe = "local", condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.non_inspiral_condor_command)) + +# aggregator job +aggJob = inspiral_pipe.generic_job("gstlal_ll_inspiral_aggregator", universe = "local", condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.non_inspiral_condor_command)) + if options.injection_file: # Summary page job pageJob = inspiral_pipe.generic_job("gstlal_ll_inspiral_daily_page_online", universe = "local", condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.non_inspiral_condor_command)) # Sensitivity plots job sensJob = inspiral_pipe.generic_job("gstlal_ll_inspiral_calculate_range", universe = "local", condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.non_inspiral_condor_command)) + if options.state_backup_destination: # State saving job stateJob = inspiral_pipe.generic_job("gstlal_ll_inspiral_save_state", universe = "local", condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.non_inspiral_condor_command)) @@ -411,6 +417,11 @@ def groups(l, n): for g in groups(jobTags, 10): urlsNode = inspiral_pipe.generic_node(urlsJob, dag, [], opts = {}, input_files = {"":" ".join(g)}, output_files = {}) margNode = inspiral_pipe.generic_node(margJob, dag, [], opts = {}, input_files = {"":[options.marginalized_likelihood_file] + ["%s_registry.txt" % r for r in jobTags]}, output_files = {}) + +# FIXME by default the inspiral jobs advertise the current directory as their +# job tag, but this should be made to be more flexible +aggNode = inspiral_pipe.generic_node(aggJob, dag, [], opts = {"dump-period": 1, "job-tag": os.getcwd()}) + if options.injection_file: for g in groups(inj_jobTags, 10): urlsNode = inspiral_pipe.generic_node(urlsJob, dag, [], opts = {}, input_files = {"":" ".join(g)}, output_files = {}) @@ -419,6 +430,7 @@ if options.injection_file: if options.state_backup_destination: stateNode = inspiral_pipe.generic_node(stateJob, dag, [], opts = {}, input_files = {"":[options.state_backup_destination, options.marginalized_likelihood_file] + options.likelihood_files}, output_files = {}) + # # Write out the dag and other flies # -- GitLab