Gitlab will migrate to a new storage backend starting 0300 UTC on 2020-04-04. We do not anticipate a maintenance window for this migration. Performance may be impacted over the weekend. Thanks for your patience.

Commit 5ea4c156 authored by Patrick Godwin's avatar Patrick Godwin

gstlal_snax_dag_online(offline): modify job names to reflect name change

parent c6797126
......@@ -221,12 +221,12 @@ dag = dagparts.DAG("feature_extractor_pipe")
condor_options = {"request_memory": options.request_memory, "request_cpus": options.request_cpu, "want_graceful_removal": "True", "kill_sig": "15"}
condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, condor_options)
feature_extractor_job = dagparts.DAGJob("gstlal_feature_extractor", condor_commands = condor_commands)
feature_extractor_job = dagparts.DAGJob("gstlal_snax_extract", condor_commands = condor_commands)
segsdict = analysis_segments(ifo, data_source_info.frame_segments, data_source_info.seg, options.segment_length, options.psd_drop_time, max_template_length=max_template_length)
combiner_condor_options = {"request_memory": "4GB", "request_cpus": 1, "want_graceful_removal": "True", "kill_sig": "15"}
combiner_condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, combiner_condor_options)
feature_combiner_job = dagparts.DAGJob("gstlal_feature_combiner", condor_commands = combiner_condor_commands)
feature_combiner_job = dagparts.DAGJob("gstlal_snax_combine", condor_commands = combiner_condor_commands)
#
# set up jobs
......@@ -236,7 +236,7 @@ feature_extractor_nodes = feature_extractor_node_gen(feature_extractor_job, dag,
feature_combiner_options = {
"verbose": options.verbose,
"rootdir": os.path.join(options.out_path, "gstlal_feature_extractor"),
"rootdir": os.path.join(options.out_path, "gstlal_snax_extract"),
"basename": options.description,
"instrument": ifo,
"tag": "offline",
......
......@@ -229,7 +229,7 @@ if options.condor_universe == 'local':
else:
condor_options = {"request_memory":options.request_memory, "request_cpus":options.request_cpu, "want_graceful_removal":"True", "kill_sig":"15"}
condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, condor_options)
feature_extractor_job = dagparts.DAGJob("gstlal_feature_extractor", condor_commands = condor_commands, universe = options.condor_universe)
feature_extractor_job = dagparts.DAGJob("gstlal_snax_extract", condor_commands = condor_commands, universe = options.condor_universe)
feature_extractor_nodes, num_channels = feature_extractor_node_gen(feature_extractor_job, dag, [], ifo, options, data_source_info)
# auxiliary jobs
......@@ -239,13 +239,13 @@ if options.save_format == 'kafka':
else:
auxiliary_condor_options = {"request_memory":options.auxiliary_request_memory, "request_cpus":options.auxiliary_request_cpu, "want_graceful_removal":"True", "kill_sig":"15"}
auxiliary_condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, auxiliary_condor_options)
synchronizer_job = dagparts.DAGJob("gstlal_feature_synchronizer", condor_commands = auxiliary_condor_commands, universe = options.condor_universe)
hdf5_sink_job = dagparts.DAGJob("gstlal_feature_hdf5_sink", condor_commands = auxiliary_condor_commands, universe = options.condor_universe)
monitor_job = dagparts.DAGJob("gstlal_feature_monitor", condor_commands = auxiliary_condor_commands, universe = options.condor_universe)
synchronizer_job = dagparts.DAGJob("gstlal_snax_synchronize", condor_commands = auxiliary_condor_commands, universe = options.condor_universe)
hdf5_sink_job = dagparts.DAGJob("gstlal_snax_sink", condor_commands = auxiliary_condor_commands, universe = options.condor_universe)
monitor_job = dagparts.DAGJob("gstlal_snax_monitor", condor_commands = auxiliary_condor_commands, universe = options.condor_universe)
# aggregator jobs
if not options.disable_agg_jobs:
aggregator_job = dagparts.DAGJob("gstlal_feature_aggregator", condor_commands = auxiliary_condor_commands, universe = options.condor_universe)
aggregator_job = dagparts.DAGJob("gstlal_snax_aggregate", condor_commands = auxiliary_condor_commands, universe = options.condor_universe)
#
# set up options for auxiliary jobs
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment