Skip to content
Snippets Groups Projects
Commit 6909b00e authored by Patrick Godwin's avatar Patrick Godwin
Browse files

gstlal_ll_feature_extractor_pipe: tidied up code and cleaned up import...

gstlal_ll_feature_extractor_pipe: tidied up code and cleaned up import namespace for maintainability, change name of exec to match what is done in inspiral
parent 0a654895
No related branches found
No related tags found
No related merge requests found
......@@ -17,46 +17,32 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
This program makes a dag to run gstlal_feature_extractor online
This program makes a dag to run a series of gstlal_feature_extractor jobs online
"""
__author__ = 'Duncan Meacher <duncan.meacher@ligo.org>, Patrick Godwin <patrick.godwin@ligo.org>'
##############################################################################
# import standard modules and append the lalapps prefix to the python path
import sys, os, stat
import itertools
import numpy
import math
from optparse import OptionParser
##############################################################################
# import the modules we need to build the pipeline
import lal
import lal.series
from lal.utils import CacheEntry
from glue import pipeline
from glue.lal import Cache
from ligo import segments
from glue.ligolw import ligolw
from glue.ligolw import lsctables
import glue.ligolw.utils as ligolw_utils
import glue.ligolw.utils.segments as ligolw_segments
from gstlal import inspiral, inspiral_pipe
# =============================
#
# preamble
#
# =============================
import optparse
import os
from gstlal import inspiral_pipe
from gstlal import dagparts as gstlaldagparts
from gstlal import datasource
from gstlal.fxtools import multichannel_datasource
from gstlal.fxtools import multirate_datasource
from gstlal.fxtools import utils
class LIGOLWContentHandler(ligolw.LIGOLWContentHandler):
pass
lsctables.use_in(LIGOLWContentHandler)
# =============================
#
# get a dictionary of all the channels per gstlal_feature_extractor job
# functions
#
# =============================
def feature_extractor_node_gen(gstlalFeatureExtractorJob, dag, parent_nodes, ifo, options, data_source_info):
feature_extractor_nodes = {}
......@@ -96,80 +82,72 @@ def feature_extractor_node_gen(gstlalFeatureExtractorJob, dag, parent_nodes, ifo
return feature_extractor_nodes
# =============================
#
# Main
# command line parser
#
# =============================
def parse_command_line():
parser = OptionParser(description = __doc__)
parser = optparse.OptionParser(description = __doc__)
# generic data source options
# generic data source and feature extraction options
multichannel_datasource.append_options(parser)
# trigger generation options
parser.add_option("-v", "--verbose", action = "store_true", help = "Be verbose.")
parser.add_option("--disable-web-service", action = "store_true", help = "If set, disables web service that allows monitoring of PSDS of aux channels.")
parser.add_option("--description", metavar = "string", default = "GSTLAL_IDQ_FEATURES", help = "Set the filename description in which to save the output.")
parser.add_option("--save-format", action = "store_true", default = "hdf5", help = "Specifies the save format (ascii or hdf5) of features written to disk. Default = hdf5")
parser.add_option("--cadence", type = "int", default = 20, help = "Rate at which to write trigger files to disk. Default = 20 seconds.")
parser.add_option("-m", "--mismatch", type = "float", default = 0.05, help = "Mismatch between templates, mismatch = 1 - minimal match. Default = 0.05.")
parser.add_option("-q", "--qhigh", type = "float", default = 100, help = "Q high value for half sine-gaussian waveforms. Default = 100.")
parser.add_option("--waveform", metavar = "string", default = "sine_gaussian", help = "Specifies the waveform used for matched filtering. Possible options: (half_sine_gaussian, sine_gaussian). Default = half_sine_gaussian")
parser.add_option("--out-path", metavar = "path", default = ".", help = "Write to this path. Default = .")
feature_extractor.append_options(parser)
# Condor commands
parser.add_option("--request-cpu", default = "2", metavar = "integer", help = "set the requested node CPU count, default = 2")
parser.add_option("--request-memory", default = "20GB", metavar = "integer", help = "set the requested node memory, default = 8GB")
group = optparse.OptionGroup(parser, "Condor Options", "Adjust parameters used for HTCondor")
parser.add_option("--condor-command", action = "append", default = [], metavar = "command=value", help = "set condor commands of the form command=value; can be given multiple times")
parser.add_option("--request-cpu", default = "2", metavar = "integer", help = "set the requested node CPU count, default = 2")
parser.add_option("--request-memory", default = "8GB", metavar = "integer", help = "set the requested node memory, default = 8GB")
parser.add_option_group(group)
options, filenames = parser.parse_args()
return options, filenames
# =============================
#
# Useful variables
# main
#
options, filenames = parse_command_line()
output_dir = "plots"
listdir = os.path.join(options.out_path, "gstlal_feature_extractor/channel_lists")
if not os.path.exists(listdir):
os.makedirs(listdir)
# =============================
#
# parsing and setting up core structures
#
#
options, filenames = parse_command_line()
data_source_info = multichannel_datasource.DataSourceInfo(options)
ifo = data_source_info.instrument
channels = data_source_info.channel_dict.keys()
#
# Setup the dag
# create directories if needed
#
try:
os.mkdir("logs")
except:
pass
dag = inspiral_pipe.DAG("feature_extractor_pipe")
listdir = os.path.join(options.out_path, "gstlal_feature_extractor/channel_lists")
aggregator.makedir(listdir)
aggregator.makedir("logs")
#
# setup the job classes
# set up dag and job classes
#
gstlalFeatureExtractorJob = inspiral_pipe.generic_job("gstlal_feature_extractor", condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.condor_command, {"request_memory":options.request_memory, "request_cpus":options.request_cpu, "want_graceful_removal":"True", "kill_sig":"15"}))
dag = inspiral_pipe.DAG("feature_extractor_pipe")
condor_options = {"request_memory":options.request_memory, "request_cpus":options.request_cpu, "want_graceful_removal":"True", "kill_sig":"15"}
condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.condor_command, condor_options)
gstlalFeatureExtractorJob = inspiral_pipe.generic_job("gstlal_feature_extractor", condor_commands = condor_commands)
#
# feature extractor jobs
# set up feature extractor jobs
#
feature_extractor_nodes = feature_extractor_node_gen(gstlalFeatureExtractorJob, dag, [], ifo, options, data_source_info)
#
# all done
# write out dag and sub files
#
dag.write_sub_files()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment