Commit cc82fbe5 authored by Patrick Godwin's avatar Patrick Godwin Committed by Kipp Cannon

consolidate DAG classes from inspiral_pipe.py + dagparts.py (e.g....

consolidate DAG classes from inspiral_pipe.py + dagparts.py (e.g. generic_node) into dagparts.py, move non-specific inspiral functions from inspiral_pipe.py to dagparts.py, changes reflected in several dag generation execs
parent b644f5dc
......@@ -35,8 +35,7 @@ import lal
from ligo import segments
from gstlal import aggregator
from gstlal import inspiral_pipe
from gstlal import dagparts as gstlaldagparts
from gstlal import dagparts
from gstlal.fxtools import feature_extractor
from gstlal.fxtools import multichannel_datasource
......@@ -107,7 +106,7 @@ def feature_extractor_node_gen(feature_extractor_job, dag, parent_nodes, segsdic
feature_end_time = min(int(seg[1]), options.gps_end_time)
feature_extractor_nodes[(ii, seg)] = \
inspiral_pipe.generic_node(feature_extractor_job, dag, parent_nodes = dep_nodes,
dagparts.DAGNode(feature_extractor_job, dag, parent_nodes = dep_nodes,
opts = {"gps-start-time":gps_start_time,
"gps-end-time":feature_end_time,
"feature-start-time":feature_start_time,
......@@ -207,11 +206,11 @@ aggregator.makedir("logs")
# set up dag and job classes
#
dag = inspiral_pipe.DAG("feature_extractor_pipe")
dag = dagparts.DAG("feature_extractor_pipe")
condor_options = {"request_memory":options.request_memory, "request_cpus":options.request_cpu, "want_graceful_removal":"True", "kill_sig":"15"}
condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.condor_command, condor_options)
feature_extractor_job = inspiral_pipe.generic_job("gstlal_feature_extractor", condor_commands = condor_commands)
condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, condor_options)
feature_extractor_job = dagparts.DAGJob("gstlal_feature_extractor", condor_commands = condor_commands)
segsdict = analysis_segments(ifo, data_source_info.frame_segments, data_source_info.seg, options.segment_length, max_template_length=max_template_length)
#
......
......@@ -33,7 +33,6 @@ import os
from gstlal import aggregator
from gstlal import dagparts
from gstlal import inspiral_pipe
from gstlal.fxtools import feature_extractor
from gstlal.fxtools import multichannel_datasource
......@@ -130,7 +129,7 @@ def feature_extractor_node_gen(feature_extractor_job, dag, parent_nodes, ifo, op
subset_options.update(command_line_options)
feature_extractor_nodes[ii] = \
inspiral_pipe.generic_node(feature_extractor_job, dag, parent_nodes = parent_nodes,
dagparts.DAGNode(feature_extractor_job, dag, parent_nodes = parent_nodes,
opts = subset_options,
output_files = {"out-path": os.path.join(options.out_path, "gstlal_feature_extractor")}
)
......@@ -153,14 +152,14 @@ def feature_extractor_node_gen(feature_extractor_job, dag, parent_nodes, ifo, op
# =============================
class zookeeper_job(inspiral_pipe.generic_job):
class zookeeper_job(dagparts.DAGJob):
"""
A zookeeper job
"""
def __init__(self, program = "zookeeper-server-start.sh", datadir = os.path.join(dagparts.log_path(), "zookeeper"), port = 2271, maxclients = 0, condor_commands = {}):
"""
"""
inspiral_pipe.generic_job.__init__(self, program, universe = "local", condor_commands = condor_commands)
dagparts.DAGJob.__init__(self, program, universe = "local", condor_commands = condor_commands)
try:
os.mkdir(datadir)
......@@ -179,14 +178,14 @@ maxClientCnxns=%d
f.close()
class kafka_job(inspiral_pipe.generic_job):
class kafka_job(dagparts.DAGJob):
"""
A kafka job
"""
def __init__(self, program = "kafka-server-start.sh", logdir = os.path.join(dagparts.log_path(), "kafka"), host = "10.14.0.112:9182", zookeeperaddr = "localhost:2271", condor_commands = {}):
"""
"""
inspiral_pipe.generic_job.__init__(self, program, universe = "local", condor_commands = condor_commands)
dagparts.DAGJob.__init__(self, program, universe = "local", condor_commands = condor_commands)
try:
os.mkdir(logdir)
......@@ -290,15 +289,15 @@ aggregator.makedir("logs")
# set up dag and job classes
#
dag = inspiral_pipe.DAG("feature_extractor_pipe")
dag = dagparts.DAG("feature_extractor_pipe")
# feature extractor job
if options.condor_universe == 'local':
condor_options = {"want_graceful_removal":"True", "kill_sig":"15"}
else:
condor_options = {"request_memory":options.request_memory, "request_cpus":options.request_cpu, "want_graceful_removal":"True", "kill_sig":"15"}
condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.condor_command, condor_options)
feature_extractor_job = inspiral_pipe.generic_job("gstlal_feature_extractor", condor_commands = condor_commands, universe = options.condor_universe)
condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, condor_options)
feature_extractor_job = dagparts.DAGJob("gstlal_feature_extractor", condor_commands = condor_commands, universe = options.condor_universe)
feature_extractor_nodes, num_channels = feature_extractor_node_gen(feature_extractor_job, dag, [], ifo, options, data_source_info)
# auxiliary jobs
......@@ -307,15 +306,15 @@ if options.save_format == 'kafka':
auxiliary_condor_options = {"want_graceful_removal":"True", "kill_sig":"15"}
else:
auxiliary_condor_options = {"request_memory":options.auxiliary_request_memory, "request_cpus":options.auxiliary_request_cpu, "want_graceful_removal":"True", "kill_sig":"15"}
auxiliary_condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.condor_command, auxiliary_condor_options)
synchronizer_job = inspiral_pipe.generic_job("gstlal_feature_synchronizer", condor_commands = auxiliary_condor_commands, universe = options.condor_universe)
hdf5_sink_job = inspiral_pipe.generic_job("gstlal_feature_hdf5_sink", condor_commands = auxiliary_condor_commands, universe = options.condor_universe)
listener_job = inspiral_pipe.generic_job("gstlal_feature_listener", condor_commands = auxiliary_condor_commands, universe = options.condor_universe)
auxiliary_condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, auxiliary_condor_options)
synchronizer_job = dagparts.DAGJob("gstlal_feature_synchronizer", condor_commands = auxiliary_condor_commands, universe = options.condor_universe)
hdf5_sink_job = dagparts.DAGJob("gstlal_feature_hdf5_sink", condor_commands = auxiliary_condor_commands, universe = options.condor_universe)
listener_job = dagparts.DAGJob("gstlal_feature_listener", condor_commands = auxiliary_condor_commands, universe = options.condor_universe)
if not options.disable_kafka_jobs:
# kafka/zookeeper jobs
local_condor_options = {"want_graceful_removal":"True", "kill_sig":"15"}
local_condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.condor_command, local_condor_options)
local_condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, local_condor_options)
zoo_job = zookeeper_job(condor_commands = local_condor_commands)
kafka_job = kafka_job(condor_commands = local_condor_commands, host = options.kafka_server)
......@@ -380,13 +379,13 @@ if options.save_format == 'kafka':
if options.save_format == 'kafka':
synchronizer_options.update({"num-topics": len(feature_extractor_nodes)})
synchronizer_node = inspiral_pipe.generic_node(synchronizer_job, dag, [], opts = synchronizer_options, output_files = {"rootdir": os.path.join(options.out_path, "gstlal_feature_synchronizer")})
hdf5_sink_node = inspiral_pipe.generic_node(hdf5_sink_job, dag, [], opts = hdf5_sink_options, output_files = {"rootdir": os.path.join(options.out_path, "gstlal_feature_hdf5_sink")})
listener_node = inspiral_pipe.generic_node(listener_job, dag, [], opts = listener_options, output_files = {"rootdir": os.path.join(options.out_path, "gstlal_feature_listener")})
synchronizer_node = dagparts.DAGNode(synchronizer_job, dag, [], opts = synchronizer_options, output_files = {"rootdir": os.path.join(options.out_path, "gstlal_feature_synchronizer")})
hdf5_sink_node = dagparts.DAGNode(hdf5_sink_job, dag, [], opts = hdf5_sink_options, output_files = {"rootdir": os.path.join(options.out_path, "gstlal_feature_hdf5_sink")})
listener_node = dagparts.DAGNode(listener_job, dag, [], opts = listener_options, output_files = {"rootdir": os.path.join(options.out_path, "gstlal_feature_listener")})
if not options.disable_kafka_jobs:
zoo_node = inspiral_pipe.generic_node(zoo_job, dag, [], opts = {"":"zookeeper.properties"})
kafka_node = inspiral_pipe.generic_node(kafka_job, dag, [], opts = {"":"kafka.properties"})
zoo_node = dagparts.DAGNode(zoo_job, dag, [], opts = {"":"zookeeper.properties"})
kafka_node = dagparts.DAGNode(kafka_job, dag, [], opts = {"":"kafka.properties"})
#
# write out dag and sub files
......
......@@ -29,6 +29,7 @@ from glue.ligolw import utils as ligolw_utils
from glue.ligolw.utils import process as ligolw_process
from gstlal import templates
from gstlal import inspiral_pipe
from gstlal import dagparts
from gstlal import chirptime
import lal
from lal.utils import CacheEntry
......@@ -216,7 +217,7 @@ with open(options.output_cache, "w") as output_cache_file:
row.mtotal = row.mass1 + row.mass2
sngl_inspiral_table[:] = rows
output = inspiral_pipe.T050017_filename(options.instrument, "%04d_GSTLAL_SPLIT_BANK" % bank_count, (0, 0), ".xml.gz", path = options.output_path)
output = dagparts.T050017_filename(options.instrument, "%04d_GSTLAL_SPLIT_BANK" % bank_count, (0, 0), ".xml.gz", path = options.output_path)
if not options.write_svd_caches:
output_cache_file.write("%s\n" % CacheEntry.from_T050017("file://localhost%s" % os.path.abspath(output)))
else:
......@@ -232,7 +233,7 @@ with open(options.output_cache, "w") as output_cache_file:
pass
for svd_cache_group in inspiral_pipe.group(svd_caches, options.num_banks):
output = inspiral_pipe.T050017_filename(options.instrument, "%04d_%04d_GSTLAL_SPLIT_BANK" % (svd_cache_group[0][0], svd_cache_group[-1][0]), (0, 0), ".cache", path = cache_path)
output = dagparts.T050017_filename(options.instrument, "%04d_%04d_GSTLAL_SPLIT_BANK" % (svd_cache_group[0][0], svd_cache_group[-1][0]), (0, 0), ".cache", path = cache_path)
with open(output, 'w') as svd_cache:
for bank_count, split_bank_cache_entry in svd_cache_group:
svd_cache.write(split_bank_cache_entry)
......
......@@ -173,7 +173,6 @@ from gstlal import far
from gstlal import httpinterface
from gstlal import hoftcache
from gstlal import inspiral
from gstlal import inspiral_pipe
from gstlal import lloidhandler
from gstlal import lloidparts
from gstlal import pipeparts
......
......@@ -18,27 +18,26 @@
import os
from glue import pipeline
from gstlal import inspiral_pipe
from gstlal import dagparts as gstlaldagparts
from gstlal import dagparts
try:
os.mkdir("logs")
except:
pass
dag = inspiral_pipe.DAG("dt_dphi")
dag = dagparts.DAG("dt_dphi")
margJob = inspiral_pipe.generic_job("gstlal_inspiral_create_dt_dphi_snr_ratio_pdfs", condor_commands = {"request_memory":"8GB", "want_graceful_removal":"True", "kill_sig":"15", "accounting_group":"ligo.prod.o2.cbc.uber.gstlaloffline"})
addJob = inspiral_pipe.generic_job("gstlal_inspiral_add_dt_dphi_snr_ratio_pdfs", condor_commands = {"request_memory":"4GB", "want_graceful_removal":"True", "kill_sig":"15", "accounting_group":"ligo.prod.o2.cbc.uber.gstlaloffline"})
margJob = dagparts.DAGJob("gstlal_inspiral_create_dt_dphi_snr_ratio_pdfs", condor_commands = {"request_memory":"8GB", "want_graceful_removal":"True", "kill_sig":"15", "accounting_group":"ligo.prod.o2.cbc.uber.gstlaloffline"})
addJob = dagparts.DAGJob("gstlal_inspiral_add_dt_dphi_snr_ratio_pdfs", condor_commands = {"request_memory":"4GB", "want_graceful_removal":"True", "kill_sig":"15", "accounting_group":"ligo.prod.o2.cbc.uber.gstlaloffline"})
num = 1000
margnodes = []
# FIXME dont hardcode 3345408, it comes from number of tiles in TimePhaseSNR
for start in range(0, 3345408, num):
stop = start + num
margnodes.append(inspiral_pipe.generic_node(margJob, dag, parent_nodes = [], opts = {"start":str(start), "stop":str(stop)}, output_files = {"output":"%s/inspiral_dtdphi_pdf_%d_%d.h5" % (margJob.output_path, start, stop)}))
margnodes.append(dagparts.DAGNode(margJob, dag, parent_nodes = [], opts = {"start":str(start), "stop":str(stop)}, output_files = {"output":"%s/inspiral_dtdphi_pdf_%d_%d.h5" % (margJob.output_path, start, stop)}))
addnode = inspiral_pipe.generic_node(addJob, dag, parent_nodes = margnodes, input_files = {"": [n.output_files["output"] for n in margnodes]})
addnode = dagparts.DAGNode(addJob, dag, parent_nodes = margnodes, input_files = {"": [n.output_files["output"] for n in margnodes]})
dag.write_sub_files()
dag.write_dag()
......
This diff is collapsed.
......@@ -61,7 +61,7 @@ from ligo.segments import utils as segmentsUtils
from gstlal import far
from gstlal import plotfar
from gstlal import inspiral_pipe
from gstlal import dagparts
class SnrChiColourNorm(matplotlib.colors.Normalize):
......@@ -334,14 +334,14 @@ for bin_index, rankingstat in enumerate(sorted(rankingstats.values(), key = lamb
fig = plotfar.plot_snr_chi_pdf(rankingstat, instrument, snr_chi_type, options.max_snr, sngls = sngls)
if fig is None:
continue
plotname = inspiral_pipe.T050017_filename(instrument, "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_%04d_%s_SNRCHI2" % (options.user_tag, bin_index, snr_chi_type.upper()), seg, options.output_format, path = options.output_dir)
plotname = dagparts.T050017_filename(instrument, "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_%04d_%s_SNRCHI2" % (options.user_tag, bin_index, snr_chi_type.upper()), seg, options.output_format, path = options.output_dir)
if options.verbose:
print >>sys.stderr, "writing %s" % plotname
fig.savefig(plotname)
# candidate rates
fig = plotfar.plot_rates(rankingstat)
plotname = inspiral_pipe.T050017_filename("H1L1V1", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_%04d_RATES" % (options.user_tag, bin_index), seg, options.output_format, path = options.output_dir)
plotname = dagparts.T050017_filename("H1L1V1", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_%04d_RATES" % (options.user_tag, bin_index), seg, options.output_format, path = options.output_dir)
if options.verbose:
print >>sys.stderr, "writing %s" % plotname
fig.savefig(plotname)
......@@ -358,7 +358,7 @@ if options.plot_snr_snr_pdfs:
horizon_distances = rankingstat.numerator.SNRPDF.quantized_horizon_distances(horizon_distances)
fig = plotfar.plot_snr_joint_pdf(rankingstat.numerator.SNRPDF, instruments, horizon_distances, rankingstat.min_instruments, options.max_snr, sngls = sngls)
if fig is not None:
plotname = inspiral_pipe.T050017_filename(instruments, "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_SNR_PDF_%s" % (options.user_tag, "_".join(["%s_%s" % (k, horizon_distances[k]) for k in sorted(horizon_distances)]) ), seg, options.output_format, path = options.output_dir)
plotname = dagparts.T050017_filename(instruments, "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_SNR_PDF_%s" % (options.user_tag, "_".join(["%s_%s" % (k, horizon_distances[k]) for k in sorted(horizon_distances)]) ), seg, options.output_format, path = options.output_dir)
if options.verbose:
print >>sys.stderr, "writing %s" % plotname
fig.savefig(plotname)
......@@ -368,7 +368,7 @@ if options.plot_snr_snr_pdfs:
if rankingstatpdf is not None:
for Title, which, NAME in (("Noise", "noise", "NOISE"), ("Signal", "signal", "SIGNAL")):
fig = plotfar.plot_likelihood_ratio_pdf(rankingstatpdf, (options.min_log_lambda, options.max_log_lambda), Title, which = which)
plotname = inspiral_pipe.T050017_filename("COMBINED", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_%s_LIKELIHOOD_RATIO_PDF" % (options.user_tag, NAME), seg, options.output_format, path = options.output_dir)
plotname = dagparts.T050017_filename("COMBINED", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_%s_LIKELIHOOD_RATIO_PDF" % (options.user_tag, NAME), seg, options.output_format, path = options.output_dir)
if options.verbose:
print >>sys.stderr, "writing %s" % plotname
fig.savefig(plotname)
......@@ -382,13 +382,13 @@ if rankingstatpdf is not None and rankingstatpdf.zero_lag_lr_lnpdf.array.any():
else:
xhi = options.max_log_lambda
fig = plotfar.plot_likelihood_ratio_ccdf(fapfar, (options.min_log_lambda, xhi), observed_ln_likelihood_ratios = zerolag_ln_lr, is_open_box = True)
plotname = inspiral_pipe.T050017_filename("COMBINED", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_NOISE_LIKELIHOOD_RATIO_CCDF_openbox" % options.user_tag, seg, options.output_format, path = options.output_dir)
plotname = dagparts.T050017_filename("COMBINED", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_NOISE_LIKELIHOOD_RATIO_CCDF_openbox" % options.user_tag, seg, options.output_format, path = options.output_dir)
if options.verbose:
print >>sys.stderr, "writing %s" % plotname
fig.savefig(plotname)
fig = plotfar.plot_likelihood_ratio_ccdf(fapfar, (options.min_log_lambda, xhi), observed_ln_likelihood_ratios = timeslide_ln_lr, is_open_box = False)
plotname = inspiral_pipe.T050017_filename("COMBINED", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_NOISE_LIKELIHOOD_RATIO_CCDF_closedbox" % options.user_tag, seg, options.output_format, path = options.output_dir)
plotname = dagparts.T050017_filename("COMBINED", "GSTLAL_INSPIRAL_PLOT_BACKGROUND_%s_NOISE_LIKELIHOOD_RATIO_CCDF_closedbox" % options.user_tag, seg, options.output_format, path = options.output_dir)
if options.verbose:
print >>sys.stderr, "writing %s" % plotname
fig.savefig(plotname)
......@@ -55,7 +55,7 @@ from glue.ligolw import utils as ligolw_utils
from glue.lal import CacheEntry
from gstlal import svd_bank
from gstlal import inspiral_pipe
from gstlal import dagparts
from gstlal.plotutil import golden_ratio
#
......@@ -348,7 +348,7 @@ lsctables.use_in(LIGOLWContentHandler)
options, filenames = parse_command_line()
filename_template = inspiral_pipe.T050017_filename('H1L1V1', 'GSTLAL_INSPIRAL_PLOTBANKS_%s', (0, 0), '.png', path = options.output_dir)
filename_template = dagparts.T050017_filename('H1L1V1', 'GSTLAL_INSPIRAL_PLOTBANKS_%s', (0, 0), '.png', path = options.output_dir)
# Make svd bank plots
if options.plot_svd_bank:
......
......@@ -32,7 +32,7 @@ import sys
from optparse import OptionParser
import itertools
from gstlal import inspiral_pipe
from gstlal import dagparts
from gstlal import plotutil
from glue.ligolw import ligolw
......@@ -711,19 +711,19 @@ for bin_type in opts.bin_types:
# save and close figures
ifostr = "%s_%s" % (UL.searched_instruments, "".join(sorted(instr)))
tag = inspiral_pipe.T050017_filename(ifostr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_VOLUME_VS_FAR_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), (UL.start_time, UL.end_time), ".png", path = opts.output_dir)
tag = dagparts.T050017_filename(ifostr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_VOLUME_VS_FAR_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), (UL.start_time, UL.end_time), ".png", path = opts.output_dir)
fig_far.savefig(tag)
pyplot.close(fig_far)
tag = inspiral_pipe.T050017_filename(ifostr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_RANGE_VS_FAR_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), (UL.start_time, UL.end_time), ".png", path = opts.output_dir)
tag = dagparts.T050017_filename(ifostr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_RANGE_VS_FAR_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), (UL.start_time, UL.end_time), ".png", path = opts.output_dir)
fig_far_range.savefig(tag)
pyplot.close(fig_far_range)
tag = inspiral_pipe.T050017_filename(ifostr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_VOLUME_VS_SNR_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), (UL.start_time, UL.end_time), ".png", path = opts.output_dir)
tag = dagparts.T050017_filename(ifostr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_VOLUME_VS_SNR_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), (UL.start_time, UL.end_time), ".png", path = opts.output_dir)
fig_snr.savefig(tag)
pyplot.close(fig_snr)
tag = inspiral_pipe.T050017_filename(ifostr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_EFFICIENCY_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), (UL.start_time, UL.end_time), ".png", path = opts.output_dir)
tag = dagparts.T050017_filename(ifostr, "GSTLAL_INSPIRAL_PLOT_SENSITIVITY_%s_EFFICIENCY_BINNED_BY_%s" % (opts.user_tag, bin_type.upper()), (UL.start_time, UL.end_time), ".png", path = opts.output_dir)
fig_eff.savefig(tag)
pyplot.close(fig_eff)
......
......@@ -63,7 +63,7 @@ from glue.ligolw import dbtables
from glue.ligolw import lsctables
from glue.ligolw.utils import segments as ligolw_segments
from gstlal import far
from gstlal import inspiral_pipe
from gstlal import dagparts
from gstlal import gviz_api
from gstlal.plotutil import golden_ratio
......@@ -1796,7 +1796,7 @@ seg_class.finish()
#
filename_template = inspiral_pipe.T050017_filename("H1L1V1", "GSTLAL_INSPIRAL_PLOTSUMMARY_%s_%02d_%s_%s", contents.seglists.extent_all(), "%s", path = options.output_dir)
filename_template = dagparts.T050017_filename("H1L1V1", "GSTLAL_INSPIRAL_PLOTSUMMARY_%s_%02d_%s_%s", contents.seglists.extent_all(), "%s", path = options.output_dir)
while len(plots):
plot_group, plot = plots.pop(0)
for fig, filename_fragment, is_open_box in plot.finish():
......
......@@ -34,6 +34,7 @@ from optparse import OptionParser
#
from ligo import segments
from gstlal import dagparts
from gstlal import inspiral_pipe
from gstlal import far
from lal.utils import CacheEntry
......@@ -140,8 +141,8 @@ try:
os.mkdir("logs")
except:
pass
dag = inspiral_pipe.DAG(options.output_name)
svdJob = inspiral_pipe.generic_job("gstlal_svd_bank", tag_base = "gstlal_svd_bank_%s" % ifo, condor_commands = inspiral_pipe.condor_command_dict_from_opts(options.condor_command, {"want_graceful_removal":"True", "kill_sig":"15"}))
dag = dagparts.DAG(options.output_name)
svdJob = dagparts.DAGJob("gstlal_svd_bank", tag_base = "gstlal_svd_bank_%s" % ifo, condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, {"want_graceful_removal":"True", "kill_sig":"15"}))
# Assumes cache is sorted by chirpmass or whatever the SVD sorting algorithm that was chosen
files = [CacheEntry(line).path for line in open(options.bank_cache)]
......@@ -157,11 +158,11 @@ for i, f in enumerate(groups):
clipleft = [options.overlap / 2] * len(f) # overlap must be even
clipright = [options.overlap / 2] * len(f) # overlap must be even
bank_ids = range(bank_ids[-1] + 1, bank_ids[-1] + 1 + len(f))
svd_bank_name = inspiral_pipe.T050017_filename(ifo, "GSTLAL_SVD_BANK_%d" % i, (0, 0), ".xml.gz", path = svdJob.output_path)
svd_bank_name = dagparts.T050017_filename(ifo, "GSTLAL_SVD_BANK_%d" % i, (0, 0), ".xml.gz", path = svdJob.output_path)
svd_bank_name = os.path.join(os.getcwd(), svd_bank_name)
dag.output_cache.append(CacheEntry(ifo, "GSTLAL_SVD_BANK_%d" % i, segments.segment(0, 0), "file://localhost%s" % (svd_bank_name,)))
svdNode = inspiral_pipe.generic_node(svdJob, dag, [],
svdNode = dagparts.DAGNode(svdJob, dag, [],
opts = {"flow":options.flow,
"svd-tolerance":options.tolerance,
"ortho-gate-fap":0.5,
......
......@@ -28,7 +28,7 @@ from gstlal import aggregator
import lal
from lal import LIGOTimeGPS
from multiprocessing import Pool
from gstlal import inspiral_pipe
from gstlal import dagparts
from copy import copy
def now():
......@@ -119,7 +119,7 @@ if __name__ == '__main__':
noninjdball = os.path.join(os.path.join(options.directory, d), 'H1L1-ALL_LLOID-%s00000-100000.sqlite' % (d,))
for injection_file in inj_file_bins:
injdball[injection_file] = os.path.join(os.path.join(options.directory, d), inspiral_pipe.T050017_filename(instruments, "ALL_LLOID_%s" % injtag(injection_file), (int(d) * 100000, (int(d) + 1) * 100000), '.sqlite'))
injdball[injection_file] = os.path.join(os.path.join(options.directory, d), dagparts.T050017_filename(instruments, "ALL_LLOID_%s" % injtag(injection_file), (int(d) * 100000, (int(d) + 1) * 100000), '.sqlite'))
if float(now()) - float("%s00000" % d) > 125000 and all([os.path.exists(f) for f in injdball.values()]+[os.path.exists(noninjdball)]):
print >> sys.stderr, "directory is %s %s greater than 125000 seconds old and has already been processed...continuing" % (n,d)
......
This diff is collapsed.
This diff is collapsed.
......@@ -9,8 +9,8 @@ from glue.ligolw import ilwd
from glue.ligolw.utils import process as ligolw_process
from gstlal import metric as metric_module
import os,sys,argparse
from gstlal import inspiral, inspiral_pipe
from gstlal import dagparts as gstlaldagparts
from gstlal import inspiral
from gstlal import dagparts
# Read command line options
def parse_command_line():
......@@ -79,8 +79,8 @@ def parse_command_line():
return args
args = parse_command_line()
dag = inspiral_pipe.DAG("treebank")
treeJob = inspiral_pipe.generic_job("gstlal_inspiral_treebank")
dag = dagparts.DAG("treebank")
treeJob = dagparts.DAGJob("gstlal_inspiral_treebank")
argsdict = dict((key.replace("_","-"), value) for key,value in vars(args).items())
mass2 = numpy.logspace(math.log(args.min_mass2, 8/3.), math.log(args.max_mass2, 8/3.), args.num_jobs, base=8/3.)
del argsdict["num-jobs"]
......@@ -91,7 +91,7 @@ for minmass2,maxmass2 in zip(mass2[:-1],mass2[1:]):
argsdict["max-mass2"] = maxmass2
argsdict["user-tag"] = cnt
cnt+=1
pnodes.append(inspiral_pipe.generic_node(treeJob, dag, parent_nodes = [], input_files = {}, output_files = {}, opts = argsdict))
pnodes.append(dagparts.DAGNode(treeJob, dag, parent_nodes = [], input_files = {}, output_files = {}, opts = argsdict))
#FIXME add a ligolw_add job to the end of the dag
try:
......
......@@ -294,7 +294,7 @@ try:
except:
pass
dag = dagparts.CondorDAG("gstlal_fake_frames_pipe")
dag = dagparts.DAG("gstlal_fake_frames_pipe")
seglists = ligolw_segments.segmenttable_get_by_name(ligolw_utils.load_filename(options.frame_segments_file, verbose = options.verbose, contenthandler = ligolw_segments.LIGOLWContentHandler), options.frame_segments_name).coalesce()
choosesegs(seglists, options.min_segment_length)
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment