Skip to content
Snippets Groups Projects
Commit 917be231 authored by Chad Hanna's avatar Chad Hanna
Browse files

various tweaks

parent 5cb24001
No related branches found
No related tags found
No related merge requests found
#!/usr/bin/python
"""
This program makes a dag for the S5 sub solar mass search
This program makes a dag for ER1
"""
__author__ = 'Chad Hanna <channa@caltech.edu>'
......@@ -23,6 +23,7 @@ import glue.ligolw.utils.segments as ligolw_segments
from optparse import OptionParser
from gstlal.svd_bank import read_bank
from gstlal import inspiral
import numpy
###############################################################################
# environment utilities
......@@ -112,7 +113,7 @@ class gstlal_inspiral_node(pipeline.CondorDAGNode):
A gstlal_inspiral node
"""
#FIXME add frame segments, name and veto segments name
def __init__(self, job, dag, channel_dict, reference_psd, svd_bank, tmp_space=log_path(), ht_gate_thresh=10.0, control_peak_time = 4, fir_stride = 4, p_node=[]):
def __init__(self, job, dag, channel_dict, reference_psd, svd_bank, tmp_space=log_path(), ht_gate_thresh=10.0, control_peak_time = 8, fir_stride = 8, p_node=[]):
pipeline.CondorDAGNode.__init__(self,job)
self.add_var_opt("channel-name", inspiral.pipeline_channel_list_from_channel_dict(channel_dict))
......@@ -168,23 +169,31 @@ def num_bank_files(cachedict):
f.close()
return cnt
def build_bank_string(cachedict, numbanks = 2):
def build_bank_string(cachedict, numbanks = [2], maxjobs = None):
numfiles = num_bank_files(cachedict)
filedict = {}
cnt = 0
job = 0
for ifo in cachedict:
filedict[ifo] = open(cachedict[ifo],'r')
for a in range(0, numfiles, numbanks):
c = ""
for b in range(numbanks):
loop = True
while cnt < numfiles:
job += 1
if maxjobs is not None and job > maxjobs:
break
position = int(float(cnt) / numfiles * len(numbanks))
c = ''
for i in range(numbanks[position]):
cnt += 1
for ifo, f in filedict.items():
if cnt < numfiles:
c += '%s:%s,' % (ifo, lal.CacheEntry(f.readline()).path())
else:
break
c = c.strip(',')
yield c
def cache_to_dict(cachefile):
out = {}
for l in open(cachefile):
......@@ -198,9 +207,11 @@ def parse_command_line():
parser.add_option("--bank-cache", metavar = "filenames", help = "Set the bank cache files in format H1=H1.cache,H2=H2.cache, etc..")
parser.add_option("--channel", metavar = "name", default=[], action = "append", help = "Set the name of the channel to process (optional). The default is \"LSC-STRAIN\" for all detectors. Override with IFO=CHANNEL-NAME can be given multiple times")
parser.add_option("--ht-gate-threshold", metavar = "float", help = "Set the h(t) gate threshold to reject glitches", type="float")
parser.add_option("--num-banks", metavar = "int", type = "int", help = "the number of banks per job")
parser.add_option("--num-banks", metavar = "str", help = "the number of banks per job. can be given as a list like 1,2,3,4 then it will split up the bank cache into N groups with M banks each.")
parser.add_option("--max-jobs", metavar = "num", type = "int", help = "stop parsing the cache after reaching a certain number of jobs")
options, filenames = parser.parse_args()
options.num_banks = [int(v) for v in options.num_banks.split(",")]
fail = ""
for option in ("bank_cache",):
......@@ -237,7 +248,7 @@ gstlalInspiralJob = gstlal_inspiral_job()
trials_factor = 0
for s in build_bank_string(bank_cache, options.num_banks):
for s in build_bank_string(bank_cache, options.num_banks, options.max_jobs):
gstlal_inspiral_node(gstlalInspiralJob, dag, channel_dict, reference_psd=options.reference_psd, svd_bank=s, ht_gate_thresh = options.ht_gate_threshold)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment