lalinference_pipe_utils.py 93.8 KB
Newer Older
1

2
#flow DAG Class definitions for LALInference Pipeline
3
# (C) 2012 John Veitch, Vivien Raymond, Kiersten Ruisard, Kan Wang
4

John Douglas Veitch's avatar
John Douglas Veitch committed
5
import itertools
6
import glue
7
from glue import pipeline,segmentsUtils,segments
8
import os
9
import socket
10
from lalapps import inspiralutils
11 12
import uuid
import ast
13
import pdb
John Douglas Veitch's avatar
John Douglas Veitch committed
14
import string
15
from math import floor,ceil,log,pow
16
import sys
17
import random
18
from itertools import permutations
19
import shutil
20
import numpy as np
21 22 23 24 25

# We use the GLUE pipeline utilities to construct classes for each
# type of job. Each class has inputs and outputs, which are used to
# join together types of jobs into a DAG.

John Douglas Veitch's avatar
John Douglas Veitch committed
26 27 28 29
class Event():
  """
  Represents a unique event to run on
  """
John Douglas Veitch's avatar
John Douglas Veitch committed
30
  new_id=itertools.count().next
31
  def __init__(self,trig_time=None,SimInspiral=None,SimBurst=None,SnglInspiral=None,CoincInspiral=None,event_id=None,timeslide_dict=None,GID=None,ifos=None, duration=None,srate=None,trigSNR=None,fhigh=None):
John Douglas Veitch's avatar
John Douglas Veitch committed
32
    self.trig_time=trig_time
John Douglas Veitch's avatar
John Douglas Veitch committed
33
    self.injection=SimInspiral
34
    self.burstinjection=SimBurst
John Douglas Veitch's avatar
John Douglas Veitch committed
35
    self.sngltrigger=SnglInspiral
36 37 38 39
    if timeslide_dict is None:
      self.timeslides={}
    else:
      self.timeslides=timeslide_dict
40 41
    self.GID=GID
    self.coinctrigger=CoincInspiral
42 43 44 45
    if ifos is None:
      self.ifos = []
    else:
      self.ifos = ifos
46 47
    self.duration = duration
    self.srate = srate
48
    self.trigSNR = trigSNR
49
    self.fhigh = fhigh
John Douglas Veitch's avatar
John Douglas Veitch committed
50 51 52
    if event_id is not None:
        self.event_id=event_id
    else:
John Douglas Veitch's avatar
John Douglas Veitch committed
53 54 55
        self.event_id=Event.new_id()
    if self.injection is not None:
        self.trig_time=self.injection.get_end()
56
        if event_id is None: self.event_id=int(str(self.injection.simulation_id).split(':')[2])
57 58 59
    if self.burstinjection is not None:
        self.trig_time=self.burstinjection.get_end()
        if event_id is None: self.event_id=int(str(self.burstinjection.simulation_id).split(':')[2])
John Douglas Veitch's avatar
John Douglas Veitch committed
60
    if self.sngltrigger is not None:
61
        self.trig_time=self.sngltrigger.get_end()
John Douglas Veitch's avatar
John Douglas Veitch committed
62
        self.event_id=int(str(self.sngltrigger.event_id).split(':')[2])
63 64 65 66
    if self.coinctrigger is not None:
        self.trig_time=self.coinctrigger.end_time + 1.0e-9 * self.coinctrigger.end_time_ns
    if self.GID is not None:
        self.event_id=int(''.join(i for i in self.GID if i.isdigit()))
67 68 69 70 71 72 73
    self.engine_opts={}
  def set_engine_option(self,opt,val):
    """
    Can set event-specific options for the engine nodes
    using this option, e.g. ev.set_engine_option('time-min','1083759273')
    """
    self.engine_opts[opt]=val
John Douglas Veitch's avatar
John Douglas Veitch committed
74 75

dummyCacheNames=['LALLIGO','LALVirgo','LALAdLIGO','LALAdVirgo']
John Douglas Veitch's avatar
John Douglas Veitch committed
76

77
def readLValert(SNRthreshold=0,gid=None,flow=40.0,gracedb="gracedb",basepath="./",downloadpsd=True):
78
  """
79
  Parse LV alert file, containing coinc, sngl, coinc_event_map.
80 81
  and create a list of Events as input for pipeline
  Based on Chris Pankow's script
82
  """
83
  output=[]
84 85
  from glue.ligolw import utils
  from glue.ligolw import lsctables
86
  from glue.ligolw import ligolw
Rory Smith's avatar
Rory Smith committed
87
  class PSDContentHandler(ligolw.LIGOLWContentHandler):
88
    pass
Rory Smith's avatar
Rory Smith committed
89
  lsctables.use_in(PSDContentHandler)
90 91
  from glue.ligolw import param
  from glue.ligolw import array
92
  import subprocess
Rory Smith's avatar
Rory Smith committed
93
  from lal import series as lalseries
94
  from subprocess import Popen, PIPE
95 96
  cwd=os.getcwd()
  os.chdir(basepath)
97
  print "%s download %s coinc.xml"%(gracedb,gid)
Vivien Raymond's avatar
Vivien Raymond committed
98
  subprocess.call([gracedb,"download", gid ,"coinc.xml"])
Rory Smith's avatar
Rory Smith committed
99
  xmldoc=utils.load_filename("coinc.xml",contenthandler = PSDContentHandler)
100
  coinctable = lsctables.CoincInspiralTable.get_table(xmldoc)
101
  coinc_events = [event for event in coinctable]
102
  sngltable = lsctables.SnglInspiralTable.get_table(xmldoc)
103
  sngl_events = [event for event in sngltable]
104 105 106
  #Issues to identify IFO with good data that did not produce a trigger
  #search_summary = lsctables.getTablesByType(xmldoc, lsctables.SearchSummaryTable)[0]
  #ifos = search_summary[0].ifos.split(",")
107 108
  #coinc_table = lsctables.getTablesByType(xmldoc, lsctables.CoincTable)[0]
  #ifos = coinc_table[0].instruments.split(",")
109
  trigSNR = 2.0*coinctable[0].snr #The factor of 2.0 is because detection pipelines recover SNR lower than PE can recover.
110
  # Parse PSD
111
  srate_psdfile=16384
112 113 114 115
  ifos=None
  if downloadpsd:
    print "gracedb download %s psd.xml.gz" % gid
    subprocess.call([gracedb,"download", gid ,"psd.xml.gz"])
Rory Smith's avatar
Rory Smith committed
116
    xmlpsd = lalseries.read_psd_xmldoc(utils.load_filename('psd.xml.gz',contenthandler = lalseries.PSDContentHandler))
117 118
    # Note: This finds the active IFOs by looking for available PSDs
    # Is there another way of getting this info?
119
    ifos = xmlpsd.keys()
120
  psdasciidic=None
121
  fhigh=None
122
  if os.path.exists("psd.xml.gz"):
123 124 125
    psdasciidic=get_xml_psds(os.path.realpath("./psd.xml.gz"),ifos,os.path.realpath('./PSDs'),end_time=None)
    combine=np.loadtxt(psdasciidic[psdasciidic.keys()[0]])
    srate_psdfile = pow(2.0, ceil( log(float(combine[-1][0]), 2) ) ) * 2
126
  else:
127
    print "Failed to gracedb download %s psd.xml.gz. lalinference will estimate the psd itself." % gid
128
  # Logic for template duration and sample rate disabled
129
  coinc_map = lsctables.CoincMapTable.get_table(xmldoc)
130 131
  for coinc in coinc_events:
    these_sngls = [e for e in sngl_events if e.event_id in [c.event_id for c in coinc_map if c.coinc_event_id == coinc.coinc_event_id] ]
132 133 134
    dur=[]
    srate=[]
    for e in these_sngls:
135
      # Review: Replace this with a call to LALSimulation function at some point
136 137 138
      p=Popen(["lalapps_chirplen","--flow",str(flow),"-m1",str(e.mass1),"-m2",str(e.mass2)],stdout=PIPE, stderr=PIPE, stdin=PIPE)
      strlen = p.stdout.read()
      dur.append(pow(2.0, ceil( log(max(8.0,float(strlen.splitlines()[2].split()[5]) + 2.0), 2) ) ) )
139
      srate.append(pow(2.0, ceil( log(float(strlen.splitlines()[1].split()[5]), 2) ) ) * 2 )
140 141 142 143
    if max(srate)<srate_psdfile:
      srate = max(srate)
    else:
      srate = srate_psdfile
Vivien Raymond's avatar
Vivien Raymond committed
144
      fhigh = srate_psdfile/2.0 * 0.95 # Because of the drop-off near Nyquist of the PSD from gstlal
145
    ev=Event(CoincInspiral=coinc, GID=gid, ifos = ifos, duration = max(dur), srate = srate, trigSNR = trigSNR, fhigh = fhigh)
146
    if(coinc.snr>SNRthreshold): output.append(ev)
147

148
  print "Found %d coinc events in table." % len(coinc_events)
149
  os.chdir(cwd)
150 151
  return output

152 153 154 155 156 157 158
def open_pipedown_database(database_filename,tmp_space):
    """
    Open the connection to the pipedown database
    """
    if not os.access(database_filename,os.R_OK):
	raise Exception('Unable to open input file: %s'%(database_filename))
    from glue.ligolw import dbtables
159
    import sqlite3
160 161 162 163
    working_filename=dbtables.get_connection_filename(database_filename,tmp_path=tmp_space)
    connection = sqlite3.connect(working_filename)
    if tmp_space:
	dbtables.set_temp_store_directory(connection,tmp_space)
164
    #dbtables.DBTable_set_connection(connection)
165
    return (connection,working_filename)
166

167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209
def get_zerolag_lloid(database_connection, dumpfile=None, gpsstart=None, gpsend=None, max_cfar=-1, min_cfar=-1):
	"""
	Returns a list of Event objects
	from pipedown data base. Can dump some stats to dumpfile if given,
	and filter by gpsstart and gpsend to reduce the nunmber or specify
	max_cfar to select by combined FAR
	"""
	output={}
	if gpsstart is not None: gpsstart=float(gpsstart)
	if gpsend is not None: gpsend=float(gpsend)
	# Get coincs
	get_coincs = "SELECT sngl_inspiral.end_time+sngl_inspiral.end_time_ns*1e-9,sngl_inspiral.ifo,coinc_event.coinc_event_id,sngl_inspiral.snr,sngl_inspiral.chisq,coinc_inspiral.combined_far \
		FROM sngl_inspiral join coinc_event_map on (coinc_event_map.table_name=='sngl_inspiral' and coinc_event_map.event_id ==\
		sngl_inspiral.event_id) join coinc_event on (coinc_event.coinc_event_id==coinc_event_map.coinc_event_id) \
		join coinc_inspiral on (coinc_event.coinc_event_id==coinc_inspiral.coinc_event_id) \
        WHERE coinc_event.time_slide_id=='time_slide:time_slide_id:1'\
		"
	if gpsstart is not None:
		get_coincs=get_coincs+' and coinc_inspiral.end_time+coinc_inspiral.end_time_ns*1.0e-9 > %f'%(gpsstart)
	if gpsend is not None:
		get_coincs=get_coincs+' and coinc_inspiral.end_time+coinc_inspiral.end_time_ns*1.0e-9 < %f'%(gpsend)
	if max_cfar !=-1:
		get_coincs=get_coincs+' and coinc_inspiral.combined_far < %f'%(max_cfar)
	if min_cfar != -1:
		get_coincs=get_coincs+' and coinc_inspiral.combined_far > %f'%(min_cfar)
	db_out=database_connection.cursor().execute(get_coincs)
    	extra={}
	for (sngl_time, ifo, coinc_id, snr, chisq, cfar) in db_out:
      		coinc_id=int(coinc_id.split(":")[-1])
	  	if not coinc_id in output.keys():
			output[coinc_id]=Event(trig_time=sngl_time,timeslide_dict={},event_id=int(coinc_id))
			extra[coinc_id]={}
		output[coinc_id].timeslides[ifo]=0
		output[coinc_id].ifos.append(ifo)
		extra[coinc_id][ifo]={'snr':snr,'chisq':chisq,'cfar':cfar}
	if dumpfile is not None:
		fh=open(dumpfile,'w')
		for co in output.keys():
			for ifo in output[co].ifos:
				fh.write('%s %s %s %s %s %s %s\n'%(str(co),ifo,str(output[co].trig_time),str(output[co].timeslides[ifo]),str(extra[co][ifo]['snr']),str(extra[co][ifo]['chisq']),str(extra[co][ifo]['cfar'])))
		fh.close()
	return output.values()

210

211
def get_zerolag_pipedown(database_connection, dumpfile=None, gpsstart=None, gpsend=None, max_cfar=-1, min_cfar=-1):
212 213 214 215 216 217 218 219 220 221 222 223 224 225
	"""
	Returns a list of Event objects
	from pipedown data base. Can dump some stats to dumpfile if given,
	and filter by gpsstart and gpsend to reduce the nunmber or specify
	max_cfar to select by combined FAR
	"""
	output={}
	if gpsstart is not None: gpsstart=float(gpsstart)
	if gpsend is not None: gpsend=float(gpsend)
	# Get coincs
	get_coincs = "SELECT sngl_inspiral.end_time+sngl_inspiral.end_time_ns*1e-9,sngl_inspiral.ifo,coinc_event.coinc_event_id,sngl_inspiral.snr,sngl_inspiral.chisq,coinc_inspiral.combined_far \
		FROM sngl_inspiral join coinc_event_map on (coinc_event_map.table_name=='sngl_inspiral' and coinc_event_map.event_id ==\
		sngl_inspiral.event_id) join coinc_event on (coinc_event.coinc_event_id==coinc_event_map.coinc_event_id) \
		join coinc_inspiral on (coinc_event.coinc_event_id==coinc_inspiral.coinc_event_id) \
226
		WHERE coinc_event.time_slide_id=='time_slide:time_slide_id:10049'\
227 228
		"
	if gpsstart is not None:
229
		get_coincs=get_coincs+' and coinc_inspiral.end_time+coinc_inspiral.end_time_ns*1.0e-9 > %f'%(gpsstart)
230
	if gpsend is not None:
231
		get_coincs=get_coincs+' and coinc_inspiral.end_time+coinc_inspiral.end_time_ns*1.0e-9 < %f'%(gpsend)
232 233
	if max_cfar !=-1:
		get_coincs=get_coincs+' and coinc_inspiral.combined_far < %f'%(max_cfar)
234 235
	if min_cfar != -1:
		get_coincs=get_coincs+' and coinc_inspiral.combined_far > %f'%(min_cfar)
236
	db_out=database_connection.cursor().execute(get_coincs)
John Douglas Veitch's avatar
John Douglas Veitch committed
237
    	extra={}
238
	for (sngl_time, ifo, coinc_id, snr, chisq, cfar) in db_out:
John Douglas Veitch's avatar
John Douglas Veitch committed
239 240
      		coinc_id=int(coinc_id.split(":")[-1])
	  	if not coinc_id in output.keys():
241
			output[coinc_id]=Event(trig_time=sngl_time,timeslide_dict={},event_id=int(coinc_id))
John Douglas Veitch's avatar
John Douglas Veitch committed
242 243 244 245
			extra[coinc_id]={}
		output[coinc_id].timeslides[ifo]=0
		output[coinc_id].ifos.append(ifo)
		extra[coinc_id][ifo]={'snr':snr,'chisq':chisq,'cfar':cfar}
246 247 248 249 250 251
	if dumpfile is not None:
		fh=open(dumpfile,'w')
		for co in output.keys():
			for ifo in output[co].ifos:
				fh.write('%s %s %s %s %s %s %s\n'%(str(co),ifo,str(output[co].trig_time),str(output[co].timeslides[ifo]),str(extra[co][ifo]['snr']),str(extra[co][ifo]['chisq']),str(extra[co][ifo]['cfar'])))
		fh.close()
252
	return output.values()
253

254 255

def get_timeslides_pipedown(database_connection, dumpfile=None, gpsstart=None, gpsend=None, max_cfar=-1):
256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271
	"""
	Returns a list of Event objects
	with times and timeslide offsets
	"""
	output={}
	if gpsstart is not None: gpsstart=float(gpsstart)
	if gpsend is not None: gpsend=float(gpsend)
	db_segments=[]
	sql_seg_query="SELECT search_summary.out_start_time, search_summary.out_end_time from search_summary join process on process.process_id==search_summary.process_id where process.program=='thinca'"
	db_out = database_connection.cursor().execute(sql_seg_query)
	for d in db_out:
		if d not in db_segments:
			db_segments.append(d)
	seglist=segments.segmentlist([segments.segment(d[0],d[1]) for d in db_segments])
	db_out_saved=[]
	# Get coincidences
272
	get_coincs="SELECT sngl_inspiral.end_time+sngl_inspiral.end_time_ns*1e-9,time_slide.offset,sngl_inspiral.ifo,coinc_event.coinc_event_id,sngl_inspiral.snr,sngl_inspiral.chisq,coinc_inspiral.combined_far \
273
		    FROM sngl_inspiral join coinc_event_map on (coinc_event_map.table_name == 'sngl_inspiral' and coinc_event_map.event_id \
274 275
		    == sngl_inspiral.event_id) join coinc_event on (coinc_event.coinc_event_id==coinc_event_map.coinc_event_id) join time_slide\
		    on (time_slide.time_slide_id == coinc_event.time_slide_id and time_slide.instrument==sngl_inspiral.ifo)\
276
		    join coinc_inspiral on (coinc_inspiral.coinc_event_id==coinc_event.coinc_event_id) where coinc_event.time_slide_id!='time_slide:time_slide_id:10049'"
John Douglas Veitch's avatar
John Douglas Veitch committed
277
	joinstr = ' and '
278
	if gpsstart is not None:
279
		get_coincs=get_coincs+ joinstr + ' coinc_inspiral.end_time+coinc_inspiral.end_time_ns*1e-9 > %f'%(gpsstart)
280
	if gpsend is not None:
281
		get_coincs=get_coincs+ joinstr+' coinc_inspiral.end_time+coinc_inspiral.end_time_ns*1e-9 <%f'%(gpsend)
282 283
	if max_cfar!=-1:
		get_coincs=get_coincs+joinstr+' coinc_inspiral.combined_far < %f'%(max_cfar)
284
	db_out=database_connection.cursor().execute(get_coincs)
285 286
	from pylal import SnglInspiralUtils
	extra={}
287
	for (sngl_time, slide, ifo, coinc_id, snr, chisq, cfar) in db_out:
288 289 290 291 292 293 294 295 296
		coinc_id=int(coinc_id.split(":")[-1])
		seg=filter(lambda seg:sngl_time in seg,seglist)[0]
		slid_time = SnglInspiralUtils.slideTimeOnRing(sngl_time,slide,seg)
		if not coinc_id in output.keys():
			output[coinc_id]=Event(trig_time=slid_time,timeslide_dict={},event_id=int(coinc_id))
			extra[coinc_id]={}
		output[coinc_id].timeslides[ifo]=slid_time-sngl_time
		output[coinc_id].ifos.append(ifo)
		extra[coinc_id][ifo]={'snr':snr,'chisq':chisq,'cfar':cfar}
297 298 299 300 301 302
	if dumpfile is not None:
		fh=open(dumpfile,'w')
		for co in output.keys():
			for ifo in output[co].ifos:
				fh.write('%s %s %s %s %s %s %s\n'%(str(co),ifo,str(output[co].trig_time),str(output[co].timeslides[ifo]),str(extra[co][ifo]['snr']),str(extra[co][ifo]['chisq']),str(extra[co][ifo]['cfar'])))
		fh.close()
303
	return output.values()
304

305 306 307 308 309 310 311 312
def mkdirs(path):
  """
  Helper function. Make the given directory, creating intermediate
  dirs if necessary, and don't complain about it already existing.
  """
  if os.access(path,os.W_OK) and os.path.isdir(path): return
  else: os.makedirs(path)

John Douglas Veitch's avatar
John Douglas Veitch committed
313 314 315
def chooseEngineNode(name):
  if name=='lalinferencenest':
    return LALInferenceNestNode
316 317
  if name=='lalinferenceburst':
    return LALInferenceBurstNode
John Douglas Veitch's avatar
John Douglas Veitch committed
318 319
  if name=='lalinferencemcmc':
    return LALInferenceMCMCNode
320
  if name=='lalinferencebambi' or name=='lalinferencebambimpi':
321
    return LALInferenceBAMBINode
322 323
  if name=='lalinferencedatadump':
    return LALInferenceDataDumpNode
John Douglas Veitch's avatar
John Douglas Veitch committed
324 325
  return EngineNode

326 327 328
def get_engine_name(cp):
    name=cp.get('analysis','engine')
    if name=='random':
329
        engine_list=['lalinferencenest','lalinferencemcmc','lalinferencebambimpi']
330 331 332 333 334 335 336 337 338 339
        if cp.has_option('input','gid'):
            gid=cp.get('input','gid')
            engine_number=int(''.join(i for i in gid if i.isdigit())) % 2
        else:
            engine_number=random.randint(0,1)
        return engine_list[engine_number]
    else:
        return name


340
def scan_timefile(timefile):
341 342 343 344 345 346 347 348 349 350 351 352
    import re
    p=re.compile('[\d.]+')
    times=[]
    timefilehandle=open(timefile,'r')
    for time in timefilehandle:
      if not p.match(time):
	continue
      if float(time) in times:
	print 'Skipping duplicate time %s'%(time)
	continue
      print 'Read time %s'%(time)
      times.append(float(time))
353 354
    timefilehandle.close()
    return times
355

356 357 358 359 360 361 362 363 364 365 366 367 368 369 370
def get_xml_psds(psdxml,ifos,outpath,end_time=None):
  """
  Get a psd.xml.gz file and:
  1) Reads it
  2) Converts PSD (10e-44) -> ASD ( ~10e-22)
  3) Checks the psd file contains all the IFO we want to analyze
  4) Writes down the ASDs into an ascii file for each IFO in psd.xml.gz. The name of the file contains the trigtime (if given) and the ifo name.
  Input:
    psdxml: psd.xml.gz file
    ifos: list of ifos used for the analysis
    outpath: path where the ascii ASD will be written to
    (end_time): trigtime for this event. Will be used a part of the ASD file name
  """
  lal=1
  from glue.ligolw import utils
371
  try:
Rory Smith's avatar
Rory Smith committed
372 373
    #from pylal import series
    from lal import series as series
374
    lal=0
375 376 377
  except ImportError:
    print "ERROR, cannot import pylal.series in bppu/get_xml_psds()\n"
    exit(1)
378

379 380 381 382 383 384 385 386 387 388 389
  out={}
  if not os.path.isdir(outpath):
    os.makedirs(outpath)
  if end_time is not None:
    time=repr(float(end_time))
  else:
    time=''
  #check we don't already have ALL the psd files #
  got_all=1
  for ifo in ifos:
    path_to_ascii_psd=os.path.join(outpath,ifo+'_psd_'+time+'.txt')
390
    # Check we don't already have that ascii (e.g. because we are running parallel runs of the save event
391 392 393 394 395 396 397 398 399
    if os.path.isfile(path_to_ascii_psd):
      got_all*=1
    else:
      got_all*=0
  if got_all==1:
    #print "Already have PSD files. Nothing to do...\n"
    for ifo in ifos:
      out[ifo]=os.path.join(outpath,ifo+'_psd_'+time+'.txt')
    return out
400

401 402 403 404
  # We need to convert the PSD for one or more IFOS. Open the file
  if not os.path.isfile(psdxml):
    print "ERROR: impossible to open the psd file %s. Exiting...\n"%psdxml
    sys.exit(1)
Rory Smith's avatar
Rory Smith committed
405
  xmlpsd =  series.read_psd_xmldoc(utils.load_filename(psdxml,contenthandler = series.PSDContentHandler))
406 407 408 409 410 411 412 413 414
  # Check the psd file contains all the IFOs we want to analize
  for ifo in ifos:
    if not ifo in [i.encode('ascii') for i in xmlpsd.keys()]:
      print "ERROR. The PSD for the ifo %s does not seem to be contained in %s\n"%(ifo,psdxml)
      sys.exit(1)
  #loop over ifos in psd xml file
  for instrument in xmlpsd.keys():
    #name of the ascii file we are going to write the PSD into
    path_to_ascii_psd=os.path.join(outpath,instrument.encode('ascii')+'_psd_'+time+'.txt')
415
    # Check we don't already have that ascii (e.g. because we are running parallel runs of the save event
416
    if os.path.isfile(path_to_ascii_psd):
417
      continue
418 419 420 421 422 423 424 425 426 427 428 429 430 431 432
    # get data for the IFO
    ifodata=xmlpsd[instrument]
    #check data is not empty
    if ifodata is None:
      continue
    # we have data. Get psd array
    if lal==0:
      #pylal stores the series in ifodata.data
      data=ifodata
    else:
      # lal stores it in ifodata.data.data
      data=ifodata.data
    # Fill a two columns array of (freq, psd) and save it in the ascii file
    f0=ifodata.f0
    deltaF=ifodata.deltaF
433

434
    combine=[]
435

Rory Smith's avatar
Rory Smith committed
436 437
    for i in np.arange(len(data.data.data)) :
      combine.append([f0+i*deltaF,np.sqrt(data.data.data[i])])
438
    np.savetxt(path_to_ascii_psd,combine)
439
    ifo=instrument.encode('ascii')
440 441 442
    # set node.psds dictionary with the path to the ascii files
    out[ifo]=os.path.join(outpath,ifo+'_psd_'+time+'.txt')
  return out
443

444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464
def get_roq_mchirp_priors(path, roq_paths, roq_params, key):

  mc_priors = {}

  for roq in roq_paths:
    params=os.path.join(path,roq,'params.dat')
    roq_params[roq]=np.genfromtxt(params,names=True)
    mc_priors[roq]=[float(roq_params[roq]['chirpmassmin']),float(roq_params[roq]['chirpmassmax'])]
  ordered_roq_paths=[item[0] for item in sorted(roq_params.items(), key=key)][::-1]
  i=0
  for roq in ordered_roq_paths:
    if i>0:
      # change min, just set to the max of the previous one since we have already aligned it in the previous iteration of this loop
      #mc_priors[roq][0]+= (mc_priors[roq_lengths[i-1]][1]-mc_priors[roq][0])/2.
      mc_priors[roq][0]=mc_priors[ordered_roq_paths[i-1]][1]
    if i<len(roq_paths)-1:
      mc_priors[roq][1]-= (mc_priors[roq][1]- mc_priors[ordered_roq_paths[i+1]][0])/2.
    i+=1
  
  return mc_priors

465 466
def create_pfn_tuple(filename,protocol='file://',site='local'):
    return( (os.path.basename(filename),protocol+os.path.abspath(filename),site) )
467

468
class LALInferencePipelineDAG(pipeline.CondorDAG):
469
  def __init__(self,cp,dax=False,first_dag=True,previous_dag=None,site='local'):
470 471
    self.subfiles=[]
    self.config=cp
472
    self.engine=get_engine_name(cp)
John Douglas Veitch's avatar
John Douglas Veitch committed
473
    self.EngineNode=chooseEngineNode(self.engine)
474
    self.site=site
475
    if cp.has_option('paths','basedir'):
John Douglas Veitch's avatar
John Douglas Veitch committed
476
      self.basepath=cp.get('paths','basedir')
477 478 479
    else:
      self.basepath=os.getcwd()
      print 'No basepath specified, using current directory: %s'%(self.basepath)
480
    mkdirs(self.basepath)
481 482
    if dax:
        os.chdir(self.basepath)
483
    self.posteriorpath=os.path.join(self.basepath,'posterior_samples')
John Douglas Veitch's avatar
John Douglas Veitch committed
484
    mkdirs(self.posteriorpath)
485 486 487 488 489 490 491 492 493 494 495 496
    if first_dag:
      daglogdir=cp.get('paths','daglogdir')
      mkdirs(daglogdir)
      self.daglogfile=os.path.join(daglogdir,'lalinference_pipeline-'+str(uuid.uuid1())+'.log')
      pipeline.CondorDAG.__init__(self,self.daglogfile,dax=dax)
    elif not first_dag and previous_dag is not None:
      daglogdir=cp.get('paths','daglogdir')
      mkdirs(daglogdir)
      self.daglogfile=os.path.join(daglogdir,'lalinference_pipeline-'+str(uuid.uuid1())+'.log')
      pipeline.CondorDAG.__init__(self,self.daglogfile,dax=dax)
      for node in previous_dag.get_nodes():
        self.add_node(node)
497
    if cp.has_option('paths','cachedir'):
John Douglas Veitch's avatar
John Douglas Veitch committed
498
      self.cachepath=cp.get('paths','cachedir')
499 500
    else:
      self.cachepath=os.path.join(self.basepath,'caches')
501
    mkdirs(self.cachepath)
502
    if cp.has_option('paths','logdir'):
John Douglas Veitch's avatar
John Douglas Veitch committed
503
      self.logpath=cp.get('paths','logdir')
504 505
    else:
      self.logpath=os.path.join(self.basepath,'log')
506
    mkdirs(self.logpath)
507
    if cp.has_option('analysis','ifos'):
508
      self.ifos=ast.literal_eval(cp.get('analysis','ifos'))
509 510 511
    else:
      self.ifos=['H1','L1','V1']
    self.segments={}
512 513 514 515
    if cp.has_option('datafind','veto-categories'):
      self.veto_categories=cp.get('datafind','veto-categories')
    else: self.veto_categories=[]
    for ifo in self.ifos:
516
      self.segments[ifo]=[]
517 518
    self.computeroqweightsnodes={}
    self.bayeslinenodes={}
519
    self.dq={}
520
    self.frtypes=ast.literal_eval(cp.get('datafind','types'))
521
    self.channels=ast.literal_eval(cp.get('data','channels'))
John Douglas Veitch's avatar
John Douglas Veitch committed
522
    self.use_available_data=False
523
    self.webdir=cp.get('paths','webdir')
524 525 526 527
    if cp.has_option('analysis','dataseed'):
      self.dataseed=cp.getint('analysis','dataseed')
    else:
      self.dataseed=None
528
    # Set up necessary job files.
529
    self.prenodes={}
530
    self.datafind_job = pipeline.LSCDataFindJob(self.cachepath,self.logpath,self.config,dax=self.is_dax())
531
    self.datafind_job.add_opt('url-type','file')
532 533
    if cp.has_option('analysis','accounting_group'):
      self.datafind_job.add_condor_cmd('accounting_group',cp.get('analysis','accounting_group'))
534 535
    if cp.has_option('analysis','accounting_group_user'):
      self.datafind_job.add_condor_cmd('accounting_group_user',cp.get('analysis','accounting_group_user'))
536
    self.datafind_job.set_sub_file(os.path.abspath(os.path.join(self.basepath,'datafind.sub')))
537
    self.preengine_job = EngineJob(self.config, os.path.join(self.basepath,'prelalinference.sub'),self.logpath,engine='lalinferencedatadump',ispreengine=True,dax=self.is_dax())
538 539
    self.preengine_job.set_grid_site('local')
    self.preengine_job.set_universe('vanilla')
540 541 542 543 544 545
    if self.config.has_option('condor','computeroqweights'):
      self.computeroqweights_job = ROMJob(self.config,os.path.join(self.basepath,'computeroqweights.sub'),self.logpath,dax=self.is_dax())
      self.computeroqweights_job.set_grid_site('local')
    if self.config.has_option('condor','bayesline'):
      self.bayesline_job = BayesLineJob(self.config,os.path.join(self.basepath,'bayesline.sub'),self.logpath,dax=self.is_dax())
      self.bayesline_job.set_grid_site('local')
546 547 548 549
    # Need to create a job file for each IFO combination
    self.engine_jobs={}
    ifocombos=[]
    for N in range(1,len(self.ifos)+1):
550
        for a in permutations(self.ifos,N):
551 552
            ifocombos.append(a)
    for ifos in ifocombos:
553
        self.engine_jobs[ifos] = EngineJob(self.config, os.path.join(self.basepath,'engine_%s.sub'%(reduce(lambda x,y:x+y, map(str,ifos)))),self.logpath,engine=self.engine,dax=self.is_dax(), site=site)
554
    self.results_page_job = ResultsPageJob(self.config,os.path.join(self.basepath,'resultspage.sub'),self.logpath,dax=self.is_dax())
555
    self.results_page_job.set_grid_site('local')
556
    self.cotest_results_page_job = ResultsPageJob(self.config,os.path.join(self.basepath,'resultspagecoherent.sub'),self.logpath,dax=self.is_dax())
557
    self.cotest_results_page_job.set_grid_site('local')
558
    self.merge_job = MergeNSJob(self.config,os.path.join(self.basepath,'merge_runs.sub'),self.logpath,dax=self.is_dax())
559
    self.merge_job.set_grid_site('local')
560
    self.coherence_test_job = CoherenceTestJob(self.config,os.path.join(self.basepath,'coherence_test.sub'),self.logpath,dax=self.is_dax())
561
    self.coherence_test_job.set_grid_site('local')
562
    self.gracedbjob = GraceDBJob(self.config,os.path.join(self.basepath,'gracedb.sub'),self.logpath,dax=self.is_dax())
563
    self.gracedbjob.set_grid_site('local')
564
    # Process the input to build list of analyses to do
John Douglas Veitch's avatar
Working  
John Douglas Veitch committed
565
    self.events=self.setup_from_inputs()
566

567
    # Sanity checking
John Douglas Veitch's avatar
Working  
John Douglas Veitch committed
568
    if len(self.events)==0:
569 570
      print 'No input events found, please check your config if you expect some events'
    self.times=[e.trig_time for e in self.events]
571

572
    # Set up the segments
573
    if not (self.config.has_option('input','gps-start-time') and self.config.has_option('input','gps-end-time')) and len(self.times)>0:
574
      (mintime,maxtime)=self.get_required_data(self.times)
575 576 577 578
      if not self.config.has_option('input','gps-start-time'):
        self.config.set('input','gps-start-time',str(int(floor(mintime))))
      if not self.config.has_option('input','gps-end-time'):
        self.config.set('input','gps-end-time',str(int(ceil(maxtime))))
579
    self.add_science_segments()
580

581
    # Save the final configuration that is being used
582
    # first to the run dir
583 584
    conffilename=os.path.join(self.basepath,'config.ini')
    with open(conffilename,'wb') as conffile:
John Douglas Veitch's avatar
John Douglas Veitch committed
585
      self.config.write(conffile)
586 587 588 589
    if self.config.has_option('paths','webdir'):
      mkdirs(self.config.get('paths','webdir'))
      with open(os.path.join(self.config.get('paths','webdir'),'config.ini'),'wb') as conffile:
        self.config.write(conffile)
590

591
    # Generate the DAG according to the config given
592
    for event in self.events: self.add_full_analysis(event)
593
    self.add_skyarea_followup()
594 595 596
    if self.config.has_option('analysis','upload-to-gracedb'):
      if self.config.getboolean('analysis','upload-to-gracedb'):
        self.add_gracedb_FITSskymap_upload(self.events[0],engine=self.engine)
597 598 599 600 601 602 603 604 605
    self.dagfilename="lalinference_%s-%s"%(self.config.get('input','gps-start-time'),self.config.get('input','gps-end-time'))
    self.set_dag_file(self.dagfilename)
    if self.is_dax():
      self.set_dax_file(self.dagfilename)

  def add_skyarea_followup(self):
    # Add skyarea jobs if the executable is given
    # Do one for each results page for now
    if self.config.has_option('condor','skyarea'):
606 607
      self.skyareajob=SkyAreaJob(self.config,os.path.join(self.basepath,'skyarea.sub'),self.logpath,dax=self.is_dax())
      respagenodes=filter(lambda x: isinstance(x,ResultsPageNode) ,self.get_nodes())
608 609 610 611
      if self.engine=='lalinferenceburst':
          prefix='LIB_'
      else:
          prefix='LALInference_'
612
      for p in respagenodes:
613
          skyareanode=SkyAreaNode(self.skyareajob,prefix=prefix)
614
          skyareanode.add_resultspage_parent(p)
615
          skyareanode.set_ifos(p.ifos)
616
          self.add_node(skyareanode)
617

618
  def add_full_analysis(self,event):
619
    if self.engine=='lalinferencenest' or  self.engine=='lalinferenceburst':
620
      result=self.add_full_analysis_lalinferencenest(event)
621
    elif self.engine=='lalinferencemcmc':
622
      result=self.add_full_analysis_lalinferencemcmc(event)
623
    elif self.engine=='lalinferencebambi' or self.engine=='lalinferencebambimpi':
624 625 626
      result=self.add_full_analysis_lalinferencebambi(event)
    return result

627

628 629 630 631 632 633 634 635 636
  def create_frame_pfn_file(self):
    """
    Create a pegasus cache file name, uses inspiralutils
    """
    import inspiralutils as iu
    gpsstart=self.config.get('input','gps-start-time')
    gpsend=self.config.get('input','gps-end-time')
    pfnfile=iu.create_frame_pfn_file(self.frtypes,gpsstart,gpsend)
    return pfnfile
637

John Douglas Veitch's avatar
John Douglas Veitch committed
638 639
  def get_required_data(self,times):
    """
John Douglas Veitch's avatar
John Douglas Veitch committed
640
    Calculate the data that will be needed to process all events
John Douglas Veitch's avatar
John Douglas Veitch committed
641
    """
642
    #psdlength = self.config.getint('input','max-psd-length')
643
    padding=self.config.getint('input','padding')
644 645 646 647 648 649
    if self.config.has_option('engine','seglen') or self.config.has_option('lalinference','seglen'):
      if self.config.has_option('engine','seglen'):
        seglen = self.config.getint('engine','seglen')
      if self.config.has_option('lalinference','seglen'):
        seglen = self.config.getint('lalinference','seglen')

650
      if os.path.isfile(os.path.join(self.basepath,'psd.xml.gz')):
651 652 653
        psdlength = 0
      else:
        psdlength = 32*seglen
654 655
    else:
      seglen = max(e.duration for e in self.events)
656
      if os.path.isfile(os.path.join(self.basepath,'psd.xml.gz')):
657 658 659 660
        psdlength = 0
      else:
        psdlength = 32*seglen
    # Assume that the data interval is (end_time - seglen -padding , end_time + psdlength +padding )
661
    # -> change to (trig_time - seglen - padding - psdlength + 2 , trig_time + padding + 2) to estimate the psd before the trigger for online follow-up.
662
    # Also require padding before start time
663
    return (min(times)-padding-seglen-psdlength+2,max(times)+padding+2)
John Douglas Veitch's avatar
John Douglas Veitch committed
664

665 666 667 668 669
  def setup_from_times(self,times):
    """
    Generate a DAG from a list of times
    """
    for time in self.times:
670
      self.add_full_analysis(Event(trig_time=time))
671

672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695
  def select_events(self):
    """
    Read events from the config parser. Understands both ranges and comma separated events, or combinations
    eg. events=[0,1,5:10,21] adds to the analysis the events: 0,1,5,6,7,8,9,10 and 21
    """
    events=[]
    times=[]
    raw_events=self.config.get('input','events').replace('[','').replace(']','').split(',')
    for raw_event in raw_events:
        if ':' in raw_event:
            limits=raw_event.split(':')
            if len(limits) != 2:
                print "Error: in event config option; ':' must separate two numbers."
                exit(0)
            low=int(limits[0])
            high=int(limits[1])
            if low>high:
                events.extend(range(int(high),int(low)+1))
            elif high>low:
                events.extend(range(int(low),int(high)+1))
        else:
            events.append(int(raw_event))
    return events

John Douglas Veitch's avatar
Working  
John Douglas Veitch committed
696 697 698 699 700 701 702
  def setup_from_inputs(self):
    """
    Scan the list of inputs, i.e.
    gps-time-file, injection-file, sngl-inspiral-file, coinc-inspiral-file, pipedown-database
    in the [input] section of the ini file.
    And process the events found therein
    """
703
    events=[]
704 705
    gpsstart=None
    gpsend=None
706 707 708 709
    if self.config.has_option('input','gps-start-time'):
      gpsstart=self.config.getfloat('input','gps-start-time')
    if self.config.has_option('input','gps-end-time'):
      gpsend=self.config.getfloat('input','gps-end-time')
710
    inputnames=['gps-time-file','burst-injection-file','injection-file','sngl-inspiral-file','coinc-inspiral-file','pipedown-db','gid','gstlal-db']
711
    ReadInputFromList=sum([ 1 if self.config.has_option('input',name) else 0 for name in inputnames])
712
    # If no input events given, just return an empty list (e.g. for PP pipeline)
713
    if ReadInputFromList!=1 and (gpsstart is None or gpsend is None):
714
        return []
715
    # Review: Clean up this section
716
    if self.config.has_option('input','events'):
717
      selected_events=self.config.get('input','events')
718
      print 'Selected events %s'%(str(selected_events))
719

720 721
      if selected_events=='all':
          selected_events=None
722 723
      else:
          selected_events=self.select_events()
724 725
    else:
        selected_events=None
726

727
    # No input file given, analyse the entire time stretch between gpsstart and gpsend
728 729
    if self.config.has_option('input','analyse-all-time') and self.config.getboolean('input','analyse-all-time')==True:
        print 'Setting up for analysis of continuous time stretch %f - %f'%(gpsstart,gpsend)
730
        seglen=self.config.getfloat('engine','seglen')
731
        if(self.config.has_option('input','segment-overlap')):
732 733 734 735 736 737 738 739 740 741
          overlap=self.config.getfloat('input','segment-overlap')
        else:
          overlap=32.;
        if(overlap>seglen):
          print 'ERROR: segment-overlap is greater than seglen'
          sys.exit(1)
        # Now divide gpsstart - gpsend into jobs of seglen - overlap length
        t=gpsstart
        events=[]
        while(t<gpsend):
742
            ev=Event(trig_time=t+seglen-2)
743 744 745 746 747 748 749 750
            ev.set_engine_option('segment-start',str(t-overlap))
            ev.set_engine_option('time-min',str(t))
            tMax=t + seglen - overlap
            if tMax>=gpsend:
                tMax=gpsend
            ev.set_engine_option('time-max',str(tMax))
            events.append(ev)
            t=tMax
751
        return events
752

John Douglas Veitch's avatar
Working  
John Douglas Veitch committed
753 754
    # ASCII list of GPS times
    if self.config.has_option('input','gps-time-file'):
755
      times=scan_timefile(self.config.get('input','gps-time-file'))
John Douglas Veitch's avatar
Working  
John Douglas Veitch committed
756 757 758 759 760
      events=[Event(trig_time=time) for time in times]
    # Siminspiral Table
    if self.config.has_option('input','injection-file'):
      from pylal import SimInspiralUtils
      injTable=SimInspiralUtils.ReadSimInspiralFromFiles([self.config.get('input','injection-file')])
John Douglas Veitch's avatar
John Douglas Veitch committed
761
      events=[Event(SimInspiral=inj) for inj in injTable]
762
      self.add_pfn_cache([create_pfn_tuple(self.config.get('input','injection-file'))])
763 764 765
    # SimBurst Table
    if self.config.has_option('input','burst-injection-file'):
      #from pylal import SimBurstUtils
766 767 768
      from glue.ligolw import lsctables
      from glue.ligolw import utils
      from glue.ligolw import ligolw
769
      injfile=self.config.get('input','burst-injection-file')
Rory Smith's avatar
Rory Smith committed
770
      class PSDContentHandler(ligolw.LIGOLWContentHandler):
771
	pass
Rory Smith's avatar
Rory Smith committed
772 773
      lsctables.use_in(PSDContentHandler)
      injTable=lsctables.SimBurstTable.get_table(utils.load_filename(injfile,contenthandler = PSDContentHandler))
774 775
      events=[Event(SimBurst=inj) for inj in injTable]
      self.add_pfn_cache([create_pfn_tuple(self.config.get('input','burst-injection-file'))])
John Douglas Veitch's avatar
Working  
John Douglas Veitch committed
776 777 778 779
    # SnglInspiral Table
    if self.config.has_option('input','sngl-inspiral-file'):
      from pylal import SnglInspiralUtils
      trigTable=SnglInspiralUtils.ReadSnglInspiralFromFiles([self.config.get('input','sngl-inspiral-file')])
John Douglas Veitch's avatar
John Douglas Veitch committed
780
      events=[Event(SnglInspiral=trig) for trig in trigTable]
781
      self.add_pfn_cache([create_pfn_tuple(self.config.get('input','sngl-inspiral-file'))])
782
    if self.config.has_option('input','coinc-inspiral-file'):
783
      from pylal import CoincInspiralUtils
784
      coincTable = CoincInspiralUtils.readCoincInspiralFromFiles([self.config.get('input','coinc-inspiral-file')])