Commit f52af4d8 authored by Jonah Kanner's avatar Jonah Kanner 🤓

Adding plot script for ECC

git-svn-id: https://svn.ligo.caltech.edu/svn/bayeswave/branches/ecchack@85 c56465c9-8126-4a4f-9d7d-ac845eff4865
parent ab6a79cf
Running BayesWaveBurst with condor
For this example, the top level script is condor_setup.sh. It takes a single
argument - the gps time of your trigger.
./condor_setup.sh 1076894532.3
But, all the action happens in doit.sh. So, before you run condor_setup.sh
open up doit.sh in your favorite editor, and edit the bayesWave command and
frame type as needed.
I think that's it!
To check on your jobs while they are running, you can run:
condor_q USERNAME
You can also check the global state of condor with the same command:
condor_q
You can also monitor your jobs by monitoring the condor log, output, and error
logs that appear in the output directory, e.g.
tail -f <path-to-your-job>/condorOut.txt
gps=$1
dirname=job_${gps}
rundir=${PWD}/${dirname}
sedstr=s/GPS/${gps}/g
sedstr2=s+TOP+${rundir}+g
mkdir job_${gps}
sed $sedstr submit.template > ${dirname}/submit_temp.txt
sed $sedstr2 ${dirname}/submit_temp.txt > ${dirname}/submit.txt
rm ${dirname}/submit_temp.txt
sed $sedstr2 doit.sh > ${dirname}/doit.sh
#-- Can also submit job right here!
# cd $dirname
# condor_submit submit.txt
# -- Add a line to the DAG File
echo "JOB $dirname ${PWD}/${dirname}/submit.txt" >> submit.dag
echo "RETRY $dirname 1" >> submit.dag
echo "" >> submit.dag
#! /bin/sh
#gps=$1
#intgps=${gps%.*}
#start=`expr $intgps - 100`
#end=`expr $intgps + 100`
export LIGO_DATAFIND_SERVER=10.14.20.73:80
# Create Cache files
# ligo_data_find --observatory H --type H1_ER_C00_L1 -s $start -e $end --lal-cache | grep file > H.cache
# ligo_data_find --observatory L --type L1_ER_C00_L1 -s $start -e $end --lal-cache | grep file > L.cache
# ligo_data_find --observatory V --type V1Online -s $start -e $end --lal-cache | grep file > V.cache
# Quick example
# ~/bayeswave/trunk/src/BayesWaveBurst --ifo H1 --H1-flow 40 --H1-cache LALAdLIGO --H1-channel LALAdLIGO --trigtime 900000000.00 --srate 512 --seglen 4 --PSDstart 900000000 --PSDlength 4 --dataseed 1234 --gnuplot
/home/francesco.pannarale/bayeswave/trunk/src/BayesWaveBurst \
--ifo H1 --H1-flow 64 --H1-channel H1:LDAS-STRAIN \
--ifo L1 --L1-flow 64 --L1-channel L1:LDAS-STRAIN \
--H1-cache /home/francesco.pannarale/BayesWaveTest/H1.cache \
--L1-cache /home/francesco.pannarale/BayesWaveTest/L1.cache \
--L1-timeslide -26 \
--trigtime 965060683.311300 --srate 512 --seglen 8 \
--bayesLine --PSDstart 965060683.311300 --PSDlength 8 \
--Niter 2000000 --NCmin 15 --NCmax 50 --runName cluster \
--gnuplot
# /home/jkanner/baysewave/svn/trunk/src/BayesWaveBurst \
# --ifo H1 --H1-flow 40 --H1-channel H1:FAKE-STRAIN \
# --ifo L1 --L1-flow 40 --L1-channel L1:FAKE-STRAIN \
# --H1-cache H.cache \
# --L1-cache L.cache \
# --trigtime $gps --srate 1024 --seglen 8 \
# --bayesLine --PSDstart $gps --PSDlength 8 \
# --Niter 1000000 --NCmin 15 \
# --bayesLine --gnuplot \
# --MDC-channel [H1:FAKE-STRAIN_BURST,L1:FAKE-STRAIN_BURST] \
# --MDC-cache [H.cache,L.cache] \
# --MDC-prefactor 1 \
tar -cf chains.tar chains
tar -cf waveforms.tar waveforms
tar -cf snr.tar snr
#! /bin/sh
gps=$1
intgps=${gps%.*}
start=`expr $intgps - 100`
end=`expr $intgps + 100`
export LIGO_DATAFIND_SERVER=10.14.20.73:80
# Create Cache files
ligo_data_find --observatory H --type H1_ER_C00_L1 -s $start -e $end --lal-cache | grep file > H.cache
ligo_data_find --observatory L --type L1_ER_C00_L1 -s $start -e $end --lal-cache | grep file > L.cache
ligo_data_find --observatory V --type V1Online -s $start -e $end --lal-cache | grep file > V.cache
/home/jkanner/baysewave/svn/trunk/src/BayesWaveBurst \
--ifo H1 --H1-flow 40 --H1-channel H1:FAKE-STRAIN \
--ifo L1 --L1-flow 40 --L1-channel L1:FAKE-STRAIN \
--H1-cache H.cache \
--L1-cache L.cache \
--trigtime $gps --srate 1024 --seglen 8 \
--bayesLine --PSDstart $gps --PSDlength 8 \
--Niter 1000000 --NCmin 15 \
--bayesLine --gnuplot \
--MDC-channel [H1:FAKE-STRAIN_BURST,L1:FAKE-STRAIN_BURST] \
--MDC-cache [H.cache,L.cache] \
--MDC-prefactor 1 \
tar -cf chains.tar chains
tar -cf waveforms.tar waveforms
tar -cf snr.tar snr
#! /bin/sh
gps=$1
intgps=${gps%.*}
start=`expr $intgps - 100`
end=`expr $intgps + 100`
export LIGO_DATAFIND_SERVER=10.14.20.73:80
# Create Cache files
ligo_data_find --observatory H --type H1_LDAS_C02_L2 -s $start -e $end --lal-cache | grep file > H.cache
ligo_data_find --observatory H --type BRST_S6 -s $start -e $end --lal-cache | grep file > Hmdc.cache
ligo_data_find --observatory L --type L1_LDAS_C02_L2 -s $start -e $end --lal-cache | grep file > L.cache
#ligo_data_find --observatory L --type BRST_S6 -s $start -e $end --lal-cache | grep file > Lmdc.cache
ligo_data_find --observatory V --type HrecOnline -s $start -e $end --lal-cache | grep file > V.cache
#ligo_data_find --observatory V --type BRST_S6 -s $start -e $end --lal-cache | grep file > Vmdc.cache
# --V1-channel V1:h_16384Hz
# --MDC-channel V1:GW-H16K
/home/jkanner/baysewave/svn/trunk/src/BayesWaveBurst \
--ifo H1 --H1-flow 40 --H1-channel H1:LDAS-STRAIN \
--ifo L1 --L1-flow 40 --L1-channel L1:LDAS-STRAIN \
--H1-cache H.cache \
--L1-cache L.cache \
--trigtime $gps --srate 1024 --seglen 8 \
--bayesLine --PSDstart $gps --PSDlength 8 \
--Niter 1000000 --NCmin 15 \
--bayesLine --gnuplot \
--MDC-channel [H1:GW-H,L1:GW-H] \
--MDC-cache [H.cache,L.cache] \
--MDC-prefactor 1 \
tar -cf chains.tar chains
tar -cf waveforms.tar waveforms
tar -cf snr.tar snr
#! /bin/sh
cd TOP
gps=$1
intgps=${gps%.*}
start=`expr $intgps - 100`
end=`expr $intgps + 100`
export LIGO_DATAFIND_SERVER=10.14.20.73:80
# Create Cache files
ligo_data_find --observatory H --type H1_LDAS_C02_L2 -s $start -e $end --lal-cache | grep file > H.cache
# ligo_data_find --observatory H --type BRST_S6 -s $start -e $end --lal-cache | grep file > Hmdc.cache
ligo_data_find --observatory L --type L1_LDAS_C02_L2 -s $start -e $end --lal-cache | grep file > L.cache
#ligo_data_find --observatory L --type BRST_S6 -s $start -e $end --lal-cache | grep file > Lmdc.cache
# ligo_data_find --observatory V --type HrecOnline -s $start -e $end --lal-cache | grep file > V.cache
#ligo_data_find --observatory V --type BRST_S6 -s $start -e $end --lal-cache | grep file > Vmdc.cache
# --V1-channel V1:h_16384Hz
# --MDC-channel V1:GW-H16K
/home/jkanner/baysewave/svn/trunk/src/BayesWaveBurst \
--ifo H1 --H1-flow 40 --H1-channel H1:LDAS-STRAIN \
--ifo L1 --L1-flow 40 --L1-channel L1:LDAS-STRAIN \
--H1-cache H.cache \
--L1-cache L.cache \
--trigtime $gps --srate 1024 --seglen 8 \
--bayesLine --PSDstart $gps --PSDlength 8 \
--Niter 1000000 --NCmin 15 \
--bayesLine --gnuplot \
--MDC-channel [H1:GW-H,L1:GW-H] \
--MDC-cache [/home/jkanner/baysewave/svn/trunk/burstinj/s6/brst_s6.cache,/home/jkanner/baysewave/svn/trunk/burstinj/s6/brst_s6.cache] \
--MDC-prefactor 1 \
tar -cf chains.tar chains
tar -cf waveforms.tar waveforms
tar -cf snr.tar snr
#! /bin/sh
cd TOP
gps=$1
intgps=${gps%.*}
start=`expr $intgps - 100`
end=`expr $intgps + 100`
export LIGO_DATAFIND_SERVER=10.14.20.73:80
# Create Cache files
ligo_data_find --observatory H --type H1_LDAS_C02_L2 -s $start -e $end --lal-cache | grep file > H.cache
# ligo_data_find --observatory H --type BRST_S6 -s $start -e $end --lal-cache | grep file > Hmdc.cache
ligo_data_find --observatory L --type L1_LDAS_C02_L2 -s $start -e $end --lal-cache | grep file > L.cache
#ligo_data_find --observatory L --type BRST_S6 -s $start -e $end --lal-cache | grep file > Lmdc.cache
# ligo_data_find --observatory V --type HrecOnline -s $start -e $end --lal-cache | grep file > V.cache
#ligo_data_find --observatory V --type BRST_S6 -s $start -e $end --lal-cache | grep file > Vmdc.cache
# --V1-channel V1:h_16384Hz
# --MDC-channel V1:GW-H16K
/home/jkanner/baysewave/svn/trunk/src/BayesWaveBurst \
--ifo H1 --H1-flow 40 --H1-channel H1:LDAS-STRAIN \
--ifo L1 --L1-flow 40 --L1-channel L1:LDAS-STRAIN \
--H1-cache H.cache \
--L1-cache L.cache \
--trigtime $gps --srate 1024 --seglen 8 \
--bayesLine --PSDstart $gps --PSDlength 8 \
--Niter 1000000 --NCmin 15 \
--bayesLine --gnuplot \
--MDC-channel [H1:GW-H,L1:GW-H] \
--MDC-cache [/home/jkanner/baysewave/svn/trunk/burstinj/s6/elptc_s6.cache,/home/jkanner/baysewave/svn/trunk/burstinj/s6/elptc_s6.cache] \
--MDC-prefactor 1 \
tar -cf chains.tar chains
tar -cf waveforms.tar waveforms
tar -cf snr.tar snr
gps=$1
dirname=job_${gps}
sedstr=s/GPS/${gps}/g
mkdir job_${gps}
sed $sedstr submit.template > ${dirname}/submit.txt
cp doit.sh ${dirname}/doit.sh
#-- Can also submit job right here!
cd $dirname
condor_submit submit.txt
\ No newline at end of file
"""
This script sets up a dag file to launch multiple BayesWaveBurst runs on condor.
Running this script requires five line arguments:
1) period (S6A, S6B, S6C, or S6D)
2) IFO1 (=H1, L1, or V1)
3) IFO2 (=H1, L1, or V1)
4) first trigger to be analyzed (positive integer)
5) last trigger to be analyzed (positive integer)
Convention: the second IFO in the columns of the triggers file is used as primary IFO in the call to BayesWaveBurst.
The ordering in the triggers file must be kept in mind. This is: L1 H1, H1 V1, and L1 V1.
This script was tested on ldas-pcdev1.ligo.caltech.edu only.
"""
__author__ = "Francesco Pannarale, Maxime Fays"
__email__ = "francesco.pannarale@ligo.org, maxime.fays@ligo.org"
__version__ = "1.6"
__date__ = ""
######################################################################################################################
# Import modules
######################################################################################################################
import argparse
import math
import os
import pwd
import subprocess
import sys
######################################################################################################################
# Parse the arguments
######################################################################################################################
parser = argparse.ArgumentParser(description='Set up a dag file to launch multiple BayesWaveBurst runs on condor.')
parser.add_argument('period', type=str, default='S6D', help='Data period: S6A, S6B, S6C, or S6D')
parser.add_argument('IFO1', type=str, default='H1', help='Interferometer 1: H1, L1, or V1')
parser.add_argument('IFO2', type=str, default='L1', help='Interferometer 2: H1, L1, or V1')
parser.add_argument('nFirst', type=int, default=1, help='Number of first trigger to run')
parser.add_argument('nLast', type=int, default=25, help='Number of last trigger to run')
args = parser.parse_args()
######################################################################################################################
# Function that writes out the command line in the shell-script file to be called by the condor submission script
######################################################################################################################
def WriteJobScriptFile(jobDirName, IFOlist, PWD, IFO2TriggerTime, IFOs, indexTriggers):
jobScriptFile = open(jobDirName+"/doit.sh", "w")
jobScriptFile.write("#! /bin/sh\n\n")
jobScriptFile.write("cd %s/%s\n\n" %(PWD, jobDirName))
jobScriptFile.write(BayesWaveBurst+" \\\n")
if not IFOlist[1] == "V1":
jobScriptFile.write("--ifo %s --%s-flow 64 --%s-channel %s:LDAS-STRAIN \\\n" % (IFOlist[1], IFOlist[1], IFOlist[1], IFOlist[1]))
else:
jobScriptFile.write("--ifo %s --%s-flow 64 --%s-channel %s:h_16384Hz \\\n" % (IFOlist[1], IFOlist[1], IFOlist[1], IFOlist[1]))
jobScriptFile.write("--ifo %s --%s-flow 64 --%s-channel %s:LDAS-STRAIN \\\n" % (IFOlist[0], IFOlist[0], IFOlist[0], IFOlist[0]))
jobScriptFile.write("--%s-cache %s/%s/%s.cache \\\n" %(IFOlist[1], PWD, jobDirName, IFOlist[1]))
jobScriptFile.write("--%s-cache %s/%s/%s.cache \\\n" %(IFOlist[0], PWD, jobDirName, IFOlist[0]))
jobScriptFile.write("--%s-timeslide %i \\\n" % (IFOlist[0], lag))
jobScriptFile.write("--trigtime %.4f --srate 512 --seglen 8 \\\n" % IFO2TriggerTime)
jobScriptFile.write("--bayesLine --PSDstart %.4f --PSDlength 8 \\\n" % IFO2TriggerTime)
jobScriptFile.write("--Niter 2000000 \\\n")
jobScriptFile.write("--NCmin 15 --NCmax 50 --runName %s_trig%i \\\n" % (IFOs, indexTriggers))
jobScriptFile.write("--gnuplot\n\n")
jobScriptFile.write("cd ..\n")
jobScriptFile.close()
######################################################################################################################
# Function that writes the condor submission script
######################################################################################################################
def WriteSub(jobDirName, PWD, gpsTriggerTime):
path=PWD+'/'+jobDirName+'/'
SubFile = open(path+'submit.sub', 'w')
SubFile.write('executable='+path+'doit.sh\n')
SubFile.write('universe=vanilla\n')
SubFile.write('arguments='+str(gpsTriggerTime)+'\n')
SubFile.write('output='+path+'condorOut.txt\n')
SubFile.write('error='+path+'condorError.txt\n')
SubFile.write('log='+path+'condorLog.txt\n')
SubFile.write('notification=never\n')
SubFile.write('should_transfer_files=YES\n')
SubFile.write('when_to_transfer_output=ON_EXIT\n')
SubFile.write('stream_error=True\n')
SubFile.write('stream_output=True\n')
SubFile.write('queue 1\n')
SubFile.close()
######################################################################################################################
# Function that generates a single dag file for multiple BayesWaveBurst runs
######################################################################################################################
def WriteDagFile(jobDirName, PWD):
#Might need to make sur the file exist instead of appending (but the + should take care of that)
DagFile = open('submit.dag', 'a')
DagFile.write('JOB '+jobDirName+' '+PWD+'/'+jobDirName+'/submit.sub\n')
DagFile.write('RETRY '+jobDirName+' 1\n\n')
DagFile.close()
######################################################################################################################
# Function that establishes the IFOs and the necessary S6 VSR2-3 backround trigger files.
# [https://wiki.ligo.org/viewauth/Bursts/AllSkyS6VSR2-3]
######################################################################################################################
def DetermineIFOsAndTriggersFile(period, IFOs, IFOlist):
if period == "S6A":
# LH - S6A [64-200] Hz
if IFOs=="H1L1":
IFOsChoice = "Running with Hanford and Livingston IFOs on S6A data"
triggersFileName = "EVENTS_LF_H1L1.txt"
fileCopyCommand = "gsiscp atlas3.atlas.aei.uni-hannover.de:/home/waveburst/S6a/coherent/offline/POSTPRODUCTION/OPEN_BOX/plot/S6A_L1H1_run101r_LF_CC_65_RHO_74_320_vED_40_FREQSEP_200/S6A_R1_BKG_L1H1_run101r_LF_La_R4_R4cat3LH_hvetoLH_R4cat2LH_excLH/EVENTS_LF.txt " + triggersFileName
IFOlist.sort(reverse=True) # H1 and L1 appear in reversed alphabetical order in their triggers file
# HV - S6A [64-200] Hz
elif IFOs=="H1V1":
IFOsChoice = "Running with Hanford and Virgo IFOs on S6A data"
triggersFileName = "EVENTS_LF_H1V1.txt"
fileCopyCommand = "gsiscp atlas3.atlas.aei.uni-hannover.de:/home/waveburst/S6a/coherent/offline/POSTPRODUCTION/OPEN_BOX/plot/S6A_H1V1_run101r_LF_CC_65_RHO_52_102_vED_40_FREQSEP_200/S6A_R1_BKG_H1V1_run101r_LF_La_R2multi_R4cat3HV_hvetoHV_R4cat2HV_excHV/EVENTS_LF.txt " + triggersFileName
# LV - S6A [64-200] Hz
elif IFOs=="L1V1":
IFOsChoice = "Running with Livingston and Virgo IFOs on S6A data"
triggersFileName = "EVENTS_LF_L1V1.txt"
fileCopyCommand = "gsiscp atlas3.atlas.aei.uni-hannover.de:/home/waveburst/S6a/coherent/offline/POSTPRODUCTION/OPEN_BOX/plot/S6A_L1V1_run101r_LF_CC_65_RHO_60_168_vED_40_FREQSEP_200/S6A_R1_BKG_L1V1_run101r_LF_La_R2_R4cat3LV_hvetoLV_R4cat2LV_excLV/EVENTS_LF.txt " + triggersFileName
# Sorry: just three options are possible so far with S6A
else:
sys.exit("Interferometer list not valid!\n\n****************************************************************************************************************")
elif period == "S6B":
# LH - S6B [64-200] Hz
if IFOs=="H1L1":
IFOsChoice = "Running with Hanford and Livingston IFOs on S6B data"
triggersFileName = "EVENTS_LF_H1L1.txt"
fileCopyCommand = "gsiscp atlas3.atlas.aei.uni-hannover.de:/home/waveburst/S6b/coherent/offline/POSTPRODUCTION/OPEN_BOX/plot/S6B_R8_BKG_L1H1_run84r_LF_afterf2f_CC_60_RHO_55_85_vED_40_FREQSEP_200/S6B_R8_BKG_L1H1_run84r_LF_R4I_R4set_cat3LH_hpveto_LH/EVENTS_LF.txt " + triggersFileName
IFOlist.sort(reverse=True) # H1 and L1 appear in reversed alphabetical order in their triggers file
# HV - S6B [64-200] Hz
elif IFOs=="H1V1":
IFOsChoice = "Running with Hanford and Virgo IFOs on S6B data"
triggersFileName = "EVENTS_LF_H1V1.txt"
fileCopyCommand = "gsiscp atlas3.atlas.aei.uni-hannover.de:/home/waveburst/S6b/coherent/offline/POSTPRODUCTION/OPEN_BOX/plot/S6B_R8_BKG_H1V1_run84r_LF_afterf2f_CC_60_RHO_52_63_vED_40_FREQSEP_200_CUT_08Ott/S6B_R8_BKG_H1V1_run84r_LF_R6_no161_cat3HV_hpveto_HV/EVENTS_LF.txt " + triggersFileName
# LV - S6B [64-200] Hz
elif IFOs=="L1V1":
IFOsChoice = "Running with Livingston and Virgo IFOs on S6B data"
triggersFileName = "EVENTS_LF_L1V1.txt"
fileCopyCommand = "gsiscp atlas3.atlas.aei.uni-hannover.de:/home/waveburst/S6b/coherent/offline/POSTPRODUCTION/OPEN_BOX/plot/S6B_R8_BKG_L1V1_run84r_LF_afterf2f_CC_60_RHO_64_58_vED_40_FREQSEP_200_CUT_08Ott/S6B_R8_BKG_L1V1_run84r_LF_R5incR2exc_cat3LV_hpveto_LV/EVENTS_LF.txt " + triggersFileName
# Sorry: just three options are possible so far with S6B
else:
sys.exit("Interferometer list not valid!\n\n****************************************************************************************************************")
elif period == "S6C":
# LH - S6C [64-200] Hz
if IFOs=="H1L1":
IFOsChoice = "Running with Hanford and Livingston IFOs on S6C data"
triggersFileName = "EVENTS_LF_H1L1.txt"
fileCopyCommand = "gsiscp atlas3.atlas.aei.uni-hannover.de:/home/waveburst/S6c/coherent/offline/POSTPRODUCTION/OPEN_BOX/plot/OUTPUT_S6C_R3_L1H1_run90r_LF_CC_60_RHO_40_vED_40/S6C_R5_BKG_L1H1_run90r_LF_R2_cat3LH_newvetoLH/EVENTS_LF.txt " + triggersFileName
IFOlist.sort(reverse=True) # H1 and L1 appear in reversed alphabetical order in their triggers file
# Sorry: just one options is possible so far with S6C
else:
sys.exit("Interferometer list not valid!\n\n****************************************************************************************************************")
elif period == "S6D":
# LH - S6D [64-200] Hz
if IFOs=="H1L1":
IFOsChoice = "Running with Hanford and Livingston IFOs on S6D data"
triggersFileName = "EVENTS_LF_H1L1.txt"
fileCopyCommand = "gsiscp atlas3.atlas.aei.uni-hannover.de:/home/waveburst/S6d/coherent/offline/POSTPRODUCTION/OPEN_BOX/plot/OUTPUT_S6D_R10_BKG_L1H1_run100r_LF_La_lt200_CC_60_RHO_141_vED_40/S6D_R10_BKG_L1H1_run100r_LF_R3multi_cat3LH_hvetoLH_excLH_lt200/EVENTS.txt " + triggersFileName
IFOlist.sort(reverse=True) # H1 and L1 appear in reversed alphabetical order in their triggers file
# HV - S6D [64-200] Hz
elif IFOs=="H1V1":
IFOsChoice = "Running with Hanford and Virgo IFOs on S6D data"
triggersFileName = "EVENTS_LF_H1V1.txt"
fileCopyCommand = "gsiscp atlas3.atlas.aei.uni-hannover.de:/home/waveburst/S6d/coherent/offline/POSTPRODUCTION/OPEN_BOX/plot/OUTPUT_S6D_R9_BKG_H1V1_run103r_LF_La_DQ1d51_CC_60_RHO_61_91_vED_40/S6D_R9_BKG_H1V1_run103r_LF_R4multi_newcat3HV_hvetoHV_newcat2HV_excHV/EVENTS_LF.txt " + triggersFileName
# LV - S6D [64-200] Hz
elif IFOs=="L1V1":
IFOsChoice = "Running with Livingston and Virgo IFOs on S6D data"
triggersFileName = "EVENTS_LF_L1V1.txt"
fileCopyCommand = "gsiscp atlas3.atlas.aei.uni-hannover.de:/home/waveburst/S6d/coherent/offline/POSTPRODUCTION/OPEN_BOX/plot/OUTPUT_S6D_R9_BKG_L1V1_run103r_LF_La_DQ1d51_CC_60_RHO_103_170_vED_40/S6D_R9_BKG_L1V1_run103r_LF_R3multi_newcat3LV_hvetoLV_newcat2LV_excLV/EVENTS_LF.txt " + triggersFileName
# Sorry: just three options are possible so far with S6D
else:
sys.exit("Interferometer list not valid!\n\n****************************************************************************************************************")
# Sorry: just S6C and S6D data periods are possible so far
else:
sys.exit("Data period not valid!\n\n************************************************************************************************************************")
return [IFOsChoice, triggersFileName, fileCopyCommand]
######################################################################################################################
# Main
######################################################################################################################
# Find out the path to the BayesWaveBurst executable
p = subprocess.Popen(["which","BayesWaveBurst"],stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
for line in iter(p.stdout.readline, b''):
BayesWaveBurst = line.rstrip()
if not os.path.isfile(BayesWaveBurst):
print "****************************************************************************************************************"
sys.exit("\nMake sure the BayesWaveBurst command is in your path before running this script!\n\n****************************************************************************************************************")
# Determine what directory this is and drop the program name from the list of arguments
sys.argv.pop(0)
username = pwd.getpwuid(os.getuid())[0]
pathname = os.getcwd().split(username)[1]
pathname = '/home/'+username+pathname
PWD = os.path.abspath(pathname)
# Determine the data period to be considered
period = args.period
# Sort the interferometer names in alphabetical order
IFOlist = sorted([args.IFO1, args.IFO2])
IFOs = IFOlist[0]+IFOlist[1]
# Determine the IFOs requested by the user and the background triggers file
print "****************************************************************************************************************"
print "\nFinding out if the triggers file must be copied over\n"
[IFOsChoice, triggersFileName, fileCopyCommand] = DetermineIFOsAndTriggersFile(period, IFOs, IFOlist)
# If mecessary, copy the file with the backround triggers
if not os.path.isfile(triggersFileName):
os.system(fileCopyCommand)
# Open the triggers file and tell the user what IFOs and triggers were requested
triggersFile = open(triggersFileName, "r")
print IFOsChoice
print "Running triggers from %i to %i" % (args.nFirst, args.nLast)
print "\n****************************************************************************************************************\n"
# Prepare to elaborate only triggers specified by the user
indexTriggers = 0
nFirst = args.nFirst - 1
nLast = args.nLast + 1
# Keep track of the number of triggers ending up in the dagfile
countTriggers = 0
# Trigger files with (M)IFAR in the header require the trick of adding an extra column...
correctionToCols = 0
for line in triggersFile:
trigger = line.lstrip()
# If header line, figure out where the columns with the trigger times
if trigger.startswith("#"):
if "IFAR" in trigger:
correctionToCols = 1
if "time for "+IFOlist[0]+" detector" in trigger:
IFO1column = trigger.split("-")[0].split("#")[1]
IFO1column = int(IFO1column) + correctionToCols
if "time for "+IFOlist[1]+" detector" in trigger:
IFO2column = trigger.split("-")[0].split("#")[1]
IFO2column = int(IFO2column) + correctionToCols
# If not header line or commented entry, prepare to run the analysis
else:
# This is not a comment line ==> it is a trigger ==> update trigger index
indexTriggers = indexTriggers + 1
if indexTriggers > nFirst and indexTriggers < nLast:
# This is a trigger requested by the user ==> update trigger counter
countTriggers = countTriggers + 1
# Trigger times for the two IFOs: the trigger time of IFO2 is later used as the trigger time in the call to BayesWaveBurst
entries = trigger.split()
IFO1TriggerTime = float(entries[IFO1column])
IFO2TriggerTime = float(entries[IFO2column])
# Lag time
lag = IFO2TriggerTime-IFO1TriggerTime
# Convert lag time to an integer
if abs(lag-(int)(lag)) < 0.5:
lag=(int)(lag)
else:
lag=(int)(lag)+1. if lag > 0.0 else (int)(lag)-1.
# Output information about these three times
print "Trigger %i: using trigger times %.4f and %.4f and lagtime %i" % (indexTriggers, IFO1TriggerTime, IFO2TriggerTime, lag)
# Create a directory for the condor job
if indexTriggers > 9:
indexTriggersStr = str(indexTriggers)
else:
indexTriggersStr = "0"+str(indexTriggers)
jobDirName = "job_"+IFOs+"_"+indexTriggersStr
os.system("mkdir "+jobDirName)
# Generate cache files for both IFOs
countIFO = -1
for ifo in IFOlist:
countIFO = countIFO + 1
cacheFileName=ifo+".cache"
# 2 seconds after the trigger time and 6 seconds before it are required
# Including an extra 2+2 seconds of buffer to compensate consequences of rounding off
[startTime, endTime] = sorted([IFO1TriggerTime, IFO2TriggerTime])
startTime = math.floor(startTime) - 6 - 2
endTime = math.ceil(endTime) + 2 + 2
if not ifo == "V1":
channel=ifo+"_LDAS_C02_L2"
else:
channel="HrecOnline" #HrecV2, HrecV3
print " * Producing cache file for IFO="+ifo+" using CHANNEL="+channel+", START TIME="+str(startTime)+" and END TIME="+str(endTime)
os.system("ligo_data_find -s "+str(startTime)+" -e "+str(endTime)+" -o "+ifo[0]+" -t "+channel+" -u file -l > "+cacheFileName)
if not os.path.getsize(cacheFileName):
print "Could not obtain cache file for IFO="+ifo+"!"
print ""
# Place the cache files in the job directory
os.system("mv *.cache "+jobDirName+"/.")
# Write out the shell-script file to be called by condor
WriteJobScriptFile(jobDirName, IFOlist, PWD, IFO2TriggerTime, IFOs, indexTriggers)
# Write the condor submission script to the job directory
WriteSub(jobDirName, PWD, IFO2TriggerTime)
# Write the dag file containing all jobs
WriteDagFile(jobDirName, PWD)
# All done! Remind the user of what to do...
print "****************************************************************************************************************"
print "\n\nTriggers found:", countTriggers
print "\n\n****************************************************************************************************************"
if countTriggers == 0:
sys.exit("\nPlese check your triggers file!\n")
print "\n\nTo run the condor jobs, type\n condor_submit_dag submit.dag"
print "\nTo monitor a job\'s output type\n tail -f <job-directory>/condorOutput.txt"
print "\nTo monitor a job\'s errors type\n tail -f <job-directory>/condorError.txt\n"
print "****************************************************************************************************************"
# ...and clean-up
triggersFile.close()
######################################################################################################################
# End Main
######################################################################################################################
gps=$1
dirname=job_${gps}
rundir=${PWD}/${dirname}
sedstr=s/GPS/${gps}/g
sedstr2=s+TOP+${rundir}+g
mkdir job_${gps}
sed $sedstr submit.template > ${dirname}/submit_temp.txt
sed $sedstr2 ${dirname}/submit_temp.txt > ${dirname}/submit.txt
rm ${dirname}/submit_temp.txt