Skip to content
Snippets Groups Projects
Commit e6020bdf authored by Jonah Kanner's avatar Jonah Kanner :nerd:
Browse files

Added HLV script

git-svn-id: https://svn.ligo.caltech.edu/svn/bayeswave/trunk@95 c56465c9-8126-4a4f-9d7d-ac845eff4865
parent 291b9a3d
No related branches found
No related tags found
No related merge requests found
import os
import numpy as np
# -- Define the key files
doit_str = """#! /bin/sh
source /home/jkanner/.bashrc.mine
cd {top}
gps=$1
intgps=${{gps%.*}}
start=`expr $intgps - 100`
end=`expr $intgps + 100`
export LIGO_DATAFIND_SERVER=10.14.20.73:80
# Create Cache files
ligo_data_find --observatory H --type H1_LDAS_C02_L2 -s $start -e $end --lal-cache | grep file > H.cache
# ligo_data_find --observatory H --type BRST_S6 -s $start -e $end --lal-cache | grep file > Hmdc.cache
ligo_data_find --observatory L --type L1_LDAS_C02_L2 -s $start -e $end --lal-cache | grep file > L.cache
#ligo_data_find --observatory L --type BRST_S6 -s $start -e $end --lal-cache | grep file > Lmdc.cache
ligo_data_find --observatory V --type HrecOnline -s $start -e $end --lal-cache | grep file > V.cache
#ligo_data_find --observatory V --type BRST_S6 -s $start -e $end --lal-cache | grep file > Vmdc.cache
# --V1-channel V1:h_16384Hz
# --MDC-channel V1:GW-H16K
/home/jkanner/baysewave/svn/trunk/src/BayesWaveBurst \
--ifo H1 --H1-flow 40 --H1-channel H1:LDAS-STRAIN \
--ifo L1 --L1-flow 40 --L1-channel L1:LDAS-STRAIN \
--ifo V1 --V1-flow 40 --V1-channel V1:h_16384Hz \
--H1-cache H.cache \
--L1-cache L.cache \
--V1-cache V.cache \
--trigtime $gps --srate 1024 --seglen 8 \
--bayesLine --PSDstart $gps --PSDlength 8 \
--Niter 1000000 --NCmin 15 \
--bayesLine --gnuplot \
--MDC-channel [H1:GW-H,L1:GW-H,V1:GW-H16K] \
--MDC-cache [{cachefile},{cachefile},{cachefile}] \
--MDC-prefactor {scale} \
python /home/jkanner/baysewave/svn/trunk/postprocess/skymap/skyview.py --mdc {mdclog}
tar -cf chains.tar chains
tar -cf waveforms.tar waveforms
tar -cf snr.tar snr
"""
submit_str = """
executable={top}/doit.sh
universe=vanilla
arguments={gps}
output={top}/condorOut.txt
error={top}/condorError.txt
log={top}/condorLog.txt
notification=never
should_transfer_files=YES
when_to_transfer_output = ON_EXIT
stream_error=True
stream_output=True
queue 1
"""
# -----------------
# Begin Main Script
# -----------------
# -- Set paramters
topdir = '/home/jkanner/baysewave/PEC/mdc_v3'
# topdir = '/home/jkanner/baysewave/test'
mdcdir = '/home/jkanner/baysewave/svn/trunk/burstinj/s6'
waveformList = ['sg153']
scaleList = ['0.3']
mdccache = '/home/jkanner/baysewave/svn/trunk/burstinj/s6/brst_s6.cache'
mdclog = '/home/jkanner/baysewave/svn/trunk/burstinj/s6/BurstMDC-BRST_S6-Log.txt'
# -- Alternate parameters
# waveformList = ['sg153', 'lwnb', 'qwnb']
# mdccache = '/home/jkanner/baysewave/svn/trunk/burstinj/s6/brst_s6.cache'
# scaleList = ['0.15', '0.3', '0.6', '1.2']
dagfile = open( os.path.join(topdir, 'submit.dag'), 'w')
for waveform in waveformList:
trigfile = os.path.join(mdcdir, "trigs_{0}.txt".format(waveform))
trigList = np.loadtxt(trigfile)
wavedir = os.path.join(topdir, waveform)
if not os.path.exists(wavedir): os.makedirs(wavedir)
for scale in scaleList:
scaledir = os.path.join(wavedir, scale)
if not os.path.exists(scaledir): os.makedirs(scaledir)
for trig in trigList:
trig = str(trig)
trigdir = os.path.join(scaledir, 'job_'+trig)
if not os.path.exists(trigdir): os.makedirs(trigdir)
submitname = os.path.join(trigdir, 'submit.txt')
submitfile = open( submitname, 'w' )
submitfile.write(submit_str.format(top=trigdir, gps=trig))
submitfile.close()
doitfile = open( os.path.join(trigdir, 'doit.sh'), 'w')
doitfile.write(doit_str.format(top=trigdir, cachefile=mdccache, scale=scale, mdclog=mdclog))
doitfile.close()
jobname = "{0}_{1}_{2}".format(waveform, scale, int(float(trig)))
dagfile.write("JOB {0} {1} \n".format(jobname, submitname))
dagfile.write("RETRY {0} 1\n\n".format(jobname))
dagfile.close()
print "Thank you, come again"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment