Commit a45c94a9 authored by Jonah Kanner's avatar Jonah Kanner 🤓
Browse files

Adding bwb-0.3

git-svn-id: https://svn.ligo.caltech.edu/svn/bayeswave/tags/bwb-0.3@322 c56465c9-8126-4a4f-9d7d-ac845eff4865
parents
Running BayesWaveBurst with condor
For this example, the top level script is condor_setup.sh. It takes a single
argument - the gps time of your trigger.
./condor_setup.sh 1076894532.3
But, all the action happens in doit.sh. So, before you run condor_setup.sh
open up doit.sh in your favorite editor, and edit the bayesWave command and
frame type as needed.
I think that's it!
To check on your jobs while they are running, you can run:
condor_q USERNAME
You can also check the global state of condor with the same command:
condor_q
You can also monitor your jobs by monitoring the condor log, output, and error
logs that appear in the output directory, e.g.
tail -f <path-to-your-job>/condorOut.txt
setup_CBC_injection_runs_xml.py
-------------------------------
This script sets up a dag file to launch multiple BayesWaveBurst and
BayesWavePost runs on condor. It generates the necessary BBH waveform
injections in an XML file using lalapps_inspinj.
Example:
python <path to BayesWave>/trunk/condor/setup_CBC_injection_runs_xml.py 5 5 15 4096 960000000 960002000 100 4 10 30 IMRPhenomBthreePointFivePN 200000 1234
For more information on the arguments, see
python <path to BayesWave>/trunk/condor/setup_CBC_injection_runs_xml.py --help
setup_CBC_injection_runs_trigtimes.py
-------------------------------------
This script sets up a dag file to launch multiple BayesWaveBurst and
BayesWavePost runs on condor. It picks the BBH injection trigger times
from a file provided by the user.
Example:
python <path to BayesWave>/trunk/condor/setup_CBC_injection_runs_trigtimes.py <name of file containing a list of triggers> 41 80
For more information on the arguments, see
python <path to BayesWave>/trunk/condor/setup_CBC_injection_runs_trigtimes.py --help
setup_cWB_background_runs.py
----------------------------
This script sets up a dag file to launch multiple BayesWaveBurst and
BayesWavePost runs on condor in order to analyze cWB background triggers.
Example:
python <path to BayesWave>/trunk/condor/setup_cWB_background_runs.py S6D H1 V1 11 20
For more information on the arguments, see
python <path to BayesWave>/trunk/condor/setup_cWB_background_runs.py --help
#! /bin/sh
gps=$1
intgps=${gps%.*}
start=`expr $intgps - 100`
end=`expr $intgps + 100`
export LIGO_DATAFIND_SERVER=10.14.20.73:80
# Create Cache files
ligo_data_find --observatory H --type H1_ER_C00_L1 -s $start -e $end --lal-cache | grep file > H.cache
ligo_data_find --observatory L --type L1_ER_C00_L1 -s $start -e $end --lal-cache | grep file > L.cache
ligo_data_find --observatory V --type V1Online -s $start -e $end --lal-cache | grep file > V.cache
/home/jkanner/baysewave/svn/trunk/src/BayesWaveBurst \
--ifo H1 --H1-flow 40 --H1-channel H1:FAKE-STRAIN \
--ifo L1 --L1-flow 40 --L1-channel L1:FAKE-STRAIN \
--H1-cache H.cache \
--L1-cache L.cache \
--trigtime $gps --srate 1024 --seglen 8 \
--bayesLine --PSDstart $gps --PSDlength 8 \
--Niter 1000000 --NCmin 15 \
--bayesLine --gnuplot \
--MDC-channel [H1:FAKE-STRAIN_BURST,L1:FAKE-STRAIN_BURST] \
--MDC-cache [H.cache,L.cache] \
--MDC-prefactor 1 \
tar -cf chains.tar chains
tar -cf waveforms.tar waveforms
tar -cf snr.tar snr
"""
This script sets up the serial postprocessing of all BayesWaveBurst run results in the directory in which it is
launched. It assumes that the individual run directories are labelled job_*. Once the script is done, the
postprocessing may be completed by submitting the dagfile it produces to condor.
This script takes the optional boolean flag radec_flag. This enables the search for the injected right ascension
and declination values to be plotted in the skymap. These are obtained from an xml injection file, assuming the
naming convention m1_m2.xml. radec_flag=False by default
This script was tested on ldas-pcdev1.ligo.caltech.edu only.
"""
__author__ = "Francesco Pannarale, Ronaldas Macas"
__email__ = "francesco.pannarale@ligo.org, ronaldas.macas@gmail.com"
__version__ = "1.2"
__date__ = "27.08.2014"
######################################################################################################################
# Import modules
######################################################################################################################
import argparse
import math
import os
import pwd
import subprocess
import sys
######################################################################################################################
# Parse the arguments
######################################################################################################################
parser = argparse.ArgumentParser(description='Set up a dag file to launch multiple BayesWaveBurst postprocessing\n runs on condor.')
parser.add_argument('--radec_flag', action='store_true', default=False, dest="radec", help="enable plot of injection locations in skymaps")
#parser.add_argument('--match', action='store_true', default=False, dest="match", help="Add matches and injected waveforms to the webpage")
parser.add_argument('--match', default=False, help="Add matches and injected waveforms to the webpage")
args = parser.parse_args()
######################################################################################################################
# Function that copies many_plots_1job.sh in all run directories to analyze
######################################################################################################################
def ObtainScript(jobDirName, PWD, many_plots_script):
destination_path=PWD+'/'+jobDirName+'/'
os.system("cp "+many_plots_script+" "+destination_path)
######################################################################################################################
# Function that writes the condor submission script
######################################################################################################################
def WriteSub(jobDirName, PWD, ra, dec, match):
path=PWD+'/'+jobDirName+'/'
SubFile = open(path+'submitWebpage.sub', 'w')
SubFile.write('executable='+path+'many_plots_1job.sh\n')
if str(ra)+str(dec)=="":
if str(match)=="False":
SubFile.write('arguments='+path+'\n')
else:
SubFile.write('arguments='+path+' '+str(match)+'\n')
else:
if str(match)=="False":
SubFile.write('arguments='+path+' '+str(ra)+' '+str(dec)+'\n')
else:
SubFile.write('arguments='+path+' '+str(ra)+' '+str(dec)+' '+str(match)+'\n')
SubFile.write('universe=vanilla\n')
SubFile.write('getenv=True\n')
SubFile.write('output='+path+'condorManyPlotsOut.txt\n')
SubFile.write('error='+path+'condorManyPlotsError.txt\n')
SubFile.write('log='+path+'condorManyPlotsLog.txt\n')
SubFile.write('notification=never\n')
SubFile.write('should_transfer_files=YES\n')
SubFile.write('when_to_transfer_output=ON_EXIT\n')
SubFile.write('stream_error=True\n')
SubFile.write('stream_output=True\n')
SubFile.write('queue 1\n')
SubFile.close()
######################################################################################################################
# Function that generates a single dag file for multiple postprocessing runs with many_plots_1job.sh
######################################################################################################################
def WriteDagFile(jobDirName, PWD):
# Might need to make sure the file exists instead of appending (but the + should take care of that)
DagFile = open('submitWebpages.dag', 'a')
DagFile.write('JOB '+jobDirName+' '+PWD+'/'+jobDirName+'/submitWebpage.sub\n')
DagFile.write('RETRY '+jobDirName+' 1\n\n')
DagFile.close()
######################################################################################################################
# Function that reads the sky location of an injected event from the XML injection file and expresses it in degrees
######################################################################################################################
def GetRADec(jobDirName, PWD):
# Find XML file name assuming the format "m1_m2.xml"
m1 = subprocess.Popen(["echo "+jobDirName+" | cut -d _ -f 2"], stdout=subprocess.PIPE, shell=True).communicate()[0].rstrip()
m2 = subprocess.Popen(["echo "+jobDirName+" | cut -d _ -f 3"], stdout=subprocess.PIPE, shell=True).communicate()[0].rstrip()
job_index = subprocess.Popen(["echo "+jobDirName+" | cut -d _ -f 4"], stdout=subprocess.PIPE, shell=True).communicate()[0].rstrip()
job_index = int(job_index) + 1 # adding +1 because command 'tail' (used below) requires non-zero index
longitude = subprocess.Popen(["lwtprint "+str(m1)+"_"+str(m2)+".xml -t sim_inspiralgroup:sim_inspiral -c longitude | head -n"+str(job_index)+" | tail -n1"], stdout=subprocess.PIPE, shell=True).communicate()[0].rstrip()
latitude = subprocess.Popen(["lwtprint "+str(m1)+"_"+str(m2)+".xml -t sim_inspiralgroup:sim_inspiral -c latitude | head -n"+str(job_index)+"| tail -n1"], stdout=subprocess.PIPE, shell=True).communicate()[0].rstrip()
#Converting from radians to degrees
longitude_deg = float(longitude)*180./math.pi
if longitude_deg > 180:
ra = longitude_deg - 360.
else:
ra = longitude_deg
dec = float(latitude)*180./math.pi
return (ra, dec)
######################################################################################################################
# Main
######################################################################################################################
# Find out the path to the BayesWaveBurst executable
p = subprocess.Popen(["which","BayesWaveBurst"],stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
for line in iter(p.stdout.readline, b''):
BayesWaveBurst = line.rstrip()
if not os.path.isfile(BayesWaveBurst):
print "****************************************************************************************************************"
sys.exit("\nMake sure the BayesWaveBurst command is in your path before running this script!\n\n****************************************************************************************************************")
# Store the absolute path to the post-processing script many_plots_1job.sh
many_plots_script = BayesWaveBurst.split("src")[0]+"postprocess/many_plots_1job.sh"
# Determine what directory this script was called from and drop the program name from the list of arguments
sys.argv.pop(0)
username = pwd.getpwuid(os.getuid())[0]
pathname = os.getcwd().split(username)[1]
pathname = '/home/'+username+pathname
PWD = os.path.abspath(pathname)
# Get a list of all the content of the PWD
PWD_ls = subprocess.Popen(["ls",PWD],stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
job_list = []
jobs_number = 0
#TODO: CHANGE THIS!!
# get match argument which should be passed to many_plots_1job.sh
match = args.match
#if match :
# match = 500
# For each element in the PWD...
for line in iter(PWD_ls.stdout.readline, b''):
jobDirName = line.rstrip()
# ...check if it is a BayesWaveBurst job directory. If it is:
if "job_" in jobDirName:
# 1) store its name (may be useful in the future)
job_list.append(jobDirName)
# 2) copy the post-processing script to the job directory
ObtainScript(jobDirName, PWD, many_plots_script)
# 3) if asked by the user get the right ascension and declination of the injected event for the skymap
radec = args.radec
if radec :
ra, dec = GetRADec(jobDirName, PWD)
else:
ra, dec = "",""
# 4) write a submit script in it to call the post processing script
WriteSub(jobDirName, PWD, ra, dec, match)
# 5) tell the dag file to postprocess this job
WriteDagFile(jobDirName, PWD)
# 6) keep count of the jobs found
jobs_number = jobs_number + 1
# All done! Report to the user
print "****************************************************************************************************************"
print "\n\nJobs to analyze created:", jobs_number
print "\n\n****************************************************************************************************************"
print "\n\nTo run the condor jobs, type\n condor_submit_dag submitWebpages.dag"
print "\nTo monitor a job\'s output type\n tail -f <job-directory>/condorManyPlotsOutput.txt"
print "\nTo monitor a job\'s errors type\n tail -f <job-directory>/condorManyPlotsError.txt"
print "\n To move the postprocessing results in order to make them visible. Run:"
print "\n rsync -av --progress "+PWD+" <path to the desired directory in the public_html> --exclude '*/*snr' --exclude '*/waveforms' --exclude '*/chains' --include='*/' --include '*html' --include '*png' --include condorOut_BWB.txt --exclude='*'\n\n"
print "****************************************************************************************************************"
######################################################################################################################
######################################################################################################################
# -----------------------------------
# Example Condor setup script for BWB
# 2014, Jonah Kanner
# ----------------------------------
import os
import numpy as np
import subprocess
# ------------------
# -- Set paramters
# ------------------
topdir = '/home/jkanner/baysewave/PEC/mdc_v4'
mdcdir = '/home/jkanner/baysewave/svn/trunk/burstinj/s6'
# waveformList = ['sg153', 'qwnb', 'lwnb']
waveformList = ['sg153']
scaleList = ['0.15', '0.3', '0.6', '1.2']
mdccache = '/home/jkanner/baysewave/svn/trunk/burstinj/s6/brst_s6.cache'
bwb = '/home/jkanner/baysewave/svn/trunk/src/BayesWaveBurst'
ifoList = ['H1', 'L1']
frtypeList = ['H1_LDAS_C02_L2', 'L1_LDAS_C02_L2']
bwbargsfmt = """--ifo H1 --H1-flow 16 --H1-channel H1:LDAS-STRAIN \
--ifo L1 --L1-flow 16 --L1-channel L1:LDAS-STRAIN \
--H1-cache {wavedir}/H1.cache \
--L1-cache {wavedir}/L1.cache \
--trigtime {gps} --srate 1024 --seglen 4 \
--bayesLine --PSDstart {gps} --PSDlength 4 \
--Niter 2000000 --NCmin 35 --NCmax 35 \
--bayesLine --gnuplot \
--MDC-channel [H1:GW-H,L1:GW-H] \
--MDC-cache [{cachefile},{cachefile}] \
--MDC-prefactor {scale} \
--Qmax 40 --Dmax 60 \
--noAdaptTemperature \
--tempSpacing 1.3 --clusterProposal
"""
# -- Alternate parameters
# waveformList = ['sg153', 'lwnb', 'qwnb']
# mdccache = '/home/jkanner/baysewave/svn/trunk/burstinj/s6/brst_s6.cache'
# scaleList = ['0.15', '0.3', '0.6', '1.2']
# topdir = '/home/jkanner/baysewave/test'
# ---------------------------------
# -- Define templates for run files
# ---------------------------------
submit_str = """
executable={bwb}
universe=standard
arguments={bwbargs}
output={top}/condorOut.txt
error={top}/condorError.txt
log={top}/condorLog.txt
notification=never
should_transfer_files=YES
when_to_transfer_output = ON_EXIT
stream_error=True
stream_output=True
queue 1
"""
# -----------------
# Begin Main Script
# -----------------
dagfile = open( os.path.join(topdir, 'submit_all.sh'), 'w')
for waveform in waveformList:
trigfile = os.path.join(mdcdir, "trigs_{0}.txt".format(waveform))
trigList = np.loadtxt(trigfile)
wavedir = os.path.join(topdir, waveform)
if not os.path.exists(wavedir): os.makedirs(wavedir)
# ------------------------------------------------
# Call LIGO Data find for whole waveform directory
# ------------------------------------------------
start = int(trigList.min()) - 50
end = int(trigList.max()) + 50
for ifo, frtype in zip(ifoList,frtypeList):
cachefilefmt = os.path.join(wavedir, '{0}.cache')
ldfcmd = "ligo_data_find --observatory {o} --type {frtype} -s {start} -e {end} --lal-cache | grep file > {cachefile}".format(o=ifo[0], frtype=frtype, cachefile = cachefilefmt.format(ifo), start=start, end=end)
print "Calling LIGO data find ..."
print ldfcmd
subprocess.call(ldfcmd, shell=True)
for scale in scaleList:
scaledir = os.path.join(wavedir, scale)
if not os.path.exists(scaledir): os.makedirs(scaledir)
for trig in trigList:
trig = str(trig)
trigdir = os.path.join(scaledir, 'job_'+trig)
if not os.path.exists(trigdir): os.makedirs(trigdir)
bwbargs = bwbargsfmt.format(gps=trig, wavedir=wavedir, cachefile=mdccache, scale=scale)
submitname = os.path.join(trigdir, 'submit.txt')
submitfile = open( submitname, 'w' )
submitfile.write(submit_str.format(top=trigdir, bwb=bwb, bwbargs=bwbargs))
submitfile.close()
dagfile.write("cd {0}\n".format(trigdir))
dagfile.write("condor_submit submit.txt\n\n")
dagfile.close()
print "Thank you, come again"
# -----------------------------------
# Example Condor setup script for BWB
# 2014, Jonah Kanner
# ----------------------------------
import os
import numpy as np
import subprocess
# ------------------
# -- Set paramters
# ------------------
topdir = '/home/jkanner/baysewave/PEC/mdc_v3'
# topdir = '/home/jkanner/baysewave/test'
mdcdir = '/home/jkanner/baysewave/svn/branches/condor_compile/burstinj/s6'
waveformList = ['elpsg153']
scaleList = ['0.15' , '0.3', '0.6', '1.2']
mdccache = '/home/jkanner/baysewave/svn/trunk/burstinj/s6/elptc_s6.cache'
mdclog = '/home/jkanner/baysewave/svn/trunk/burstinj/s6/BurstMDC-ELPTC_S6-Log.txt'
bwb = '/home/jkanner/baysewave/svn/branches/condor_compile/src/BayesWaveBurst'
ifoList = ['H1', 'L1', 'V1']
frtypeList = ['H1_LDAS_C02_L2', 'L1_LDAS_C02_L2', 'HrecV2']
bwbargsfmt = """--ifo H1 --H1-flow 40 --H1-channel H1:LDAS-STRAIN \
--ifo L1 --L1-flow 40 --L1-channel L1:LDAS-STRAIN \
--ifo V1 --L1-flow 40 --V1-channel V1:h_16384Hz \
--H1-cache {wavedir}/H1.cache \
--L1-cache {wavedir}/L1.cache \
--V1-cache {wavedir}/V1.cache \
--trigtime {gps} --srate 1024 --seglen 4 \
--bayesLine --PSDstart {gps} --PSDlength 4 \
--Niter 2000000 --NCmin 15 \
--bayesLine --gnuplot \
--MDC-channel [H1:GW-H,L1:GW-H,V1:GW-16K] \
--MDC-cache [{cachefile},{cachefile},{cachefile}] \
--MDC-prefactor {scale} \
"""
# -- Alternate parameters
# waveformList = ['sg153', 'lwnb', 'qwnb']
# mdccache = '/home/jkanner/baysewave/svn/trunk/burstinj/s6/brst_s6.cache'
# scaleList = ['0.15', '0.3', '0.6', '1.2']
# topdir = '/home/jkanner/baysewave/test'
# ---------------------------------
# -- Define templates for run files
# ---------------------------------
submit_str = """
executable={bwb}
universe=standard
arguments={bwbargs}
output={top}/condorOut.txt
error={top}/condorError.txt
log={top}/condorLog.txt
notification=never
should_transfer_files=YES
when_to_transfer_output = ON_EXIT
stream_error=True
stream_output=True
queue 1
"""
# -----------------
# Begin Main Script
# -----------------
dagfile = open( os.path.join(topdir, 'submit_all.sh'), 'w')
for waveform in waveformList:
trigfile = os.path.join(mdcdir, "trigs_{0}.txt".format(waveform))
trigList = np.loadtxt(trigfile)
wavedir = os.path.join(topdir, waveform)
if not os.path.exists(wavedir): os.makedirs(wavedir)
# ------------------------------------------------
# Call LIGO Data find for whole waveform directory
# ------------------------------------------------
start = int(trigList.min()) - 50
end = int(trigList.max()) + 50
for ifo, frtype in zip(ifoList,frtypeList):
cachefilefmt = os.path.join(wavedir, '{0}.cache')
ldfcmd = "ligo_data_find --observatory {o} --type {frtype} -s {start} -e {end} --lal-cache | grep file > {cachefile}".format(o=ifo[0], frtype=frtype, cachefile = cachefilefmt.format(ifo), start=start, end=end)
print "Calling LIGO data find ..."
print ldfcmd
subprocess.call(ldfcmd, shell=True)
for scale in scaleList:
scaledir = os.path.join(wavedir, scale)
if not os.path.exists(scaledir): os.makedirs(scaledir)
for trig in trigList:
trig = str(trig)
trigdir = os.path.join(scaledir, 'job_'+trig)
if not os.path.exists(trigdir): os.makedirs(trigdir)
bwbargs = bwbargsfmt.format(gps=trig, wavedir=wavedir, cachefile=mdccache, scale=scale)
submitname = os.path.join(trigdir, 'submit.txt')
submitfile = open( submitname, 'w' )
submitfile.write(submit_str.format(top=trigdir, bwb=bwb, bwbargs=bwbargs))
submitfile.close()
dagfile.write("cd {0}\n".format(trigdir))
dagfile.write("condor_submit submit.txt\n\n")
dagfile.close()
print "Thank you, come again"
# -----------------------------------
# Example Condor setup script for BWB
# 2014, Jonah Kanner
# ----------------------------------
import os
import numpy as np
import subprocess
# ------------------
# -- Set paramters
# ------------------
topdir = '/home/jkanner/baysewave/PEC/mdc_v4'
mdcdir = '/home/jkanner/baysewave/svn/trunk/burstinj/s6'
# waveformList = ['sg153', 'qwnb', 'lwnb']
waveformList = ['elpsg153']
scaleList = ['0.15', '0.3', '0.6', '1.2']
mdccache = '/home/jkanner/baysewave/svn/trunk/burstinj/s6/elptc_s6.cache'
bwb = '/home/jkanner/baysewave/svn/trunk/src/BayesWaveBurst'
ifoList = ['H1', 'L1']
frtypeList = ['H1_LDAS_C02_L2', 'L1_LDAS_C02_L2']
bwbargsfmt = """--ifo H1 --H1-flow 16 --H1-channel H1:LDAS-STRAIN \
--ifo L1 --L1-flow 16 --L1-channel L1:LDAS-STRAIN \
--H1-cache {wavedir}/H1.cache \
--L1-cache {wavedir}/L1.cache \
--trigtime {gps} --srate 1024 --seglen 4 \
--bayesLine --PSDstart {gps} --PSDlength 4 \
--Niter 2000000 --NCmin 35 --NCmax 35 \
--bayesLine --gnuplot \
--MDC-channel [H1:GW-H,L1:GW-H] \
--MDC-cache [{cachefile},{cachefile}] \
--MDC-prefactor {scale} \
--Qmax 40 --Dmax 60 \
--noAdaptTemperature \
--tempSpacing 1.3 --clusterProposal
"""
# -- Alternate parameters
# waveformList = ['sg153', 'lwnb', 'qwnb']
# mdccache = '/home/jkanner/baysewave/svn/trunk/burstinj/s6/brst_s6.cache'
# scaleList = ['0.15', '0.3', '0.6', '1.2']
# topdir = '/home/jkanner/baysewave/test'
# ---------------------------------
# -- Define templates for run files
# ---------------------------------
submit_str = """
executable={bwb}
universe=standard
arguments={bwbargs}
output={top}/condorOut.txt
error={top}/condorError.txt
log={top}/condorLog.txt
notification=never
should_transfer_files=YES
when_to_transfer_output = ON_EXIT
stream_error=True
stream_output=True
queue 1
"""
# -----------------
# Begin Main Script
# -----------------
dagfile = open( os.path.join(topdir, 'submit_all.sh'), 'w')
for waveform in waveformList:
trigfile = os.path.join(mdcdir, "trigs_{0}.txt".format(waveform))
trigList = np.loadtxt(trigfile)
wavedir = os.path.join(topdir, waveform)
if not os.path.exists(wavedir): os.makedirs(wavedir)
# ------------------------------------------------
# Call LIGO Data find for whole waveform directory
# ------------------------------------------------
start = int(trigList.min()) - 50
end = int(trigList.max()) + 50
for ifo, frtype in zip(ifoList,frtypeList):
cachefilefmt = os.path.join(wavedir, '{0}.cache')
ldfcmd = "ligo_data_find --observatory {o} --type {frtype} -s {start} -e {end} --lal-cache | grep file > {cachefile}".format(o=ifo[0], frtype=frtype, cachefile = cachefilefmt.format(ifo), start=start, end=end)
print "Calling LIGO data find ..."
print ldfcmd
subprocess.call(ldfcmd, shell=True)
for scale in scaleList:
scaledir = os.path.join(wavedir, scale)
if not os.path.exists(scaledir): os.makedirs(scaledir)
for trig in trigList:
trig = str(trig)
trigdir = os.path.join(scaledir, 'job_'+trig)
if not os.path.exists(trigdir): os.makedirs(trigdir)
bwbargs = bwbargsfmt.format(gps=trig, wavedir=wavedir, cachefile=mdccache, scale=scale)
submitname = os.path.join(trigdir, 'submit.txt')
submitfile = open( submitname, 'w' )
submitfile.write(submit_str.format(top=trigdir, bwb=bwb, bwbargs=bwbargs))
submitfile.close()
dagfile.write("cd {0}\n".format(trigdir))
dagfile.write("condor_submit submit.txt\n\n")
dagfile.close()
print "Thank you, come again"
executable=TOP/doit.sh
universe=vanilla
arguments=GPS
output=TOP/condorOut.txt
error=TOP/condorError.txt
log=TOP/condorLog.txt
notification=never
should_transfer_files=YES
when_to_transfer_output = ON_EXIT
stream_error=True
stream_output=True
queue 1