Commit 0950a1dc authored by Vivien Raymond's avatar Vivien Raymond
Browse files

Moved accounting_group to [condor] section

Original: 78f9e20a4cadd63a2b42fe6317b81d7a63ef69ae
parent 6de5b444
......@@ -8,20 +8,6 @@
# IFOs to use for this analysis
ifos=['H1','L1','V1']
# Analysis type for accounting for computing use
# See https://ldas-gridmon.ligo.caltech.edu/accounting/condor_groups/determine_condor_account_group.html
# !!! If omitted, jobs may not run on all LDG clusters !!!
# Common choices are:
# ligo.dev.o1.cbc.pe.lalinferenceonline - day-latency followup for O1 development
# ligo.dev.o1.cbc.pe.lalinference - follow up for detection candidates for O1 development
# ligo.dev.o1.cbc.noise.lalinference - noise modelling for O1 development
# ligo.dev.o1.cbc.testgr.tiger - TIGER for O1 development
#
# Replace dev with sim or prod for simulation or production runs
# Replace o1 with s6,o2,o3 for other runs
#
accounting_group=ligo.dev.o2.cbc.pe.lalinference
# select the engine to use, lalinferencenest, lalinferencemcmc or lalinferencebambimpi
# A comma separated list can be provided. In that case a dag for each sampler will be created, along with a top level dag to run them all, e.g.
# engine=lalinferencenest,lalinferencemcmc
......@@ -174,6 +160,21 @@ pos_to_sim_inspiral=/home/albert.einstein/bin/cbcBayesPosToSimInspiral.py
# this will use the email address from the [resultspage] section
#notification=Complete
# Analysis type for accounting for computing use
# See https://ldas-gridmon.ligo.caltech.edu/accounting/condor_groups/determine_condor_account_group.html
# !!! If omitted, jobs may not run on all LDG clusters !!!
# Common choices are:
# ligo.dev.o1.cbc.pe.lalinferenceonline - day-latency followup for O1 development
# ligo.dev.o1.cbc.pe.lalinference - follow up for detection candidates for O1 development
# ligo.dev.o1.cbc.noise.lalinference - noise modelling for O1 development
# ligo.dev.o1.cbc.testgr.tiger - TIGER for O1 development
#
# Replace dev with sim or prod for simulation or production runs
# Replace o1 with s6,o2,o3 for other runs
#
accounting_group=ligo.dev.o2.cbc.pe.lalinference
#####################################################################################
# Section used by the datafind jobs (not used with simulated noise)
[datafind]
......
......@@ -651,10 +651,10 @@ class LALInferencePipelineDAG(pipeline.CondorDAG):
self.prenodes={}
self.datafind_job = pipeline.LSCDataFindJob(self.cachepath,self.logpath,self.config,dax=self.is_dax())
self.datafind_job.add_opt('url-type','file')
if cp.has_option('analysis','accounting_group'):
self.datafind_job.add_condor_cmd('accounting_group',cp.get('analysis','accounting_group'))
if cp.has_option('analysis','accounting_group_user'):
self.datafind_job.add_condor_cmd('accounting_group_user',cp.get('analysis','accounting_group_user'))
if cp.has_option('condor','accounting_group'):
self.datafind_job.add_condor_cmd('accounting_group',cp.get('condor','accounting_group'))
if cp.has_option('condor','accounting_group_user'):
self.datafind_job.add_condor_cmd('accounting_group_user',cp.get('condor','accounting_group_user'))
self.datafind_job.set_sub_file(os.path.abspath(os.path.join(self.basepath,'datafind.sub')))
self.preengine_job = EngineJob(self.config, os.path.join(self.basepath,'prelalinference.sub'),self.logpath,engine='lalinferencedatadump',ispreengine=True,dax=self.is_dax())
self.preengine_job.set_grid_site('local')
......@@ -1755,10 +1755,10 @@ class EngineJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
pipeline.CondorDAGJob.__init__(self,universe,exe)
pipeline.AnalysisJob.__init__(self,cp,dax=dax)
if cp.has_option('analysis','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('analysis','accounting_group'))
if cp.has_option('analysis','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('analysis','accounting_group_user'))
if cp.has_option('condor','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('condor','accounting_group'))
if cp.has_option('condor','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('condor','accounting_group_user'))
try:
hostname=socket.gethostbyaddr(socket.gethostname())[0]
except:
......@@ -2177,10 +2177,10 @@ class BayesWavePSDJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
pipeline.AnalysisJob.__init__(self,cp,dax=dax)
if cp.has_section('bayeswave'):
self.add_ini_opts(cp,'bayeswave')
if cp.has_option('analysis','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('analysis','accounting_group'))
if cp.has_option('analysis','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('analysis','accounting_group_user'))
if cp.has_option('condor','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('condor','accounting_group'))
if cp.has_option('condor','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('condor','accounting_group_user'))
requirements=''
if cp.has_option('condor','queue'):
self.add_condor_cmd('+'+cp.get('condor','queue'),'True')
......@@ -2211,10 +2211,10 @@ class ResultsPageJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
exe=cp.get('condor','resultspage')
pipeline.CondorDAGJob.__init__(self,"vanilla",exe)
pipeline.AnalysisJob.__init__(self,cp,dax=dax) # Job always runs locally
if cp.has_option('analysis','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('analysis','accounting_group'))
if cp.has_option('analysis','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('analysis','accounting_group_user'))
if cp.has_option('condor','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('condor','accounting_group'))
if cp.has_option('condor','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('condor','accounting_group_user'))
requirements=''
if cp.has_option('condor','queue'):
self.add_condor_cmd('+'+cp.get('condor','queue'),'True')
......@@ -2327,10 +2327,10 @@ class CoherenceTestJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
exe=cp.get('condor','coherencetest')
pipeline.CondorDAGJob.__init__(self,"vanilla",exe)
pipeline.AnalysisJob.__init__(self,cp,dax=dax)
if cp.has_option('analysis','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('analysis','accounting_group'))
if cp.has_option('analysis','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('analysis','accounting_group_user'))
if cp.has_option('condor','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('condor','accounting_group'))
if cp.has_option('condor','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('condor','accounting_group_user'))
requirements=''
if cp.has_option('condor','queue'):
self.add_condor_cmd('+'+cp.get('condor','queue'),'True')
......@@ -2398,10 +2398,10 @@ class MergeJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
exe=cp.get('condor','mergeNSscript')
pipeline.CondorDAGJob.__init__(self,"vanilla",exe)
pipeline.AnalysisJob.__init__(self,cp,dax=dax)
if cp.has_option('analysis','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('analysis','accounting_group'))
if cp.has_option('analysis','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('analysis','accounting_group_user'))
if cp.has_option('condor','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('condor','accounting_group'))
if cp.has_option('condor','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('condor','accounting_group_user'))
requirements=''
if cp.has_option('condor','queue'):
self.add_condor_cmd('+'+cp.get('condor','queue'),'True')
......@@ -2463,10 +2463,10 @@ class CombineMCMCJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
exe=cp.get('condor','combinePTMCMCh5script')
pipeline.CondorDAGJob.__init__(self,"vanilla",exe)
pipeline.AnalysisJob.__init__(self,cp,dax=dax)
if cp.has_option('analysis','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('analysis','accounting_group'))
if cp.has_option('analysis','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('analysis','accounting_group_user'))
if cp.has_option('condor','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('condor','accounting_group'))
if cp.has_option('condor','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('condor','accounting_group_user'))
self.set_sub_file(os.path.abspath(submitFile))
self.set_stdout_file(os.path.join(logdir,'combine-$(cluster)-$(process).out'))
self.set_stderr_file(os.path.join(logdir,'combine-$(cluster)-$(process).err'))
......@@ -2507,10 +2507,10 @@ class GraceDBJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
#pipeline.CondorDAGJob.__init__(self,"vanilla",exe)
pipeline.CondorDAGJob.__init__(self,"scheduler",exe)
pipeline.AnalysisJob.__init__(self,cp,dax=dax)
if cp.has_option('analysis','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('analysis','accounting_group'))
if cp.has_option('analysis','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('analysis','accounting_group_user'))
if cp.has_option('condor','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('condor','accounting_group'))
if cp.has_option('condor','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('condor','accounting_group_user'))
self.set_sub_file(os.path.abspath(submitFile))
self.set_stdout_file(os.path.join(logdir,'gracedb-$(cluster)-$(process).out'))
self.set_stderr_file(os.path.join(logdir,'gracedb-$(cluster)-$(process).err'))
......@@ -2577,10 +2577,10 @@ class ROMJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
exe=cp.get('condor','computeroqweights')
pipeline.CondorDAGJob.__init__(self,"vanilla",exe)
pipeline.AnalysisJob.__init__(self,cp,dax=dax)
if cp.has_option('analysis','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('analysis','accounting_group'))
if cp.has_option('analysis','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('analysis','accounting_group_user'))
if cp.has_option('condor','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('condor','accounting_group'))
if cp.has_option('condor','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('condor','accounting_group_user'))
requirements=''
if cp.has_option('condor','queue'):
self.add_condor_cmd('+'+cp.get('condor','queue'),'True')
......@@ -2638,10 +2638,10 @@ class BayesLineJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
exe=cp.get('condor','bayesline')
pipeline.CondorDAGJob.__init__(self,"vanilla",exe)
pipeline.AnalysisJob.__init__(self,cp,dax=dax)
if cp.has_option('analysis','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('analysis','accounting_group'))
if cp.has_option('analysis','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('analysis','accounting_group_user'))
if cp.has_option('condor','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('condor','accounting_group'))
if cp.has_option('condor','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('condor','accounting_group_user'))
requirements=''
if cp.has_option('condor','queue'):
self.add_condor_cmd('+'+cp.get('condor','queue'),'True')
......@@ -2721,10 +2721,10 @@ class SkyAreaJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
exe=cp.get('condor','skyarea')
pipeline.CondorDAGJob.__init__(self,"vanilla",exe)
pipeline.AnalysisJob.__init__(self,cp,dax=dax)
if cp.has_option('analysis','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('analysis','accounting_group'))
if cp.has_option('analysis','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('analysis','accounting_group_user'))
if cp.has_option('condor','accounting_group'):
self.add_condor_cmd('accounting_group',cp.get('condor','accounting_group'))
if cp.has_option('condor','accounting_group_user'):
self.add_condor_cmd('accounting_group_user',cp.get('condor','accounting_group_user'))
requirements=''
if cp.has_option('condor','queue'):
self.add_condor_cmd('+'+cp.get('condor','queue'),'True')
......
......@@ -326,22 +326,22 @@ for sampler in samps:
with open('pegasus.properties','w') as fout:
for line in lines:
fout.write(line)
if cp.has_option('analysis','accounting_group'):
if cp.has_option('condor','accounting_group'):
lines=[]
with open('sites.xml') as fin:
for line in fin:
if '<profile namespace="condor" key="getenv">True</profile>' in line:
line=line+' <profile namespace="condor" key="accounting_group">'+cp.get('analysis','accounting_group')+'</profile>\n'
line=line+' <profile namespace="condor" key="accounting_group">'+cp.get('condor','accounting_group')+'</profile>\n'
lines.append(line)
with open('sites.xml','w') as fout:
for line in lines:
fout.write(line)
if cp.has_option('analysis','accounting_group_user'):
if cp.has_option('condor','accounting_group_user'):
lines=[]
with open('sites.xml') as fin:
for line in fin:
if '<profile namespace="condor" key="getenv">True</profile>' in line:
line=line+' <profile namespace="condor" key="accounting_group_user">'+cp.get('analysis','accounting_group_user')+'</profile>\n'
line=line+' <profile namespace="condor" key="accounting_group_user">'+cp.get('condor','accounting_group_user')+'</profile>\n'
lines.append(line)
with open('sites.xml','w') as fout:
for line in lines:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment