Skip to content
Snippets Groups Projects

Polarization

40 files
+ 8274
3524
Compare changes
  • Side-by-side
  • Inline
Files
40
@@ -885,18 +885,6 @@ def condor_job_config(job_type, condor_job, config_parser):
requires=[]
# If the process exits successfully after receiving this signal, intermediate file transfer will occur
# "success" occurs when the process exits on signal CheckpointExitSignal
# Otherwise, "success" occurs when the process exits with code CheckpointExitCode
condor_job.add_condor_cmd('+CheckpointExitBySignal', False) #
condor_job.add_condor_cmd('+CheckpointExitSignal', '"SIGTERM"')
condor_job.add_condor_cmd('+CheckpointExitCode', 130)
condor_job.add_condor_cmd('+SuccessCheckpointExitBySignal', False) #
condor_job.add_condor_cmd('+SuccessCheckpointExitSignal', '"SIGTERM"')
condor_job.add_condor_cmd('+SuccessCheckpointExitCode', 130)
condor_job.add_condor_cmd('+WantFTOnCheckpoint', True)
condor_job.add_condor_cmd('+CheckpointSig', 130)
#condor_job.add_condor_cmd('+SpoolOnEvict', False)
#
# Singularity configuration
@@ -933,8 +921,32 @@ def condor_job_config(job_type, condor_job, config_parser):
print("Configuring file transfers for {}".format(job_type), file=sys.stdout)
condor_job.add_condor_cmd('should_transfer_files', 'YES')
condor_job.add_condor_cmd("transfer_executable", False)
# Only checkpoitn bayeswave jobs
if job_type == 'bayeswave':
condor_job.add_condor_cmd('when_to_transfer_output', 'ON_EXIT_OR_EVICT')
#
# Checkpoint configuration
#
# See:
# https://htcondor-wiki.cs.wisc.edu/index.cgi/wiki?p=HowToRunSelfCheckpointingJobs
# Using +SuccessCheckpointExitCode (recommended approach)
if job_type == 'bayeswave':
condor_job.add_condor_cmd('+SuccessCheckpointExitCode', 77)
condor_job.add_condor_cmd('+WantFTOnCheckpoint', True)
# "Working Around The Assumptions"
#
# condor_job.add_condor_cmd('+SuccessCheckpointExitBySignal', True) #
# condor_job.add_condor_cmd('+SuccessCheckpointExitSignal', '"SIGTERM"')
# condor_job.add_condor_cmd('+CheckpointExitSignal', '"SIGTERM"')
# "Delayed Transfers" (strongly discouraged)
# condor_job.add_condor_cmd('+WantCheckpointSignal', True)
# condor_job.add_condor_cmd('+CheckpointSig', '"SIGINT"')
# condor_job.add_condor_cmd('when_to_transfer_output', 'ON_EXIT_OR_EVICT')
else:
condor_job.add_condor_cmd('when_to_transfer_output', 'ON_EXIT')
@@ -966,6 +978,11 @@ def condor_job_config(job_type, condor_job, config_parser):
config_parser.get('condor','desired-sites'))
except configparser.NoOptionError:
pass
try:
condor_job.add_condor_cmd('+UNDESIRED_Sites',
config_parser.get('condor','undesired-sites'))
except configparser.NoOptionError:
pass
# Ensure LIGO data is present
if not config_parser.getboolean('datafind','sim-data'):
@@ -1027,7 +1044,8 @@ def condor_job_config(job_type, condor_job, config_parser):
class bayeswaveJob(pipeline.CondorDAGJob, pipeline.AnalysisJob):
def __init__(self, cp, cacheFiles, injfile=None, numrel_data=None, dax=False):
def __init__(self, cp, cacheFiles, injfile=None, numrel_data=None,
dax=False):
#
# [condor]: Common workflow configuration
@@ -1042,7 +1060,7 @@ class bayeswaveJob(pipeline.CondorDAGJob, pipeline.AnalysisJob):
'BayesWave_$(macrooutputDir)-$(cluster)-$(process).out'))
self.set_stderr_file(os.path.join(workdir, 'logs',
'BayesWave_$(macrooutputDir)-$(cluster)-$(process).err'))
self.set_log_file(os.path.join('workdir', 'logs',
self.set_log_file(os.path.join(workdir, 'logs',
'BayesWave_$(macrooutputDir)-$(cluster)-$(process).log'))
if cp.has_option('condor','arch'):
@@ -1077,7 +1095,7 @@ class bayeswaveJob(pipeline.CondorDAGJob, pipeline.AnalysisJob):
self.add_condor_cmd("+PreArgs", '"$(macrooutputDir) bayeswave"')
# Configure file transfers
transferstring='setupdirs.py,datafind'
transferstring='datafind,setupdirs.py'
if cp.getboolean('condor','copy-frames'):
transferstring += ',$(macroframes)'
@@ -1265,8 +1283,8 @@ class bayeswave_postJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
'BayesWavePost_$(macrooutputDir)-$(cluster)-$(process).out'))
self.set_stderr_file(os.path.join(workdir, 'logs',
'BayesWavePost_$(macrooutputDir)-$(cluster)-$(process).err'))
self.set_log_file(os.path.join('workdir', 'logs',
'BayesWave_$(macrooutputDir)-$(cluster)-$(process).log'))
self.set_log_file(os.path.join(workdir, 'logs',
'BayesWavePost_$(macrooutputDir)-$(cluster)-$(process).log'))
if cp.has_option('condor','arch'):
self.add_condor_cmd('+arch',cp.get('condor','arch'))
@@ -1576,7 +1594,7 @@ class megaskyJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
'megasky_$(macrooutputDir)-$(cluster)-$(process).out'))
self.set_stderr_file(os.path.join(workdir, 'logs',
'megasky_$(macrooutputDir)-$(cluster)-$(process).err'))
self.set_log_file(os.path.join('workdir', 'logs',
self.set_log_file(os.path.join(workdir, 'logs',
'megasky_$(macrooutputDir)-$(cluster)-$(process).log'))
#
@@ -1623,7 +1641,7 @@ class megaplotJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
'megaplot_$(macrooutputDir)-$(cluster)-$(process).out'))
self.set_stderr_file(os.path.join(workdir, 'logs',
'megaplot_$(macrooutputDir)-$(cluster)-$(process).err'))
self.set_log_file(os.path.join('workdir', 'logs',
self.set_log_file(os.path.join(workdir, 'logs',
'megaplot_$(macrooutputDir)-$(cluster)-$(process).log'))
#
@@ -1666,6 +1684,8 @@ class submitToGraceDB(pipeline.CondorDAGJob,pipeline.AnalysisJob):
# --- Allow desired sites
if cp.has_option('condor','desired-sites'):
self.add_condor_cmd('+DESIRED_Sites',cp.get('condor','desired-sites'))
if cp.has_option('condor','undesired-sites'):
self.add_condor_cmd('+UNDESIRED_Sites',cp.get('condor','desired-sites'))
if cp.has_option('condor', 'accounting-group'):
self.add_condor_cmd('accounting_group', cp.get('condor', 'accounting-group'))
Loading