Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • sudarshan-ghonge/bayeswave
  • april.partington/bayeswave
  • lscsoft/bayeswave
  • tyson-littenberg/bayeswave
  • james-clark/bayeswave
  • meg.millhouse/bayeswave
  • katerina.chatziioannou/bayeswave
  • deborah.ferguson/bayeswave
  • thomas-callister/bayeswave
  • andoni.torres/bayeswave
  • erika.cowan/bayeswave
  • bcheeseboro/bayeswave
  • salvatore-vitale/bayeswave
  • bence.becsy/bayeswave
  • duncanmmacleod/bayeswave
  • paul.baker/bayeswave
  • leo-singer/bayeswave
  • hannah.griggs/bayeswave
  • bhooshan.gadre/bayeswave
  • ka-wa.tsang/bayeswave
  • marcella.wijngaarden/bayeswave
  • bruce.edelman/bayeswave
  • sangeet.paul/bayeswave
  • colm.talbot/bayeswave
  • sophie.hourihane/bayeswave
  • arianna.renzini/bayeswave
  • nayyer.raza/bayeswave
  • cailin.plunkett/bayeswave
  • johnmichael.sullivan/bayeswave
  • seth.moriarty/bayeswave
  • howard.deshong/bayeswave
  • argyro.sasli/bayeswave
  • megan.arogeti/bayeswave
  • johnmichael.sullivan/bayeswave-master-dev
  • tomasz.baka/bayeswave
  • cjhaster/bayeswave
  • meg.millhouse/bayeswave-cicd-testing
  • neil.cornish/bayeswave
38 results
Show changes
Commits on Source (3)
...@@ -173,13 +173,19 @@ def x509_manipulation(workdir): ...@@ -173,13 +173,19 @@ def x509_manipulation(workdir):
""" """
Copy x509 proxy cert to the working directory and return its location Copy x509 proxy cert to the working directory and return its location
""" """
print("---------------------------------------------------------\n")
print("Warning!!\n")
print("It looks like you're still using X509 proxy.\n")
print("Please consider using scitokens instead\n")
print("see: https://computing.docs.ligo.org/guide/auth/scitokens/")
print("---------------------------------------------------------\n")
try: try:
print("Trying to get X509 location from igwn_auth_utils") print("Trying to get X509 location from igwn_auth_utils")
x509 = igwn_auth_utils.find_x509_credentials(timeleft=600) x509 = igwn_auth_utils.find_x509_credentials(timeleft=600)
except: except:
traceback.print_exc(file=sys.stderr) traceback.print_exc(file=sys.stderr)
print("Warning: No X509 proxy found, please run ligo-proxy-init") print("Warning: No X509 proxy found, please run ligo-proxy-init...")
# Copy X509 to workdir and return the new path # Copy X509 to workdir and return the new path
...@@ -298,6 +304,21 @@ class eventTrigger: ...@@ -298,6 +304,21 @@ class eventTrigger:
self.hv_time_lag = hv_time_lag self.hv_time_lag = hv_time_lag
self.lv_time_lag = lv_time_lag self.lv_time_lag = lv_time_lag
self.total_time_lag = total_time_lag self.total_time_lag = total_time_lag
# Keep track of actual GPS times in each IFO
ifo_list = ast.literal_eval(cp.get('input', 'ifo-list'))
self.perIFO_trigtime = {}
if 'H1' in ifo_list:
self.perIFO_trigtime['H1'] = trigger_time - total_time_lag
if 'L1' in ifo_list:
self.perIFO_trigtime['L1'] = trigger_time - hl_time_lag
if 'V1' in ifo_list:
if 'H1' in ifo_list:
self.perIFO_trigtime['V1'] = trigger_time - hv_time_lag
elif 'L1' in ifo_list:
self.perIFO_trigtime['V1'] = trigger_time - lv_time_lag
self.trigger_frequency = trigger_frequency self.trigger_frequency = trigger_frequency
# Variable sample rate / window length [fixed TF volume] # Variable sample rate / window length [fixed TF volume]
...@@ -1074,8 +1095,13 @@ def condor_job_config(job_type, condor_job, config_parser): ...@@ -1074,8 +1095,13 @@ def condor_job_config(job_type, condor_job, config_parser):
# Accounting configurations # Accounting configurations
# #
workdir = os.getcwd() workdir = os.getcwd()
x509 = x509_manipulation(os.getcwd()) if not config_parser.getboolean('condor','scitoken-auth'):
condor_job.add_condor_cmd('x509userproxy', x509) x509 = x509_manipulation(os.getcwd())
condor_job.add_condor_cmd('x509userproxy', x509)
else: # add stuff forusing scitokens
condor_job.add_condor_cmd('use_oauth_services','igwn') # TODO -- would this ever need to be anything else?
condor_job.add_condor_cmd('igwn_oauth_permissions','read:/ligo')
try: try:
...@@ -1192,7 +1218,11 @@ class bayeswaveJob(pipeline.CondorDAGJob, pipeline.AnalysisJob): ...@@ -1192,7 +1218,11 @@ class bayeswaveJob(pipeline.CondorDAGJob, pipeline.AnalysisJob):
# #
if cp.getboolean('condor', 'transfer-files'): if cp.getboolean('condor', 'transfer-files'):
transferstring='datafind,$(macrooutputDir)' # See if this works for individual caches
if cp.getboolean('condor','copy-frames'):
transferstring = '$(macrooutputDir)/datafind,$(macrooutputDir)'
else:
transferstring='datafind,$(macrooutputDir)'
# Generate a script for PreCmd to setup directory structure # Generate a script for PreCmd to setup directory structure
if condor_precommand: if condor_precommand:
...@@ -1363,10 +1393,12 @@ class bayeswaveNode(pipeline.CondorDAGNode, pipeline.AnalysisNode): ...@@ -1363,10 +1393,12 @@ class bayeswaveNode(pipeline.CondorDAGNode, pipeline.AnalysisNode):
self.frames=[] self.frames=[]
for ifo in framedict.keys(): for ifo in framedict.keys():
for frame in framedict[ifo]: for frame in framedict[ifo]:
self.frames.append(frame) self.frames.append(frame)
self.frames = ",".join(self.frames) self.frames = ",".join(self.frames)
self.add_macro('macroframes', self.frames) self.add_macro('macroframes', self.frames)
# TODO: would it be cleaner to have a single time slide variable as a dictionary?
def set_L1_timeslide(self, L1_timeslide): def set_L1_timeslide(self, L1_timeslide):
self.add_var_opt('L1-timeslide', L1_timeslide) self.add_var_opt('L1-timeslide', L1_timeslide)
self.L1_timeslide = L1_timeslide self.L1_timeslide = L1_timeslide
......
...@@ -219,6 +219,7 @@ def parser(): ...@@ -219,6 +219,7 @@ def parser():
parser.add_argument("--transfer-files", default=True, action="store_true") parser.add_argument("--transfer-files", default=True, action="store_true")
parser.add_argument("--shared-filesystem", default=False, action="store_true") parser.add_argument("--shared-filesystem", default=False, action="store_true")
parser.add_argument("--singularity", default=None) parser.add_argument("--singularity", default=None)
parser.add_argument("--scitoken-auth", default=False, action="store_true")
opts = parser.parse_args() opts = parser.parse_args()
...@@ -375,9 +376,17 @@ try: ...@@ -375,9 +376,17 @@ try:
cp.getboolean('condor', 'osg-deploy') cp.getboolean('condor', 'osg-deploy')
except: except:
cp.set('condor', 'osg-deploy', str(opts.osg_deploy)) cp.set('condor', 'osg-deploy', str(opts.osg_deploy))
try:
cp.getboolean('condor', 'scitoken-auth')
except:
cp.set('condor', 'scitoken-auth', str(opts.scitoken_auth))
if cp.getboolean('condor', 'osg-deploy'): if cp.getboolean('condor', 'osg-deploy'):
opts.server="datafind.ligo.org:443" if cp.getboolean('condor','scitoken-auth'):
opts.server = "datafind.igwn.org"
else:
opts.server="datafind.ligo.org:443"
cp.set('condor', 'transfer-files', str(True)) cp.set('condor', 'transfer-files', str(True))
# try: # try:
...@@ -565,14 +574,14 @@ seglens = [trigger.seglen for trigger in trigger_list.triggers] ...@@ -565,14 +574,14 @@ seglens = [trigger.seglen for trigger in trigger_list.triggers]
if cp.has_option('input','gps-start-time'): if cp.has_option('input','gps-start-time'):
gps_start_time = cp.getint('input','gps-start-time') gps_start_time = cp.getint('input','gps-start-time')
else: else: #time slides
trigtime = min(trigger_times) - (max(np.absolute(hl_lag_times))+25.0) trigtime = min(trigger_times) - (max(np.absolute(hl_lag_times))+25.0)
seg, _ = job_times(trigtime, max(seglens), psdlen, padding) seg, _ = job_times(trigtime, max(seglens), psdlen, padding)
gps_start_time = seg[0] gps_start_time = seg[0]
if cp.has_option('input','gps-end-time'): if cp.has_option('input','gps-end-time'):
gps_end_time = cp.getint('input','gps-end-time') gps_end_time = cp.getint('input','gps-end-time')
else: else: #time slides
trigtime = max(trigger_times) + (max(np.absolute(hl_lag_times))+25.0) trigtime = max(trigger_times) + (max(np.absolute(hl_lag_times))+25.0)
seg,_ = job_times(trigtime, max(seglens), psdlen, padding) seg,_ = job_times(trigtime, max(seglens), psdlen, padding)
gps_end_time = seg[1] gps_end_time = seg[1]
...@@ -972,12 +981,17 @@ for t,trigger in enumerate(trigger_list.triggers): ...@@ -972,12 +981,17 @@ for t,trigger in enumerate(trigger_list.triggers):
#---------------------------------------- #----------------------------------------
# Check job times fall within available data # Check job times fall within available data
job_segment, psd_start = job_times(trigger.trigger_time, trigger.seglen, # Define job segment per ifo for time slides
# Makes it easier for data transfer
job_segment = {}
psd_start = {}
for ifo in ifo_list:
job_segment[ifo], psd_start[ifo] = job_times(trigger.perIFO_trigtime[ifo], trigger.seglen,
psdlen, padding) psdlen, padding)
for ifo in ifo_list: for ifo in ifo_list:
job_in_segments = [seg.__contains__(job_segment) \ job_in_segments = [seg.__contains__(job_segment[ifo]) \
for seg in segmentList[ifo]] for seg in segmentList[ifo]]
if not any(job_in_segments): if not any(job_in_segments):
...@@ -988,7 +1002,7 @@ for t,trigger in enumerate(trigger_list.triggers): ...@@ -988,7 +1002,7 @@ for t,trigger in enumerate(trigger_list.triggers):
bad_job['seglen']=trigger.seglen bad_job['seglen']=trigger.seglen
bad_job['psdlen']=psdlen bad_job['psdlen']=psdlen
bad_job['padding']=padding bad_job['padding']=padding
bad_job['job_segment']=job_segment bad_job['job_segment']=job_segment[ifo]
bad_job['data_segments']=segmentList[ifo] bad_job['data_segments']=segmentList[ifo]
unanalyzeable_jobs.append(bad_job) unanalyzeable_jobs.append(bad_job)
...@@ -998,6 +1012,7 @@ for t,trigger in enumerate(trigger_list.triggers): ...@@ -998,6 +1012,7 @@ for t,trigger in enumerate(trigger_list.triggers):
print(bad_job, file=sys.stderr) print(bad_job, file=sys.stderr)
break break
#MM is this an orphan else?
else: else:
if 'H1' in ifo_list: if 'H1' in ifo_list:
...@@ -1014,13 +1029,22 @@ for t,trigger in enumerate(trigger_list.triggers): ...@@ -1014,13 +1029,22 @@ for t,trigger in enumerate(trigger_list.triggers):
if not cp.getboolean('datafind','sim-data'): if not cp.getboolean('datafind','sim-data'):
#
# Identify frames associated with this job # Identify frames associated with this job
if opts.copy_frames: if opts.copy_frames:
custom_cache={}
for ifo in ifo_list: for ifo in ifo_list:
frame_idx = [seg.intersects(job_segment) for seg in frameSegs[ifo]] cache_file = os.path.join(datafind_dir,
transferFrames[ifo] = [frame for f,frame in '{ifo}.cache'.format(ifo=ifo))
enumerate(framePaths[ifo]) if frame_idx[f]] with open(cache_file) as cache_entries:
cache_entries = cache_entries.readlines()
frame_idx = [seg.intersects(job_segment[ifo]) for seg in frameSegs[ifo]]
if cp.getboolean('condor','scitoken-auth'):
transferFrames[ifo] = [f"igwn+{frame}" for f,frame in
enumerate(framePaths[ifo]) if frame_idx[f]] # required format for odsf file transfer
else:
transferFrames[ifo] = [frame for f,frame in
enumerate(framePaths[ifo]) if frame_idx[f]]
custom_cache[ifo] = [e for ii,e in enumerate(cache_entries)if frame_idx[ii]] # Make a cache for each trigger. Makes file transfers manageable
# Make output directory for this trigger # Make output directory for this trigger
outputDir = 'trigtime_' + str('%.9f'%trigger.trigger_time) + '_' + \ outputDir = 'trigtime_' + str('%.9f'%trigger.trigger_time) + '_' + \
...@@ -1029,7 +1053,21 @@ for t,trigger in enumerate(trigger_list.triggers): ...@@ -1029,7 +1053,21 @@ for t,trigger in enumerate(trigger_list.triggers):
#str(float(trigger.hv_time_lag)) #+ str(uuid.uuid4()) #str(float(trigger.hv_time_lag)) #+ str(uuid.uuid4())
outputDir = os.path.join(outputDir) outputDir = os.path.join(outputDir)
if not os.path.exists(outputDir): os.makedirs(outputDir) if not os.path.exists(outputDir): os.makedirs(outputDir)
if not os.path.exists(os.path.join(outputDir,'datafind')): os.makedirs(os.path.join(outputDir,'datafind'))
# Try to make custom cache files
dump_job_info(outputDir, trigger) dump_job_info(outputDir, trigger)
if opts.copy_frames: # write cache into each trigger directory (again makes file transfer manageable)
for ifo in ifo_list:
cachefile = os.path.join(outputDir,'datafind','{ifo}.cache'.format(ifo=ifo))
new_cache = open(cachefile, 'w')
for xx in custom_cache[ifo]:
new_cache.write(f"{xx}")
new_cache.close()
# A little hacky, but need to get single psd_start for BW command line
dummy, psd_start = job_times(trigger.trigger_time, trigger.seglen,
psdlen, padding)
# ------------------------------------------------------------------ # ------------------------------------------------------------------
# BAYESLINE MEDIAN PSD NODES # BAYESLINE MEDIAN PSD NODES
...@@ -1198,6 +1236,9 @@ for t,trigger in enumerate(trigger_list.triggers): ...@@ -1198,6 +1236,9 @@ for t,trigger in enumerate(trigger_list.triggers):
elif 'L1' in ifo_list and 'V1' in ifo_list: elif 'L1' in ifo_list and 'V1' in ifo_list:
bayeswave_node.set_V1_timeslide(trigger.lv_time_lag) bayeswave_node.set_V1_timeslide(trigger.lv_time_lag)
bayeswave_post_node.set_V1_timeslide(trigger.lv_time_lag) bayeswave_post_node.set_V1_timeslide(trigger.lv_time_lag)
# bayeswave_node.set_perIFO_trigtime(ifo_list, trigger.trigger_time, trigger.total_time_lag, trigger.hl_time_lag, trigger.hv_time_lag)
# print("per IFO time=",bayeswave_node.perIFO_trigtime)
if cp.has_option('bayeswave_options','BW-inject'): if cp.has_option('bayeswave_options','BW-inject'):
bayeswave_post_node.set_BW_event(trigger.BW_event) bayeswave_post_node.set_BW_event(trigger.BW_event)
......