Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • iain.morton/locklost
  • oli.patane/locklost-olifork
  • timothy.ohanlon/locklost
  • benjaminrobert.mannix/locklost
  • austin.jennings/locklost
  • camilla.compton/locklost
  • arnaud.pele/locklost
  • yamamoto/locklost
  • marc.lormand/locklost
  • saravanan.tiruppatturrajamanikkam/locklost
  • nikhil-mukund/locklost
  • patrick.godwin/locklost
  • yannick.lecoeuche/locklost
  • jameson.rollins/locklost
14 results
Show changes
Showing
with 548 additions and 102 deletions
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import matplotlib.patches as mpatches
from gwpy.segments import Segment
from .. import logger
from .. import config
from .. import data
from .. import plotutils
##############################################
itmx = mpatches.Patch(color='#F05039', label='ITMX')
itmy = mpatches.Patch(color='#EEBAB4', label='ITMY')
etmx = mpatches.Patch(color='#1F449C', label='ETMX')
etmy = mpatches.Patch(color='#7CA1CC', label='ETMY')
tag = mpatches.Patch(color='k', label='Tag Threshold')
def plot_color(string):
if 'ITMX' in string:
return '#F05039'
elif 'ITMY' in string:
return '#EEBAB4'
elif 'ETMX' in string:
return '#1F449C'
else:
return '#7CA1CC'
def check_violin(event):
"""Checks violin 1st, 2nd, and 3rd order violin modes
"""
if config.IFO == 'H1':
logger.info('NOT SET UP FOR LHO')
plotutils.set_rcparams()
mod_window = [config.VIOLIN_SEARCH_WINDOW[0], config.VIOLIN_SEARCH_WINDOW[1]]
segment = Segment(mod_window).shift(int(event.gps))
VIOLIN_channels = data.fetch(config.VIOLIN_CHANNELS, segment)
saturating = False
for buf in VIOLIN_channels:
srate = buf.sample_rate
t = np.arange(segment[0], segment[1], 1/srate)
idxs = ((t-event.gps) < -2)
if np.max(buf.data[idxs]) > config.VIOLIN_SAT_THRESH:
saturating = True
if saturating:
# ADD H1 code here
if config.IFO == 'L1':
if event.transition_index[0] >= 1020:
event.add_tag('VIOLIN')
else:
if config.IFO == 'L1':
logger.info('VIOLINS are ok')
gs = gridspec.GridSpec(2, 4)
gs.update(wspace=0.5)
fig = plt.figure(figsize=(22*3, 16*3))
ax1 = fig.add_subplot(gs[0, :2])
ax2 = fig.add_subplot(gs[1, :2])
ax3 = fig.add_subplot(gs[1, 2:])
for idx, buf in enumerate(VIOLIN_channels):
srate = buf.sample_rate
t = np.arange(segment[0], segment[1], 1/srate)
if any(substring in buf.channel for substring in ['MODE3', 'MODE4', 'MODE5', 'MODE6']):
ax1.plot(
t-event.gps,
buf.data,
label=buf.channel,
alpha=0.8,
lw=8,
color=plot_color(buf.channel),
)
elif any(substring in buf.channel for substring in ['MODE11', 'MODE12', 'MODE13', 'MODE14']):
ax2.plot(
t-event.gps,
buf.data,
label=buf.channel,
alpha=0.8,
lw=8,
color=plot_color(buf.channel),
)
else:
ax3.plot(
t-event.gps,
buf.data,
label=buf.channel,
alpha=0.8,
lw=8,
color=plot_color(buf.channel),
)
if config.IFO == 'L1':
ax1.axhline(
config.VIOLIN_SAT_THRESH,
linestyle='--',
color='black',
label='Violin threshold',
lw=5,
)
ax2.axhline(
config.VIOLIN_SAT_THRESH,
linestyle='--',
color='black',
label='Violin threshold',
lw=5,
)
ax3.axhline(
config.VIOLIN_SAT_THRESH,
linestyle='--',
color='black',
label='Violin threshold',
lw=5,
)
ax1.set_xlabel('Time [s] since lock loss at {}'.format(event.gps), labelpad=10)
ax2.set_xlabel('Time [s] since lock loss at {}'.format(event.gps), labelpad=10)
ax3.set_xlabel('Time [s] since lock loss at {}'.format(event.gps), labelpad=10)
ax1.set_ylabel('Violin Magnitude [log]')
ax2.set_ylabel('Violin Magnitude [log]')
ax3.set_ylabel('Violin Magnitude [log]')
ax1.set_xlim(t[0]-event.gps, t[-1]-event.gps)
ax2.set_xlim(t[0]-event.gps, t[-1]-event.gps)
ax3.set_xlim(t[0]-event.gps, t[-1]-event.gps)
'''
if config.IFO == 'L1':
ax1.set_ylim(-18, -14)
ax2.set_ylim(-18, -14)
ax3.set_ylim(-18, -14)
'''
ax1.legend(handles=[itmx, itmy, etmx, etmy, tag], loc='best')
ax2.legend(handles=[itmx, itmy, etmx, etmy, tag], loc='best')
ax3.legend(handles=[itmx, itmy, etmx, etmy, tag], loc='best')
# ax1.legend(loc='best')
ax1.set_title('1st Order Violins')
ax2.set_title('2nd Order Violins')
ax3.set_title('3rd Order Violins')
ax1.grid()
ax2.grid()
ax3.grid()
outfile_plot = 'VIOLIN_monitor.png'
outpath_plot = event.path(outfile_plot)
fig.savefig(outpath_plot, bbox_inches='tight')
import os
import logging
import numpy as np
import matplotlib.pyplot as plt
from gwpy.segments import Segment
from .. import logger
from .. import config
from .. import data
from .. import plotutils
##############################################
def check_wind(event):
"""Checks for elevated windspeed.
......@@ -40,9 +41,9 @@ def check_wind(event):
if windy:
event.add_tag('WINDY')
else:
logging.info("wind speed below threshold")
logger.info("wind speed below threshold")
fig, ax = plt.subplots(1, figsize=(22,16))
fig, ax = plt.subplots(1, figsize=(22, 16))
for buf in wind_channels:
srate = buf.sample_rate
t = np.arange(segment[0], segment[1], 1/srate)
......@@ -66,7 +67,7 @@ def check_wind(event):
ax.grid()
ax.set_xlabel('Time [s] since lock loss at {}'.format(event.gps), labelpad=10)
ax.set_ylabel('Velocity [mph]')
ax.set_ylim(0, max_windspeed+1)
# ax.set_ylim(0, max_windspeed+1)
ax.set_xlim(t[0]-event.gps, t[-1]-event.gps)
ax.legend(loc='best')
ax.set_title('End/corner station wind speeds', y=1.04)
......
import os
import argparse
import logging
import importlib
from gwpy.segments import Segment, SegmentList
from . import set_signal_handlers
from . import config_logger
from . import logger
from . import config
from . import data
from .event import LocklossEvent
from . import segments
from . import condor
##################################################
def search_buf(buf, previous=None, event_callback=None):
"""Search for lock lock events in buffer
......@@ -27,18 +30,18 @@ def search_buf(buf, previous=None, event_callback=None):
nevents = 0
for time, pval, val in data.gen_transitions(buf, previous):
trans = (int(pval), int(val))
logging.debug("transition: {:0.3f} {}->{}".format(time, trans[0], trans[1]))
logger.debug("transition: {:0.3f} {}->{}".format(time, trans[0], trans[1]))
if val not in lockloss_indices:
continue
logging.info("lockloss found: {} {}->{}".format(time, *trans))
logger.info("lockloss found: {} {}->{}".format(time, *trans))
nevents += 1
try:
event = LocklossEvent.create(time, trans)
except OSError as e:
logging.info(e)
logger.info(e)
event = LocklossEvent(time)
if event_callback:
logging.info("executing event callback: {}({})".format(
logger.info("executing event callback: {}({})".format(
event_callback.__name__, event.id))
event_callback(event)
return nevents
......@@ -48,29 +51,32 @@ def search(segment):
"""Search segment for events
"""
logging.debug("searching segment {}...".format(segment))
logger.debug("searching segment {}...".format(segment))
channel = config.GRD_STATE_N_CHANNEL
buf = data.fetch([channel], segment)[0]
nevents = search_buf(buf)
segments.write_segments(config.SEG_DIR, [(int(buf.gps_start), int(buf.gps_stop))])
logging.info("{} events found".format(nevents))
logger.info("{} events found".format(nevents))
def search_iterate(segment=None, event_callback=None, stat_file=None):
"""Iterative search for events (NDS-only)
"""
channel = config.GRD_STATE_N_CHANNEL
if segment:
logging.info("searching segment {}...".format(segment))
logger.info(f"searching channel {channel} segment {segment}...")
else:
logging.info("searching online...")
logger.info(f"searching channel {channel} online...")
previous = None
nevents = 0
nbufs = 0
progress = None
channel = config.GRD_STATE_N_CHANNEL
for bufs in data.nds_iterate([channel], start_end=segment):
buf = bufs[0]
if previous is None:
logger.info("initial state: {} @{:0.3f}".format(
buf.data[0], buf.gps_start))
nevents += search_buf(
buf,
previous=previous,
......@@ -93,11 +99,12 @@ def search_iterate(segment=None, event_callback=None, stat_file=None):
if segment is None:
raise RuntimeError("NDS iteration returned unexpectedly.")
else:
logging.info("{} events found".format(nevents))
logger.info("{} events found".format(nevents))
# segments.compress_segdir(config.SEG_DIR)
##################################################
def _parser_add_arguments(parser):
from .util import GPSTimeParseAction
parser.add_argument('start', action=GPSTimeParseAction,
......@@ -137,9 +144,9 @@ def main(args=None):
completed_segs = segments.load_segments(config.SEG_DIR)
reduced_segs = SegmentList([full_seg]) - completed_segs
if not reduced_segs:
logging.info("All segments analyzed")
logger.info("All segments analyzed")
raise SystemExit()
logging.debug("segments: {}".format(reduced_segs))
logger.debug("segments: {}".format(reduced_segs))
def condor_args_gen():
for s in segments.slice_segments(
......@@ -177,10 +184,7 @@ def main(args=None):
# direct execution of this module intended for condor jobs
if __name__ == '__main__':
set_signal_handlers()
logging.basicConfig(
level='DEBUG',
format=config.LOG_FMT,
)
config_logger()
parser = argparse.ArgumentParser()
parser.add_argument('start', type=int,
help="search start time")
......
import os
import shutil
import logging
from gwpy.segments import Segment, SegmentList
from . import logger
def load_segments(segdir, intersect=None):
"""Load segments from directory
......@@ -16,7 +17,7 @@ def load_segments(segdir, intersect=None):
for i, sf in enumerate(os.listdir(segdir)):
s, e = map(int, sf.split('-'))
seglist.append(Segment(s, e))
logging.debug("{} segments loaded".format(i+1))
logger.debug("{} segments loaded".format(i+1))
return seglist.coalesce()
......@@ -66,16 +67,14 @@ def compress_segdir(segdir):
"""
if not os.path.exists(segdir):
#raise RuntimeError("segdir does not exist")
logging.warning("segdir does not exist")
logger.warning("segdir does not exist")
return
tmpdir = segdir+'.tmp'
if os.path.exists(tmpdir):
#raise RuntimeError("segdir compression in progress")
logging.warning("segdir compression in progress, skipping")
logger.warning("segdir compression in progress, skipping")
return
logging.info("compressing segdir: {}".format(segdir))
logger.info("compressing segdir: {}".format(segdir))
shutil.move(segdir, tmpdir)
n = write_segments(segdir, load_segments(tmpdir))
logging.debug("{} segments written".format(n))
logger.debug("{} segments written".format(n))
shutil.rmtree(tmpdir)
import os
import argparse
import numpy as np
from collections import defaultdict
import pytz
import numpy as np
import matplotlib.pyplot as plt
from collections import defaultdict
from gpstime import gpsnow, gpstime
import pytz
import logging
from . import logger
from . import config
from .event import find_events
from . import plotutils
EPOCHS = {
'run': config.O3_GPS_START,
'run': config.O4_GPS_START,
'month': int(gpsnow()) - 30*24*3600,
'week': int(gpsnow()) - 7*24*3600,
}
......@@ -24,23 +25,22 @@ if config.IFO == 'H1':
if config.IFO == 'L1':
local_tz = pytz.timezone('US/Eastern')
def grab_data(gps):
"""Returns relevant lockloss summary data within three time ranges.
Looks through O3 lockloss data and returns counts for the specificed check
Looks through O4 lockloss data and returns counts for the specificed check
functions. Does this for the run, the last 30 days, and the last week.
"""
shift_times = {
'H1': [np.arange(0, 8), np.arange(8, 16), np.arange(16, 24)],
'L1': [np.concatenate(([23], np.arange(0, 8))), np.arange(8, 12), np.arange(12, 22)]
'L1': [np.concatenate(([22, 23], np.arange(0, 8))), np.arange(8, 12), np.arange(12, 22)]
}
shift_names = ['owl', 'day', 'eve']
shifts = {
shift_names[x]: {
'time': shift_times[config.IFO][x],
'counts': 0
} for x in range(3)
}
shifts = defaultdict(lambda: defaultdict(int))
for x in range(3):
for time in shift_times[config.IFO][x]:
shifts[shift_names[x]][time] = 0
transitions = defaultdict(int)
observe_durations = []
saturations = {
......@@ -49,11 +49,11 @@ def grab_data(gps):
'LOCKING_ALS': defaultdict(int),
'ACQUIRE_DRM1_1F': defaultdict(int),
'Observe': defaultdict(int),
},
},
'L1':
{
'Observe': defaultdict(int)
},
},
}
five_sats = []
tag_count = defaultdict(int)
......@@ -67,6 +67,9 @@ def grab_data(gps):
'ANTHROPOGENIC',
'WINDY',
'Unknown',
'PI_MONITOR',
'VIOLIN',
'ISS',
]
event_count = 0
......@@ -79,7 +82,7 @@ def grab_data(gps):
tag_count = check_tags(event, tags, tag_count)
event_count += 1
logging.info("Events analyzed: {}".format(event_count))
logger.info("Events analyzed: {}".format(event_count))
return transitions, observe_durations, saturations, five_sats, shifts, tag_count
......@@ -102,18 +105,18 @@ def check_tags(event, tags, tag_count):
def check_shift(event, shifts):
"""Checks which operating shift event happened during.
"""Checks which operating shift and hour event happened during.
Checks which operator shift the lockloss gps happened during and increments
a counter for that shift (for locklosses from Observe).
Checks which operator shift and hour the lockloss gps happened during and
increments a counter for that hour (for locklosses from Observe).
"""
if not event.has_tag('OBSERVE'):
return shifts
gt = gpstime.fromgps(event.gps)
gt = gt.astimezone(local_tz)
for key in shifts:
if gt.hour in shifts[key]['time']:
shifts[key]['counts'] += 1
for shift_name in shifts:
if gt.hour in shifts[shift_name]:
shifts[shift_name][gt.hour] += 1
break
return shifts
......@@ -175,7 +178,6 @@ def get_five_sats(sat_path):
for sat in all_sats:
# create shortened channel name (excluding IFO, quadrant, characters)
sat_123 = sat.split('-')
sat1 = sat_123[0]
sat2 = sat_123[1].split('_')[0]
sat3 = sat_123[1].split('_')[1]
channel_shorthand = '%s %s' % (sat2, sat3)
......@@ -210,7 +212,7 @@ def plot_summary(path, epoch):
sort_keys = list(transitions.keys())
sort_keys.sort()
sort_values = [transitions[x] for x in sort_keys if bool(sort_keys)]
fig, ax = plt.subplots(1, figsize=(22,16))
fig, ax = plt.subplots(1, figsize=(22, 16))
ax.bar(
state_position,
sort_values,
......@@ -218,7 +220,7 @@ def plot_summary(path, epoch):
)
ax.set_xlabel('State from which lockloss has occurred', labelpad=10)
ax.set_ylabel('Number of locklosses')
ax.set_title('O3 lockloss occurences by final state: %s' % (epoch))
ax.set_title('O4 lockloss occurences by final state: %s' % (epoch))
ax.set_xticks(state_position)
ax.tick_params(axis='x', which='major', labelsize=18)
ax.set_xticklabels(sort_keys, rotation=45, ha='right')
......@@ -260,7 +262,7 @@ def plot_summary(path, epoch):
sort_keys = list(sat_dict.keys())
sort_keys.sort()
sort_values = [sat_dict[x] for x in sort_keys if bool(sort_keys)]
fig, ax = plt.subplots(1, figsize=(22,16))
fig, ax = plt.subplots(1, figsize=(22, 16))
ax.bar(
sat_position,
sort_values,
......@@ -280,31 +282,39 @@ def plot_summary(path, epoch):
fig.savefig(outpath_plot, bbox_inches='tight')
plt.close()
# Lockloss shift plot
counts = [x['counts'] for x in shifts.values()]
shifts = shifts.keys()
# Lockloss shift hour plot
colors = ['#1f77b4', '#dbcb2b', '#b41f2d']
times = []
counts = []
shift_total = []
for shift_name in shifts:
times.append(shifts[shift_name].keys())
counts.append(shifts[shift_name].values())
shift_total.append(sum(shifts[shift_name].values()))
fig, ax = plt.subplots(1, figsize=(22, 16))
shift_x = np.array([0, 1, 2])
ax.bar(
shift_x,
counts,
align='center',
)
ax.set_xlabel('Operating shift', labelpad=10)
for time, count, shift, total, color in zip(times, counts, shifts.keys(), shift_total, colors):
ax.bar(
time,
count,
color=color,
label='{} total count: {}'.format(shift.upper(), total),
align='center',
)
ax.set_xlabel('Hour lockloss occurred ({})'.format(local_tz.zone), labelpad=10)
ax.set_ylabel('Number of locklosses')
ax.set_title('Number of locklosses per shift: %s' % (epoch))
ax.set_xticks(shift_x)
ax.set_xticklabels(shifts, rotation=45, ha='right')
ax.set_xlim([-1, shift_x.size])
ax.set_title('Number of locklosses from observing by hour: {}'.format(epoch))
ax.set_xlim([-0.9, 23.9])
ax.grid()
plt.legend(loc='upper left')
plt.gcf().text(0.02, 0.02, "Created: {}".format(gpsnow()), fontsize=16)
fig.tight_layout()
outpath_plot = os.path.join(epoch_path, 'Lockloss_by_shift')
outpath_plot = os.path.join(epoch_path, 'Lockloss_by_hour')
fig.savefig(outpath_plot, bbox_inches='tight')
plt.close()
# Associated tag plot
fig, ax = plt.subplots(1, figsize=(22,16))
fig, ax = plt.subplots(1, figsize=(22, 16))
shift_x = np.arange(0, len(tag_count))
ax.bar(
shift_x,
......@@ -324,7 +334,7 @@ def plot_summary(path, epoch):
fig.savefig(outpath_plot, bbox_inches='tight')
plt.close()
#saturating suspension grid plot
# saturating suspension grid plot
y_sats = sorted(list(set([i[0] for i in five_sats])))
x_sats = sorted(list(set([item for sublist in five_sats for item in sublist[1:]])))
sat_grid = np.zeros((len(y_sats), len(x_sats)))
......@@ -335,15 +345,15 @@ def plot_summary(path, epoch):
if xval in sat_set[1:]:
sat_grid[y_idx][x_idx] += 1
fig, ax = plt.subplots(1, figsize=(22,16))
fig, ax = plt.subplots(1, figsize=(22, 16))
img = ax.imshow(sat_grid, interpolation='nearest',)
ax.set_xlabel('2nd-5th saturating suspension', labelpad=10)
ax.tick_params(width=5)
ax.set_ylabel('First saturating suspension')
ax.set_title('O3 suspension correlations: %s' % (epoch))
ax.set_title('O4 suspension correlations: %s' % (epoch))
ax.set_xticks(np.arange(len(x_sats)))
ax.set_yticks(np.arange(len(y_sats)))
ax.set_xticklabels(x_sats, rotation = 45, ha = 'right')
ax.set_xticklabels(x_sats, rotation=45, ha='right')
ax.set_yticklabels(y_sats)
fig.tight_layout()
cbar = fig.colorbar(img)
......@@ -355,6 +365,7 @@ def plot_summary(path, epoch):
######################################################
def _parser_add_arguments(parser):
parser.add_argument(
'--path', '-p', default=config.SUMMARY_ROOT, type=str,
......@@ -366,6 +377,7 @@ def _parser_add_arguments(parser):
def main(args=None):
"""Generate lockloss summary plots."""
if not args:
parser = argparse.ArgumentParser()
_parser_add_arguments(parser)
......@@ -387,7 +399,3 @@ def main(args=None):
for epoch in args.epoch:
print('summarizing locklosses epoch {}'.format(epoch))
plot_summary(args.path, epoch)
if __name__ == '__main__':
main()
......@@ -17,6 +17,7 @@ WEB_SCRIPT = config.WEB_ROOT + '/index.cgi'
##################################################
def query_parse(query):
"""parse query and return dictionary
......@@ -46,8 +47,7 @@ def query_parse(query):
if not val:
continue
if key == 'tag':
val = [tag for tag in bottle.request.query.getall('tag') \
if isinstance(tag, str)]
val = [tag for tag in bottle.request.query.getall('tag') if isinstance(tag, str)]
try:
out_query[key] = val_conv[key](val)
except ValueError:
......@@ -61,7 +61,7 @@ def online_status():
"""HTML-formatted online status information
"""
stat_file = os.path.join(config.CONDOR_ONLINE_DIR, 'stat')
stat_file = config.ONLINE_STAT_FILE
if not os.path.exists(stat_file):
return '<span style="color:red">ONLINE ANALYSIS NOT RUNNING</span>'
stat = os.stat(stat_file)
......@@ -73,18 +73,18 @@ def online_status():
color = 'red'
else:
color = 'green'
wcroot = os.path.join(config.WEB_ROOT, 'events', '.condor_online')
return '<span style="color: {}">online last update: {:0.1f} min ago ({}) [<a href="{}">log</a>]</span>'.format(
return '<span style="color: {}">online last update: {:0.1f} min ago ({})</span>'.format(
color,
tsecs/60,
dt,
os.path.join(wcroot, 'out'),
)
##################################################
app = bottle.Bottle()
@app.route("/")
@app.route("/tag/<tag>")
def index(tag='all'):
......@@ -94,7 +94,7 @@ def index(tag='all'):
gps = bottle.request.query.get('event')
try:
event = LocklossEvent(gps)
except (ValueError, OSError) as e:
except (ValueError, OSError):
bottle.abort(404, {'error': {'code': 404, 'message': 'Unknown event: {}'.format(gps)}})
return event.to_dict()
......@@ -109,7 +109,7 @@ def index(tag='all'):
gps = bottle.request.query.get('event')
try:
event = LocklossEvent(gps)
except (ValueError, OSError) as e:
except (ValueError, OSError):
bottle.abort(404, 'Unknown event: {}'.format(gps))
sat_channels = []
......@@ -143,6 +143,7 @@ def index(tag='all'):
query=query,
)
@app.route("/summary")
@app.route("/summary/<epoch>")
def summary_route(epoch=None):
......@@ -157,6 +158,7 @@ def summary_route(epoch=None):
epoch=epoch,
)
@app.route("/event/<gps>")
def event_route(gps):
bottle.redirect('{}?event={}'.format(WEB_SCRIPT, gps))
......@@ -181,5 +183,6 @@ def json_route(tag='all'):
##################################################
if __name__ == '__main__':
bottle.run(app, server='cgi', debug=True)
......@@ -89,20 +89,87 @@
</div>
</div>
% else:
<p>LPY plots not created due to lack of saturating suspension channels.</p>
<hr />
<div class="container">
<div class="row">
<h3>Length-Pitch-Yaw Plots</h3>
<br />
<p>Length, pitch, and yaw drives reconstructed from osem DAC counts for ETMX L3 since no suspension stages saturated before lockloss time.</p>
% include('plots.tpl', plots=event_plot_urls(event, 'lpy'), size=6)
</div>
</div>
% end
<!-- DARM -->
% if os.path.exists(event.path('darm.png')):
<hr />
<div class="container">
<br />
<div class="row">
% darm_plot = [event.url('darm.png')]
% include('collapsed_plots.tpl', title ='DARM', id='darm', plots=darm_plot, size=18, expand=False, section='main')
</div>
</div>
% end
<!-- FSS oscillations -->
<!-- PI Monitor -->
<hr />
<div class="container">
<br />
<div class="row">
% has_tag = event.has_tag('PI_MONITOR')
% PI_plot = [event.url('PI_monitor.png')]
% include('collapsed_plots.tpl', title ='PI Monitor', id='pi', plots=PI_plot, size=18, expand=has_tag, section='main')
</div>
</div>
<!-- VIOLIN Modes -->
<hr />
<div class="container">
<br />
<div class="row">
% has_tag = event.has_tag('VIOLIN')
% VIOLIN_plot = [event.url('VIOLIN_monitor.png')]
% include('collapsed_plots.tpl', title ='Violin Mode', id='violin', plots=VIOLIN_plot, size=18, expand=has_tag, section='main')
</div>
</div>
<!-- PSL (FSS and ISS) -->
<hr />
<div class="container">
<br />
<div class="row">
% has_tag = event.has_tag('FSS_OSCILLATION')
% board_plot = [event.url('fss.png')]
% include('collapsed_plots.tpl', title ='FSS oscillation plot', id='fss', plots=board_plot, size=5, expand=has_tag, section='main')
% has_tag = event.has_tag('FSS_OSCILLATION') or event.has_tag('ISS')
% board_plot = [event.url('fss.png'), event.url('iss.png')]
% include('collapsed_plots.tpl', title ='PSL plots', id='psl', plots=board_plot, size=5, expand=has_tag, section='main')
</div>
</div>
<!-- ETM Glitch -->
% if os.path.exists(event.path('ETM_GLITCH.png')):
<hr />
<div class="container">
<br />
<div class="row">
% has_tag = event.has_tag('ETM_GLITCH')
% etm_glitch_plots = [event.url('ETM_GLITCH.png'), event.url('ETM_GLITCH_HP.png')]
% include('collapsed_plots.tpl', title ='ETM Glitch plots', id='ETM_Glitch', plots=etm_glitch_plots, size=5, expand=has_tag, section='main')
</div>
</div>
% end
<!-- HAM6 power -->
% if os.path.exists(event.path('HAM6_power.png')):
<hr />
<div class="container">
<br />
<div class="row">
% ham6_plot = [event.url('HAM6_power.png')]
% include('collapsed_plots.tpl', title ='HAM6 power plot', id='ham6', plots=ham6_plot, size=18, expand=False, section='main')
</div>
</div>
% end
<!-- BRS glitch -->
<hr />
<div class="container">
......
import os
from datetime import datetime
from .. import config
from ..event import find_events
##################################################
def event_gen(query):
"""generate events from query
......
[flake8]
ignore = E226,E501
exclude = locklost/plugins/__init__.py
......@@ -9,21 +9,21 @@ setup(
'write_to': 'locklost/version.py',
},
name = 'locklost',
description = 'LIGO lock loss tracking and analysis',
author = 'Jameson Graef Rollins',
author_email = 'jameson.rollins@ligo.org',
url = 'https://git.ligo.org/jameson.rollins/locklost.git',
license = 'GPLv3+',
name='locklost',
description='LIGO lock loss tracking and analysis',
author='Jameson Graef Rollins',
author_email='jameson.rollins@ligo.org',
url='https://git.ligo.org/jameson.rollins/locklost.git',
license='GPLv3+',
packages = [
packages=[
'locklost',
'locklost.plugins',
'locklost.web',
'locklost.web.templates',
],
package_data = {
package_data={
'locklost.web.templates': ['*.tpl'],
},
......
#!/usr/bin/env python3
import os
import sys
import glob
import subprocess
import signal
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
##################################################
def dag_find_failed(rescue_file):
"""return list of job ids of failed jobs"""
retry = set()
done = set()
with open(rescue_file, 'r') as f:
for line in f:
line = line.split()
try:
if line[0] == 'DONE':
done.add(line[1])
elif line[0] == 'RETRY':
retry.add(line[1])
except IndexError:
continue
return retry - done
def dag_failed_vars(dag_file, failed_jobs):
jobs = {}
with open(dag_file, 'r') as f:
for line in f:
line = line.split()
try:
if line[0] not in ['JOB', 'VARS']:
continue
except IndexError:
continue
if line[1] not in failed_jobs:
continue
if line[0] == 'JOB':
jobs[int(line[1])] = line[2:]
if line[0] == 'VARS':
jobs[int(line[1])] += line[2:]
return jobs
def submit_extract_cmd(sub_file):
"""extract command from submit file"""
with open(dag_file, 'r') as f:
for line in f:
line = line.split('=')
if len(line) < 2:
continue
if line[0] == 'executable':
exec_str = line[2].strip()
elif line[0] == 'arguments':
arg_str = line[2].strip()
return exec_str, arg_str
def cat_job_log(jobid):
logs = glob.glob('/home/lockloss/events/.condor_analyze/logs/{}_*'.format(jobid))
print(logs)
subprocess.call(['cat'] + logs)
##################################################
def main():
dag_file = sys.argv[1]
dag_rescue_file = sorted(glob.glob(dag_file + '.rescue*'))[-1]
#print(dag_rescue_file)
failed_jobs = dag_find_failed(dag_rescue_file)
for jid, jvars in sorted(dag_failed_vars(dag_file, failed_jobs).items()):
print('{} {}'.format(jid, ' '.join(jvars)))
if __name__ == '__main__':
main()
#!/bin/bash
tail -F ~/events/.condor_online/{log,out} | \
ts %Y-%m-%d_%H:%M:%S | \
tee --append ~/online.log
[Unit]
Description=Locklost analyze backfill
[Service]
Type=oneshot
# use system environment settings, for e.g. NDSSERVER, if available
EnvironmentFile=-/etc/sysconfig/ligo
EnvironmentFile=%h/config
Environment=LOG_FMT_NOTIME=t
ExecStart=%h/opt/locklost/bin/locklost analyze --condor '2 weeks ago' now
TimeoutSec=300
LogLevelMax=emerg
[Install]
WantedBy=default.target
[Unit]
Description=Lockloss analyze backfill timer
# FIXME: LHO systems are not recognizing the FQDN
#ConditionHost=detchar.ligo-*.caltech.edu
ConditionHost=|detchar.ligo-la.caltech.edu
ConditionHost=|detchar
[Timer]
OnCalendar=Wed *-*-* 2:00:00
Persistent=true
[Install]
WantedBy=timers.target
[Unit]
Description=Locklost segment compress
[Service]
Type=oneshot
# use system environment settings, for e.g. NDSSERVER, if available
EnvironmentFile=-/etc/sysconfig/ligo
EnvironmentFile=%h/config
Environment=LOG_FMT_NOTIME=t
ExecStart=%h/opt/locklost/bin/locklost compress
TimeoutSec=900
LogLevelMax=emerg
[Install]
WantedBy=default.target
[Unit]
Description=Lockloss segment compress timer
# FIXME: LHO systems are not recognizing the FQDN
#ConditionHost=detchar.ligo-*.caltech.edu
ConditionHost=|detchar.ligo-la.caltech.edu
ConditionHost=|detchar
[Timer]
OnCalendar=hourly
Persistent=true
[Install]
WantedBy=timers.target
[Unit]
Description=Locklost online search
[Service]
Type=exec
# use system environment settings, for e.g. NDSSERVER, if available
EnvironmentFile=-/etc/sysconfig/ligo
# local config
EnvironmentFile=%h/config
# turn off time logging, since systemd already handles that
Environment=LOG_FMT_NOTIME=t
# always want debug log level for the online search
Environment=LOG_LEVEL=DEBUG
ExecStart=%h/opt/locklost/bin/locklost online exec --analyze
Restart=always
[Install]
WantedBy=default.target
[Unit]
Description=Locklost history plot
[Service]
Type=oneshot
# use system environment settings, for e.g. NDSSERVER, if available
EnvironmentFile=-/etc/sysconfig/ligo
EnvironmentFile=%h/config
Environment=LOG_FMT_NOTIME=t
ExecStart=%h/opt/locklost/bin/locklost plot-history %h/public_html/history.tmp.svg -l '1 week ago'
ExecStartPost=/bin/mv -f %h/public_html/history.tmp.svg %h/public_html/history.svg
TimeoutSec=300
LogLevelMax=emerg
[Install]
WantedBy=default.target
[Unit]
Description=Lockloss history plot timer
# FIXME: LHO systems are not recognizing the FQDN
#ConditionHost=detchar.ligo-*.caltech.edu
ConditionHost=|detchar.ligo-la.caltech.edu
ConditionHost=|detchar
[Timer]
OnCalendar=minutely
Persistent=true
[Install]
WantedBy=timers.target
[Unit]
Description=Locklost search backfill
[Service]
Type=oneshot
# use system environment settings, for e.g. NDSSERVER, if available
EnvironmentFile=-/etc/sysconfig/ligo
EnvironmentFile=%h/config
Environment=LOG_FMT_NOTIME=t
ExecStart=%h/opt/locklost/bin/locklost search --condor '2 weeks ago' now
TimeoutSec=300
LogLevelMax=emerg
[Install]
WantedBy=default.target