Commit 9bb8ed30 authored by Patrick Godwin's avatar Patrick Godwin

remove cgi-based low latency summary functionality

parent 20cc9f84
......@@ -56,12 +56,12 @@ dist_bin_SCRIPTS = \
gstlal_inspiral_rerank_pipe \
gstlal_inspiral_reset_likelihood \
gstlal_inspiral_reset_zerolag_counts \
gstlal_inspiral_summary_page \
gstlal_inspiral_summary_page_lite \
gstlal_inspiral_combine_injection_sets \
gstlal_inspiral_svd_bank \
gstlal_inspiral_svd_bank_pipe \
gstlal_ll_inspiral_calculate_range \
gstlal_ll_inspiral_daily_page \
gstlal_ll_inspiral_daily_page_online \
gstlal_ll_inspiral_event_plotter \
gstlal_ll_inspiral_event_uploader \
gstlal_ll_inspiral_get_urls \
......@@ -71,10 +71,5 @@ dist_bin_SCRIPTS = \
gstlal_ll_inspiral_plot_sensitivity \
gstlal_ll_inspiral_save_state \
gstlal_ll_inspiral_trigger_counter \
gstlal_inspiral_summary_page \
gstlal_inspiral_summary_page_lite \
gstlalcbcnode \
gstlalcbcsummary \
gstlal_svd_bank
gstlal_svd_bank \
gstlal_svd_bank_calc_psd
#!/usr/bin/env python3
#
# Copyright (C) 2015 Kipp Cannon, Chad Hanna
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
### A program to generate offline style web pages for an online analysis
"""Meta program to generate offline style summary pages from online runs."""
import sys, os, subprocess, re, time, glob, shutil
from optparse import OptionParser
from glue.text_progress_bar import ProgressBar
from gstlal import aggregator
import lal
from lal import LIGOTimeGPS
from multiprocessing import Pool
from gstlal import dagparts
from copy import copy
def now():
return LIGOTimeGPS(lal.UTCToGPS(time.gmtime()))
def process_mass_bin(args):
massbin, result_dirs, n, d, options, tag, typecode, cluster_file = args
print >> sys.stderr, "processing mass bin %s, tag %s in directory %d of %d: %s" % (massbin, tag, n+1, len(result_dirs), d)
# FIXME don't hard code H1L1
# FIXME assumes 10 digit GPS
db = os.path.join(os.path.join(options.directory, d), 'H1L1-%s_%s-%s00000-100000.sqlite.tmp' % (massbin, tag, d))
if os.path.exists(db):
os.remove(db)
dbfinal = db.replace(".tmp","")
# FIXME we really have to change this hacky convention for injections to start at 1000
pattern = re.compile('.*-%d%s_LLOID-.*.xml.gz' % (typecode, massbin[1:]))
# First do non injections
files = sorted([os.path.join(os.path.join(options.directory, d), xml) for xml in os.listdir(os.path.join(options.directory, d)) if pattern.match(xml) and "~" not in xml])
for f in files:
try:
subprocess.check_call(["gstlal_inspiral_merge_and_reduce", "--sql-file", cluster_file, "--tmp-space", "/dev/shm", "--database", "%s" % db, "%s" % f])
except:
print >> sys.stderr, "couldn't process %s" % f
continue
# rename files
if os.path.exists(db):
os.rename(db, dbfinal)
return dbfinal
def injtag(injection_file):
return os.path.split(injection_file)[1].split('.')[0].replace("-","_")
if __name__ == '__main__':
#
# Some data reduction happens in parallel
#
pool = Pool(8)
#
# Parse command line
#
parser = OptionParser()
parser.add_option("--directory", default = ".", help = "This option is disabled it will be ignored. It is set to '.' ")
parser.add_option("--injection-file", default = [], action = "append", help = "The injection xml files that corresponds to the low latency injections for given mass bins. 0000:0002:Injection_1.xml, 0002:0004:Injection_2.xml")
parser.add_option("--web-dir", help = "set the output path to write the ''offline'' style web page to")
options, massbins = parser.parse_args()
options.directory = "." # only relative paths are supported at the moment.
# FIXME hardcoded instruments
instruments = "H1L1"
# FIXME should be more clever than this
# Match 5 digit directories
dir_pattern = re.compile('[0-9]{5}')
noninj_files_to_merge = []
inj_files_to_merge = {}
inj_file_bins = {}
injdball = {}
for injection_file in options.injection_file:
inj_file_split = injection_file.split(':')
for massbin in range(int(inj_file_split[0]),int(inj_file_split[1])):
inj_file_bins.setdefault(inj_file_split[2],[]).append(str(massbin).zfill(4))
# FIXME assume that the correct low latency cluster file is in the working
# directory. Perhaps this should be a command line argument.
cluster_file = os.path.join(options.directory, "ll_simplify_and_cluster.sql")
# FIXME presently not used, should it be?
simplify_file = os.path.join(options.directory, "ll_simplify.sql")
#
# Parallel process data within each result directory (every 100,000 seconds)
#
result_dirs = sorted([d for d in os.listdir(options.directory) if dir_pattern.match(d)])
for n, d in enumerate(result_dirs):
noninjdball = os.path.join(os.path.join(options.directory, d), 'H1L1-ALL_LLOID-%s00000-100000.sqlite' % (d,))
for injection_file in inj_file_bins:
injdball[injection_file] = os.path.join(os.path.join(options.directory, d), dagparts.T050017_filename(instruments, "ALL_LLOID_%s" % injtag(injection_file), (int(d) * 100000, (int(d) + 1) * 100000), '.sqlite'))
if float(now()) - float("%s00000" % d) > 125000 and all([os.path.exists(f) for f in injdball.values()]+[os.path.exists(noninjdball)]):
print >> sys.stderr, "directory is %s %s greater than 125000 seconds old and has already been processed...continuing" % (n,d)
noninj_files_to_merge.append(noninjdball)
for injection_file in inj_file_bins:
inj_files_to_merge.setdefault(injection_file,[]).append(injdball[injection_file])
continue
# Parallel process the data reduction
args = ([massbin, result_dirs, n, d, options, "ALL_LLOID", 0, cluster_file] for massbin in massbins)
# Merge the files of this directory
subprocess.check_call(["gstlal_inspiral_merge_and_reduce", "--sql-file", cluster_file, "--tmp-space", "/dev/shm", "--replace", "--verbose", "--database", "%s" % noninjdball] + list(pool.map(process_mass_bin, args)))
noninj_files_to_merge.append(noninjdball)
for injection_file in inj_file_bins:
args = ([massbin, result_dirs, n, d, options, injtag(injection_file), 1, cluster_file] for massbin in inj_file_bins[injection_file])
subprocess.check_call(["gstlal_inspiral_merge_and_reduce", "--sql-file", cluster_file, "--tmp-space", "/dev/shm", "--replace", "--verbose", "--database", "%s" % injdball[injection_file]] + list(pool.map(process_mass_bin, args)))
inj_files_to_merge.setdefault(injection_file,[]).append(injdball[injection_file])
#
# Do top level data reduction
#
# FIXME only add *new* files
noninjdb = os.path.join(options.directory, 'H1L1-ALL_LLOID-0-2000000000.sqlite.tmp')
injdb = {}
for injection_file in inj_file_bins:
injdb[injection_file] = os.path.join(options.directory, 'H1L1-ALL_LLOID_%s-0-2000000000.sqlite.tmp' % (injtag(injection_file)))
if os.path.exists(noninjdb):
os.remove(noninjdb)
for injection_file in injdb:
if os.path.exists(injdb[injection_file]):
os.remove(injdb[injection_file])
progressbar = ProgressBar("Merge noninjection files", len(noninj_files_to_merge))
for f in noninj_files_to_merge:
# NOTE the online analysis doesn't do a global clustering stage!! That means that you will under count the events in the final db
subprocess.check_call(["gstlal_inspiral_merge_and_reduce", "--sql-file", cluster_file, "--tmp-space", "/dev/shm", "--verbose", "--database", "%s" % noninjdb, "%s" % f])
progressbar.increment()
del progressbar
for injection_file in inj_file_bins:
progressbar = ProgressBar("Merge injection files", len(inj_files_to_merge[injection_file]))
for f in inj_files_to_merge[injection_file]:
# NOTE the online analysis doesn't do a global clustering stage!! That means that you will under count the events in the final db
subprocess.check_call(["gstlal_inspiral_merge_and_reduce", "--sql-file", cluster_file, "--tmp-space", "/dev/shm", "--verbose", "--database", "%s" % injdb[injection_file], "%s" % f])
progressbar.increment()
del progressbar
# Find injections
progressbar = ProgressBar("Find injections", 4)
subprocess.check_call(["ligolw_sqlite", "--tmp-space", os.environ["TMPDIR"], "--verbose", "--database", "%s" % injdb[injection_file], "%s" % injection_file])
progressbar.increment()
subprocess.check_call(["ligolw_sqlite", "--tmp-space", os.environ["TMPDIR"], "--verbose", "--database", "%s" % injdb[injection_file], "--extract", "%s.xml" % injdb[injection_file]])
progressbar.increment()
subprocess.check_call(["lalapps_inspinjfind", "--verbose", "%s.xml" % injdb[injection_file]])
progressbar.increment()
subprocess.check_call(["ligolw_sqlite", "--tmp-space", os.environ["TMPDIR"], "--verbose", "--database", "%s" % injdb[injection_file], "--replace", "%s.xml" % injdb[injection_file]])
progressbar.increment()
#
# Make plots and such
#
plot_dir = os.path.join(options.directory, "plots")
if os.path.exists(plot_dir):
os.rename(plot_dir, "%s.%s" % (plot_dir, str(now())))
os.mkdir(plot_dir)
pattern = re.compile("(?P<id>[0-9]{4})_prior.xml.gz")
#for d in os.listdir(options.directory):
# m = pattern.match(d)
# if m:
# subprocess.check_call(["gstlal_inspiral_plot_background", "--output-dir", plot_dir, "--user-tag", m.group("id"), "--verbose", d])
#
# Plot background
#
# FIXME this is broken for some reason online
# try:
# subprocess.check_call(["gstlal_inspiral_plot_background", "--output-dir", plot_dir, "--user-tag", "ALL", "--database", noninjdb, "--verbose", "rankingstat_pdf.xml.gz"])
# except subprocess.CalledProcessError as plot_error:
# print >> sys.stderr, "plotting failed. recieved error%s...continuing anyway" % plot_error
#
# Plot summary
#
try:
if inj_file_bins:
subprocess.check_call(["gstlal_inspiral_plotsummary", "--verbose", "--likelihood-file", "rankingstat_pdf.xml.gz", "--segments-name", "statevectorsegments", "--user-tag", "ALL_LLOID_COMBINED", "--output-dir", plot_dir, "%s" % noninjdb] + injdb.values())
else:
subprocess.check_call(["gstlal_inspiral_plotsummary", "--verbose", "--likelihood-file", "rankingstat_pdf.xml.gz", "--segments-name", "statevectorsegments", "--user-tag", "ALL_LLOID_COMBINED", "--output-dir", plot_dir, "--plot-group", "0", "--plot-group", "5", "%s" % noninjdb])
except subprocess.CalledProcessError as plot_error:
print >> sys.stderr, "plotting failed. recieved error%s...continuing anyway" % plot_error
#
# Plot sensitivity
#
if inj_file_bins:
try:
subprocess.check_call(["gstlal_inspiral_plot_sensitivity", "--verbose", "--output-dir", plot_dir, "--bin-by-source-type", "--zero-lag-database", noninjdb, "--dist-bins", "200", "--user-tag", "ALL_LLOID_COMBINED", "--data-segments-name", "statevectorsegments"] + injdb.values())
except subprocess.CalledProcessError as plot_error:
print >> sys.stderr, "plotting failed. recieved error%s...continuing anyway" % plot_error
#
# Summary page
#
try:
webserverplotdir = os.path.join(options.web_dir, "plots")
shutil.rmtree(options.web_dir + "_lite", ignore_errors = True)
if os.path.exists(webserverplotdir):
newdir = os.path.join(options.web_dir, str(int(now())))
aggregator.makedir(newdir)
shutil.move(webserverplotdir, newdir)
try:
shutil.move(os.path.join(options.web_dir, "index.html"), newdir)
except IOError:
pass
# FIXME only relative paths are supported hence the hardcoded "plots" directory
subprocess.check_call(["gstlal_inspiral_summary_page_lite", "--no-navigation", "--open-box", "--output-user-tag", "ALL_LLOID_COMBINED", "--glob-path", "plots", "--webserver-dir", options.web_dir, "--title", "gstlal-online"])
except subprocess.CalledProcessError as plot_error:
print >> sys.stderr, "plotting failed. recieved error%s...continuing anyway" % plot_error
# copy the working files back
os.rename(noninjdb, noninjdb.replace(".tmp",""))
for injection_file in injdb:
os.rename(injdb[injection_file], injdb[injection_file].replace(".tmp",""))
#!/bin/bash
#
# Copyright (C) 2015 Kipp Cannon, Chad Hanna
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
while true ; do
gstlal_ll_inspiral_daily_page "$@"
sleep 3600
done
#!/usr/bin/env python3
#
# Copyright (C) 2011 Chad Hanna
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import sys
import cgi
import cgitb
import os
os.environ["MPLCONFIGDIR"] = "/tmp"
import matplotlib
matplotlib.use('Agg')
import numpy
import matplotlib.pyplot as plt
import time
import StringIO
import base64
from urlparse import urlparse
cgitb.enable()
form = cgi.FieldStorage()
### This program will monitor the output of a gstlal inspiral low latency
### analysis; See gstlal_llcbcsummary for help and usage.
###
### This program is designed to be placed in the cgi-bin directory of the user's
### public_html directory on the cluster that is running the gstlal inspiral low
### latency analysis
###
### USAGE:
### ------
###
### This program is never meant to be executed by a user, but rather on a
### webserver via a url such as::
###
### https://hostname.domain/path/to/cgi-bin/gstlal_llcbcsummary?id=\<start\>,\<stop\>&dir=/path/to/analysis/directory
###
### e.g.,::
###
### https://ldas-jobs.ligo.caltech.edu/~gstlalcbc/cgi-bin/gstlal_llcbcsummary?id=0001,0010&dir=/home/gstlalcbc/engineering/5/bns_trigs_40Hz
###
### \<start\> and \<stop\> are the 4 digit id numbers corresponding to the first and
### last job, respectively.
###
def now():
#FIXME use lal when available
return time.time() - 315964785
def to_png_image():
f = StringIO.StringIO()
plt.savefig(f, format="png")
print '<img src="data:image/png;base64,',base64.b64encode(f.getvalue()),'"></img>'
f.close()
def read_registry(dir, dataurl, ids):
nodedict = {}
for id in ids:
url = '%s/%s%s' % (dir, id, dataurl)
try:
tmp = open(url,"r")
nodedict[id] = urlparse(tmp.readline()).netloc
tmp.close()
except IOError:
nodedict[id] = ""
return nodedict
def load_data(directory, idrange, type):
found = {}
missed = {}
for i, id in enumerate(idrange):
fname = "%s/%s/%s.txt" % (directory, id, type)
try:
found[i] = numpy.loadtxt(fname)
if len(found[i].shape) == 1:
found[i] = numpy.array([found[i],])
except IOError:
missed[i] = numpy.array([])
except ValueError:
missed[i] = numpy.array([])
return found, missed
def setup_plot():
fig = plt.figure(figsize=(20,5),)
fig.patch.set_alpha(0.0)
h = fig.add_subplot(111, axisbg = 'k')
plt.subplots_adjust(left = .062, right = 0.98, bottom = 0.3)
return fig, h
def finish_plot(ids, registry, ylim, title=''):
plt.grid(color=(0.1,0.4,0.5), linewidth=2)
ticks = ["%s : %s " % (id, registry[id]) for id in ids]
plt.xticks(numpy.arange(len(ids))+.3, ticks, rotation=90, fontsize = 10)
plt.xlim([0, len(ids)])
plt.ylim(ylim)
tickpoints = numpy.linspace(ylim[0], ylim[1], 8)
ticks = ["%.1e" % (10.**t,) for t in tickpoints]
plt.yticks(tickpoints, ticks, fontsize = 14)
plt.title(title, fontsize = 18)
to_png_image()
#plt.savefig(sys.stdout, format = "svg")
def plot_latency(found, missed, ids, registry):
fig, h = setup_plot()
found_x = found.keys()
latency_y = numpy.log10(numpy.array([found[k][-1,1] for k in found_x]))
time_y = numpy.log10(now() - numpy.array([found[k][-1,0] for k in found_x]))
try:
max_y = max(time_y.max(), latency_y.max())
except ValueError:
max_y = 1
missed_x = missed.keys()
missed_y = numpy.ones(len(missed_x)) * max_y
h.bar(missed_x, missed_y, color='r', alpha=0.9, linewidth=2)
h.bar(found_x, latency_y, color='w', alpha=0.9, linewidth=2)
h.bar(found_x, time_y, color='w', alpha=0.7, linewidth=2)
finish_plot(ids, registry, [0, max_y], 'Time (s) since last event (gray) and latency (white)')
def plot_snr(found, missed, ids, registry):
fig, h = setup_plot()
found_x = found.keys()
maxsnr_y = numpy.log10(numpy.array([found[k][:,1].max() for k in found_x]))
mediansnr_y = numpy.log10(numpy.array([numpy.median(found[k][:,1]) for k in found_x]))
try:
max_y = max(maxsnr_y)
except ValueError:
max_y = 1
missed_x = missed.keys()
missed_y = numpy.ones(len(missed_x)) * max_y
h.bar(missed_x, missed_y, color='r', alpha=0.9, linewidth=2)
h.bar(found_x, mediansnr_y, color='w', alpha=0.9, linewidth=2)
h.bar(found_x, maxsnr_y, color='w', alpha=0.7, linewidth=2)
finish_plot(ids, registry, [numpy.log10(5.5), max_y], 'SNR of last 1000 events: max (gray) and median (white)')
def plot_livetime(found, missed, ids, registry, ifo):
fig, h = setup_plot()
found_x = found.keys()
# Handle log of 0 by setting it to max of (actual value, 1)
on_y = numpy.log10(numpy.array([max(found[k][0][1],1) for k in found_x]))
off_y = numpy.log10(numpy.array([max(found[k][0][2],1) for k in found_x]))
gap_y = numpy.log10(numpy.array([max(found[k][0][3],1) for k in found_x]))
# FIXME Hack to adjust for high sample rate L1 and V1 state vector
if ifo != "V1":
on_y -= numpy.log10(16)
off_y -= numpy.log10(16)
gap_y -= numpy.log10(16)
if len(found_x) > 0:
max_y = max(on_y.max(), off_y.max(), gap_y.max())
min_y = min(on_y.min(), off_y.min(), gap_y.min())
else:
max_y = 1
min_y = 0
missed_x = missed.keys()
missed_y = numpy.ones(len(missed_x)) * max_y
h.bar(missed_x, missed_y, color='r', alpha=0.9, linewidth=2)
h.bar(found_x, off_y, color='w', alpha=0.7, linewidth=2)
h.bar(found_x, gap_y, color='b', alpha=0.5, linewidth=2)
h.bar(found_x, on_y, color='w', alpha=0.5, linewidth=2)
finish_plot(ids, registry, [min_y*.9, max_y], '%s Up time (gray) Down time (white) Dropped time (blue)' % (ifo,))
def plot_ram(found, missed, ids, registry):
fig, h = setup_plot()
found_x = found.keys()
found_y = numpy.log10(numpy.array([found[k][0,1] for k in found_x]))
try:
max_y, min_y = max(found_y), min(found_y)
except ValueError:
max_y, min_y = (1,0)
missed_x = missed.keys()
missed_y = numpy.ones(len(missed_x)) * max_y
h.bar(missed_x, missed_y, color='r', alpha=0.9, linewidth=2)
h.bar(found_x, found_y, color='w', alpha=0.9, linewidth=2)
finish_plot(ids, registry, [0.9 * min_y, max_y], 'RAM usage GB')
def plot_single_col(found, missed, ids, registry, col = 0, title = ''):
fig, h = setup_plot()
found_x = found.keys()
found_y = numpy.log10(numpy.array([found[k][0][col] for k in found_x]))
try:
max_y, min_y = max(found_y), min(found_y)
except ValueError:
max_y, min_y = (1,0)
missed_x = missed.keys()
missed_y = numpy.ones(len(missed_x)) * max_y
h.bar(missed_x, missed_y, color='r', alpha=0.9, linewidth=2)
h.bar(found_x, found_y, color='w', alpha=0.9, linewidth=2)
finish_plot(ids, registry, [0.9 * min_y, max_y], title)
def get_ids(form):
idrange = [int(n) for n in form.getvalue("id").split(",")]
#FIXME relies on 4 digit ids
ids = ['%04d' % (job,) for job in range(idrange[0], idrange[1]+1)]
return ids
if "dir" not in form:
raise ValueError("must specify dir")
if "id" not in form:
raise ValueError("must specify id")
ids = get_ids(form)
directory = form.getvalue("dir")
ifos = form.getvalue("ifos").split(",")
reg = read_registry(form.getvalue("dir"), "_registry.txt", ids)
# Header
print >>sys.stdout, 'Cache-Control: no-cache, must-revalidate'
print >>sys.stdout, 'Expires: Mon, 26 Jul 1997 05:00:00 GMT'
print >>sys.stdout, 'Content-type: text/html\r\n'
# HTML preamble
print """
<html>
<head>
<meta http-equiv="Pragma" content="no-cache">
<meta http-equiv="Expires" content="-1">
<meta http-equiv="CACHE-CONTROL" content="NO-CACHE">
<meta http-equiv="refresh" content="300">
<link rel="stylesheet" href="//code.jquery.com/ui/1.10.0/themes/base/jquery-ui.css" />
<script src="//code.jquery.com/jquery-1.8.3.js"></script>
<script src="//code.jquery.com/ui/1.10.0/jquery-ui.js"></script>
<script type="text/javascript"> $(function() {
$("#accordion").accordion({
});
});</script>
</head>
<body>
"""
# title
print """
<font size=10><img src="http://www.lsc-group.phys.uwm.edu/cgit/gstlal/plain/gstlal/doc/gstlal.png">gstlal_inspiral online </font><font size=6 color=#707070><b><right>%s: %d - %s </right></b><br></font><hr><br>
""" % ("".join(sorted(ifos)), int(now()), time.strftime("%a, %d %b %Y %H:%M:%S %Z", time.localtime()))
print '<div id="accordion">'
#
# latency history
#
print "<h1>Latency</h1>"
print "<div id='canvaslatency'>"
found, missed = load_data(directory, ids, "latency_history")
plot_latency(found, missed, ids, reg)
print "</div>"
#
# snr history
#
print "<h1>SNR</h1>"
print "<div id='canvassnr'>"
found, missed = load_data(directory, ids, "snr_history")
plot_snr(found, missed, ids, reg)
print "</div>"
#
# live time by ifo
#
for ifo in ifos:
print "<h1>Livetime for %s</h1>" % ifo
print "<div id='%scanvastime'>" % (ifo,)
found, missed = load_data(directory, ids, "%s/state_vector_on_off_gap" % (ifo,))
plot_livetime(found, missed, ids, reg, ifo)
print "</div>"
#
# Template Duration
#
print "<h1>Template Duration</h1>"
print "<div id='canvasdur'>"
found, missed = load_data(directory, ids, "bank")
plot_single_col(found, missed, ids, reg, 1, "Template Duration (s)")
print "</div>"
#
# Chirp Mass
#
print "<h1>Chirp Mass</h1>"
print "<div id='canvasmchirp'>"
found, missed = load_data(directory, ids, "bank")
plot_single_col(found, missed, ids, reg, 2, "Chirp Mass")
print "</div>"
#
# RAM
#