Skip to content
Snippets Groups Projects
Commit a6435bcd authored by Tanner Prestegard's avatar Tanner Prestegard Committed by gracedb-dev1
Browse files

removing django shell commands which have been fully migrated to the gracedb/scripts repository

parent eff481b2
No related branches found
No related tags found
No related merge requests found
from django.conf import settings
from django.core.management.base import NoArgsCommand
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as pyplot
import numpy
from gracedb.models import Event, Pipeline
import os
from datetime import timedelta
from django.utils import timezone
DEST_DIR = settings.LATENCY_REPORT_DEST_DIR
MAX_X = settings.LATENCY_MAXIMUM_CHARTED
WEB_PAGE_FILE_PATH = settings.LATENCY_REPORT_WEB_PAGE_FILE_PATH
URL_PREFIX = settings.REPORT_INFO_URL_PREFIX
# XXX Branson introduced during ER6 to clean things up a bit.
PIPELINE_EXCLUDE_LIST = ['HardwareInjection', 'X', 'Q', 'Omega', 'Ringdown', 'LIB', 'SNEWS', 'pycbc', 'CWB2G']
class Command(NoArgsCommand):
help = "I am the HISTOGRAM MAKER!"
def handle_noargs(self, **options):
now = timezone.now()
start_day = now - timedelta(1)
start_week = now - timedelta(7)
start_month = now - timedelta(30)
time_ranges = [(start_day, "day"), (start_week, "week"), (start_month, "month")]
annotations = {}
# Make the histograms, save as png's.
for pipeline in Pipeline.objects.all():
if pipeline.name in PIPELINE_EXCLUDE_LIST:
continue
pname = pipeline.name
annotations[pname] = {}
for start_time, time_range in time_ranges:
note = {}
fname = os.path.join(DEST_DIR, "%s-%s.png" % (pname, time_range))
note['fname'] = fname
data = Event.objects.filter(pipeline=pipeline,
created__range=[start_time, now],
gpstime__gt=0) \
.exclude(group__name="Test")
note['count'] = data.count()
data = [e.reportingLatency() for e in data]
data = [d for d in data if d <= MAX_X and d > 0]
note['npoints'] = len(data)
note['over'] = note['count'] - note['npoints']
if note['npoints'] <= 0:
try:
note['fname'] = None
os.unlink(fname)
except OSError:
pass
else:
makePlot(data, pname, maxx=MAX_X).savefig(fname)
annotations[pname][time_range] = note
writeIndex(annotations, WEB_PAGE_FILE_PATH)
def writeIndex(notes, fname):
createdDate = str(timezone.now())
maxx = MAX_X
table = '<table border="1" bgcolor="white">'
table += """<caption>Tables generated: %s<br/>
Maximum charted latency: %s seconds</caption>""" % (createdDate, maxx)
table += "<tr><th>&nbsp;</th>"
for time_range in ['day', 'week', 'month']:
table += "<th>last %s</th>" % time_range
table += "</tr>"
for pipeline in Pipeline.objects.all():
#for atype, atype_name in Event.ANALYSIS_TYPE_CHOICES:
if pipeline.name in PIPELINE_EXCLUDE_LIST:
continue
pname = pipeline.name
table += "<tr>"
table += "<td>%s</td>" % pname
for time_range in ['day', 'week', 'month']:
table += '<td align="center" bgcolor="white">'
n = notes[pname][time_range]
extra = ""
if n['fname'] is not None:
table += '<img width="400" height="300" src="%s"/>' % \
(URL_PREFIX + os.path.basename(n['fname']))
extra = "%d total events" % n['count']
else:
extra = "No Applicable Events"
if n['over'] != 0:
extra += "<br/>%d events over maximum latency of %s seconds" % (n['over'], MAX_X)
table += "<br/>%s" % extra
table += "</td>"
table += "</tr>"
table += "</table>"
f = open(fname, "w")
f.write(table)
f.close()
def makePlot(data, title, maxx=1800, facecolor='green'):
# make sure plot is clear!
pyplot.close()
#nbins = maxx / 30
nbins = numpy.logspace(1.3, numpy.log10(maxx), 50)
pyplot.xlim([20,maxx])
fig = pyplot.figure()
ax = fig.add_axes((.1, .1, .8, .8))
n, bins, patches = ax.hist(data, nbins, facecolor=facecolor)
vmax = max(n)
if vmax <= 10:
vmax = 10
elif (vmax%10) == 0:
vmax += 10
else:
vmax += 10 - (vmax % 10)
ax.set_xlabel('Seconds', fontsize=20)
ax.set_ylabel('Number of Events', fontsize=20)
ax.set_xscale('log')
ax.axis([20, maxx, 0, vmax])
ax.grid(True)
return pyplot
from django.core.management.base import BaseCommand, NoArgsCommand
from django.conf import settings
from gracedb.models import Event
from gracedb.query import parseQuery
import os
import matplotlib
matplotlib.use('Agg')
import numpy
import scipy
import pylab
#pylab.rc('text', usetex = True)
def ifar_none(title, message, filename):
fig = pylab.figure()
ax = fig.add_axes((.1, .1, .8, .8))
ax.set_title(title)
ax.axis([0,10,0,10])
ax.text(3,5,message)
#pylab.legend(loc="center")
pylab.savefig(filename)
def ifar_chart(events, title, axis_label, filename):
ts = []
fars = []
for e in events:
ts.append(e.gpstime)
fars.append(e.far)
fars = scipy.array(sorted(fars))
Ns = scipy.arange(len(fars)) + 1
T = float(max(ts) - min(ts))
fig = pylab.figure()
ax = fig.add_axes((.1, .1, .8, .8))
ax.loglog(fars, Ns, label=axis_label)
Ns = scipy.arange(len(fars)*10) / 10.
ax.loglog(Ns/T, Ns, label="Expected Background")
ax.fill_between(Ns/T, Ns - Ns**.5, Ns + Ns**.5, color='k', alpha=0.1)
ax.fill_between(Ns/T, Ns - 2 * Ns**.5, Ns + 2 * Ns**.5, color='k', alpha=0.1)
ax.invert_xaxis()
ax.set_ylabel(r"#")
ax.set_xlabel(r"FAR (Hz)")
#ax.set_title(r"ER1 FARs from gstlal_ll_inspiral t in [%i, %i)" %(min(ts), max(ts)))
ax.set_title(title)
pylab.legend(loc='upper right')
pylab.ylim([1, Ns[-1] + 2 * Ns[-1]**.5])
pylab.xlim([fars[-1], fars[0]])
ax.text(1e-7, 3, r'$t \in [%i, %i)$'%(min(ts), max(ts)))
pylab.savefig(filename)
return
fars = scipy.array(sorted(fars))
Ns = scipy.arange(len(fars)) + 1
T = float(max(ts) - min(ts))
fig = pylab.figure()
ax = fig.add_axes((.1, .1, .8, .8))
ax.loglog(fars, Ns, label=axis_label)
Ns = scipy.arange(len(fars)*10) / 10
ax.loglog(Ns/T, Ns, label="Expected Background")
ax.fill_between(Ns/T, Ns - Ns**.5, Ns + Ns**.5, color='k', alpha=0.1)
ax.fill_between(Ns/T, Ns - 2 * Ns**.5, Ns + 2 * Ns**.5, color='k', alpha=0.1)
ax.invert_xaxis()
#ax.set_ylabel(r"$\#$")
#ax.set_xlabel(r"\textrm{FAR (Hz)")
#ax.set_title(r"\textrm{ER1 FARs from {\sc gstlal\_ll\_inspiral}}")
ax.set_ylabel(r"#")
ax.set_xlabel(r"FAR (Hz)")
ax.set_title(title)
ax.text(1e-7, 3, r'$t \in [%i, %i)$'%(min(ts), max(ts)))
pylab.legend(loc='upper right')
pylab.ylim([1, Ns[-1] + 2 * Ns[-1]**.5])
pylab.xlim([fars[-1], fars[0]])
pylab.savefig(filename)
class Command(NoArgsCommand):
help = "I am the IFAR MAKER!"
def handle_noargs(self, **options):
for (q, label, title, fname) in settings.REPORTS_IFAR:
query = parseQuery(q)
events = Event.objects.filter(query).distinct()
filename = os.path.join(settings.REPORT_IFAR_IMAGE_DIR, fname)
if events.count() > 0:
ifar_chart(events, title, label, filename)
else:
ifar_none(title, "No Data", filename)
return
# XXX Branson fixing during ER6
#query = parseQuery("gstlal yesterday .. now")
query = parseQuery("gstlal now yesterday .. now")
events = Event.objects.filter(query).distinct()
if events.count() > 0:
axis_label = "GraceDB gstlal events"
# XXX Branson edited during ER6
# title = r"ER1 FARs from gstlal_ll_inspiral - last day"
title = r"FARs from gstlal - last day"
filename = os.path.join(settings.REPORT_IFAR_IMAGE_DIR, "ifar_day.png")
ifar_chart(events, title, axis_label, filename)
else:
print "No day"
try:
os.unlink(filename)
except:
pass
query = parseQuery("gstlal a week ago .. now")
events = Event.objects.filter(query).distinct()
if events.count() > 0:
axis_label = "GraceDB gstlal events"
#title = r"ER1 FARs from gstlal_ll_inspiral - last week"
title = r"FARs from gstlal - last week"
filename = os.path.join(settings.REPORT_IFAR_IMAGE_DIR, "ifar_week.png")
ifar_chart(events, title, axis_label, filename)
else:
print "No week"
try:
os.unlink(filename)
except:
pass
import os, shutil
from django.core.management.base import NoArgsCommand
from gracedb.models import Event, Search, Group
from gracedb.models import CoincInspiralEvent, MultiBurstEvent
from guardian.models import GroupObjectPermission
from django.contrib.contenttypes.models import ContentType
import datetime
class Command(NoArgsCommand):
help = "I kill the MDC events."
def handle_noargs(self, **options):
"""
Note! This needs to be run as root. The reason is that these
directories are owned by www-data. And the 'gracedb' user
doesn't have sudo.
"""
MDC = Search.objects.get(name='MDC')
Test = Group.objects.get(name='Test')
events = Event.objects.filter(search=MDC).exclude(group=Test)
for e in events:
datadir = e.datadir()
graceid = e.graceid()
print "Deleting %s, %s" % (graceid, datadir)
# First we need to clean up the data directory
# Note. rmtree with throw OSError if the datadir is a softlink.
# Even though there are softlinks farther up the chain, the leafdir
# in question here can be removed by rmtree.
if os.path.isdir(datadir):
shutil.rmtree(datadir)
# Uhm. For debugging purposes, did we get it?
if os.path.isdir(datadir):
print 'Problem! Have not deleted datadir %s' % datadir
exit(1)
# We need to delete the relevant GroupObjectPermission objects as well.
# Any CoincInspiralEvent for this event? If so, delete their associated
# GroupObjectPermissions.
try:
coinc_event = CoincInspiralEvent.objects.get(id=e.id)
ctype = ContentType.objects.get(app_label='gracedb', model='coincinspiralevent')
gops = GroupObjectPermission.objects.filter(object_pk=e.id, content_type=ctype)
for g in gops:
g.delete()
except:
pass
# Any MultiBurstEvent for this event? If so, delete their associated
# GroupObjectPermissions.
try:
coinc_event = MultiBurstEvent.objects.get(id=e.id)
ctype = ContentType.objects.get(app_label='gracedb', model='multiburstevent')
gops = GroupObjectPermission.objects.filter(object_pk=e.id, content_type=ctype)
for g in gops:
g.delete()
except:
pass
# Finally delete the GroupObjectPermissions on the Event itself.
ctype = ContentType.objects.get(app_label='gracedb', model='event')
gops = GroupObjectPermission.objects.filter(object_pk=e.id, content_type=ctype)
for g in gops:
g.delete()
# Now, say goodbye to the database entry
e.delete()
# Print information if no events found
if not events:
print "No MDC events found ({0})".format(datetime.datetime.now())
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from gracedb.models import Event, Pipeline, Search, Group
from datetime import timedelta
from dateutil import parser
from django.utils import timezone
import pytz
import json
#------------------------------------------------------------------------------------
# Utilities
#------------------------------------------------------------------------------------
# get_counts_for_bin
# Takes as input:
# - the lower bin boundary (a naive datetime object in UTC)
# - the bin_width in hours
# - the pipeline we are interested in
# Returns the number of events in that bin, excluding MDC and Test.
MDC = Search.objects.filter(name='MDC').first()
Test = Group.objects.filter(name='Test').first()
def get_counts_for_bin(lbb, bin_width, pipeline):
ubb = lbb + timedelta(hours=bin_width)
events = Event.objects.filter(pipeline=pipeline, created__range=(lbb, ubb))
if MDC:
events = events.exclude(search=MDC)
if Test:
events = events.exclude(group=Test)
return events.count()
# given a date string, parse it and localize to UTC if necessary
def parse_and_localize(date_string):
if not date_string:
return None
dt = parser.parse(date_string)
if not dt.tzinfo:
dt = pytz.utc.localize(dt)
return dt
# make a list of pipeline objects
PIPELINES = []
for n in settings.BINNED_COUNT_PIPELINES:
try:
PIPELINES.append(Pipeline.objects.get(name=n))
except:
pass
OTHER_PIPELINES = []
for p in Pipeline.objects.all():
if p.name not in PIPELINES:
OTHER_PIPELINES.append(p)
def get_record(lbb, bin_width):
bc = lbb + timedelta(hours=bin_width/2)
r = { 'time': bc, 'delta_t': bin_width }
total = 0
for p in PIPELINES:
count = get_counts_for_bin(lbb, bin_width, p)
total += count
r[p.name] = count
other = 0
for p in OTHER_PIPELINES:
other += get_counts_for_bin(lbb, bin_width, p)
r['Other'] = other
total += other
r['Total'] = total
return r
#------------------------------------------------------------------------------------
# Binned counts command
#------------------------------------------------------------------------------------
class Command(BaseCommand):
help = 'Manage the binned counts file used for plotting rates.'
def add_arguments(self, parser):
# start and end should be ISO-8601 strings
# they will be interpreted as UTC, of course
# delta is an integer in hours
parser.add_argument('start')
parser.add_argument('end')
parser.add_argument('delta', type=int)
def handle(self, *args, **options):
# First of all, that bin width had better be an even number of hours.
bin_width = options['delta']
if bin_width % 2 != 0:
raise ValueError("Bin width must be divisible by 2. Sorry.")
# Let's take our desired range and turn it into UTC datetime objects
start = parse_and_localize(options['start'])
end = parse_and_localize(options['end'])
duration = end - start
# This timedelta has days, seconds, and total seconds.
# What we want to verify is that is an integer number of hours.
# That is, the total seconds should be divisible by 3600.
hours, r_seconds = divmod(duration.total_seconds(), 3600)
if r_seconds != 0.0:
msg = "The start and end times must be separated by an integer number of hours."
raise ValueError(msg)
# Now we need to verify that the number of hours is divisible by our
# bin width
bins, r_hours = divmod(hours, bin_width)
bins = int(bins)
if r_hours != 0.0:
msg = "The start and end times must correspond to an integer number of bins."
raise ValueError(msg)
# read in the file and interpret it as JSON
f = None
try:
f = open(settings.BINNED_COUNT_FILE, 'r')
except:
pass
records = []
if f:
try:
records = json.loads(f.read())
except:
pass
f.close()
# process the records so that the time is a datetime for all of them
# Note that the times here are at the bin centers
def dt_record(r):
r['time'] = parse_and_localize(r['time'])
return r
records = [dt_record(r) for r in records]
# accumlate the necessary records
new_records = []
for i in range(bins):
lbb = start + timedelta(hours = i*bin_width)
bc = lbb + timedelta(hours = bin_width/2)
# look for an existing record with the desired lower bin
# boundary and delta.
found = False
for r in records:
if bc == r['time'] and bin_width == r['delta_t']:
found = True
new_records.append(r)
if not found:
new_records.append(get_record(lbb, bin_width))
def strtime_record(r):
r['time'] = r['time'].replace(tzinfo=None).isoformat()
return r
new_records = [strtime_record(r) for r in new_records]
# write out the file
f = open(settings.BINNED_COUNT_FILE, 'w')
f.write(json.dumps(new_records))
f.close()
from django.conf import settings
from django.core.management.base import NoArgsCommand
from gracedb.reports import rate_data
import json
class Command(NoArgsCommand):
help = "I write down rate data in JSON to disk. That's about it."
def handle_noargs(self, **options):
outfile = open(settings.RATE_INFO_FILE, 'w')
json_data = json.dumps(rate_data())
outfile.write(json_data)
outfile.close()
from django.core.management.base import NoArgsCommand
from ligoauth.models import LigoLdapUser, X509Cert, AlternateEmail
from django.contrib.auth.models import User, Group
from django.db.utils import IntegrityError
import ldap
baseDN = "ou=people,dc=ligo,dc=org"
searchScope = ldap.SCOPE_SUBTREE
searchFilter = "(employeeNumber=*)"
retrieveAttributes = ["krbPrincipalName",
"gridX509subject",
"givenName",
"sn",
"mail",
"isMemberOf",
"mailAlternateAddress",
"mailForwardingAddress"]
class Command(NoArgsCommand):
help = "Update ligoauth.models.LigoUser and django.contrib.auth.models.User from LIGO LDAP"
def handle_noargs(self, **options):
l = ldap.open("ldap.ligo.org")
l.protocol_version = ldap.VERSION3
ldap_result_id = l.search(baseDN, searchScope, searchFilter, retrieveAttributes)
while 1:
result_type, result_data = l.result(ldap_result_id, 0)
if (result_data == []):
break
else:
if result_type == ldap.RES_SEARCH_ENTRY:
for (ldap_dn, ldap_result) in result_data:
first_name = unicode(ldap_result['givenName'][0], 'utf-8')
last_name = unicode(ldap_result['sn'][0], 'utf-8')
email = ldap_result['mail'][0]
new_dns = set(ldap_result.get('gridX509subject',[]))
memberships = ldap_result.get('isMemberOf',[])
is_active = "Communities:LSCVirgoLIGOGroupMembers" in memberships
principal = ldap_result['krbPrincipalName'][0]
#mailForwardingAddress = ldap_result.get('mailForwardingAddress', None)
try:
mailForwardingAddress = unicode(ldap_result['mailForwardingAddress'][0])
except:
mailForwardingAddress = None
mailAlternateAddresses = ldap_result.get('mailAlternateAddress', [])
# Update/Create LigoLdapUser entry
# This is breaking. XXX Do we need to pass in default values for the underlying User object?
defaults = {
'first_name' : first_name,
'last_name' : last_name,
'email' : email,
'username' : principal,
'is_active' : is_active
}
# Sometimes the user will have been created by the auth middleware as the
# result of a new user arriving with a shib session. So we must try to
# look up the user first.
try:
user = LigoLdapUser.objects.get(username=defaults['username'])
created = False
except User.DoesNotExist:
try:
user, created = LigoLdapUser.objects.get_or_create(ldap_dn=ldap_dn, defaults=defaults)
except IntegrityError:
# This actually should not happen.
print "Problem for %s" % ldap_dn
continue
# Now we allow for a change in the ldap_dn, since the ldap_dn will be an
# empty string if the user is created based off of a shib session (which will
# not have the ldap_dn).
changed = created \
or (user.ldap_dn != ldap_dn) \
or (user.first_name != first_name) \
or (user.last_name != last_name) \
or (user.email != email) \
or (user.username != principal) \
or (user.is_active != is_active)
if changed:
user.ldap_dn = ldap_dn
user.first_name = first_name
user.last_name = last_name
user.email = email
user.username = principal
user.is_active = is_active
# revoke staff/superuser if not active.
user.is_staff = user.is_staff and is_active
user.is_superuser = user.is_superuser and is_active
try:
user.save()
except Exception, e:
print "Failed to save user '%s'. (%s)" % (ldap_dn, first_name+" "+last_name)
print "Reason: %s" % str(e)
# update X509 certs for user
current_dns = set([ cert.subject for cert in user.x509cert_set.all() ])
if current_dns != new_dns:
# XXX Some certs put in by hand are getting blow away. I don't think this feature is really needed anyway.
# for dn in current_dns - new_dns:
# X509Cert.objects.get(subject=dn).delete()
for dn in new_dns - current_dns:
cert, created = X509Cert.objects.get_or_create(subject=dn)
if created:
cert.save()
cert.users.add(user)
# update group information
# We do this only for groups that already exist in the GraceDB database
for g in Group.objects.all():
if g.name in memberships:
# Add the user to the group. First get the User object.
u = User.objects.get(username = user.username)
print "Adding %s to %s" % (user.username,g.name)
g.user_set.add(u)
# Finally, deail with alternate emails.
if mailForwardingAddress:
try:
AlternateEmail.objects.get_or_create(user=user,
email=mailForwardingAddress)
except:
pass
if len(mailAlternateAddresses) > 0:
for email in mailAlternateAddresses:
try:
AlternateEmail.objects.get_or_create(user=user,
email=email)
except:
pass
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment