Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • alexander.pace/server
  • geoffrey.mo/gracedb-server
  • deep.chatterjee/gracedb-server
  • cody.messick/server
  • sushant.sharma-chaudhary/server
  • michael-coughlin/server
  • daniel.wysocki/gracedb-server
  • roberto.depietri/gracedb
  • philippe.grassia/gracedb
  • tri.nguyen/gracedb
  • jonah-kanner/gracedb
  • brandon.piotrzkowski/gracedb
  • joseph-areeda/gracedb
  • duncanmmacleod/gracedb
  • thomas.downes/gracedb
  • tanner.prestegard/gracedb
  • leo-singer/gracedb
  • computing/gracedb/server
18 results
Show changes
Showing
with 788 additions and 1005 deletions
from django.conf import settings
from django.http import HttpResponse, HttpResponseBadRequest
from django.utils.deprecation import MiddlewareMixin
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import gettext_lazy as _
from packaging import specifiers
import logging
......
from django.conf import settings
from django.http import HttpResponse
from django.shortcuts import render
from django.urls import resolve
import logging
# Set up logger
logger = logging.getLogger(__name__)
class MaintenanceModeMiddleware(object):
accept_header_name = 'HTTP_ACCEPT'
default_message = 'The site is temporarily down for maintenance.'
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
# Process request -----------------------------------------------------
if settings.MAINTENANCE_MODE is True:
# Check if the view specifies to ignore maintenance mode
ignore_maintenance = \
self.check_for_ignore_maintenance_mode(request)
if not ignore_maintenance:
# Get message to display
maintenance_message = self.get_message()
accept_header = request.META.get(self.accept_header_name, None)
if accept_header and 'text/html' in accept_header:
# Attempt to handle browsers
context = {'message': maintenance_message}
return render(request, 'maintenance.html', context=context,
status=503)
else:
# Anything else (likely client API requests)
return HttpResponse(maintenance_message, status=503)
# Otherwise, get response and return with no further processing -------
response = self.get_response(request)
return response
@staticmethod
def check_for_ignore_maintenance_mode(request):
resolver_match = resolve(request.path)
view_func = resolver_match.func
return view_func.__dict__.get('ignore_maintenance_mode', False)
def get_message(self):
message = settings.MAINTENANCE_MODE_MESSAGE
if message is None:
message = self.default_message
return message
......@@ -5,9 +5,14 @@
import sys
import os
import re
import hotshot, hotshot.stats
import tempfile
import StringIO
try:
from StringIO import StringIO
except ImportError: # python >= 3
from io import StringIO
import hotshot, hotshot.stats
from django.utils.deprecation import MiddlewareMixin
from django.conf import settings
......@@ -91,7 +96,7 @@ class ProfileMiddleware(MiddlewareMixin):
if (settings.DEBUG or request.user.is_superuser) and 'prof' in request.GET:
self.prof.close()
out = StringIO.StringIO()
out = StringIO()
old_stdout = sys.stdout
sys.stdout = out
......
......@@ -11,7 +11,7 @@ class XForwardedForMiddleware(object):
def __call__(self, request):
# Process request -----------------------------------------------------
if request.META.has_key('HTTP_X_FORWARDED_FOR'):
if 'HTTP_X_FORWARDED_FOR' in request.META:
request.META['REMOTE_ADDR'] = \
request.META['HTTP_X_FORWARDED_FOR'].split(",")[0].strip()
......
from collections import OrderedDict
import logging
import re
from django.db import models, connection
from django.utils import six
from django.db import models, connection, IntegrityError
from django.db.models import Q, Max
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from django.db.models import QuerySet
from django.forms.models import model_to_dict
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import gettext_lazy as _
from core.vfile import VersionedFile
# Testing waiting:
from time import sleep
from random import random
DB_SLEEP_FACTOR = settings.DB_SLEEP_FACTOR
# Set up user model
UserModel = get_user_model()
......@@ -41,6 +48,7 @@ class AutoIncrementModel(models.Model):
"""
AUTO_FIELD = None
AUTO_CONSTRAINTS = None
max_retries = 5
class Meta:
abstract = True
......@@ -58,120 +66,87 @@ class AutoIncrementModel(models.Model):
def auto_increment_insert(self, *args, **kwargs):
"""
This custom save method does a SELECT and INSERT in a single raw SQL
query in order to properly handle a quasi-autoincrementing field, which
is used to identify instances associated with a ForeignKey. With this
method, concurrency issues are handled by the database backend.
Ex: EventLog instances associated with an Event should be numbered from
1 to N, based on the order of their submission.
This has been tested with the following classes:
EventLog, EMObservation, EMFootprint, EMBBEventLog, VOEvent
Thorough testing is needed to use this method for a new model. Note
that this method may not work properly for non-MySQL backends.
Requires AUTO_FIELD and AUTO_CONSTRAINTS to be defined.
"""
# Check database type
if connection.vendor != 'mysql':
raise DatabaseError(_('The custom AutoIncrementModel '
'auto_increment_save method is not compatible with non-MySQL '
'backends'))
# Check for the existence of the required fields:
if not self.AUTO_CONSTRAINTS or not self.AUTO_FIELD:
raise TypeError('AUTO_CONSTRAINTS or AUTO_FIELD not set.')
# Check type of self.AUTO_CONSTRAINTS
if not isinstance(self.AUTO_CONSTRAINTS, (tuple, list)):
raise TypeError(_('AUTO_CONSTRAINTS should be a tuple or list'))
# Get some useful information
meta = self.__class__._meta
pk_set = self._get_pk_val() is not None
current_class = self.__class__
# Get model fields, except for primary key field.
fields = [f for f in meta.local_concrete_fields if not
isinstance(f, models.fields.AutoField)]
# Check type of self.AUTO_CONSTRAINTS
if not isinstance(self.AUTO_CONSTRAINTS, (tuple, list)):
raise TypeError(_('AUTO_CONSTRAINTS should be a tuple or list'))
# Check constraint fields
f_names = [f.name for f in fields]
for constraint_field in self.AUTO_CONSTRAINTS:
if constraint_field not in f_names:
raise ValueError(_(('Constraint {0} is not a field for '
'model {1}').format(constraint_field,
self.__class__.__name__)))
current_class.__name__)))
# Check auto field
if self.AUTO_FIELD not in f_names:
raise ValueError(_(('AUTO_FIELD {0} is not a field for '
'model {1}').format(self.auto_field, self.__class__.__name__)))
# Setup for generating base SQL query for doing an INSERT.
query = models.sql.InsertQuery(self.__class__)
query.insert_values(fields, objs=[self])
compiler = query.get_compiler(using=self.__class__._base_manager.db)
compiler.return_id = meta.has_auto_field and not pk_set
# Useful function
qn = compiler.quote_name_unless_alias
# Compile multiple constraints with AND
constraint_fields = map(meta.get_field, self.AUTO_CONSTRAINTS)
constraint_list = ["{0}=%s".format(qn(f.column))
for f in constraint_fields]
constraint_values = [f.get_db_prep_value(getattr(self, f.column),
compiler.connection) for f in constraint_fields]
constraint_str = " AND ".join(constraint_list)
with compiler.connection.cursor() as cursor:
# Get base SQL query as string.
for sql, params in compiler.as_sql():
# Modify SQL string to do an INSERT with SELECT.
# NOTE: it's unlikely that the following will generate
# a functional database query for non-MySQL backends.
# Replace VALUES (%s, %s, ..., %s) with
# SELECT %s, %s, ..., %s
sql = re.sub(r"VALUES \((.*)\)", r"SELECT \1", sql)
# Add table to SELECT from, as well as constraints
sql += " FROM {tbl_name} WHERE {constraints}".format(
tbl_name=qn(meta.db_table),
constraints=constraint_str
)
# Get index corresponding to AUTO_FIELD.
af_idx = [f.attname for f in fields].index(self.AUTO_FIELD)
# Put this directly in the SQL; cursor.execute quotes it
# as a literal, which causes the SQL command to fail.
# We shouldn't have issues with SQL injection because
# AUTO_FIELD should never be a user-defined parameter.
del params[af_idx]
sql = re.sub(r"((%s, ){{{0}}})%s".format(af_idx),
r"\1IFNULL(MAX({af}),0)+1", sql, 1).format(
af=self.AUTO_FIELD)
# Add constraint values to params
params += constraint_values
# Execute SQL command.
cursor.execute(sql, params)
# Get primary key from database and set it in memory.
if compiler.connection.features.can_return_id_from_insert:
id = compiler.connection.ops.fetch_returned_insert_id(cursor)
else:
id = compiler.connection.ops.last_insert_id(cursor,
meta.db_table, meta.pk.column)
self._set_pk_val(id)
# Refresh object in memory in order to get AUTO_FIELD value.
self.refresh_from_db()
# Prevents check for unique primary key - needed to prevent an
# IntegrityError when the object was just created and we try to
# update it while it's still in memory
self._state.adding = False
# Set up Q query for multiple constraints. For instance, superevents'
# base_date_number is constrained by both category and the date. So
# for example, there can exist S123456a, TS123456a, and MS123456a. The
# t_0_date is the same, but the category is different. So they each get
# an "a".
const_query = Q()
for const in self.AUTO_CONSTRAINTS:
const_query = const_query & Q(**{const: getattr(self, const)})
# Set AUTO_FIELD to one-plus-the-maximum of the value of AUTO_FIELD
# in the constained set. Note that since some superevents get removed
# or other circumstances, the max does not always equal to the number
# entries in the set. This caused some db integrity errors in testing.
# Save object and check constraints. Add ability to retry on the event
# of DB Integrity errors.
number_of_tries = 0
success = False
while not success:
try:
setattr(self, self.AUTO_FIELD,
self.updated_autofield_value(current_class, const_query))
super(AutoIncrementModel, self).save(*args, **kwargs)
except (IntegrityError, ValidationError):
logger.warning("Sleeping to stabilize database. try= {}, object={}".format(number_of_tries, self))
sleep(DB_SLEEP_FACTOR * random())
number_of_tries += 1
if number_of_tries > 6:
raise
else:
success = True
def updated_autofield_value(self, current_class, query):
# get the queryset:
query_set = current_class.objects.filter(query)
if query_set:
return query_set.aggregate(max_val=Max(self.AUTO_FIELD))['max_val'] + 1
else:
return 1
def auto_increment_update(self, update_field_name, constraints=[],
allow_update_to_nonnull=False):
......@@ -184,45 +159,33 @@ class AutoIncrementModel(models.Model):
raise ValueError(_(('Attempt to update a non-null constrained auto'
'field for object {0}. Not allowed.').format(self.__str__())))
# Setup for generating base SQL query for doing an update
meta = self._meta
field = meta.get_field(update_field_name)
values = [(field, None, field.pre_save(self, False))]
query = models.sql.UpdateQuery(self.__class__)
query.add_update_fields(values)
compiler = query.get_compiler(using=self.__class__._base_manager.db)
# Useful function
qn = compiler.quote_name_unless_alias
# SQL for doing autoincrement
custom_sql= ("(SELECT N FROM (SELECT IFNULL(MAX({field}),0)+1 AS N "
"FROM {tbl_name}").format(tbl_name=qn(meta.db_table),
field=update_field_name)
# Get current class:
current_class = self.__class__
# Convert list of field names to be used as constraints into database
# column names and their values (retrieved from the instance itself)
constraint_fields = [meta.get_field(f) for f in constraints]
constraint_list = ["{0}=%s".format(qn(f.column)) for f in constraint_fields]
values = [f.get_db_prep_value(getattr(self, f.column),
compiler.connection) for f in constraint_fields]
# Set up query based on the constraints:
query = Q()
for i in constraints:
query = query & Q(**{i: getattr(self, i)})
# Add constraints to custom SQL (if they are provided)
if constraint_list:
custom_sql += (" WHERE " + " AND ".join(constraint_list))
# get the queryset that meets the constraints:
# Add end
custom_sql += (") AS temp) WHERE id={pk};".format(pk=self.pk))
qs = current_class.objects.filter(query)
# Replace NULL in base sql update query
base_sql = compiler.as_sql()[0]
sql = base_sql.replace('NULL', custom_sql)
# Set AUTO_FIELD to one-plus-the-maximum of the value of AUTO_FIELD
# in the constained set. Note that since some superevents get removed
# or other circumstances, the max does not always equal to the number
# entries in the set. This caused some db integrity errors in testing.
# Execute sql
compiler.connection.cursor().execute(sql, values)
# Refresh from database
self.refresh_from_db(fields=[update_field_name])
if qs:
try:
setattr(self, update_field_name,
qs.aggregate(max_val=Max(update_field_name))['max_val'] + 1)
# Add exception for cases where the update field wasn't
# initially...updated:
except TypeError:
setattr(self, update_field_name, 1)
else:
setattr(self, update_field_name, 1)
class LogBase(models.Model):
......@@ -233,7 +196,7 @@ class LogBase(models.Model):
Used in events.EventLog, superevents.Log
"""
created = models.DateTimeField(auto_now_add=True)
issuer = models.ForeignKey(UserModel, null=False)
issuer = models.ForeignKey(UserModel, null=False, on_delete=models.CASCADE)
filename = models.CharField(max_length=100, default="", blank=True)
file_version = models.IntegerField(null=True, default=None, blank=True)
comment = models.TextField(null=False)
......@@ -242,6 +205,7 @@ class LogBase(models.Model):
class Meta:
abstract = True
ordering = ['-created', '-N']
indexes = [models.Index(fields=['filename', ])]
@property
def versioned_filename(self):
......@@ -275,7 +239,7 @@ class m2mThroughBase(models.Model):
creation time.
"""
creator = models.ForeignKey(UserModel, null=False, related_name=
'%(app_label)s_%(class)s_set')
'%(app_label)s_%(class)s_set', on_delete=models.CASCADE)
created = models.DateTimeField(auto_now_add=True)
class Meta:
......
from django import template
register = template.Library()
@register.filter
def get_item(dictionary, key):
return dictionary.get(key)
......@@ -5,12 +5,15 @@ import shutil
from django.conf import settings
from django.test import TestCase, override_settings
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group, Permission
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from guardian.conf import settings as guardian_settings
from guardian.models import GroupObjectPermission, UserObjectPermission
from events.models import Tag
from ligoauth.models import AuthGroup, AuthorizedLdapMember
# Set up user model
UserModel = get_user_model()
......@@ -60,8 +63,19 @@ class InternalGroupAndUserSetup(TestCase):
super(InternalGroupAndUserSetup, cls).setUpTestData()
# Get or create internal group
cls.internal_group, _ = Group.objects.get_or_create(
cls.internal_group, created = AuthGroup.objects.get_or_create(
name=settings.LVC_GROUP)
if created:
cls.internal_group.ldap_name = 'internal_ldap_group'
cls.internal_group.save(update_fields=['ldap_name'])
# Create AuthorizedLdapMember, link it to internal group:
cls.authldapmember, created = AuthorizedLdapMember.objects.get_or_create(
name='TestLDAPAuthMember')
if created:
cls.authldapmember.ldap_gname='internal_ldap_group'
cls.authldapmember.ldap_authgroup=cls.internal_group
cls.authldapmember.save()
# Get or create user
cls.internal_user, _ = UserModel.objects.get_or_create(
......@@ -104,12 +118,36 @@ class LvemGroupAndUserSetup(TestCase):
super(LvemGroupAndUserSetup, cls).setUpTestData()
# Get or create LV-EM group
cls.lvem_group, _ = Group.objects.get_or_create(
cls.lvem_group, created = AuthGroup.objects.get_or_create(
name=settings.LVEM_GROUP)
if created:
cls.lvem_group.ldap_name = 'lvem_ldap_group'
cls.lvem_group.save(update_fields=['ldap_name'])
# Create AuthorizedLdapMember, link it to LV-EM group:
cls.authldapmember_lvem, created = AuthorizedLdapMember.objects.get_or_create(
name='TestLDAPAuthMember_LVEM')
if created:
cls.authldapmember_lvem.ldap_gname='lvem_ldap_group'
cls.authldapmember_lvem.ldap_authgroup=cls.lvem_group
cls.authldapmember_lvem.save()
# Get or create LV-EM observers group
cls.lvem_obs_group, _ = Group.objects.get_or_create(
lvem_obs_tag, _ = Tag.objects.get_or_create(name='lvem')
cls.lvem_obs_group, created = AuthGroup.objects.get_or_create(
name=settings.LVEM_OBSERVERS_GROUP)
if created:
cls.lvem_obs_group.ldap_name = 'lvem_observers_ldap_group'
cls.lvem_obs_group.tag = lvem_obs_tag
cls.lvem_obs_group.save(update_fields=['ldap_name', 'tag'])
# Create AuthorizedLdapMember, link it to LV-EM observers group:
cls.authldapmember_lvemob, created = AuthorizedLdapMember.objects.get_or_create(
name='TestLDAPAuthMember_LVEMOB')
if created:
cls.authldapmember_lvemob.ldap_gname='lvem_observers_ldap_group'
cls.authldapmember_lvemob.ldap_authgroup=cls.lvem_group
cls.authldapmember_lvemob.save()
# Get or create user
cls.lvem_user, _ = UserModel.objects.get_or_create(
......@@ -133,7 +171,7 @@ class SupereventManagersGroupAndUserSetup(TestCase):
super(SupereventManagersGroupAndUserSetup, cls).setUpTestData()
# Get or create superevent managers
cls.sm_group, _ = Group.objects.get_or_create(
cls.sm_group, _ = AuthGroup.objects.get_or_create(
name='superevent_managers')
# Get or create user
......@@ -144,8 +182,20 @@ class SupereventManagersGroupAndUserSetup(TestCase):
cls.sm_group.user_set.add(cls.sm_user)
# Also add user to internal group
internal_group, _ = Group.objects.get_or_create(
internal_group, created = AuthGroup.objects.get_or_create(
name=settings.LVC_GROUP)
if created:
internal_group.ldap_name = 'internal_ldap_group'
internal_group.save(update_fields=['ldap_name'])
# Create AuthorizedLdapMember, link it to internal group:
authldapmember, created = AuthorizedLdapMember.objects.get_or_create(
name='TestLDAPAuthMember')
if created:
authldapmember.ldap_gname='internal_ldap_group'
authldapmember.ldap_authgroup=internal_group
authldapmember.save()
internal_group.user_set.add(cls.sm_user)
# Get permissions
......@@ -165,6 +215,56 @@ class SupereventManagersGroupAndUserSetup(TestCase):
codename__in=sm_permissions_codenames)
cls.sm_group.permissions.add(*perms)
class catalog_managers_group_and_user_setup(TestCase):
"""
Base class which sets up cata;pg_managers group and user
These are accessible with self.cm_group and self.cm_user.
Also adds appropriate permissions.
"""
@classmethod
def setUpTestData(cls):
# Run super
super(catalog_managers_group_and_user_setup, cls).setUpTestData()
# Get or create access managers
cls.cm_group, _ = AuthGroup.objects.get_or_create(
name='catalog_managers')
# Get or create user
cls.cm_user, _ = UserModel.objects.get_or_create(
username='catalog.manager')
# Add user to catalog managers group
cls.cm_group.user_set.add(cls.cm_user)
# Also add user to internal group
internal_group, created = AuthGroup.objects.get_or_create(
name=settings.LVC_GROUP)
if created:
internal_group.ldap_name = 'internal_ldap_group'
internal_group.save(update_fields=['ldap_name'])
# Create AuthorizedLdapMember, link it to internal group:
authldapmember, created = AuthorizedLdapMember.objects.get_or_create(
name='TestLDAPAuthMember')
if created:
authldapmember.ldap_gname='internal_ldap_group'
authldapmember.ldap_authgroup=internal_group
authldapmember.save()
internal_group.user_set.add(cls.cm_user)
# Get permissions
cm_permissions_codenames = [
'add_gwtc_catalog',
'delete_gwtc_catalog',
]
perms = Permission.objects.filter(
content_type__app_label='gwtc',
codename__in=cm_permissions_codenames)
cls.cm_group.permissions.add(*perms)
class AccessManagersGroupAndUserSetup(TestCase):
"""
......@@ -179,7 +279,7 @@ class AccessManagersGroupAndUserSetup(TestCase):
super(AccessManagersGroupAndUserSetup, cls).setUpTestData()
# Get or create access managers
cls.am_group, _ = Group.objects.get_or_create(
cls.am_group, _ = AuthGroup.objects.get_or_create(
name='access_managers')
# Get or create user
......@@ -190,8 +290,20 @@ class AccessManagersGroupAndUserSetup(TestCase):
cls.am_group.user_set.add(cls.am_user)
# Also add user to internal group
internal_group, _ = Group.objects.get_or_create(
internal_group, created = AuthGroup.objects.get_or_create(
name=settings.LVC_GROUP)
if created:
internal_group.ldap_name = 'internal_ldap_group'
internal_group.save(update_fields=['ldap_name'])
# Create AuthorizedLdapMember, link it to internal group:
authldapmember, created = AuthorizedLdapMember.objects.get_or_create(
name='TestLDAPAuthMember')
if created:
authldapmember.ldap_gname='internal_ldap_group'
authldapmember.ldap_authgroup=internal_group
authldapmember.save()
internal_group.user_set.add(cls.am_user)
# Get permissions
......@@ -222,14 +334,26 @@ class SignoffGroupsAndUsersSetup(TestCase):
super(SignoffGroupsAndUsersSetup, cls).setUpTestData()
# Internal group, used later
internal_group, _ = Group.objects.get_or_create(
internal_group, created = AuthGroup.objects.get_or_create(
name=settings.LVC_GROUP)
if created:
internal_group.ldap_name = 'internal_ldap_group'
internal_group.save(update_fields=['ldap_name'])
# Create AuthorizedLdapMember, link it to internal group:
cls.authldapmember, created = AuthorizedLdapMember.objects.get_or_create(
name='TestLDAPAuthMember')
if created:
cls.authldapmember.ldap_gname='internal_ldap_group'
cls.authldapmember.ldap_authgroup=internal_group
cls.authldapmember.save()
# Get or create IFO control room groups and users, and add perms
ifos = ['H1', 'L1', 'V1']
for ifo in ifos:
# Create groups and usres
g, _ = Group.objects.get_or_create(name=(ifo + '_control_room'))
g, _ = AuthGroup.objects.get_or_create(
name=(ifo + '_control_room'))
user, _ = UserModel.objects.get_or_create(username=(ifo + '.user'))
user.groups.add(g)
setattr(cls, ifo + '_user', user)
......@@ -244,7 +368,10 @@ class SignoffGroupsAndUsersSetup(TestCase):
g.permissions.add(*p)
# Same for em advocates
g, _ = Group.objects.get_or_create(name='em_advocates')
g, created = AuthGroup.objects.get_or_create(name='em_advocates')
if created:
g.ldap_name = 'em_advocates_ldap_group'
g.save(update_fields=['ldap_name'])
user, _ = UserModel.objects.get_or_create(username='em.advocate')
user.groups.add(g)
cls.adv_user = user
......@@ -253,6 +380,10 @@ class SignoffGroupsAndUsersSetup(TestCase):
content_type__app_label='superevents',
codename='do_adv_signoff')
g.permissions.add(*p)
p = Permission.objects.filter(
content_type__app_label='events',
codename='manage_pipeline')
g.permissions.add(*p)
# Also add user to internal group
internal_group.user_set.add(user)
......@@ -265,7 +396,7 @@ class SignoffGroupsAndUsersSetup(TestCase):
'delete_signoff',
]
for grp_name in grps:
group = Group.objects.get(name=grp_name)
group = AuthGroup.objects.get(name=grp_name)
perms = Permission.objects.filter(
content_type__app_label='superevents',
codename__in=permission_codenames)
......@@ -284,8 +415,12 @@ class PublicGroupSetup(TestCase):
super(PublicGroupSetup, cls).setUpTestData()
# Get or create public group
cls.public_group, _ = Group.objects.get_or_create(
public_tag, _ = Tag.objects.get_or_create(name='public')
cls.public_group, created = AuthGroup.objects.get_or_create(
name=settings.PUBLIC_GROUP)
if created:
cls.public_group.tag = public_tag
cls.public_group.save(update_fields=['tag'])
# Create guardian AnonymousUser and add to group
anonymous_user, _ = UserModel.objects.get_or_create(username=
......
......@@ -14,7 +14,7 @@ import calendar
gpsEpoch = calendar.timegm((1980, 1, 6, 0, 0, 0, 0, 0, 0))
leapSeconds = map(calendar.timegm, [
leapSeconds = list(map(calendar.timegm, [
(1981, 7, 0, 0, 0, 0, 0, 0, 0),
(1982, 7, 0, 0, 0, 0, 0, 0, 0),
(1983, 7, 0, 0, 0, 0, 0, 0, 0),
......@@ -33,21 +33,17 @@ leapSeconds = map(calendar.timegm, [
(2012, 7, 0, 0, 0, 0, 0, 0, 0),
(2015, 7, 0, 0, 0, 0, 0, 0, 0),
(2017, 1, 0, 0, 0, 0, 0, 0, 0),
])
]))
def gpsToPosixTime(gpsTime):
if gpsTime is None:
return None
# XXX
# So apparently this gpsTime occasionally comes in as a unicode string.
# I am not sure how this is possible, since the gpstime is a django DecimalField,
# and it should be a python decimal object. I don't want to cast it to a float unless
# absolutely necessary, hence the try/except block.
#t = gpsEpoch + gpsTime
try:
t = gpsEpoch + gpsTime
except:
t = gpsEpoch + float(gpsTime)
# 20221122: some change from python3.7--> 3.10 caused
# datetime.datetime.fromtimestamp to not handle decimal.Decimal
# values as an input. so, return this value as a float instead.
t = gpsEpoch + float(gpsTime)
for leap in leapSeconds:
if t >= leap:
t = t - 1
......@@ -68,28 +64,10 @@ def gpsToUtc(gpsTime):
t = gpsToPosixTime(gpsTime)
return datetime.datetime.fromtimestamp(t, pytz.utc)
def isoToGps(t):
# The input is a string in ISO time format: 2012-10-28T05:04:31.91
# First strip out whitespace, then split off the factional
# second. We'll add that back later.
if not t:
return None
t=t.strip()
ISOTime = t.split('.')[0]
ISOTime = datetime.datetime.strptime(ISOTime,"%Y-%m-%dT%H:%M:%S")
# Need to set UTC time zone or this is interpreted as local time.
ISOTime = ISOTime.replace(tzinfo=pytz.utc)
sec_substr = t.split('.')[1]
if sec_substr:
fracSec = float('0.' + sec_substr)
else:
fracSec = 0
posixTime = calendar.timegm(ISOTime.utctimetuple()) + fracSec
return int(round(posixToGpsTime(posixTime)))
def isoToGpsFloat(t):
# The input is a string in ISO time format: 2012-10-28T05:04:31.91
# First strip out whitespace, then split off the factional
# First strip out whitespace, then split off the fractional
# second. We'll add that back later.
if not t:
return None
......@@ -105,3 +83,16 @@ def isoToGpsFloat(t):
fracSec = 0
posixTime = calendar.timegm(ISOTime.utctimetuple()) + fracSec
return posixToGpsTime(posixTime)
def isoToGps(t):
return int(round(isoToGpsFloat(t)))
def utc_datetime_to_gps_float(dt):
posix_time = calendar.timegm(dt.timetuple()) + (dt.microsecond * 1e-6)
return posixToGpsTime(posix_time)
def utc_datetime_decimal_seconds(dt):
return dt.strftime('%Y-%m-%d %H:%M:%S') + '.{:02d}'.format(round(dt.microsecond, -4))[:3]
......@@ -10,3 +10,16 @@ def build_absolute_uri(relative_uri, request=None):
# Assume HTTPS
domain = Site.objects.get_current().domain
return 'https://' + domain + relative_uri
# A helper function to return superevent urls from /api/-->/apiweb/
# Note that event files are never exposed to the public and so aren't
# subject to the same private api vs public apiweb caching schemes
def build_apiweb_uri(relative_uri, request=None):
absolute_uri = build_absolute_uri(relative_uri, request=request)
# Return the apiweb url for superevents:
if '/superevents/' in absolute_uri:
absolute_uri = absolute_uri.replace('/api/', '/apiweb/')
return absolute_uri
import logging
from pathlib import Path
import re
import string
from django.utils.six import string_types
from django.conf import settings
from django.views.debug import ExceptionReporter
from six import string_types
# Set up logger
logger = logging.getLogger(__name__)
......@@ -11,6 +16,20 @@ ALPHABET = string.ascii_lowercase
BASE = len(ALPHABET)
ASCII_ALPHABET_START = ord('a') - 1
# FAR Unit conversions (fixed 2024/3/1, big error in hr_to_yr)
# Note that the converstion error that was just fixed was only
# in the web page display, so there's thankfully no values in the
# database to go back and fix.
far_hr_to_yr = 24*settings.DAYS_PER_YEAR # 1/yr = 1/hr * (8760ish hr/year)
far_sec_to_year = 60*60*24*settings.DAYS_PER_YEAR # 1 yr = (sec/min) * (min/hr) * (hr/day) * (day/year) sec
far_yr_to_sec = 1.0/far_sec_to_year # ^^ inverse of that
far_hr_to_sec = far_hr_to_yr * far_yr_to_sec # 1/sec = 1/hr * (8760 hr/ yr) * (1 yr/ 3.154e7 sec)
# Unit formats:
per_hr_formats = ['1/hr', 'hr^-1', '1/hour', 'hour^-1']
per_yr_formats = ['1/yr', 'yr^-1', '1/year', 'year^-1']
def int_to_letters(num, positive_only=True):
"""
......@@ -19,7 +38,7 @@ def int_to_letters(num, positive_only=True):
"""
# Argument checking
if not isinstance(num, (int, long)):
if not isinstance(num, int):
# Coerce to int
logger.warning('Coercing argument of type {0} to int'.format(
type(num)))
......@@ -57,3 +76,69 @@ def letters_to_int(letters):
enumerate(letters[::-1])])
return num
def display_far_hz_to_yr(display_far):
"""
A helper function to change hz far value to a nicely formatted
/year value for use in tables and such.
"""
# Determine "human-readable" FAR to display
display_far_hr = display_far
if display_far:
# FAR in units of yr^-1
far_yr = display_far * far_sec_to_year
if (far_yr < 1):
display_far_hr = "1 per {0:0.5g} years".format(1.0/far_yr)
else:
display_far_hr = "{0:0.5g} per year".format(far_yr)
return display_far_hr
def return_far_in_hz(far_in, units=None):
"""
A helper function that takes in a far value and a set of units
and returns the value in Hz. expects /hr or /yr atm.
Raising errors instead of 400's is probably safe here, since users
never directly call this function from the API.
"""
# Check if units were provided, if not, quit
if not units: raise ValueError('no units input')
# Make sure the units are a string:
if not isinstance(units, str): raise TypeError('units must be a string')
# Change the units' formatting so they match the templates:
# make it lowercase and get rid of any spaces:
units_in = units.lower().replace(' ', '')
# Convert per hour:
if units_in in per_hr_formats:
return far_in * far_hr_to_sec
# Convert per year:
elif units_in in per_yr_formats:
return far_in * far_yr_to_sec
# Log and give up if not recognized:
else:
logger.debug('could not recognize input far units')
return None
class CustomExceptionReporter(ExceptionReporter):
"""
Custom Django exception reporter, used in DEBUG mode. Overrides default
templates with ones that do not expose our settings.
"""
@property
def html_template_path(self):
return Path(settings.PROJECT_ROOT) / 'templates' / 'technical_500.html'
@property
def text_template_path(self):
return Path(settings.PROJECT_ROOT) / 'templates' / 'technical_500.txt'
......@@ -13,7 +13,18 @@ import logging
logger = logging.getLogger(__name__)
class VersionedFile(file):
class FileVersionError(Exception):
# Problem with file version (likely not an int)
pass
class FileVersionNameError(Exception):
# Problem with filename (likely has an extra comma somewhere in the
# filename)
pass
class VersionedFile(object):
"""
Open a versioned file.
......@@ -63,7 +74,7 @@ class VersionedFile(file):
# one scoped inside of this __init__). But I'm reluctant to mess with
# Brian's code too much.
self.version = version
file.__init__(self, actual_name, *args, **kwargs)
self.file = open(actual_name, *args, **kwargs)
# Otherwise...
......@@ -111,13 +122,13 @@ class VersionedFile(file):
# os.O_EXCL causes the open to fail if the file already exists.
fd = os.open(actual_name,
os.O_WRONLY | os.O_CREAT | os.O_EXCL,
0644)
0o644)
# re-open
file.__init__(self, actual_name, *args, **kwargs)
self.file = open(actual_name, *args, **kwargs)
# lose fd we used to ensure file creation.
os.close(fd)
break
except OSError, e:
except OSError as e:
if e.errno != errno.EEXIST:
raise
version += 1
......@@ -149,13 +160,19 @@ class VersionedFile(file):
if len(result) == 2:
filename = result[0]
version = result[1]
# Version is a string here, try to convert it to an int
try:
version = int(version)
except ValueError as e:
raise FileVersionError('Bad version specifier')
elif len(result) == 1:
filename = result[0]
version = None
else:
err = 'Filename {0} does not match versioning scheme'.format(
versioned_name)
raise ValueError(err)
raise FileVersionNameError(err)
return filename, version
......@@ -166,12 +183,24 @@ class VersionedFile(file):
try:
# XXX Another race condition. File will not exist for a very brief time.
os.unlink(self.fullname)
except OSError, e:
except OSError as e:
# Do not care if file does not exist, otherwise raise exception.
if e.errno != errno.ENOENT:
raise
name = os.path.basename(self._name_for_version(last_version))
os.symlink(name, self.fullname)
# Try and safely create the symlink. This try/except blcok is to avoid
# race conditions that RAVEN encountered when another process has created
# a symlink for its version of the file. RAVEN was uploading like 10 files
# called "coincidence_far.json" at the same time.
try:
name = os.path.basename(self._name_for_version(last_version))
os.symlink(name, self.fullname)
except OSError:
# We do not care that it could not create the symlink, just move on.
# The original file is still accessible, and the link to the exact file
# is still reflected in the logs.
logger.warning("Symlink for {} exists for file {}".format(name, self.fullname))
return
# XXX This fails when renaming/mv-ing across devices.
......@@ -191,6 +220,13 @@ class VersionedFile(file):
return [int(f.split(',')[1])
for f in os.listdir(d) if f.startswith(name + ',')]
def write(self, s):
self.file.write(s)
@property
def closed(self):
return self.file.closed
def close(self):
if self.writing:
# no need to update symlink if we were only reading.
......@@ -198,17 +234,17 @@ class VersionedFile(file):
# file -- trying to discover the lastest version fails
# painfully. (max(known_versions()) => max([]))
self._repoint_symlink()
if not self.closed:
file.close(self)
if not self.file.closed:
self.file.close()
def __del__(self):
# XXX file does not have a __del__ method. Should we?
if not self.closed:
self.close()
if not self.file.closed:
self.file.close()
@staticmethod
def guess_mimetype(filename):
TEXT_EXTENSIONS = ['.log']
TEXT_EXTENSIONS = ['.log', '.out']
filename = VersionedFile.basename(filename)
content_type, encoding = mimetypes.guess_type(filename)
if content_type is None and '.' in filename:
......@@ -226,18 +262,24 @@ class VersionedFile(file):
def create_versioned_file(filename, file_dir, file_contents):
# Get full file path
full_path = os.path.join(file_dir, filename)
# Create file
fdest = VersionedFile(full_path, 'w')
if isinstance(file_contents, six.string_types):
fdest = VersionedFile(full_path, 'w')
fdest.write(file_contents)
elif isinstance(file_contents, bytes):
fdest = VersionedFile(full_path, 'wb')
fdest.write(file_contents)
elif isinstance(file_contents, (UploadedFile, InMemoryUploadedFile,
TemporaryUploadedFile, SimpleUploadedFile)):
fdest = VersionedFile(full_path, 'wb')
for chunk in file_contents.chunks():
fdest.write(chunk)
else:
raise TypeError('Unexpected file contents in '
'core.vfile.create_versioned_file')
fdest.close()
return fdest.version
import logging
from django.http import HttpResponse
from django.views.generic.edit import FormView
from .decorators import ignore_maintenance_mode
# Set up logger
logger = logging.getLogger(__name__)
def heartbeat(request):
@ignore_maintenance_mode
def heartbeat(request):
# Do something (?) and return 200 response
return HttpResponse()
class MultipleFormView(FormView):
"""Requires forms to inherit from core.forms.MultipleForm"""
form_classes = []
@property
def form_classes(self):
if not hasattr(self, '_form_classes'):
self._form_classes = {f.key: f for f in self.form_class_list}
return self._form_classes
def get_forms(self, form_classes=None):
if (form_classes is None):
form_classes = self.form_classes
return [form(**self.get_form_kwargs(form.key)) for form in
form_classes]
def get_form_kwargs(self, form_key):
kwargs = {
'initial': self.get_initial(),
'prefix': self.get_prefix(),
}
if (self.request.method in ('POST', 'PUT')):
key_field = self.request.POST.get('key_field', None)
if (key_field is not None and key_field == form_key):
kwargs.update({'data': self.request.POST,
'files': self.request.FILES})
return kwargs
def get(self, request, *args, **kwargs):
forms = self.get_forms()
return self.render_to_response(self.get_context_data(forms=forms))
def post(self, request, *args, **kwargs):
form_key = request.POST.get('key_field')
forms = self.get_forms()
return self._process_individual_form(form_key, forms)
def forms_valid(self, form):
form_valid_method = '{key}_form_valid'.format(key=form.key)
if hasattr(self, form_valid_method):
return getattr(self, form_valid_method)(form)
else:
return HttpResponseRedirect(self.success_url)
def forms_invalid(self, forms):
return self.render_to_response(self.get_context_data(forms=forms))
def _get_form_by_key(self, key, forms):
form_keys = [f.key for f in forms]
return forms[form_keys.index(key)]
def _process_individual_form(self, form_key, forms):
form = self._get_form_by_key(form_key, forms)
if form.is_valid():
return self.forms_valid(form)
else:
return self.forms_invalid(forms)
# Taken from VOEventLib example code, which is:
# Copyright 2010 Roy D. Williams
# then modified
"""
buildVOEvent: Creates a complex VOEvent with tables
See the VOEvent specification for details
http://www.ivoa.net/Documents/latest/VOEvent.html
"""
from VOEventLib.VOEvent import VOEvent, Who, Author, Param, How, What, Group
from VOEventLib.VOEvent import Citations, EventIVORN
#from VOEventLib.VOEvent import Why
#from VOEventLib.Vutil import makeWhereWhen, stringVOEvent
from VOEventLib.Vutil import stringVOEvent
from VOEventLib.VOEvent import AstroCoords, AstroCoordSystem
from VOEventLib.VOEvent import ObservationLocation, ObservatoryLocation
from VOEventLib.VOEvent import ObsDataLocation, WhereWhen
from VOEventLib.VOEvent import Time, TimeInstant
from core.time_utils import gpsToUtc
from core.urls import build_absolute_uri
from django.utils import timezone
from django.conf import settings
from django.urls import reverse
from .models import CoincInspiralEvent, MultiBurstEvent
from .models import VOEvent as GraceDBVOEvent
from .models import LalInferenceBurstEvent
import os
class VOEventBuilderException(Exception):
pass
# Used to create the Packet_Type parameter block
PACKET_TYPES = {
GraceDBVOEvent.VOEVENT_TYPE_PRELIMINARY: (150, 'LVC_PRELIMINARY'),
GraceDBVOEvent.VOEVENT_TYPE_INITIAL: (151, 'LVC_INITIAL'),
GraceDBVOEvent.VOEVENT_TYPE_UPDATE: (152, 'LVC_UPDATE'),
GraceDBVOEvent.VOEVENT_TYPE_RETRACTION: (164, 'LVC_RETRACTION'),
}
VOEVENT_TYPE_DICT = dict(GraceDBVOEvent.VOEVENT_TYPE_CHOICES)
def get_voevent_type(short_name):
for t in GraceDBVOEvent.VOEVENT_TYPE_CHOICES:
if short_name in t:
return t[1]
return None
def buildVOEvent(event, serial_number, voevent_type, request=None, skymap_filename=None,
skymap_type=None, internal=True, open_alert=False, hardware_inj=False,
CoincComment=False, ProbHasNS=None, ProbHasRemnant=None, BNS=None,
NSBH=None, BBH=None, Terrestrial=None, MassGap=None):
# XXX Branson commenting out. Reed's MDC events do not have FAR for some reason.
# if not event.far:
# raise VOEventBuilderException("Cannot build a VOEvent because event has no FAR.")
if not event.gpstime:
raise VOEventBuilderException("Cannot build a VOEvent because event has no gpstime.")
if not voevent_type in VOEVENT_TYPE_DICT.keys():
raise VOEventBuilderException("voevent_type must be preliminary, initial, update, or retraction")
# Let's convert that voevent_type to something nicer looking
default_voevent_type = voevent_type
voevent_type = VOEVENT_TYPE_DICT[voevent_type]
objid = event.graceid
# Now build the IVORN.
type_string = voevent_type.capitalize()
event_id = "%s-%d-%s" % (objid, serial_number, type_string)
ivorn = settings.IVORN_PREFIX + event_id
############ VOEvent header ############################
v = VOEvent(version="2.0")
v.set_ivorn(ivorn)
if event.search and event.search.name == 'MDC':
v.set_role("test")
elif event.group.name == 'Test':
v.set_role("test")
else:
v.set_role("observation")
if voevent_type != 'retraction':
v.set_Description(settings.SKYALERT_DESCRIPTION)
############ Who ############################
w = Who()
a = Author()
a.add_contactName("LIGO Scientific Collaboration and Virgo Collaboration")
#a.add_contactEmail("postmaster@ligo.org")
w.set_Author(a)
w.set_Date(timezone.now().strftime("%Y-%m-%dT%H:%M:%S"))
v.set_Who(w)
############ Why ############################
# Moving this information into the 'How' section.
#if voevent_type != 'retraction':
# y = Why()
# y.add_Description("Candidate gravitational wave event identified by low-latency analysis")
# v.set_Why(y)
############ How ############################
if voevent_type != 'retraction':
h = How()
h.add_Description("Candidate gravitational wave event identified by low-latency analysis")
instruments = event.instruments.split(',')
if 'H1' in instruments:
h.add_Description("H1: LIGO Hanford 4 km gravitational wave detector")
if 'L1' in instruments:
h.add_Description("L1: LIGO Livingston 4 km gravitational wave detector")
if 'V1' in instruments:
h.add_Description("V1: Virgo 3 km gravitational wave detector")
if int(CoincComment) == 1:
h.add_Description("A gravitational wave trigger identified a possible counterpart GRB")
v.set_How(h)
############ What ############################
w = What()
# UCD = Unified Content Descriptors
# http://monet.uni-sw.gwdg.de/twiki/bin/view/VOEvent/UnifiedContentDescriptors
# OR -- (from VOTable document, [21] below)
# http://www.ivoa.net/twiki/bin/view/IVOA/IvoaUCD
# http://cds.u-strasbg.fr/doc/UCD.htx
#
# which somehow gets you to: http://www.ivoa.net/Documents/REC/UCD/UCDlist-20070402.html
# where you might find some actual information.
# Unit / Section 4.3 of [21] which relies on [25]
# [21] http://www.ivoa.net/Documents/latest/VOT.html
# [25] http://vizier.u-strasbg.fr/doc/catstd-3.2.htx
#
# basically, a string that makes sense to humans about what units a value is. eg. "m/s"
# Add Packet_Type for GCNs
w.add_Param(Param(name="Packet_Type",
value=PACKET_TYPES[default_voevent_type][0], dataType="int",
Description=[("The Notice Type number is assigned/used within GCN, eg "
"type={typenum} is an {typedesc} notice").format(
typenum=PACKET_TYPES[default_voevent_type][0],
typedesc=PACKET_TYPES[default_voevent_type][1])]))
# Whether the alert is internal or not
w.add_Param(Param(name="internal", value=int(internal), dataType="int",
Description=['Indicates whether this event should be distributed to LSC/Virgo members only']))
# The serial number
w.add_Param(Param(name="Pkt_Ser_Num", value=serial_number,
Description=["A number that increments by 1 each time a new revision "
"is issued for this event"]))
# The GraceID
w.add_Param(Param(name="GraceID",
dataType="string",
ucd="meta.id",
value=objid,
Description=["Identifier in GraceDB"]))
# Alert type parameter
w.add_Param(Param(name="AlertType",
dataType="string",
ucd="meta.version",
value = voevent_type.capitalize(),
Description=["VOEvent alert type"]))
# Shib protected event page
# Whether the event is a hardware injection or not
w.add_Param(Param(name="HardwareInj",
dataType="int",
ucd="meta.number",
value=int(hardware_inj),
Description=['Indicates that this event is a hardware injection if 1, no if 0']))
w.add_Param(Param(name="OpenAlert",
dataType="int",
ucd="meta.number",
value=int(open_alert),
Description=['Indicates that this event is an open alert if 1, no if 0']))
w.add_Param(Param(name="EventPage",
ucd="meta.ref.url",
value=build_absolute_uri(reverse('view', args=[objid]), request),
Description=["Web page for evolving status of this candidate event"]))
if voevent_type != 'retraction':
# Instruments
w.add_Param(Param(name="Instruments",
dataType="string",
ucd="meta.code",
value=event.instruments,
Description=["List of instruments used in analysis to identify this event"]))
# False alarm rate
if event.far:
w.add_Param(Param(name="FAR",
dataType="float",
ucd="arith.rate;stat.falsealarm",
unit="Hz",
value=float(max(event.far, settings.VOEVENT_FAR_FLOOR)),
Description=["False alarm rate for GW candidates with this strength or greater"]))
# Group
w.add_Param(Param(name="Group",
dataType="string",
ucd="meta.code",
value=event.group.name,
Description=["Data analysis working group"]))
# Pipeline
w.add_Param(Param(name="Pipeline",
dataType="string",
ucd="meta.code",
value=event.pipeline.name,
Description=["Low-latency data analysis pipeline"]))
# Search
if event.search:
w.add_Param(Param(name="Search",
ucd="meta.code",
dataType="string",
value=event.search.name,
Description=["Specific low-latency search"]))
# initial and update VOEvents must have a skymap.
# new feature (10/24/5/2016): preliminary VOEvents can have a skymap,
# but they don't have to.
if (voevent_type in ["initial", "update"] or
(voevent_type == "preliminary" and skymap_filename != None)):
if not skymap_filename:
raise VOEventBuilderException("Skymap filename not provided.")
fits_name = skymap_filename
fits_path = os.path.join(event.datadir, fits_name)
if not os.path.exists(fits_path):
raise VOEventBuilderException("Skymap file does not exist: %s" % skymap_filename)
if not skymap_type:
raise VOEventBuilderException("Skymap type must be provided.")
# Skymaps. Create group and set fits file name
g = Group('GW_SKYMAP', skymap_type)
fits_skymap_url = build_absolute_uri(reverse(
"api:default:events:files", args=[objid, fits_name]), request)
# Add parameters to the skymap group
g.add_Param(Param(name="skymap_fits", dataType="string",
ucd="meta.ref.url", value=fits_skymap_url,
Description=["Sky Map FITS"]))
w.add_Group(g)
# Analysis specific attributes
if voevent_type != 'retraction':
classification_group = Group('Classification', Description=["Source "
"classification: binary neutron star (BNS), neutron star-black "
"hole (NSBH), binary black hole (BBH), MassGap, or terrestrial "
"(noise)"])
properties_group = Group('Properties', Description=["Qualitative "
"properties of the source, conditioned on the assumption that the "
"signal is an astrophysical compact binary merger"])
if isinstance(event,CoincInspiralEvent) and voevent_type != 'retraction':
# get mchirp and mass
mchirp = float(event.mchirp)
mass = float(event.mass)
# calculate eta = (mchirp/total_mass)**(5/3)
eta = pow((mchirp/mass),5.0/3.0)
# EM-Bright mass classifier information for CBC event candidates
if BNS is not None:
classification_group.add_Param(Param(name="BNS",
dataType="float", ucd="stat.probability",
value=BNS, Description=["Probability that the "
"source is a binary neutron star merger (both objects "
"lighter than 3 solar masses)"]))
if NSBH is not None:
classification_group.add_Param(Param(name="NSBH",
dataType="float", ucd="stat.probability",
value=NSBH, Description=["Probability that the "
"source is a neutron star-black hole merger (primary "
"heavier than 5 solar masses, secondary lighter than 3 "
"solar masses)"]))
if BBH is not None:
classification_group.add_Param(Param(name="BBH",
dataType="float", ucd="stat.probability",
value=BBH, Description=["Probability that the "
"source is a binary black hole merger (both objects "
"heavier than 5 solar masses)"]))
if MassGap is not None:
classification_group.add_Param(Param(name="MassGap",
dataType="float", ucd="stat.probability",
value=MassGap, Description=["Probability that the source "
"has at least one object between 3 and 5 solar masses"]))
if Terrestrial is not None:
classification_group.add_Param(Param(name="Terrestrial",
dataType="float", ucd="stat.probability",
value=Terrestrial, Description=["Probability "
"that the source is terrestrial (i.e., a background noise "
"fluctuation or a glitch)"]))
# Add to source properties group
if ProbHasNS is not None:
properties_group.add_Param(Param(name="HasNS",
dataType="float", ucd="stat.probability", value=ProbHasNS,
Description=["Probability that at least one object in the "
"binary has a mass that is less than 2.83 solar masses"]))
if ProbHasRemnant is not None:
properties_group.add_Param(Param(name="HasRemnant",
dataType="float", ucd="stat.probability",
value=ProbHasRemnant, Description=["Probability that a "
"nonzero mass was ejected outside the central remnant "
"object"]))
# build up MaxDistance. event.singleinspiral_set.all()?
# Each detector calculates an effective distance assuming the inspiral is
# optimally oriented. It is the maximum distance at which a source of the
# given parameters would've been seen by that particular detector. To get
# an effective 'maximum distance', we just find the minumum over detectors
max_distance = float('inf')
for obj in event.singleinspiral_set.all():
if obj.eff_distance < max_distance:
max_distance = obj.eff_distance
# if max_distance < float('inf'):
# w.add_Param(Param(name="MaxDistance",
# dataType="float",
# ucd="pos.distance",
# unit="Mpc",
# value=max_distance,
# Description=["Estimated maximum distance for CBC event"]))
elif isinstance(event,MultiBurstEvent):
w.add_Param(Param(name="CentralFreq",
dataType="float",
ucd="gw.frequency",
unit="Hz",
value=float(event.central_freq),
Description=["Central frequency of GW burst signal"]))
w.add_Param(Param(name="Duration",
dataType="float",
ucd="time.duration",
unit="s",
value=float(event.duration),
Description=["Measured duration of GW burst signal"]))
# XXX Calculate the fluence. Unfortunately, this requires parsing the trigger.txt
# file for hrss values. These should probably be pulled into the database.
# But there is no consensus on whether hrss or fluence is meaningful. So I will
# put off changing the schema for now.
try:
# Go find the data file.
log = event.eventlog_set.filter(comment__startswith="Original Data").all()[0]
filename = log.filename
filepath = os.path.join(event.datadir,filename)
if os.path.isfile(filepath):
datafile = open(filepath,"r")
else:
raise Exception("No file found.")
# Now parse the datafile.
# The line we want looks like:
# hrss: 1.752741e-23 2.101590e-23 6.418900e-23
for line in datafile:
if line.startswith('hrss:'):
hrss_values = [float(hrss) for hrss in line.split()[1:]]
max_hrss = max(hrss_values)
# From Min-A Cho: fluence = pi*(c**3)*(freq**2)*(hrss_max**2)*(10**3)/(4*G)
# Note that hrss here actually has units of s^(-1/2)
pi = 3.14152
c = 2.99792E10
G = 6.674E-8
fluence = pi * pow(c,3) * pow(event.central_freq,2)
fluence = fluence * pow(max_hrss,2)
fluence = fluence / (4.0*G)
w.add_Param(Param(name="Fluence",
dataType="float",
ucd="gw.fluence",
unit="erg/cm^2",
value=fluence,
Description=["Estimated fluence of GW burst signal"]))
except Exception:
pass
elif isinstance(event,LalInferenceBurstEvent):
w.add_Param(Param(name="frequency",
dataType="float",
ucd="gw.frequency",
unit="Hz",
value=float(event.frequency_mean),
Description=["Mean frequency of GW burst signal"]))
# Calculate the fluence.
# From Min-A Cho: fluence = pi*(c**3)*(freq**2)*(hrss_max**2)*(10**3)/(4*G)
# Note that hrss here actually has units of s^(-1/2)
# XXX obviously need to refactor here.
try:
pi = 3.14152
c = 2.99792E10
G = 6.674E-8
fluence = pi * pow(c,3) * pow(event.frequency,2)
fluence = fluence * pow(event.hrss,2)
fluence = fluence / (4.0*G)
w.add_Param(Param(name="Fluence",
dataType="float",
ucd="gw.fluence",
unit="erg/cm^2",
value=fluence,
Description=["Estimated fluence of GW burst signal"]))
except:
pass
else:
pass
# Add Groups to What block
w.add_Group(classification_group)
w.add_Group(properties_group)
v.set_What(w)
############ Wherewhen ############################
# The old way of making the WhereWhen section led to a pointless position
# location.
# wwd = {'observatory': 'LIGO Virgo',
# 'coord_system': 'UTC-FK5-GEO',
# # XXX time format
# 'time': str(gpsToUtc(event.gpstime).isoformat())[:-6], #'1918-11-11T11:11:11',
# #'timeError': 1.0,
# 'longitude': 0.0,
# 'latitude': 0.0,
# 'positionalError': 180.0,
# }
#
# ww = makeWhereWhen(wwd)
# if ww: v.set_WhereWhen(ww)
coord_system_id = 'UTC-FK5-GEO'
event_time = str(gpsToUtc(event.gpstime).isoformat())[:-6]
observatory_id = 'LIGO Virgo'
ac = AstroCoords(coord_system_id=coord_system_id)
acs = AstroCoordSystem(id=coord_system_id)
ac.set_Time(Time(TimeInstant = TimeInstant(event_time)))
onl = ObservationLocation(acs, ac)
oyl = ObservatoryLocation(id=observatory_id)
odl = ObsDataLocation(oyl, onl)
ww = WhereWhen()
ww.set_ObsDataLocation(odl)
v.set_WhereWhen(ww)
############ Citation ############################
if event.voevent_set.count()>1:
c = Citations()
for ve in event.voevent_set.all():
# Oh, actually we need to exclude *this* voevent.
if serial_number == ve.N:
continue
if voevent_type == 'initial':
ei = EventIVORN('supersedes', ve.ivorn)
c.set_Description('Initial localization is now available')
elif voevent_type == 'update':
ei = EventIVORN('supersedes', ve.ivorn)
c.set_Description('Updated localization is now available')
elif voevent_type == 'retraction':
ei = EventIVORN('retraction', ve.ivorn)
c.set_Description('Determined to not be a viable GW event candidate')
elif voevent_type == 'preliminary':
# For cases when an additional preliminary VOEvent is sent
# in order to add a preliminary skymap.
ei = EventIVORN('supersedes', ve.ivorn)
c.set_Description('Initial localization is now available (preliminary)')
c.add_EventIVORN(ei)
v.set_Citations(c)
############ output the event ############################
xml = stringVOEvent(v)
#schemaURL = "http://www.ivoa.net/xml/VOEvent/VOEvent-v2.0.xsd")
return xml, ivorn
......@@ -3,7 +3,7 @@ import logging
from django import forms
from django.utils.safestring import mark_safe
from django.utils.html import escape
from .models import Event, Group, Label
from .models import Event, Group, Label, GrbEvent
from .models import Pipeline, Search, Signoff
from django.contrib.auth.models import User
from django.core.exceptions import FieldError
......@@ -49,3 +49,10 @@ class SignoffForm(ModelForm):
class Meta:
model = Signoff
fields = [ 'status', 'comment' ]
class GrbEventUpdateForm(ModelForm):
class Meta:
model = GrbEvent
fields = ['ra', 'dec', 'error_radius', 't90', 'redshift',
'designation']
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from django.db.models import Count
from events.models import Event, Label, Labelling, EventLog
from superevents.models import Superevent
try:
from aws_xray_sdk.core import xray_recorder
xray_recorder.begin_segment("remove-test-events-segment")
except ModuleNotFoundError:
print("aws_xray_sdk not found, skipping.")
log_message = 'Duplicate label <span style="color:{color};">{label_name}</span> was ' \
'removed as part of regular database maintenance.'
robot_last_name = 'GraceDB'
robot_user_name = 'gracedb_robot'
class Command(BaseCommand):
help= 'Searches for events that have duplicate labels, removes the label, ' \
'and then writes a log message that the label was removed.'
def add_arguments(self, parser):
parser.add_argument('-q', '--dry-run', action='store_true',
default=False, help='Don\'t actually delete labels.')
def handle(self, *args, **options):
# Get or create a "GraceDB" user (just to show up in log messages):
gracedb_robot, created = User.objects.get_or_create(username=robot_user_name,
last_name=robot_last_name)
if created:
print(f'created {robot_last_name} user for logging operations.')
for lab in Label.objects.all():
print(f'Looking for duplicate labels for {lab.name}')
# The following query returns a queryset of value dictionaries that look
# like:
# {'event': integer row ID of event that has duplicate labels,
# 'id__count': how many instances of repeated labels it has (usually 2)}
repeats = Labelling.objects.filter(label=lab).values('event')\
.annotate(Count('id')).order_by().filter(id__count__gt=1)
if repeats.exists():
print(f'--> {repeats.count()} duplicate labels detected.')
for rep in repeats:
# Get the event for the label duplication:
rep_event = Event.objects.get(id=rep['event'])
# Get the label objects:
rep_labels_for_event = Labelling.objects.filter(event=rep_event,
label=lab)
# Make super sure that they're actually duplicates:
if rep_labels_for_event.count() > 1:
# Actually for real delete them.
if not options['dry_run']:
print(f'----> clearing duplicate labels for {rep_event.graceid}')
for label_to_delete in rep_labels_for_event[1:]:
print(f'----> writing a log messaage for {rep_event.graceid}')
msg =log_message.format(color=label_to_delete.label.defaultColor,
label_name=label_to_delete.label.name)
EventLog.objects.create(event=label_to_delete.event,
comment=msg,
issuer=gracedb_robot)
label_to_delete.delete()
else:
err_msg = (f'{lab.name} was determined to be a duplicate label for '
f'{rep_event.graceid}, but something went wrong. Quitting.')
raise ValueError(err_msg)
else:
print(f'--> No duplicates found for label {lab.name}')
from django.core.management.base import BaseCommand
from ...models import Event, EMBBEventLog
from ...models import EMGroup
from ...models import AlternateEmail
from django.conf import settings
from django.contrib.auth.models import User
import json
import re
import smtplib
from email.mime.text import MIMEText
from email import message_from_string
from binascii import a2b_qp, a2b_base64
wierdchars = re.compile(u'[\U00010000-\U0010ffff]')
USER_NOT_FOUND_MESSAGE = """
No GraceDB user was found matching email: %s
To proceed, the following actions are recommended:
For LVC users: Please re-send your EEL using your @LIGO.org mail
forwarding address or a LIGO alternate mail address (i.e., an
address from which you can send message to LIGO mailing lists).
For non-LVC users: If you have not already done so, please log in
to the GraceDB web interface at
https://gracedb.ligo.org
This will have the effect of caching your email address, and
then you can try re-sending your EEL message. We apologize for
the inconvenience.
Also, please use the email address with which you registered
at gw-astronomy.org. If you need to use an alternate email
address, we can add it to the system manually. Just send a
message to uwm-help@ligo.org.
"""
def sendResponse(to, subject, message):
print message
msg = MIMEText(message)
# Allow the 'to' argument to contain either a list (for multiple recipients)
# or a string (for a single recipient)
if isinstance(to, list):
msg['To'] = ','.join(to)
to_list = to
else:
msg['To'] = to
to_list = [to]
# Remove any addresses to ignore
to_list = list(set(to_list) - set(settings.EMBB_IGNORE_ADDRESSES))
if not len(to_list):
return None
from_address = settings.EMBB_MAIL_ADDRESS
msg['From'] = from_address
msg['Subject'] = subject
s = smtplib.SMTP(settings.EMBB_SMTP_SERVER)
s.sendmail(from_address, to_list, msg.as_string())
s.quit()
return None
#
# Given a string and an encoding, return a unicode string with the
# 6, 9, 66, and 99 characters replaced.
#
def get_unicode_and_fix_quotes(s, encoding):
rv = u''
for char in s:
if encoding:
uchar = unicode(char, encoding)
else:
uchar = unicode(char)
if ord(uchar) > 127:
# Fix 6 and 9
if uchar == u'\u2018' or uchar == u'\u2019':
uchar = u"'"
# Fix 66 and 99
if uchar == u'\u201c' or uchar == u'\u201d':
uchar = u'"'
rv += uchar
return rv
class Command(BaseCommand):
help = "I am the email ingester!"
def handle(self, *args, **options):
self.transcript = 'Started email ingester\n'
# must provide a filename
if len(args) < 1:
self.transcript += 'No filename provided'
return sendResponse(settings.EMBB_MAIL_ADMINS, 'embb submission', self.transcript)
# The file is understood to contain the raw contents of the email.
filename = args[0]
try:
f = open(filename, 'r')
data = f.read()
f.close()
self.transcript += 'Got email with %d characters incl headers\n' % len(data)
except Exception, e:
self.transcript += 'Could not fetch email file\n' + str(e)
return sendResponse(settings.EMBB_MAIL_ADMINS, 'embb submission', self.transcript)
# Try to convert to email object.
email_obj = message_from_string(data)
# Parse the email and find out who it's from.
from_string = email_obj['from']
try:
# XXX Hacky way to get the stuff between the '<' and the '>'
from_address = from_string.split('<')[1].split('>')[0]
except:
try:
from_address = email_obj._unixfrom.split()[1]
except Exception, e:
self.transcript += 'Problem parsing out sender address\n' + str(e)
return sendResponse(settings.EMBB_MAIL_ADMINS, 'embb submission failure', self.transcript)
# find the submitter
# Look up the sender's address.
user = None
try:
user = User.objects.get(email=from_address)
except:
pass
try:
alt_email = AlternateEmail.objects.get(email=from_address)
user = alt_email.user
self.transcript += 'Found submitter %s\n' % user.username
except:
pass
if not user:
self.transcript += USER_NOT_FOUND_MESSAGE % from_address
return sendResponse(from_address, 'gracedb user not found', self.transcript)
# Get the subject of the email. Use it in the reply
subject = email_obj.get('Subject', '')
reply_subject = 'Re: ' + subject
# Now we want to get the contents of the email.
# Get the payload and encoding.
encoding = None
if email_obj.is_multipart():
# Let's look for a plain text part. If not, throw an error.
msg = None
for part in email_obj.get_payload():
if part.get_content_type() == 'text/plain':
content_transfer_encoding = part.get('Content-Transfer-Encoding', None)
msg = part.get_payload()
try:
encoding = part.get_content_charset()
except:
pass
if not msg:
self.transcript += 'We cannot parse your email because it is not plain text.\n'
self.transcript += 'Please send plain text emails instead of just HTML.\n'
return sendResponse(from_address, reply_subject, self.transcript)
else:
# not multipart.
msg = email_obj.get_payload()
content_transfer_encoding = email_obj.get('Content-Transfer-Encoding', None)
try:
encoding = email_obj.get_content_charset()
except:
pass
if content_transfer_encoding:
if content_transfer_encoding == 'quoted-printable':
msg = a2b_qp(msg)
elif content_transfer_encoding == 'base64':
msg = a2b_base64(msg)
else:
self.transcript += 'Your message uses an unsupported content transfer encoding.\n'
self.transcript += 'Please use quoted-printable or base64.\n'
return sendResponse(from_address, reply_subject, self.transcript)
# Get a unicode string and fix any quotation marks.
msg = get_unicode_and_fix_quotes(msg, encoding)
# Get the body of the message and convert to lines.
if msg:
lines = msg.split('\n')
else:
lines = []
comment = ''
dict = {}
p = re.compile('[A-Za-z-]+:')
inkey = 0
key = ''
for line in lines:
if len(line) > 0:
if inkey and line[0].isspace(): # initial space implies continuation
dict[key] += line
continue
m = p.match(line)
if m:
key = line[m.start():m.end()-1]
val = line[m.end():].strip()
if dict.has_key(key): # same key again just makes a new line in val
dict[key] += '\n' + val
else:
dict[key] = val
inkey = 1
else:
comment += line
inkey = 0
self.transcript += 'Found %d keys in email\n' % len(dict.keys())
# if not dict.has_key('JSON'):
# self.transcript += 'Error: no JSON key'
# return sendResponse(from_address, dict['SUBJECT'], self.transcript)
def getpop(dict, key, default):
if dict.has_key(key):
return dict.pop(key)
else:
return default
def getTextList(dict, key1, key2, default):
val = None
if dict.has_key(key1): val = dict[key1]
if dict.has_key(key2): val = dict[key2]
if val:
if isinstance(val, list):
return json.dumps(val)[1:-1]
else:
return str(val)
else:
return default
# look for the JSON field at the end of the mail
extra_dict = {}
if dict.has_key('JSON'):
try:
extra_dict = json.loads(dict['JSON'])
self.transcript += 'Found %d keys in JSON\n' % len(extra_dict.keys())
except Exception, e:
self.transcript += 'Error: Cannot parse JSON: %s\n' % dict['JSON']
self.transcript += str(e)
return sendResponse(from_address, reply_subject, self.transcript)
# look for PARAM fields of the form
# PARAM: apple=34.2 or appleList=[2,3,4]
if dict.has_key('PARAM'):
lines = dict['PARAM'].split('\n')
for line in lines:
tok = line.split('=')
if len(tok) == 2:
key = tok[0].strip()
val = tok[1].strip()
extra_dict[key] = val
# gotta get the Graceid!
graceid = getpop(extra_dict, 'graceid', None) # try to get the graceid from the extra_dict
if not graceid and dict.has_key('SUBJECT'):
tok = dict['SUBJECT'].split(':') # look for a second colon in the SUBJECT line
graceid = tok[0].strip()
if not graceid:
self.transcript += 'Cannot locate GraceID in SUBJECT, JSON, or PARAM data'
return sendResponse(from_address, reply_subject, self.transcript)
try:
event = Event.getByGraceid(graceid)
self.transcript += 'Found Graceid %s\n' % graceid
except Exception, e:
self.transcript += 'Error: Cannot find Graceid %s\n' % graceid
self.transcript += str(e)
return sendResponse(from_address, reply_subject, self.transcript)
# create a log entry
eel = EMBBEventLog(event=event)
eel.event = event
eel.submitter = user
# Assign a group name
group_name = getpop(extra_dict, 'group', None)
try:
group = EMGroup.objects.get(name=group_name)
eel.group = group
self.transcript += 'Found EMGroup %s\n' % group_name
except Exception, e:
self.transcript += 'Error: Cannot find EMGroup =%s=\n' % group_name
self.transcript += str(e)
return sendResponse(from_address, reply_subject, self.transcript)
eel.eel_status = getpop(extra_dict, 'eel_status', 'FO')
eel.obs_status = getpop(extra_dict, 'obs_status', 'TE')
eel.footprintID = getpop(extra_dict, 'footprintID', '')
eel.waveband = getpop(extra_dict, 'waveband', 'em.opt')
eel.raList = getTextList(extra_dict, 'ra', 'raList', '')
eel.decList = getTextList(extra_dict, 'dec', 'decList', '')
eel.raWidthList = getTextList(extra_dict, 'raWidth', 'raWidthList', '')
eel.decWidthList = getTextList(extra_dict, 'decWidth', 'decWidthList', '')
eel.gpstimeList = getTextList(extra_dict, 'gpstime', 'gpstimeList', '')
eel.durationList = getTextList(extra_dict, 'duration', 'durationList', '')
eel.validateMakeRects()
eel.extra_info_dict = json.dumps(extra_dict)
self.transcript += 'Extra_info_dict is %s\n' % eel.extra_info_dict
eel.comment = comment
try:
eel.save()
except Exception as e:
self.transcript += 'Error: Could not save EEL\n'
self.transcript += str(e)
return sendResponse(from_address, reply_subject, self.transcript)
self.transcript += 'EEL is successfully saved!'
return sendResponse(from_address, reply_subject, self.transcript)
# This tool removes the GroupObjectPermission objects for MDC superevent
# log objects and superevents. The reference issue is here:
#
# https://git.ligo.org/computing/gracedb/server/-/issues/302
#
# Note that the actual data is remaining in place, just the public
# visibility permission is being removed.
from datetime import timedelta
from django.conf import settings
from django.utils.timezone import now
from django.contrib.auth.models import Permission
from django.core.management.base import BaseCommand
from superevents.models import Superevent, Log, LogGroupObjectPermission, \
SupereventGroupObjectPermission
if getattr(settings, 'ENABLE_AWS_XRAY', None):
try:
from aws_xray_sdk.core import xray_recorder
xray_recorder.begin_segment("remove-public-perms-segment")
except ModuleNotFoundError:
print("aws_xray_sdk not found, skipping.")
class Command(BaseCommand):
help="Remove public view permission for old mdc superevents log messages"
def handle(self, *args, **options):
# Were're worried about events greater than two weeks old.
days_old = 14
t_now = now()
date_cutoff = t_now - timedelta(days=days_old)
# There is needs to be a groupobjectpermission object a log object
# to be exposed to the public. Internal users don't require that
# permission. So find all Log objects whose parent superevent is MDC,
# older than our date cutoff, and whose groupobjectpermission_set isn't
# null.
public_mdc_log_objects = Log.objects.filter(superevent__category='M',
created__lt=date_cutoff,
loggroupobjectpermission__isnull=False)
# Spit out the number of log objects that meet that criteria, just for
# ha ha's.
num_logs = public_mdc_log_objects.count()
print(f"There are {num_logs} publicly-tagged MDC log objects")
# Also get the list of > 14-day-old, MDC, exposed superevents in order to
# clear their permissions too.
public_mdc_superevents = Superevent.objects.filter(category='M',
supereventgroupobjectpermission__isnull=False,
created__lt=date_cutoff).distinct()
print("Clearing log permissions.")
for l in public_mdc_log_objects:
l.loggroupobjectpermission_set.all().delete()
print("--> Done clearing log permissions.")
# Now change the visibility for the superevents:
num_superevents = public_mdc_superevents.count()
print(f'Clearing visiblity for {num_superevents} superevents')
for s in public_mdc_superevents:
s.is_exposed = False
s.supereventgroupobjectpermission_set.all().delete()
s.save()
print('--> Done clearing superevent permissions')
from datetime import timedelta
from django.utils.timezone import now
from django.core.management.base import BaseCommand
from events.models import Event, Group
from superevents.models import Superevent
try:
from aws_xray_sdk.core import xray_recorder
xray_recorder.begin_segment("remove-test-events-segment")
except ModuleNotFoundError:
print("aws_xray_sdk not found, skipping.")
class Command(BaseCommand):
help="Remove test superevents and events older than three weeks."
def handle(self, *args, **options):
days_old = 21
# Get lists of test superevents and events older than 21 days.
test_group = Group.objects.get(name='Test')
t_now = now()
date_cutoff = t_now - timedelta(days=days_old)
superevent_list = Superevent.objects.filter(created__lt=date_cutoff,
category="T")
event_list = Event.objects.filter(created__lt=date_cutoff,
group=test_group)
# Loop over superevents and delete
print("Deleting {0} test superevents older than {1} days"
"({2})".format(superevent_list.count(), days_old, t_now))
for s in superevent_list:
print("\tDeleting superevent {0}".format(s.superevent_id))
# Delete superevent, removing its data and GroupObjectPermissions
s.delete(purge=True)
# Loop over events and delete
print("Deleting {0} test events older than {1} days"
"({2})".format(event_list.count(), days_old, t_now))
for event in event_list:
print("\tDeleting event {0}".format(event.graceid))
# Delete event, removing data, subclasses, and GroupObjectPermissions
# for the event and its subclasses
event.delete(purge=True)
from django.db import models
# Custom managers for the Pipeline model --------------------------------------
class ProductionPipelineManager(models.Manager):
"""Pipelines which are production search pipelines"""
def get_queryset(self):
return super(ProductionPipelineManager, self).get_queryset().filter(
pipeline_type=self.model.PIPELINE_TYPE_SEARCH_PRODUCTION
)
class ExternalPipelineManager(models.Manager):
"""Pipelines which correspond to external experiments"""
def get_queryset(self):
return super(ExternalPipelineManager, self).get_queryset().filter(
pipeline_type=self.model.PIPELINE_TYPE_EXTERNAL
)