Commit 81b3e066 authored by Alexander Pace's avatar Alexander Pace
Browse files

gracedb-2.11.1

* `auto_increment_insert` updates
* `auto_increment_update` updates
* minor visual tweaks
* enable/disable LVAlert via environment variable
* upstream package updates
* bug fixes and performance improvements
parent d455a18c
Pipeline #204408 passed with stages
in 15 minutes and 33 seconds
......@@ -30,7 +30,7 @@ INFO_BANNER_ENABLED = False
INFO_BANNER_MESSAGE = "TEST MESSAGE"
# Version ---------------------------------------------------------------------
PROJECT_VERSION = '2.11.0-2'
PROJECT_VERSION = '2.11.1'
# Unauthenticated access ------------------------------------------------------
# This variable should eventually control whether unauthenticated access is
......@@ -627,3 +627,12 @@ LOGGING = {
# Turn off debug/error emails when in maintenance mode.
if MAINTENANCE_MODE:
LOGGING['loggers']['django.request']['handlers'].remove('mail_admins')
# Define some words for the instance stub:
ENABLED = {True: "enabled", False: "disabled"}
INSTANCE_STUB = """
<li>Phone alerts (calls/SMS) are {0}</li>
<li>Email alerts are {1}</li>
<li>LVAlert messages to <span class="text-monospace">{2}</span> are {3}</li>
"""
......@@ -28,8 +28,22 @@ INSTALLED_APPS += [
# Add testserver to ALLOWED_HOSTS
ALLOWED_HOSTS += ['testserver']
# Turn on XMPP alerts
SEND_XMPP_ALERTS = True
# Turn LVAlert on/off from the environment. Adding this
# to turn lvalerts on/off from docker compose/update instead
# of having to rebuild containers. If the environment variable
# isn't set, then revert to the hardwired behavior:
xmpp_env_var = get_from_env('SEND_LVALERT_XMPP_ALERTS',
default_value=SEND_XMPP_ALERTS,
fail_if_not_found=False)
# Fix for other boolean values:
if (isinstance(xmpp_env_var, str) and
xmpp_env_var.lower() in ['true','t','1']):
SEND_XMPP_ALERTS=True
elif (isinstance(xmpp_env_var, str) and
xmpp_env_var.lower() in ['false','f','0']):
SEND_XMPP_ALERTS=False
else:
SEND_XMPP_ALERTS = True
# Enforce that phone and email alerts are off
SEND_PHONE_ALERTS = False
......@@ -72,6 +86,11 @@ if sentry_dsn is not None:
LOGGING['loggers']['django.request']['handlers'] = []
# Home page stuff
INSTANCE_TITLE = 'GraceDB Development VM'
INSTANCE_LIST = INSTANCE_STUB.format(ENABLED[SEND_PHONE_ALERTS],
ENABLED[SEND_EMAIL_ALERTS],
LVALERT_OVERSEER_INSTANCES[0]['lvalert_server'],
ENABLED[SEND_XMPP_ALERTS])
INSTANCE_TITLE = 'GraceDB Development Server'
INSTANCE_INFO = """
<h5>Development Instance</h5>
......@@ -83,11 +102,10 @@ behavior of this instance will mimic the production system at any time.
Events and associated data may change or be removed at any time.
</p>
<ul>
<li>Phone and e-mail alerts are turned off.</li>
{}
<li>Only LIGO logins are provided (no login via InCommon or Google).</li>
<li>LVAlert messages are sent to lvalert-dev.cgca.uwm.edu.</li>
</ul>
"""
""".format(INSTANCE_LIST)
if AWS_ELASTICACHE_ADDR:
CACHES['default']['KEY_PREFIX'] = '1'
......
......@@ -12,8 +12,22 @@ DEBUG = False
# TP (8 Aug 2017): not sure why?
EMBB_MAIL_ADDRESS = 'gracedb@{fqdn}'.format(fqdn=SERVER_FQDN)
# Turn on XMPP alerts
SEND_XMPP_ALERTS = True
# Turn LVAlert on/off from the environment. Adding this
# to turn lvalerts on/off from docker compose/update instead
# of having to rebuild containers. If the environment variable
# isn't set, then revert to the hardwired behavior:
xmpp_env_var = get_from_env('SEND_LVALERT_XMPP_ALERTS',
default_value=SEND_XMPP_ALERTS,
fail_if_not_found=False)
# Fix for other boolean values:
if (isinstance(xmpp_env_var, str) and
xmpp_env_var.lower() in ['true','t','1']):
SEND_XMPP_ALERTS=True
elif (isinstance(xmpp_env_var, str) and
xmpp_env_var.lower() in ['false','f','0']):
SEND_XMPP_ALERTS=False
else:
SEND_XMPP_ALERTS = True
# Enforce that phone and email alerts are off
SEND_PHONE_ALERTS = False
......@@ -24,20 +38,24 @@ ALLOWED_HOSTS += ['testserver']
# Home page stuff
INSTANCE_TITLE = 'GraceDB Playground'
INSTANCE_LIST = INSTANCE_STUB.format(ENABLED[SEND_PHONE_ALERTS],
ENABLED[SEND_EMAIL_ALERTS],
LVALERT_OVERSEER_INSTANCES[0]['lvalert_server'],
ENABLED[SEND_XMPP_ALERTS])
INSTANCE_INFO = """
<h5>Playground instance</h5>
<hr>
<p>
This GraceDB instance is designed for users to develop and test their own
applications. It mimics the production instance in all but the following ways:
</p>
<ul>
<li>Phone and e-mail alerts are turned off.</li>
{}
<li>Only LIGO logins are provided (no login via InCommon or Google).</li>
<li>LVAlert messages are sent to lvalert-playground.cgca.uwm.edu.</li>
<li>Events and associated data will <b>not</b> be preserved indefinitely.
A nightly cron job removes events older than 21 days.</li>
</ul>
"""
""".format(INSTANCE_LIST)
# Safety check on debug mode for playground
if (DEBUG == True):
......
......@@ -46,8 +46,22 @@ INTERNAL_IPS = [
INTERNAL_IP_ADDRESS,
]
# Turn on XMPP alerts
SEND_XMPP_ALERTS = True
# Turn LVAlert on/off from the environment. Adding this
# to turn lvalerts on/off from docker compose/update instead
# of having to rebuild containers. If the environment variable
# isn't set, then revert to the hardwired behavior:
xmpp_env_var = get_from_env('SEND_LVALERT_XMPP_ALERTS',
default_value=SEND_XMPP_ALERTS,
fail_if_not_found=False)
# Fix for other boolean values:
if (isinstance(xmpp_env_var, str) and
xmpp_env_var.lower() in ['true','t','1']):
SEND_XMPP_ALERTS=True
elif (isinstance(xmpp_env_var, str) and
xmpp_env_var.lower() in ['false','f','0']):
SEND_XMPP_ALERTS=False
else:
SEND_XMPP_ALERTS = True
# Enforce that phone and email alerts are off
SEND_PHONE_ALERTS = False
......@@ -73,6 +87,10 @@ if sentry_dsn is not None:
# Home page stuff
INSTANCE_TITLE = 'GraceDB Testing Server'
INSTANCE_LIST = INSTANCE_STUB.format(ENABLED[SEND_PHONE_ALERTS],
ENABLED[SEND_EMAIL_ALERTS],
LVALERT_OVERSEER_INSTANCES[0]['lvalert_server'],
ENABLED[SEND_XMPP_ALERTS])
INSTANCE_INFO = """
<h5>Testing Instance</h5>
<hr>
......@@ -83,11 +101,10 @@ Software should meet QA milestones on the test instance before being moved
to Playground or Production. Note, on this GraceDB instance:
</p>
<ul>
<li>Phone and e-mail alerts are turned off.</li>
{}
<li>Only LIGO logins are provided (no login via InCommon or Google).</li>
<li>LVAlert messages are sent to lvalert-test.cgca.uwm.edu.</li>
</ul>
"""
""".format(INSTANCE_LIST)
if AWS_ELASTICACHE_ADDR:
CACHES['default']['KEY_PREFIX'] = '2'
......
......@@ -48,3 +48,22 @@ if 'silk' in INSTALLED_APPS:
INTERNAL_IPS = [
INTERNAL_IP_ADDRESS,
]
INSTANCE_TITLE = 'GraceDB Development VM'
INSTANCE_LIST = INSTANCE_STUB.format(ENABLED[SEND_PHONE_ALERTS],
ENABLED[SEND_EMAIL_ALERTS],
LVALERT_OVERSEER_INSTANCES[0]['lvalert_server'],
ENABLED[SEND_XMPP_ALERTS])
INSTANCE_INFO = """
<h5>Development Instance</h5>
<hr>
<p>
This GraceDB instance is designed for GraceDB maintainers to develop and
test in the AWS cloud architecture. There is <b>no guarantee</b> that the
behavior of this instance will mimic the production system at any time.
Events and associated data may change or be removed at any time.
</p>
<ul>
{}
<li>Only LIGO logins are provided (no login via InCommon or Google).</li>
</ul>
""".format(INSTANCE_LIST)
......@@ -93,8 +93,3 @@ require([
{% endblock %}
{% block header %}
<div id="gracedb-nav-header"></div>
{% endblock %}
......@@ -3,6 +3,7 @@ import logging
import re
from django.db import models, connection
from django.db.models import Q
from django.utils import six
from django.contrib.auth import get_user_model
from django.db.models import QuerySet
......@@ -58,120 +59,66 @@ class AutoIncrementModel(models.Model):
def auto_increment_insert(self, *args, **kwargs):
"""
This custom save method does a SELECT and INSERT in a single raw SQL
query in order to properly handle a quasi-autoincrementing field, which
is used to identify instances associated with a ForeignKey. With this
method, concurrency issues are handled by the database backend.
Ex: EventLog instances associated with an Event should be numbered from
1 to N, based on the order of their submission.
This has been tested with the following classes:
EventLog, EMObservation, EMFootprint, EMBBEventLog, VOEvent
Thorough testing is needed to use this method for a new model. Note
that this method may not work properly for non-MySQL backends.
Requires AUTO_FIELD and AUTO_CONSTRAINTS to be defined.
"""
# Check database type
if connection.vendor != 'mysql':
raise DatabaseError(_('The custom AutoIncrementModel '
'auto_increment_save method is not compatible with non-MySQL '
'backends'))
# Check for the existence of the required fields:
if not self.AUTO_CONSTRAINTS or not self.AUTO_FIELD:
raise TypeError('AUTO_CONSTRAINTS or AUTO_FIELD not set.')
# Check type of self.AUTO_CONSTRAINTS
if not isinstance(self.AUTO_CONSTRAINTS, (tuple, list)):
raise TypeError(_('AUTO_CONSTRAINTS should be a tuple or list'))
# Get some useful information
meta = self.__class__._meta
pk_set = self._get_pk_val() is not None
current_class = self.__class__
# Get model fields, except for primary key field.
fields = [f for f in meta.local_concrete_fields if not
isinstance(f, models.fields.AutoField)]
# Check type of self.AUTO_CONSTRAINTS
if not isinstance(self.AUTO_CONSTRAINTS, (tuple, list)):
raise TypeError(_('AUTO_CONSTRAINTS should be a tuple or list'))
# Check constraint fields
f_names = [f.name for f in fields]
for constraint_field in self.AUTO_CONSTRAINTS:
if constraint_field not in f_names:
raise ValueError(_(('Constraint {0} is not a field for '
'model {1}').format(constraint_field,
self.__class__.__name__)))
current_class.__name__)))
# Check auto field
if self.AUTO_FIELD not in f_names:
raise ValueError(_(('AUTO_FIELD {0} is not a field for '
'model {1}').format(self.auto_field, self.__class__.__name__)))
# Setup for generating base SQL query for doing an INSERT.
query = models.sql.InsertQuery(self.__class__)
query.insert_values(fields, objs=[self])
compiler = query.get_compiler(using=self.__class__._base_manager.db)
compiler.return_id = meta.auto_field is not None and not pk_set
# Useful function
qn = compiler.quote_name_unless_alias
# Compile multiple constraints with AND
constraint_fields = list(map(meta.get_field, self.AUTO_CONSTRAINTS))
constraint_list = ["{0}=%s".format(qn(f.column))
for f in constraint_fields]
constraint_values = [f.get_db_prep_value(getattr(self, f.column),
compiler.connection) for f in constraint_fields]
constraint_str = " AND ".join(constraint_list)
with compiler.connection.cursor() as cursor:
# Get base SQL query as string.
for sql, params in compiler.as_sql():
# Modify SQL string to do an INSERT with SELECT.
# NOTE: it's unlikely that the following will generate
# a functional database query for non-MySQL backends.
# Replace VALUES (%s, %s, ..., %s) with
# SELECT %s, %s, ..., %s
sql = re.sub(r"VALUES \((.*)\)", r"SELECT \1", sql)
# Add table to SELECT from, as well as constraints
sql += " FROM {tbl_name} WHERE {constraints}".format(
tbl_name=qn(meta.db_table),
constraints=constraint_str
)
# Get index corresponding to AUTO_FIELD.
af_idx = [f.attname for f in fields].index(self.AUTO_FIELD)
# Put this directly in the SQL; cursor.execute quotes it
# as a literal, which causes the SQL command to fail.
# We shouldn't have issues with SQL injection because
# AUTO_FIELD should never be a user-defined parameter.
del params[af_idx]
sql = re.sub(r"((%s, ){{{0}}})%s".format(af_idx),
r"\1IFNULL(MAX({af}),0)+1", sql, 1).format(
af=self.AUTO_FIELD)
# Add constraint values to params
params += constraint_values
# Execute SQL command.
cursor.execute(sql, params)
# Get primary key from database and set it in memory.
if compiler.connection.features.can_return_id_from_insert:
id = compiler.connection.ops.fetch_returned_insert_id(cursor)
else:
id = compiler.connection.ops.last_insert_id(cursor,
meta.db_table, meta.pk.column)
self._set_pk_val(id)
# Refresh object in memory in order to get AUTO_FIELD value.
self.refresh_from_db()
# Prevents check for unique primary key - needed to prevent an
# IntegrityError when the object was just created and we try to
# update it while it's still in memory
self._state.adding = False
# Get the AUTO_CONSTRAINT object (i.e, superevent or event)
# Note that this assumes that there's one constaint, which appears to
# be the case for all objects that I (Alex) can find. Make this more
# general, if need be. This could be accomplished with Q(...) filters.
auto_const_object = getattr(self, self.AUTO_CONSTRAINTS[0])
# If there is no value for the constrained autofield set, then set it to one
# more than the total number of objects constraint to the auto_constraint.
# Clear enough?
if not getattr(self, self.AUTO_FIELD):
setattr(self, self.AUTO_FIELD,
current_class.objects.filter(**{self.AUTO_CONSTRAINTS[0]: auto_const_object}).count()+1)
else:
num_objects = current_class.objects.filter(**{self.AUTO_CONSTRAINTS[0]: auto_const_object}).count()
setattr(self, self.AUTO_FIELD,
max(num_objects, num_objects + 1))
# Save object and check constraints:
self.full_clean()
super(AutoIncrementModel, self).save(*args, **kwargs)
def auto_increment_update(self, update_field_name, constraints=[],
allow_update_to_nonnull=False):
......@@ -184,45 +131,19 @@ class AutoIncrementModel(models.Model):
raise ValueError(_(('Attempt to update a non-null constrained auto'
'field for object {0}. Not allowed.').format(self.__str__())))
# Setup for generating base SQL query for doing an update
meta = self._meta
field = meta.get_field(update_field_name)
values = [(field, None, field.pre_save(self, False))]
query = models.sql.UpdateQuery(self.__class__)
query.add_update_fields(values)
compiler = query.get_compiler(using=self.__class__._base_manager.db)
# Useful function
qn = compiler.quote_name_unless_alias
# SQL for doing autoincrement
custom_sql= ("(SELECT N FROM (SELECT IFNULL(MAX({field}),0)+1 AS N "
"FROM {tbl_name}").format(tbl_name=qn(meta.db_table),
field=update_field_name)
# Convert list of field names to be used as constraints into database
# column names and their values (retrieved from the instance itself)
constraint_fields = [meta.get_field(f) for f in constraints]
constraint_list = ["{0}=%s".format(qn(f.column)) for f in constraint_fields]
values = [f.get_db_prep_value(getattr(self, f.column),
compiler.connection) for f in constraint_fields]
# Add constraints to custom SQL (if they are provided)
if constraint_list:
custom_sql += (" WHERE " + " AND ".join(constraint_list))
# Add end
custom_sql += (") AS temp) WHERE id={pk};".format(pk=self.pk))
# Replace NULL in base sql update query
base_sql = compiler.as_sql()[0]
sql = base_sql.replace('NULL', custom_sql)
# Execute sql
compiler.connection.cursor().execute(sql, values)
# Refresh from database
self.refresh_from_db(fields=[update_field_name])
# Get current class:
current_class = self.__class__
# Set up query based on the constraints:
query = Q()
for i in constraints:
query = query & Q(**{i: getattr(self, i)})
# Perform query and get number of objects:
num_results = current_class.objects.filter(query).count()
setattr(self, update_field_name, num_results + 1)
class LogBase(models.Model):
......
......@@ -275,7 +275,7 @@ class Superevent(CleanSaveModel, AutoIncrementModel):
self.gw_letter_suffix = int_to_letters(self.gw_date_number).upper()
# Save the fields which have changed
self.save(update_fields=['is_gw', 'gw_letter_suffix'])
self.save(update_fields=['is_gw', 'gw_letter_suffix', 'gw_date_number'])
def get_groups_with_groupobjectpermissions(self):
gops = self.supereventgroupobjectpermission_set.all()
......
......@@ -53,7 +53,7 @@
{% if user %}
{% if user.is_superuser %}
<li class="nav-item">
<a class="nav-link text-light font-weight-light" href="{% url "home" %}admin_docs/">Admin Docs</a>
<a class="nav-link text-light font-weight-light" href="{% url "home" %}admin_docs/" target="_blank">Admin Docs</a>
</li>
{% endif %}
{% endif %}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment