Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • alexander.pace/server
  • geoffrey.mo/gracedb-server
  • deep.chatterjee/gracedb-server
  • cody.messick/server
  • sushant.sharma-chaudhary/server
  • michael-coughlin/server
  • daniel.wysocki/gracedb-server
  • roberto.depietri/gracedb
  • philippe.grassia/gracedb
  • tri.nguyen/gracedb
  • jonah-kanner/gracedb
  • brandon.piotrzkowski/gracedb
  • joseph-areeda/gracedb
  • duncanmmacleod/gracedb
  • thomas.downes/gracedb
  • tanner.prestegard/gracedb
  • leo-singer/gracedb
  • computing/gracedb/server
18 results
Show changes
Showing
with 885 additions and 877 deletions
# Settings for a playground GraceDB instance (for user testing) running
# in a container on AWS. These settings inherent from base.py)
# and overrides or adds to them.
from .base import *
TIER = "playground"
CONFIG_NAME = "USER TESTING"
# Debug settings
DEBUG = False
# Override EMBB email address
# TP (8 Aug 2017): not sure why?
EMBB_MAIL_ADDRESS = 'gracedb@{fqdn}'.format(fqdn=SERVER_FQDN)
# Enforce that phone and email alerts are off XXX: Set by deployment variables!
#SEND_PHONE_ALERTS = False
#SEND_EMAIL_ALERTS = False
# Enable Mattermost alerts
SEND_MATTERMOST_ALERTS = True
# Add testserver to ALLOWED_HOSTS
ALLOWED_HOSTS += ['testserver']
# Set up Sentry for error logging
sentry_dsn = get_from_env('DJANGO_SENTRY_DSN', fail_if_not_found=False)
if sentry_dsn is not None:
USE_SENTRY = True
# Set up Sentry
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
sentry_sdk.init(
environment='playground',
dsn=sentry_dsn,
integrations=[DjangoIntegration()],
before_send=before_send,
)
# Turn off default admin error emails
LOGGING['loggers']['django.request']['handlers'] = []
# Home page stuff
INSTANCE_TITLE = 'GraceDB Playground'
# Add sub-bullet with igwn-alert group:
group_sub_bullet = """<ul>
<li> Messages are sent to group: <span class="text-monospace"> {0} </span></li>
</ul>""".format(LVALERT_OVERSEER_INSTANCES[0]['igwn_alert_group'])
INSTANCE_LIST = INSTANCE_LIST + group_sub_bullet
INSTANCE_INFO = """
<h5>Playground instance</h5>
<hr>
<p>
This GraceDB instance is designed for users to develop and test their own
applications. It mimics the production instance in all but the following ways:
</p>
<ul>
{}
<li>Only LIGO logins are provided (no login via InCommon or Google).</li>
<li>Events and associated data will <b>not</b> be preserved indefinitely.
A nightly cron job removes events older than 21 days.</li>
</ul>
""".format(INSTANCE_LIST)
# Safety check on debug mode for playground
if (DEBUG == True):
raise RuntimeError("Turn off debug mode for playground")
# Settings for a production GraceDB instance running in a container
from .base import *
TIER = "production"
DEBUG = False
# Turn on alerts: XXX: Set by deployment variables!
#SEND_PHONE_ALERTS = True
#SEND_EMAIL_ALERTS = True
#SEND_MATTERMOST_ALERTS = True
# TP, March 2019: for now, it looks infeasible to use multiple databases
# since there are many operations which normal LVC users can do that
# do a write and then a read very soon after. And we can't rely on
# the read replica being updated quickly enough for that to work.
# So there are several workflows that need to be redone in order for
# this to be possible, but it's not obvious that they even can be
# reworked properly. I.e. this is a much bigger project than expected
# so we're going to have to revisit it at some point. We'll leave the
# config here for now.
# if not PRIORITY_SERVER:
# # If not a priority server, we use the read-only replica database
# # for reads and master for writes.
# # The username, password, and database name are all replicated
# # from the production database
#
# # Set up dict and add to DATABASES setting
# read_replica = {
# 'NAME': DATABASES['default']['NAME'],
# 'ENGINE': 'django.db.backends.mysql',
# 'USER': DATABASES['default']['USER'],
# 'PASSWORD': DATABASES['default']['PASSWORD'],
# 'HOST': os.environ.get('DJANGO_REPLICA_DB_HOST', ''),
# 'PORT': os.environ.get('DJANGO_REPLICA_DB_PORT', ''),
# 'OPTIONS': {
# 'init_command': 'SET storage_engine=MyISAM',
# },
# }
# DATABASES['read_replica'] = read_replica
#
# # Set up database router
# DATABASE_ROUTERS = ['core.db.routers.NonPriorityRouter',]
# Set up Sentry for error logging
sentry_dsn = get_from_env('DJANGO_SENTRY_DSN', fail_if_not_found=False)
if sentry_dsn is not None:
USE_SENTRY = True
# Set up Sentry
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
sentry_sdk.init(
environment='production',
dsn=sentry_dsn,
integrations=[DjangoIntegration()],
before_send=before_send,
)
# Turn off default admin error emails
LOGGING['loggers']['django.request']['handlers'] = []
# Home page stuff
INSTANCE_TITLE = 'GraceDB'
# Add sub-bullet with igwn-alert group:
group_sub_bullet = """<ul>
<li> Messages are sent to group: <span class="text-monospace"> {0} </span></li>
</ul>""".format(LVALERT_OVERSEER_INSTANCES[0]['igwn_alert_group'])
INSTANCE_LIST = INSTANCE_LIST + group_sub_bullet
INSTANCE_INFO = """
<h5>GraceDB Notifications</h5>
<hr>
<p>
GraceDB notifies registered users of Gravitational-Wave candidate detections
in real-time during LIGO/Virgo/KAGRA observation periods. Current notifications
mechanisms are:
</p>
<ul>
{}
</ul>
""".format(INSTANCE_LIST)
# Safety check on debug mode for production
if (DEBUG == True):
raise RuntimeError("Turn off debug mode for production")
# Hardcode pipelines not approved for production:
UNAPPROVED_PIPELINES += ['aframe', 'GWAK']
# Settings for a test/dev GraceDB instance running in a container
from .base import *
TIER = "test"
CONFIG_NAME = "TEST"
# Debug settings
DEBUG = True
# Override EMBB email address
# TP (8 Aug 2017): not sure why?
EMBB_MAIL_ADDRESS = 'gracedb@{fqdn}'.format(fqdn=SERVER_FQDN)
# Add middleware
debug_middleware = 'debug_toolbar.middleware.DebugToolbarMiddleware'
MIDDLEWARE += [
debug_middleware,
#'silk.middleware.SilkyMiddleware',
#'core.middleware.profiling.ProfileMiddleware',
#'core.middleware.admin.AdminsOnlyMiddleware',
]
# Add to installed apps
INSTALLED_APPS += [
'debug_toolbar',
#'silk'
]
# Add testserver to ALLOWED_HOSTS
ALLOWED_HOSTS += ['testserver']
# Settings for django-silk profiler
SILKY_AUTHENTICATION = True
SILKY_AUTHORISATION = True
if 'silk' in INSTALLED_APPS:
# Needed to prevent RequestDataTooBig for files > 2.5 MB
# when silk is being used. This setting is typically used to
# prevent DOS attacks, so should not be changed in production.
DATA_UPLOAD_MAX_MEMORY_SIZE = 20*(1024**2)
# Tuple of IPs which are marked as internal, useful for debugging.
# Tanner (5 Dec. 2017): DON'T CHANGE THIS! Django Debug Toolbar exposes
# some headers which we want to keep hidden. So to be safe, we only allow
# it to be used through this server. You need to configure a SOCKS proxy
# on your local machine to use DJDT (see admin docs).
INTERNAL_IPS = [
INTERNAL_IP_ADDRESS,
]
# Enforce that phone and email alerts are off XXX: Set by deployment variables!
#SEND_PHONE_ALERTS = False
#SEND_EMAIL_ALERTS = False
#SEND_MATTERMOST_ALERTS = True
# Set up Sentry for error logging
sentry_dsn = get_from_env('DJANGO_SENTRY_DSN', fail_if_not_found=False)
if sentry_dsn is not None:
USE_SENTRY = True
# Set up Sentry
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
sentry_sdk.init(
environment='test',
dsn=sentry_dsn,
integrations=[DjangoIntegration()],
before_send=before_send,
)
# Turn off default admin error emails
LOGGING['loggers']['django.request']['handlers'] = []
# Home page stuff
INSTANCE_TITLE = 'GraceDB Testing Server'
# Add sub-bullet with igwn-alert group:
group_sub_bullet = """<ul>
<li> Messages are sent to group: <span class="text-monospace"> {0} </span></li>
</ul>""".format(LVALERT_OVERSEER_INSTANCES[0]['igwn_alert_group'])
INSTANCE_LIST = INSTANCE_LIST + group_sub_bullet
INSTANCE_INFO = """
<h5>Testing Instance</h5>
<hr>
<p>
This GraceDB instance is designed for Quality Assurance (QA) testing and
validation for GraceDB and electromagnetic follow-up (EMFollow) developers.
Software should meet QA milestones on the test instance before being moved
to Playground or Production. Note, on this GraceDB instance:
</p>
<ul>
{}
<li>Only LIGO logins are provided (no login via InCommon or Google).</li>
</ul>
""".format(INSTANCE_LIST)
# For running a VM that is provisioned by Puppet with a secret.py file
# for secret settings
from ..base import *
# Get secret settings:
# DB_PASSWORD, SECRET_KEY, TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN
from ..secret import *
import socket
# Nested dict of settings for all databases
DATABASES = {
'default' : {
'NAME': 'gracedb',
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'USER': 'gracedb',
'PASSWORD': DB_PASSWORD,
'HOST':'127.0.0.1',
'PORT':'5432',
'CONN_MAX_AGE': 3600,
},
}
# Set up allowed hosts
SERVER_FQDN = socket.getfqdn()
SERVER_HOSTNAME = INTERNAL_HOSTNAME
LIGO_FQDN = '{hostname}.ligo.org'.format(hostname=SERVER_HOSTNAME)
ALLOWED_HOSTS += [SERVER_FQDN, LIGO_FQDN]
# Email settings - dependent on server hostname and FQDN ----------------------
EMAIL_HOST = 'localhost'
SERVER_EMAIL = 'GraceDB <gracedb@{fqdn}>'.format(fqdn=SERVER_FQDN)
ALERT_EMAIL_FROM = SERVER_EMAIL
ALERT_EMAIL_TO = []
ALERT_EMAIL_BCC = []
ALERT_TEST_EMAIL_FROM = SERVER_EMAIL
ALERT_TEST_EMAIL_TO = []
# EMBB email settings
EMBB_MAIL_ADDRESS = 'embb@{fqdn}.ligo.org'.format(fqdn=SERVER_FQDN)
EMBB_SMTP_SERVER = 'localhost'
EMBB_MAIL_ADMINS = [admin[1] for admin in ADMINS]
EMBB_IGNORE_ADDRESSES = ['Mailer-Daemon@{fqdn}'.format(fqdn=SERVER_FQDN)]
# Load modified caching middleware:
# https://docs.djangoproject.com/en/2.2/ref/middleware/#middleware-ordering
MIDDLEWARE = [
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.gzip.GZipMiddleware',
'events.middleware.PerformanceMiddleware',
'core.middleware.accept.AcceptMiddleware',
'core.middleware.api.ClientVersionMiddleware',
'core.middleware.api.CliExceptionMiddleware',
'core.middleware.proxy.XForwardedForMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'user_sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'core.middleware.maintenance.MaintenanceModeMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'ligoauth.middleware.ShibbolethWebAuthMiddleware',
'ligoauth.middleware.ControlRoomMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
]
# Set caches:
CACHE_MIDDLEWARE_SECONDS = 5
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.PyMemcacheCache',
'LOCATION': 'localhost:11211',
'TIMEOUT': 60,
'KEY_PREFIX': 'NULL',
'OPTIONS': {
'ignore_exc': True,
}
},
# For API throttles
'throttles': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'api_throttle_cache', # Table name
},
}
# FIXME: hardwire this for now in the VMs for testing
ENABLE_REDIS_QUEUE = True
if ENABLE_REDIS_QUEUE:
# For async alert follow-up:
CACHES.update({"async_followup": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": f"redis://{REDIS_QUEUE_ADDRESS}:{REDIS_QUEUE_PORT}/{REDIS_QUEUE_DATABASE}",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
}})
# Set queue backend for async django tasks:
# example django-redis connection
Q_CLUSTER = {
'name': Q_CLUSTER_NAME,
'label': Q_CLUSTER_LABEL,
'retry': REDIS_QUEUE_RETRY,
'timeout': REDIS_QUEUE_TIMEOUT,
'workers': REDIS_QUEUE_WORKERS,
'recycle': REDIS_QUEUE_RECYCLE,
'django_redis': 'async_followup'
}
# DB "cool-down" factor for when a db conflict is detected. This
# factor scales a random number of seconds between zero and one.
DB_SLEEP_FACTOR = get_from_env(
'DJANGO_DB_SLEEP_FACTOR',
default_value=1.0,
fail_if_not_found=False
)
# Fix the factor (str to float)
try:
DB_SLEEP_FACTOR = float(DB_SLEEP_FACTOR)
except:
DB_SLEEP_FACTOR = 1.0
BETA_REPORTS_LINK = True
## EGAD (External GraceDB Alert Dispatcher) configuration
ENABLE_EGAD_EMAIL = parse_envvar_bool(
get_from_env('ENABLE_EGAD_EMAIL',
fail_if_not_found=False, default_value="false")
)
ENABLE_EGAD_KAFKA = parse_envvar_bool(
get_from_env('ENABLE_EGAD_KAFKA',
fail_if_not_found=False, default_value="false")
)
ENABLE_EGAD_MATTERMOST = parse_envvar_bool(
get_from_env('ENABLE_EGAD_MATTERMOST',
fail_if_not_found=False, default_value="false")
)
ENABLE_EGAD_PHONE = parse_envvar_bool(
get_from_env('ENABLE_EGAD_PHONE',
fail_if_not_found=False, default_value="false")
)
ENABLE_EGAD = (
ENABLE_EGAD_EMAIL or ENABLE_EGAD_KAFKA
or ENABLE_EGAD_MATTERMOST or ENABLE_EGAD_PHONE
)
# Pull in remaining (phone/email) alert variables from
# the environment. Default to false.
SEND_PHONE_ALERTS = parse_envvar_bool(get_from_env(
'SEND_PHONE_ALERTS',
default_value='False',
fail_if_not_found=False
))
SEND_EMAIL_ALERTS = parse_envvar_bool(get_from_env(
'SEND_EMAIL_ALERTS',
default_value='False',
fail_if_not_found=False
))
SEND_MATTERMOST_ALERTS = parse_envvar_bool(get_from_env(
'SEND_MATTERMOST_ALERTS',
default_value='False',
fail_if_not_found=False
))
INSTANCE_STUB = """
<li>Phone alerts (calls/SMS) are {0}</li>
<li>Email alerts are {1}</li>
<li><span class="text-monospace">igwn-alert</span> messages to <span class="text-monospace">{2}</span> are {3}</li>
"""
INSTANCE_LIST = INSTANCE_STUB.format(ENABLED[SEND_PHONE_ALERTS],
ENABLED[SEND_EMAIL_ALERTS],
LVALERT_OVERSEER_INSTANCES[0]['lvalert_server'],
ENABLED[SEND_XMPP_ALERTS])
if (len(LVALERT_OVERSEER_INSTANCES) == 2):
IGWN_STUB = '<li><span class="text-monospace">igwn-alert</span> messages to <span class="text-monospace">{0}</span> are {1}</li>'
IGWN_LIST = IGWN_STUB.format(LVALERT_OVERSEER_INSTANCES[1]['lvalert_server'],
ENABLED[SEND_XMPP_ALERTS])
INSTANCE_LIST = INSTANCE_LIST + IGWN_LIST
# Set SciToken accepted audience to server FQDN
SCITOKEN_AUDIENCE = ["https://" + SERVER_FQDN, "https://" + LIGO_FQDN]
# Settings for a test/dev GraceDB instance running on a VM with Puppet
# provisioning. Starts with vm.py settings (which inherits from base.py
# settings) and overrides or adds to them.
import socket
from .base import *
TIER = "dev"
CONFIG_NAME = "DEV"
# Debug settings
DEBUG = True
SEND_XMPP_ALERTS=True
SEND_MATTERMOST_ALERTS=True
# Override EMBB email address
# TP (8 Aug 2017): not sure why?
EMBB_MAIL_ADDRESS = 'gracedb@{fqdn}'.format(fqdn=SERVER_FQDN)
# Add middleware
debug_middleware = 'debug_toolbar.middleware.DebugToolbarMiddleware'
MIDDLEWARE += [
debug_middleware,
#'silk.middleware.SilkyMiddleware',
#'core.middleware.profiling.ProfileMiddleware',
#'core.middleware.admin.AdminsOnlyMiddleware',
]
# Add to installed apps
INSTALLED_APPS += [
'debug_toolbar',
]
# Add testserver to ALLOWED_HOSTS
ALLOWED_HOSTS += ['testserver']
# Settings for django-silk profiler
SILKY_AUTHENTICATION = True
SILKY_AUTHORISATION = True
if 'silk' in INSTALLED_APPS:
# Needed to prevent RequestDataTooBig for files > 2.5 MB
# when silk is being used. This setting is typically used to
# prevent DOS attacks, so should not be changed in production.
DATA_UPLOAD_MAX_MEMORY_SIZE = 20*(1024**2)
# Tuple of IPs which are marked as internal, useful for debugging.
# Tanner (5 Dec. 2017): DON'T CHANGE THIS! Django Debug Toolbar exposes
# some headers which we want to keep hidden. So to be safe, we only allow
# it to be used through this server. You need to configure a SOCKS proxy
# on your local machine to use DJDT (see admin docs).
INTERNAL_IPS = [
INTERNAL_IP_ADDRESS,
]
INSTANCE_TITLE = 'GraceDB Development VM'
# Add sub-bullet with igwn-alert group:
if (len(LVALERT_OVERSEER_INSTANCES) == 2):
igwn_alert_group = os.environ.get('IGWN_ALERT_GROUP', 'lvalert-dev')
group_sub_bullet = """<ul>
<li> Messages are sent to group: <span class="text-monospace"> {0} </span></li>
</ul>""".format(igwn_alert_group)
INSTANCE_LIST = INSTANCE_LIST + group_sub_bullet
INSTANCE_INFO = """
<h5>Development Instance</h5>
<hr>
<p>
This GraceDB instance is designed for GraceDB maintainers to develop and
test in the AWS cloud architecture. There is <b>no guarantee</b> that the
behavior of this instance will mimic the production system at any time.
Events and associated data may change or be removed at any time.
</p>
<ul>
{}
<li>Only LIGO logins are provided (no login via InCommon or Google).</li>
</ul>
""".format(INSTANCE_LIST)
# Turn off public page caching for development and testing:
PUBLIC_PAGE_CACHING = 0
# Hardcode pipelines not approved for production (for vm testing)
# UNAPPROVED_PIPELINES += ['aframe', 'GWAK']
# Settings for a playground GraceDB instance (for user testing) running
# on a VM with Puppet provisioning. Starts with vm.py settings (which inherits
# from base.py settings) and overrides or adds to them.
from .base import *
TIER = "playground"
CONFIG_NAME = "USER TESTING"
# Debug settings
DEBUG = False
# Override EMBB email address
# TP (8 Aug 2017): not sure why?
EMBB_MAIL_ADDRESS = 'gracedb@{fqdn}'.format(fqdn=SERVER_FQDN)
# Turn on XMPP alerts
SEND_XMPP_ALERTS = True
# Turn on Mattermost alerts
SEND_MATTERMOST_ALERTS = True
# Enforce that phone and email alerts are off
SEND_PHONE_ALERTS = False
SEND_EMAIL_ALERTS = False
# Define correct LVAlert settings
LVALERT_OVERSEER_INSTANCES = [
{
"lvalert_server": "lvalert-playground.cgca.uwm.edu",
"listen_port": 8001,
},
]
# Add testserver to ALLOWED_HOSTS
ALLOWED_HOSTS += ['testserver']
# Home page stuff
INSTANCE_TITLE = 'GraceDB Playground'
INSTANCE_INFO = """
<h3>Playground instance</h3>
<p>
This GraceDB instance is designed for users to develop and test their own
applications. It mimics the production instance in all but the following ways:
</p>
<ul>
<li>Phone and e-mail alerts are turned off.</li>
<li>Only LIGO logins are provided (no login via InCommon or Google).</li>
<li>LVAlert messages are sent to lvalert-playground.cgca.uwm.edu.</li>
<li>Events and associated data will <b>not</b> be preserved indefinitely.
A nightly cron job removes events older than 21 days.</li>
</ul>
"""
# Safety check on debug mode for playground
if (DEBUG == True):
raise RuntimeError("Turn off debug mode for playground")
# Settings for a production GraceDB instance running on a VM with Puppet
# provisioning. Starts with vm.py settings (which inherits from base.py
# settings) and overrides or adds to them.
from .base import *
TIER = "production"
DEBUG = False
# LVAlert Overseer settings
LVALERT_OVERSEER_INSTANCES = [
{
"lvalert_server": "lvalert.cgca.uwm.edu",
"listen_port": 8000,
},
]
# Turn on alerts
SEND_XMPP_ALERTS = True
SEND_PHONE_ALERTS = True
SEND_EMAIL_ALERTS = True
SEND_MATTERMOST_ALERTS = True
# Safety check on debug mode for production
if (DEBUG == True):
raise RuntimeError("Turn off debug mode for production")
from django.conf import settings
from django.urls import re_path, include
from django.contrib import admin
from django.contrib.auth.views import LogoutView
from django.views.generic import TemplateView
# Import feeds
import core.views
from events.feeds import EventFeed, feedview
import events.reports
import events.views
from ligoauth.views import (
manage_password, ShibLoginView, ShibPostLoginView
)
import search.views
# Django admin auto-discover
admin.autodiscover()
feeds = {
'latest' : EventFeed
}
urlpatterns = [
re_path(r'^$', events.views.index, name="home"),
re_path(r'^navbar_only$', TemplateView.as_view(
template_name='navbar_only.html'), name="navbar-only"),
re_path(r'^SPInfo', TemplateView.as_view(template_name='gracedb/spinfo.html'),
name="spinfo"),
re_path(r'^SPPrivacy', TemplateView.as_view(
template_name='gracedb/spprivacy.html'), name="spprivacy"),
re_path(r'^DiscoveryService', TemplateView.as_view(
template_name='discovery.html'), name="discovery"),
re_path(r'^events/', include('events.urls')),
re_path(r'^superevents/', include('superevents.urls')),
re_path(r'^alerts/', include('alerts.urls')),
re_path(r'^feeds/(?P<url>.*)/$', EventFeed()),
re_path(r'^feeds/$', feedview, name="feeds"),
re_path(r'^other/$', TemplateView.as_view(template_name='other.html'),
name='other'),
re_path(r'^performance/$', events.views.performance, name="performance"),
re_path(r'^reports/$', events.reports.reports_page_context, name="reports"),
re_path(r'^latest/$', search.views.latest, name="latest"),
#(r'^reports/(?P<path>.+)$', 'django.views.static.serve',
# {'document_root': settings.LATENCY_REPORT_DEST_DIR}),
re_path(r'^search/$', search.views.search, name="mainsearch"),
# Authentication
re_path(r'^login/$', ShibLoginView.as_view(), name='login'),
re_path(r'^post-login/$', ShibPostLoginView.as_view(), name='post-login'),
re_path(r'^logout/$', LogoutView.as_view(), name='logout'),
# Password management
re_path('^manage-password/$', manage_password, name='manage-password'),
# API URLs
re_path(r'^api/', include('api.urls')),
# Legacy API URLs - must be maintained!
re_path(r'^apibasic/', include('api.urls', namespace='legacy_apibasic')),
re_path(r'^apiweb/', include('api.urls', namespace='legacy_apiweb')),
# Heartbeat URL
re_path(r'^heartbeat/$', core.views.heartbeat, name='heartbeat'),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
re_path(r'^admin/', admin.site.urls),
# Sessions
re_path(r'^', include('user_sessions.urls', 'user_sessions')),
]
# We don't require settings.DEBUG for django-silk since running unit tests
# by default setings settings.DEBUG to False, unless you use the
# --debug-mode flag
if ('silk' in settings.INSTALLED_APPS):
# Add django-silk
urlpatterns = [
re_path(r'^silk/', include('silk.urls', namespace='silk'))
] + urlpatterns
# Add django-debug-toolbar
if settings.DEBUG and 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
re_path(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
import os
import sys
from os.path import abspath, dirname, join
# Parameters
DEFAULT_SETTINGS_MODULE = 'config.settings.vm.dev'
PROJECT_ROOT_NAME = 'gracedb'
# Set up base dir of repository
BASE_DIR = abspath(join(dirname(__file__), ".."))
# Add the source code directory and project root
sys.path.append(BASE_DIR)
sys.path.append(join(BASE_DIR, PROJECT_ROOT_NAME))
# Set DJANGO_SETTINGS_MODULE environment variable if it's not already set
os.environ.setdefault('DJANGO_SETTINGS_MODULE', DEFAULT_SETTINGS_MODULE)
# Matplotlib config directory
os.environ['MPLCONFIGDIR'] = '/tmp/'
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
File deleted
doc/source/images/annotations_flow_white.png

15.1 KiB

=====================================
Integration with LVAlert
=====================================
Introduction
===============================================
GraceDB uses `LVAlert
<https://www.lsc-group.phys.uwm.edu/daswg/projects/lvalert.html>`__ to send
alerts to listeners within the LVC. The content of the LVAlert message is
designed to convey actionable information about a state change in GraceDB,
whether it involves the creation of a new event, or the updating or labeling of
an existing one.
.. NOTE::
An LVAlert message is sent out for *any* new event or annotation that arrives in the
GraceDB database. This means that
a message volumes may be very high under certain circumstances, and
appropriate filtering is required in order for LVAlert to be useful.
Listening to specific event streams
==============================================
By running ``lvalert_listen``, you will receive messages over all **nodes** to
which you are subscribed. The node names consist of at least two elements::
<group_name>_<pipeline_name>
In other words, the (lower-cased) names of the Group and Pipeline separated by an
underscore. For example, the node ``burst_cwb`` would catch all messages relating to
events in the Burst group from the cWB pipeline. One can also specify the search name::
<group_name>_<pipeline_name>_<search_name>
which has the effect of narrowing down the messages to only those related to a specific
search. For example, the node ``burst_cwb_allsky`` will contain messages relating to the
AllSky search, but not the MDC search. GraceDB tries to send a message to all applicable
nodes. Thus, a message sent to the node ``burst_cwb_allsky`` will *also* be sent to the
node ``burst_cwb``. This property allows the user to filter according to search by
specifying different LVAlert processing scripts for different nodes.
To see the names of all available nodes, simply execute::
lvalert_admin -a username -b password -i
LVAlert message contents
================================================
GraceDB sends messages as a JSON-encoded dictionary. The dictionary contains the
following keys:
- ``uid``: the unique ID (a.k.a. ``graceid``) of the relevant event
- ``alert_type``: ``new``, ``update``, or ``label``
- ``description``: a text description (if applicable)
- ``file``: a URL for the relevant file (if applicable)
- ``object``: a dictionary representing the relevant object
For example, when a new event is created, an LVAlert message is created
with alert type ``new``, and the ``object`` is just the JSON representation of
of the event provided by the REST interface (see :ref:`searching_for_events`).
Below are examples of the possible types of LVAlert message. These were generated
by creating a new event, adding a couple of log messages, creating a Preliminary
VOEvent, and applying the DQV label (in that order).
New event::
{
"alert_type": "new",
"description": "",
"file": "https://gracedb.ligo.org/events/T129911/files/coinc.xml",
"object": {
"created": "2015-06-17 22:10:43 UTC",
"extra_attributes": {
"CoincInspiral": {
"combined_far": 3.772326334623149e-14,
"end_time": 968929613,
"end_time_ns": 817383681,
"false_alarm_rate": 3.654963804145501e-08,
"ifos": "H1,L1",
"mass": 2.621732950210571,
"mchirp": 1.139938473701477,
"minimum_duration": null,
"snr": 19.73621083881572
}
},
"far": 3.772326334623149e-14,
"gpstime": 968929613.8173836,
"graceid": "T129911",
"group": "Test",
"instruments": "H1,L1",
"labels": {},
"likelihood": 8.33784842725385e+44,
"links": {
"embb": "https://gracedb.ligo.org/api/events/T129911/embb/",
"filemeta": "https://gracedb.ligo.org/api/events/T129911/filemeta/",
"files": "https://gracedb.ligo.org/api/events/T129911/files/",
"labels": "https://gracedb.ligo.org/api/events/T129911/labels/",
"log": "https://gracedb.ligo.org/api/events/T129911/log/",
"neighbors": "https://gracedb.ligo.org/api/events/T129911/neighbors/",
"self": "https://gracedb.ligo.org/api/events/T129911",
"tags": "https://gracedb.ligo.org/api/events/T129911/tag/"
},
"nevents": 2,
"pipeline": "gstlal",
"search": "MDC",
"submitter": "branson.stephens@LIGO.ORG"
},
"uid": "T129911"
}
Log message without file::
{
"alert_type": "update",
"description": "LOG: This is a test.",
"file": "",
"object": {
"N": 4,
"comment": "This is a test.",
"created": "2015-06-17T17:10:43.381117",
"file": null,
"file_version": null,
"filename": "",
"issuer": {
"display_name": "Branson Stephens",
"username": "branson.stephens@LIGO.ORG"
},
"self": "https://gracedb.ligo.org/api/events/T129911/log/4",
"tag_names": [
"analyst_comments"
],
"tags": "https://gracedb.ligo.org/api/events/T129911/log/4/tag/"
},
"uid": "T129911"
}
Log message with a file::
{
"alert_type": "update",
"description": "UPLOAD: bayestar.fits This is a file.",
"file": "bayestar.fits",
"object": {
"N": 6,
"comment": "This is a file.",
"created": "2015-06-17T17:10:43.980188",
"file": "https://gracedb.ligo.org/api/events/T129911/files/bayestar.fits%2C0",
"file_version": 0,
"filename": "bayestar.fits",
"issuer": {
"display_name": "Branson Stephens",
"username": "branson.stephens@LIGO.ORG"
},
"self": "https://gracedb.ligo.org/api/events/T129911/log/6",
"tag_names": [
"sky_loc"
],
"tags": "https://gracedb.ligo.org/api/events/T129911/log/6/tag/"
},
"uid": "T129911"
}
New VOEvent created::
{
"alert_type": "update",
"description": "VOEVENT: T129911-1-Preliminary.xml",
"file": "T129911-1-Preliminary.xml",
"object": {
"N": 1,
"created": "2015-06-17T17:10:44.172876",
"file": "https://gracedb.ligo.org/api/events/T129911/files/T129911-1-Preliminary.xml%2C0",
"file_version": 0,
"filename": "T129911-1-Preliminary.xml",
"issuer": {
"display_name": "Branson Stephens",
"username": "branson.stephens@LIGO.ORG"
},
"ivorn": "ivo://gwnet/gcn_sender#T129911-1-Preliminary",
"self": "https://gracedb.ligo.org/api/events/T129911/voevent/1",
"text": "<voevent text>",
"voevent_type": "PR"
},
"uid": "T129911"
}
New DQV label applied::
{
"alert_type": "label",
"description": "DQV",
"file": "",
"uid": "T129911"
}
.. XXX what kind of alert does a replacement trigger?
Receiving and Parsing LVAlert messages
====================================================
The LVAlert client tools include the ``lvalert_listen`` executable, which can be used to
receive and respond to LVAlert messages::
lvalert_listen -a username -b password -c /path/to/lvalert_config.ini
The ``-c`` (configuration file) option allows you to specify an executable script to be called
each time a message arrives over a particular node. Suppose you are only interested in
events from the burst group, cWB pipeline, and MDC search. Then the
``lvalert_config.ini`` file could look like this::
[burst_cwb_mdc]
executable = /path/to/mdc_event_handler
And the script ``mdc_event_handler`` could be any script that is prepared to receive the
LVAlert message contents through standard input. Here is an example in Python::
#!/usr/bin/env python
import json
from sys import stdin
# Load the LVAlert message contents into a dictionary
streamdata = json.loads(stdin.read())
# Do something with new events having FAR below threshold
alert_type = streamdata['alert_type']
if alert_type == 'new':
# The object is a serialized event. Get the FAR
far = streamdata['object']['far']
if far < 1.e-6:
# Do some interesting processing
pass
Further reading on LVAlert
=====================================================
Further information on using LVAlert can be found on the
`LVAlert Project Page <https://www.lsc-group.phys.uwm.edu/daswg/projects/lvalert.html>`__
and the `LVAlert Howto <https://www.lsc-group.phys.uwm.edu/daswg/docs/howto/lvalert-howto.html>`__.
=============================
Features for EM Collaboration
=============================
On logging in
=============
A successful login is required in to access GraceDB events and upload
followup information. The login process is the same as for the
`LV-EM wiki <https://gw-astronomy.org/wiki/LV_EM/WebHome>`__: namely,
click "LOGIN" at the upper right and then choose the login method
according to the identity you used for registering for LV-EM membership at
`gw-astronomy.org <https://gw-astronomy.org>`__.
.. NOTE::
Some users may have multiple identities available from the identity providers listed
on the login page. However, only the identity used to register for LV-EM
will work for GraceDB access. For example, even though I have identities from
LIGO, UW-Milwaukee, and Google, only my LIGO login will work for GraceDB since that
is the one I used to register for LV-EM membership. The reason is that
there is no way (at present) to map these different identities to the same
underlying user.
.. _basic_auth_for_lvem:
Scripted access for LV-EM members
============================================
Some processes need to access GraceDB in a *scripted* manner. For example,
an observational group might set up an automated process to listen for GCN
notices for new GW events and download the skymaps for further processing
(see the `tutorial <http://nbviewer.ipython.org/github/lpsinger/ligo-virgo-emfollowup-tutorial/blob/master/ligo-virgo-emfollowup-tutorial.ipynb>`__).
As these alerts could come at any time of the day or night, it is not
generally possible for the user to go through the usual login sequence. Traditionally,
GraceDB has handled scripted access with X509 robot certificates or
robot Kerberos keytabs, but these may not be easily accessible to all
LV-EM group members.
Thus, there is an alternative using basic auth (a simple username-and-password
scheme). First, obtain a robotic
access password by navigating to `this page <https://gracedb.ligo.org/options/manage_password>`__
and clicking "Get me a password!" (or by clicking "OPTIONS" on the navigation
menu and then "Password Manager." Each time you click the button, you
will get a new basic auth password, and the old one will be lost. (Note
that these passwords only last for 1 year.) The password is a 20 character
random sequence.
.. NOTE::
This robotic password does not affect the way in which you login to
the GraceDB web interface. It is only for use with the REST interface
as described in the examples below. You will need to continue logging
into the web interface using the identity with which you registered for
LV-EM membership.
Once you've obtained a robotic password, the best way to use it is to create
a ``.netrc`` file containing your username and password (with permissions ``0600``
to make sure that only you can read it). The ``.netrc`` file could look like this::
machine gracedb.ligo.org
login myself@institution.edu
password abc123.....
Once that's done, you should be able to access the GraceDB REST API
using any tool that supports basic auth.
For example, you can use the GraceDB Python client in much the same
way as described in :ref:`rest_client_basic_usage`, except that the
client class is specially formulated for basic auth::
from ligo.gracedb.rest import GraceDbBasic, HTTPError
service_url = 'https://gracedb.ligo.org/apibasic/'
client = GraceDbBasic(service_url)
try:
r = client.ping()
except HTTPError, e:
print e.message
print "Response code: %d" % r.status
print "Response content: %s" % r.json()
The only real difference is that the ``GraceDbBasic`` client class is used instead
of the ``GraceDb`` class (which assumes that X509 credentials are available).
If you're not comfortable using Python for scripted access to GraceDB, it is
also possible to use ``curl`` to directly make requests to the server with the
same basic auth credentials. Some examples of using curl are available
`here <https://gw-astronomy.org/wiki/LV_EM/TechInfo>`__.
Downloading a skymap
======================
The GraceDB Python client can be used to download
files from Gracedb or add comments, plots, or observation records (see
the next section). Here, we'll
show an example of downloading a skymap. Suppose we know that a particular
GraceDB event (``T125738``) has a skymap file called ``bayestar.fits.gz``.
This file can be retrieved in the following way::
from ligo.gracedb.rest import GraceDbBasic
grace_id = 'T125738' # identifier for the event
filename = 'bayestar.fits.gz' # filename of desired skymap
# Prepend with grace_id for output filename
out_filename = grace_id + '_' + filename
# Instantiate the GraceDB client
service_url = 'https://gracedb.ligo.org/apibasic/'
client = GraceDbBasic(service_url)
# Grab the file from the server and write it
out_file = open(out_filename, "w")
r = client.files(grace_id, filename)
out_file.write(r.read())
out_file.close()
.. _create_emobservation:
Reporting coordinates of followup observations
===============================================
In the following example, the GraceDB Python client is used to create an
observation record consisting of three separate footprints::
# Define the parameters of the observation to be reported
grace_id = 'M158044' # the event's UID
group = 'CRTS' # the MOU group
comment = 'hello my friend' # free text comment
raList = [123.0,124.0,125.0] # RAs of centers (degrees)
decList = [10.0,11.0,13.0] # Dec of centers (degrees)
startTimeList = [ # beginnings of exposures (UTC)
'2015-05-31T12:45:00',
'2015-05-31T12:49:00',
'2015-05-31T12:53:00']
raWidthList = 10.0 # list (or one for all) of widths in RA (degrees)
decWidthList = 10.0 # list (or one for all) of widths in Dec (degrees)
durationList = 20.0 # list (or one for all) of exposure times in sec
# Instantiate the GraceDB client
client = GraceDbBasic()
# Write the EMObservation record to GraceDB
r = client.writeEMObservation(grace_id, group, raList, raWidthList,
decList, decWidthList, startTimeList, durationList, comment)
if r.status == 201: # 201 means 'Created'
print 'Success!'
For users not familiar with Python, there are several other options available for
uploading observation records:
- by using the webform on each event page (scroll down to the 'EM Observations'
section and click on 'add observation record'). However, this method requires
by-hand data entry.
- by ``curl``-ing directly against the EM observation
resource in the API (`example <https://gw-astronomy.org/wiki/LV_EM/CurlUploadFootprints>`__)
- by coding against the GraceDB REST API
in one's own favorite language. If you choose to go this route, please
consider sending us your script or posting it in the LV-EM wiki Technical Info
page for the benefit of other users. See :ref:`coding_against_api`.
- by email (not yet availabe, but in the works)
If you discover a mistake in your observation record, the best way to correct
it is to submit a new observation record with corrected values and request that
the old one be deleted. Please send an email to uwm-help@cgca.uwm.edu with
something like "delete GraceDB EMObservation" in the subject line. Tell us
which entry you'd like deleted, and we'll take care of it. In the future, we
are hoping to make these observation records editable by the submitter.
For more on the GraceBD event page and creating EM observation records, see
`this <https://www.youtube.com/watch?v=oIJE4dTISs4>`__ helpful video
by Roy Williams. There is a companion video on the SkymapViewer
`here <https://www.youtube.com/watch?v=ydXUD9KIN98>`__.
==========================
Data models
==========================
What characterizes an event?
=====================================
The different types of events in GraceDB are distinguished by the following parameters:
- ``Group``: the working group responsible for finding the candidate
- values: ``CBC``, ``Burst``, ``External``, ``Test``
- ``Pipeline``: the data analysis software tool used make the detection
- values: ``MBTAOnline``, ``CWB``, ``gstlal``, ``gstlal-spiir``, ``HardwareInjection``, ``Fermi``, ``Swift``, ``SNEWS``, ``LIB``
- ``Search``: the search activity which led to the detection
- values: ``AllSky``, ``LowMass``, ``HighMass``, ``GRB``, ``Supernova``, ``MDC``
An individual "event stream" is specified by setting the values of these three
parameters. For example, choosing ``Group=CBC, Pipeline=gstlal, and Search=LowMass``
selects the event stream consisting of low-mass inspiral events
detected by the gstlal pipeline from the CBC group. This framework was chosen
in order avoid situations where events from different sources would overlap in searches
and alerts.
Base event model
====================================
In addition to the three parameters described above, there are additional
common attributes for all events. These are
- ``submitter``: the user who submitted the event
- ``created``: the time at which the event was created
- ``instruments``: the interferometers involved in the detection
- ``far``: the false alarm rate in Hz
- ``gpstime``: the time at which the event occurred (a.k.a. "Event time")
The base event class was created with GW events in mind, so not all of the fields
will be applicable for any given event. (For example, ``instruments`` and ``far``
do not apply to a Swift GRB event.)
Event subclasses
====================================
Most events also have pipeline-specific attributes, and these are reflected in event
subclasses. For example, the ``gstlal`` pipeline produces an estimate for the chirp
mass, which is represented in the ``CoincInspiral`` event subclass. The following table
shows the different suclasses with selected attributes:
.. raw:: html
<div id="subclasses_table"></div>
.. _annotation_models:
Annotations
=======================
*Annotations* are pieces of information about an event that that are added
after the event is created. They are often the results of followup processes,
but are sometimes also provided by the same data analysis pipeline that
initially generated the event. The most common type of annotation is an *event
log message* with the following fields:
- ``submitter``: the user who created the log message
- ``created``: the time at which the log message was created
- ``filename``: the name of the attached file (if applicable)
- ``file_version``: the specific version of the file for this message
- ``comment``: the log message text
If the uploaded file is an image, it is displayed along with the comment in the
GraceDB event page. Log messages can also be *tagged* in order to give other
users an idea of the thematic category to which the message belongs. Users can
invent arbitrary tags, but the following set have a special status, as they
affect the display of information in the event page (i.e., the are
*"blessed"*):
- ``analyst_comments``: Analyst Comments
- ``em_follow``: EM Followup
- ``psd``: Noise Curves
- ``data_quality``: Data Quality
- ``sky_loc``: Sky Localization
- ``background``: Background Information
- ``ext_coinc``: External Coincidence
- ``strain``: Strain Data
- ``tfplots``: Time-Frequency Info
- ``pe``: Parameter Estimation
- ``sig_info``: Significance Info
- ``audio``: Sound Files
Other types of annotations are labels, VOEvent objects, and EM observation records.
.. _rest_interface:
==========================
Using the REST interface
==========================
.. _installing_the_client:
Installing the client
====================================
The GraceDB client tools should already be installed at the LVC computing clusters.
However, if you want to interact with GraceDB from your own machine, you will need
to install the client tools yourself. The easiest way is to use ``pip`` to install
it from the `Python Package Index <https://pypi.python.org/pypi>`__::
pip install ligo-gracedb
(See `here <https://pip.pypa.io/en/latest/installing.html>`__ for instructions in
installing ``pip`` if it is not already available on your machine.) Additionally,
packages for Debian (``.deb``) and Scientific Linux (``.rpm``) are available by
pointing your system to the appropriate repositories as described
`here <https://www.lsc-group.phys.uwm.edu/daswg/download/repositories.html>`__.
Then the client tools can be installed via::
apt-get install python-ligo-gracedb
or ::
yum install ligo-gracedb
Alternatively, the client can be built from source. Please note, however, that
the code at the head of the git repository may be volatile or unstable. The
repository may be cloned using LIGO credentials as follows::
git clone albert.einstein@ligo-vcs.phys.uwm.edu:/usr/local/git/gracedb-client.git
.. _rest_client_basic_usage:
Basic usage of the REST client
====================================
The documentation in this section will focus on the use of the Python REST client. The
alternative command-line client, ``gracedb`` is a simple wrapper on the REST client
provided for convenience (see below, :ref:`command_line_client`) and is not as
fully featured.
.. NOTE::
Before using the REST client, credentials for authentication must be available.
Run ligo-proxy-init or, if using a robot certificate, set the environment
variables ``X509_USER_CERT`` and ``X509_USER_KEY`` (for more information, see :ref:`auth`).
.. XXX Probably would be good to actually show people how to set these environment variables.
The REST client is typically used in a script or in the Python interpreter to
accomplish a specific task, such as creating a new event or retrieving information
about events matching a query. The workflow involves importing the client class,
instantiating the client, and then calling the desired method::
from ligo.gracedb.rest import GraceDb, HTTPError
client = GraceDb()
try:
r = client.ping()
except HTTPError, e:
print e.message
print "Response code: %d" % r.status
print "Response content: %s" % r.json()
In the above example, we merely ping the GraceDB server and examine the
response. If there is an error (such as an authentication failure), the
``HTTPError`` exception will be thrown. If not, the response object contains a
status code and a response body in JSON. The ``json`` method on the response
object simply decodes the JSON content. In this particular case, the response
code should be 200 (meaning "OK") and the body contains a large dictionary
of information representing the `API Root resource <https://gracedb.ligo.org/apiweb/>`__.
Most of the examples below will ignore the error handling shown here for the
sake of brevity.
In addition to ``ping``, the most important client methods are:
- ``events`` for accessing a list of events,
- ``files`` for downloading a file or list of files, and ``writeFile`` for uploading,
- ``logs`` for obtaining a list of log entries, and ``writeLog`` to create a new one,
- ``emobservations`` for obtaining a list of EM followup observations, and
``writeEMObservation`` to create a new one,
- ``labels``, ``writeLabel``, and ``removeLabel`` for managing labels,
- ``tags``, ``createTag``, and ``deleteTag`` for managing tags.
Docstrings are available for most of the client methods. To see them, type
``help(client.ping)`` (for example) in the Python interpreter.
.. _searching_for_events:
Searching for events
===================================
Suppose you are working on a script to search for all events matching a
specific query and then to retrieve a piece of information about each event in
the results. For example, the following code retrieves the chirp
mass for each ``gstlal`` event during ER5 with FAR less than 1.0E-4::
from ligo.gracedb.rest import GraceDb
client = GraceDb()
# Retrieve an iterator for events matching a query.
events = client.events('gstlal ER5 far < 1.0e-4')
# For each event in the search results, add the graceid
# and chirp mass to a dictionary.
results = {}
for event in events:
graceid = event['graceid']
mchirp = event['extra_attributes']['CoincInspiral']['mchirp']
results.update({ graceid: mchirp})
Note that the ``events`` method on the client returns an *iterator* on event
dictionaries rather than a list. The chirp mass is an attribute specific to the
inspiral event subclass, hence the different ways of accessing the ``graceid``
and the chirp mass.
But how did I know the structure of the event dictionary so that I could pull
out the chirp mass? The best way is to look at the structure of an example
event in the *browseable* REST API. Here some example events from the different
subclasses to demonstrate the structure of the event dictionaries.
- `Test gstlal MDC <https://gracedb.ligo.org/apiweb/events/T125738>`__ (a CBC event)
- `Test cWB MDC <https://gracedb.ligo.org/apiweb/events/T153811>`__ (a Burst event)
- `External Swift GRB <https://gracedb.ligo.org/apiweb/events/E160846>`__ (a GRB event)
Creating new events
====================================
To create a new event, use the ``createEvent`` method on the client. In this
example, a new ``gstlal`` event is created in the ``Test`` group from a
file on disk::
from ligo.gracedb.rest import GraceDb
client = GraceDb()
event_file = "/path/to/coinc.xml"
r = client.createEvent("Test","gstlal", event_file, search="LowMass")
event_dict = r.json()
graceid = event_dict["graceid"]
The server response includes a JSON representation of the event, and the
event dictionary can thus be obtained as shown. In this example, the event
dictionary is used to get the ``graceid`` of the new event.
.. NOTE::
In order to create events in a group other than ``Test``, the user must
be explicitly authorized. Thus, events in the ``CBC`` or ``Burst`` groups,
for example, can only be created by authorized users. For more information
on authorization, see :ref:`auth`.
Now suppose that a subsequent analysis has updated the values in the original
``coinc.xml`` file. We can *replace* the original event since we know the
``graceid``::
new_event_file = "/path/to/new_coinc.xml"
r = client.replaceEvent(graceid, new_event_file)
This has the effect of updating the values of the various database fields, but
the original version of the event file is kept and can be found in the full
event log.
Annotating events
======================================
As discussed in the :ref:`annotation_models` section, the term refers to pieces of
information added to an event after the time of its creation. Most commonly,
these take the form of event log messages or electromagnetic observation
records (see :ref:`create_emobservation`). The following demonstrates how to
add a log message to an existing event::
from ligo.gracedb.rest import GraceDb
client = GraceDb()
graceid = 'T160779'
message = 'This is a test of the emergency commenting system.'
filename = '/path/to/my_plot.png'
r = client.writeLog(graceid, message, filename, tagname='analyst_comments')
In this example, a plot is uploaded to the log messages of event
``T160779`` with a text comment. The new log message is also tagged with
the ``analyst_comments`` tag, which displays the plot and comment in a
special section for comments from (human) data analysts. The tag name
can actually be anything the user wants, but only a limited list of tags
are *blessed* in the sense that they affect the display of information.
For a reminder of which tags are blessed see :ref:`annotation_models`.
Tags can also be added and removed from existing annotations. See the
docstrings for the client methods ``createTag`` and ``deleteTag``.
Applying labels
====================================
To add a label to an event, the ``graceid`` of the event and the name
of the label must be known::
from ligo.gracedb.rest import GraceDb
client = GraceDb()
graceid = 'T160779'
label_name = 'DQV' # meaning data quality veto
r = client.writeLabel(graceid, label_name)
Care should be taken when applying labels to non-test events, since this
affects the sending of alerts related to potential electromagnetic followup.
.. _command_line_client:
Using the command-line client
=====================================
The GraceDB command-line client, ``gracedb``, is essentially a thin
wrapper on the Python client. The examples above could be repeated
with the command-line client as follows (assuming ``bash``)::
# Create the new event and store the uid
GRACEID="$(gracedb Test gstlal LowMass /path/to/coinc.xml)"
# Replace the event
gracedb replace $GRACEID /path/to/new_coinc.xml
# Annotate an event with a plot
COMMENT="This is a test of the emergency commenting system."
gracedb --tag-name=analyst_comments upload T160779 /path/to/my_plot.png $COMMENT
# Label an event
gracedb label T160779 DQV
Type ``gracedb -h`` for detailed help with the command-line client.
.. _coding_against_api:
Coding against the GraceDB REST API
=======================================================
Some users may wish to code directly against the GraceDB REST API rather
than use the Python or command-line clients. In order to do this, the user
will need to know which resources are exposed by which URLs, and which HTTP
methods those URLs allow. Fortunately, the
`Django REST Framework <http://www.django-rest-framework.org>`__ (on which
the GraceDB API is built) provides
a convenient *browseable* version of the API which serves as a reference.
The root of the API can be found here:
`https://gracedb.ligo.org/apiweb/ <https://gracedb.ligo.org/apiweb/>`__
A glance at the upper-right hand corner shows that this URL supports only
``OPTIONS`` and ``GET``. The body is a collection of JSON information provided
by the root resource, including ``links``. One of these links points to the
event list resource:
`https://gracedb.ligo.org/apiweb/events/ <https://gracedb.ligo.org/apiweb/events/>`__
which also supports ``POST`` (see the bottom of the page). New events are
created by ``POST``-ing to the event list resource. This results in a new
event with a unique URL. If the parameters of the event change, the event
can be replaced by a ``PUT`` request to that same event URL with the replacement
data in the body. In a similar manner,
new log messages are created by ``POST``-ing to the event log list associated
with a particular event. The data expected by these target URLs is not yet
documented here. However, the source code of the GraceDB Python client
can be consulted for examples.
==========================
Using the web interface
==========================
The GraceDB web interface is intended primarily for "read" operations (i.e., searching for
and viewing events), whereas the REST interface (discussed in
:ref:`rest_interface`) is used for both "read" and "write" operations. Thus,
new events are ordinarily created via the REST interface, but viewed with the
web interface. This section focuses on the latter.
Searching for events
==========================
The search form can be found by clicking on "SEARCH" in the top navigation menu.
Many different types of searches are available, and they can be combined with
each other in various ways:
- by event attributes:
- ``instruments = "H1,L1,V1" & far < 1e-7``
- ``coincinspiral.mass < 5 & single_inspiral.eff_distance in 100,300``
- by GPS time or range
- ``gpstime: 999999999``
- ``899999000..999999999``
- by event creation time
- ``created: 2009-10-08 .. 2009-12-04 16:00:00``
- ``yesterday .. now``
- by specific graceid or range
- ``G125700``
- ``G120000 .. G13000``
- by group, pipeline, and search (names are case insensitive)
- ``cbc gstlal lowmass``
- ``group: burst``
- ``test hardwareinjection``
- by label
- ``label: INJ``
- ``EM_READY PE_READY``
- by submitter (note the required straight quotes)
- ``"waveburst"``
- ``submitter: "gracedb.processor"``
- ``submitter: "joss.whedon@ligo.org"``
Notice how pipeline-specific attributes, such as the chirp mass, need to be
qualified with the type of event (e.g., ``coincinspiral.mchirp`` in queries
rather than just ``mchirp``). Keywords in the searches (e.g., ``gpstime``,
``created``, etc.) are usually optional but are sometimes useful for
disambiguation.
Understanding the event page
===============================
Clicking on an event in the search results table leads to an individual event page.
These pages are broken up into several sections (in order from top to bottom):
- **Basic info**: Attributes that are common to all event types, including the graceid (UID),
group, pipeline, and search. A link to the associated data files is also found here.
- **Pipeline-specific attributes**: Tables of attributes associated with a specific search
pipeline (e.g., the chirp mass for a CBC event, or the central frequency for a burst event)
- **Neighbors**: Surrounding events within a specified time window. (Note that the time window is adjustable
by clicking on it.) These events are neighbors in the temporal sense only (i.e., not spatial).
- **Event log messages**: This is the largest section, consisting of annotations broken up into
thematic sections that may be collapsed and expanded.
At the bottom of the Event Log Messaages section, there is a pane entitled
"Full Event Log" which (when expanded) shows all of the annotations in reverse
chronological order. These individual entries are sometimes *tagged* as
belonging to a particular category, and these tags are used to group entries
into the thematic sections above. Each entry in the full event log has a log
message number, creation time, and user name to establish provenance. The
existing tags are also shown in the same column as the message itself, as is
the form (which looks like a `+`) to add a new tag. Users are free to create
new tags for their own purposes (e.g., searching through annotations at some
later date), but only a pre-determined list of tags is used to create title
pane sections.
For more on the GraceDB event page, see
`this <https://www.youtube.com/watch?v=oIJE4dTISs4>`_ helpful video
by Roy Williams, which is geared toward LV-EM users. There also is a
`companion video <https://www.youtube.com/watch?v=ydXUD9KIN98>`__ on the SkymapViewer.
Signing up for email alerts (LVC only)
=======================================================
LVC users may set up email notifications for events that come from specific
pipelines and have specific labels. (This feature is available to LVC users
only because the events are not vetted before the alert is sent out. For
non-LVC users, GCN will provide the equivalent functionality. See the LV-EM
`techinfo page <https://gw-astronomy.org/wiki/LV_EM/TechInfo>`__.)
In order to sign up for an email alert, you must first create a contact by
clicking on "OPTIONS" in the navigation menu, and then "Create New Contact."
Add an email address and description (such as "uni email account", or "my cell",
etc.). Note that many mobile carriers allow users to receive emails via
text (e.g., 1234567890@vtext.com).
Next, return to the options page and click "Create New Notification",
which allows you to select a label and pipeline to track, as well as a
contact.
File added
ServerName ${DJANGO_PRIMARY_FQDN}
<VirtualHost *:80>
ServerName https://${DJANGO_PRIMARY_FQDN}:443
UseCanonicalName On
ServerSignature On
ErrorLog /dev/stderr
Transferlog /dev/stdout
ServerAdmin cgca-admins@uwm.edu
## Log format
LogFormat "APACHE | %a %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\""
## Vhost docroot
DocumentRoot "/var/www/html"
## Directories, there should at least be a declaration for /var/www/html
<Directory "/var/www/html">
Options Indexes FollowSymLinks MultiViews
AllowOverride None
Require all granted
</Directory>
# Improve proxy behavior with gunicorn:
# https://serverfault.com/questions/206738/intermittent-error-when-using-mod-proxy-to-do-reverse-proxy-to-soap-service#comment1327184_209006
# https://github.com/benoitc/gunicorn/issues/207
SetEnv force-proxy-request-1.0 1
SetEnv proxy-nokeepalive 1
## Custom fragment
# gUnicorn edits
Alias /shibboleth-ds/idpselect_config.js /etc/shibboleth-ds/idpselect_config.js
Alias /shibboleth-ds/idpselect.js /etc/shibboleth-ds/idpselect.js
Alias /shibboleth-ds/idpselect.css /etc/shibboleth-ds/idpselect.css
Alias /static/ "/app/gracedb_project/static_root/"
# Aliases for docs and admin_docs
Alias /documentation/ "/app/gracedb_project/docs/user_docs/build/"
Alias /admin_docs/ "/app/gracedb_project/docs/admin_docs/build/"
ProxyPreserveHost on
ProxyAddHeaders off
ProxyPass "/robots.txt" "!"
ProxyPass "/shibboleth-ds" "!"
ProxyPass "/Shibboleth.sso" "!"
ProxyPass "/static" "!"
ProxyPass "/documentation" "!"
ProxyPass "/admin_docs" "!"
ProxyPass "/" "http://localhost:8080/" timeout=120
ProxyPassReverse "/" "http://localhost:8080/"
# This section is for apache2 timeout and keepalive tuning parameters.
# https://ioflood.com/blog/2020/02/21/what-is-apache-keepalive-timeout-how-to-optimize-this-critical-setting/
# KeepAlive will... keep a connection alive for subsequent requests.
# Turn this on.
KeepAlive On
# The maximum number of requests served to a client before terminating the connection.
# This can be large, possibly safely unlimited. (0 = unlimited)
MaxKeepAliveRequests 0
# The number of seconds Apache will wait for a subsequent request before closing the
# connection. Once a request has been received, the timeout value specified by the
# Timeout directive applies. Setting KeepAliveTimeout to a high value may cause
# performance problems in heavily loaded servers. The higher the timeout, the more
# server processes will be kept occupied waiting on connections with idle clients
KeepAliveTimeout 5
# Amount of time the server will wait for certain events before failing a
# request. The TimeOut directive defines the length of time Apache will wait for
# I/O (e.g., when reading data from the client, when writing data to the client, etc.)
# Default: 300s. Try setting this lower, then do a test like a long query with the API
# and in the browser and see what happens.
Timeout 60
# Unset certain headers to help prevent spoofing
RequestHeader unset REMOTE_USER
RequestHeader unset ISMEMBEROF
RequestHeader unset X_FORWARDED_FOR
RequestHeader unset REMOTE_ADDR
RequestHeader unset SSL_CLIENT_S_DN
RequestHeader unset SSL_CLIENT_I_DN
RequestHeader unset X_FORWARDED_PROTO
# Get a few of them from the environment
RequestHeader set X_FORWARDED_FOR "%{X_FORWARDED_FOR}e" env=X_FORWARDED_FOR
RequestHeader set REMOTE_ADDR "%{REMOTE_ADDR}e" env=REMOTE_ADDR
# Set X_FORWARDED_PROTO to https
RequestHeader set X_FORWARDED_PROTO "https"
# Increase the max allowable header size:
LimitRequestFieldSize 16384
# Set up mod_xsendfile for serving static event files as directed by Django
XSendFile On
XSendFilePath /app/db_data/
Alias /shibboleth-ds/idpselect_config.js /etc/shibboleth-ds/idpselect_config.js
Alias /shibboleth-ds/idpselect.js /etc/shibboleth-ds/idpselect.js
Alias /shibboleth-ds/idpselect.css /etc/shibboleth-ds/idpselect.css
<Directory /etc/shibboleth-ds>
Require all granted
</Directory>
# Deny access to the DocumentRoot. This makes it possible to upload
# large files. See notes.
<Directory "/var/www/">
Require all denied
</Directory>
<Directory "/app/gracedb_project/static_root/">
AllowOverride None
Options None
Require all granted
</Directory>
Alias /robots.txt /app/gracedb_project/static_root/robots.txt
<Location /Shibboleth.sso>
SetHandler shib
Require all granted
</Location>
<Location /shibboleth-sp>
Require all granted
</Location>
<Location "/post-login/">
AuthType Shibboleth
Require shibboleth
ShibRequestSetting requireSession true
ShibUseHeaders On
# use funky method to get REMOTE_USER variable
RewriteEngine On
RewriteCond %{LA-U:REMOTE_USER} (.+)
RewriteRule . - [E=RU:%1]
RequestHeader set REMOTE_USER %{RU}e
# this way only works with SSLEngine On because REMOTE_USER is secure variable
#RequestHeader set REMOTE_USER %{REMOTE_USER}s
RequestHeader set ISMEMBEROF "%{ISMEMBEROF}e" env=ISMEMBEROF
</Location>
<Directory "/app/gracedb_project/docs/user_docs/build/">
Require all granted
</Directory>
# Restrict access to admin documentation
<Location "/admin_docs/">
AuthType Shibboleth
ShibRequestSetting requireSession true
ShibUseHeaders On
Require shib-user duncan.meacher@ligo.org alexander.pace@ligo.org daniel.wysocki@ligo.org patrick.brady@ligo.org
</Location>
</VirtualHost>
Explanation: shibboleth 3.0 dependencies
Package: init-system-helpers libxerces-c3.2
Pin: release a=stretch-backports
Pin-Priority: 500