diff --git a/.gitignore b/.gitignore
index 19357e968f9a880c9c582f176d91697186b1d749..43855c8f6c9b9c3d516c563431c390533ff89a12 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,4 +8,6 @@ static/rest_framework/
 static/debug_toolbar/
 doc/build/*
 doc/build/.buildinfo
+admin_doc/build/*
+admin_doc/build/.buildinfo
 settings/settings_secret.py
diff --git a/admin_doc/source/_static/gracedb-nav-style.css b/admin_doc/source/_static/gracedb-nav-style.css
new file mode 100644
index 0000000000000000000000000000000000000000..fdef2ac3aa08bbf4e6e58397c12e19666dac47e0
--- /dev/null
+++ b/admin_doc/source/_static/gracedb-nav-style.css
@@ -0,0 +1,152 @@
+
+
+html, body {
+  color: black;
+  background-color: white;
+  margin: 0;
+  padding: 0;
+}
+
+a.link, a, a.active {
+  color: #369;
+}
+
+h1,h2,h3,h4,h5,h6,#getting_started_steps {
+  font-family: "Century Schoolbook L", Georgia, serif;
+  font-weight: bold;
+}
+
+h1.docnav {
+    font-size: 25px;
+}
+
+#getting_started_steps li {
+  font-size: 80%;
+  margin-bottom: 0.5em;
+}
+
+#gracedb-nav-header {
+  color: black;
+  font-size: 127%;  
+  background-color: white;
+  font: x-small "Lucida Grande", "Lucida Sans Unicode", geneva, verdana, sans-serif;
+/*  width: 757px; */
+  width: 95%;
+  margin: 0 auto 0 auto;
+  border: none;
+/*  border-left: 1px solid #aaa; */
+/*  border-right: 1px solid #aaa; */
+  padding: 10px 10px 0px 10px;
+}
+
+
+/* Nav */
+#nav {
+    margin:0;
+    padding:0;
+    background:#eee;    /* Nav base color */
+    float: left;
+    width: 100%;
+    font-size: 13px;
+    border:1px solid #42432d;
+    border-width:1px 1px;
+}
+
+#nav #nav-user
+{
+    color:#000;
+    background:#eee;    /* Nav base color */
+    padding:4px 20px 4px 20px;
+    float: right;
+    width: auto;
+    text-decoration:none;
+    font:bold 1em/1em Arial, Helvetica, sans-serif;
+    text-transform:uppercase;
+    /* text-shadow: 2px 2px 2px #555; */
+}
+
+#nav #nav-login
+{
+    float: right;
+}
+
+#nav li {
+    display:inline;
+    padding:0;
+    margin:0;
+}
+/*
+#nav li:first-child a {
+    border-left:1px solid #42432d;
+}
+*/
+
+#nav a:link,
+#nav a:visited {
+    color:#000;
+    background:#eee;    /* Nav base color */
+    /* padding:20px 40px 4px 10px; */
+    padding:4px 20px 4px 20px;
+    float: left;
+    width: auto;
+    border-right:1px solid #42432d;
+
+    text-decoration:none;
+    font:bold 1em/1em Arial, Helvetica, sans-serif;
+    text-transform:uppercase;
+    /* text-shadow: 2px 2px 2px #555; */
+}
+#nav a:hover {
+    /* color:#fff;  / * Use if bg is dark */
+    background: #dce2ed;  /* Nav hover color */
+}
+
+#home #nav-home a,
+#create #nav-create a,
+#search #nav-search a,
+#doc #nav-doc a,
+#reports #nav-reports a,
+#feeds #nav-feeds a,
+#about #nav-about a,
+#archive #nav-archive a,
+#lab #nav-lab a,
+#reviews #nav-reviews a,
+#userprofile #nav-userprofile a,
+#latest #nav-latest a,
+#contact #nav-contact a {
+    background: #a9b0ba;  /* Nav selected color */
+    /* color:#fff;  / * Use if bg is dark */
+    /* text-shadow:none; */
+}
+#home #nav-home a:hover,
+#create #nav-create a,
+#search #nav-search a,
+#doc #nav-doc a,
+#reports #nav-reports a,
+#feeds #nav-feeds a,
+#about #nav-about a:hover,
+#archive #nav-archive a:hover,
+#lab #nav-lab a:hover,
+#reviews #nav-reviews a:hover,
+#userprofile #nav-userprofile a:hover,
+#latest #nav-latest a:hover,
+#contact #nav-contact a:hover {
+    /* background:#e35a00; */
+    background: #a9b0ba;  /* Nav selected color */
+}
+#nav a:active {
+    /* background:#e35a00; */
+    background: #a9b0ba;  /* Nav selected color */
+    color:#fff;
+}
+
+
+/* The Following is for the subclasses table. */
+table.subclasses_main {
+    width: 100%;
+}
+
+td.subclasses_row {
+    vertical-align: top;
+}
+    
diff --git a/admin_doc/source/_templates/layout.html b/admin_doc/source/_templates/layout.html
new file mode 100644
index 0000000000000000000000000000000000000000..b541e6626840a44ee2e4349641ecf836c76e68b8
--- /dev/null
+++ b/admin_doc/source/_templates/layout.html
@@ -0,0 +1,100 @@
+{% extends "!layout.html" %}
+
+{% block extrahead %}
+
+<link rel="stylesheet" href="_static/gracedb-nav-style.css" />
+<script src="/bower-static/dojo/dojo.js" data-dojo-config="async: true"></script>
+<script>
+
+var getKeys = function(obj){
+   var keys = [];
+   for(var key in obj){
+      keys.push(key);
+   }
+   return keys;
+}
+
+require([
+    'dojo/_base/declare',
+    'dojo/query',
+    'dojo/parser',
+    'put-selector/put',
+    'dojo/dom',
+    'dojo/dom-construct',
+    'dojo/dom-style',
+    'dojo/request',
+    'dojo/NodeList-dom',
+    'dojo/NodeList-traverse',
+    'dojo/domReady!',
+], function(declare, query, parser, put, dom, domConstruct, domStyle, request) {
+
+    parser.parse();
+
+    // The url will look like: base + /admin_docs/...
+    var loc = window.location.href;
+    var ind = loc.indexOf('admin_docs');
+    var url = loc.substring(0,ind);
+    url += 'navbar_only';
+    
+    var header_div = dom.byId("gracedb-nav-header");
+
+    request.get(url).then(
+        function(text) {
+            var node = domConstruct.toDom(text);
+            var nl = query('*', node);
+            var header_content = "";
+            // XXX this should not be necessary. Why can't I just query directly for the node with
+            // id == 'content'?
+            nl.forEach(function(n) {
+                if (n.tagName == 'DIV' && n.id == 'content') {
+                    header_content = n.innerHTML;
+                }
+            });
+            header_div.innerHTML = header_content;
+        },
+        function(error) {
+            console.log("failed to get navbar content.")
+        }
+    );
+
+    // All the rest of this is to get the silly subclass information table in place.
+    {% if pagename == 'models' %}
+        var tableNode = dom.byId("subclasses_table");
+
+        var SubclassInfo = new Object();
+
+        // You know, there is probably a better way of getting at this information.
+        SubclassInfo['CoincInspiralEvent'] = ["ifos","end_time","mass","mchirp","minimum_duration","snr","false_alarm_rate","combined_far"];
+
+        SubclassInfo['MultiBurstEvent'] = ["ifos","start_time","duration","peak_time","central_freq","bandwidth","amplitude","snr","confidence","false_alarm_rate","ligo_axis_ra","ligo_axis_dec","ligo_angle","ligo_angle_sig"];
+
+        SubclassInfo['SimInspiralEvent'] = ["mass1","mass2","eta","mchirp","spin1z","spin2z","amp_order","coa_phase","geocent_end_time","f_lower","f_final","distance","latitude","longitude","polarization","inclination","theta0","phi0","waveform","source_channel","destination_channel"];
+
+        SubclassInfo['GrbEvent'] = ["ivorn","author_ivorn","author_shortname","observatory_location_id","coord_system","ra","dec","error_radius","how_description","how_reference_url","trigger_duration","t90"]
+
+        var mainTable = put(tableNode, 'table.subclasses_main');
+        headerRow = put(mainTable, 'tr');
+        for (var key in SubclassInfo) {
+            put(headerRow, 'th', key);
+        }
+
+        contentsRow = put(mainTable, 'tr');
+        for (var key in SubclassInfo) {
+            var subTable = put(contentsRow, 'td.subclasses_row table');
+            for (var ind in SubclassInfo[key]) {
+                put(subTable, 'tr', SubclassInfo[key][ind]);
+            }
+        }
+    {% endif %}
+
+});
+
+</script>
+
+{% endblock %}
+
+{% block header %}
+
+<div id="gracedb-nav-header"></div>
+
+{% endblock %}
diff --git a/admin_doc/source/client_release.rst b/admin_doc/source/client_release.rst
new file mode 100644
index 0000000000000000000000000000000000000000..d8d7eef7faf08862aea7020c2895142373e27276
--- /dev/null
+++ b/admin_doc/source/client_release.rst
@@ -0,0 +1,9 @@
+================================
+Preparing a new client release
+================================
+
+Disclaimer
+==========
+
+The steps here are only suggestions. You will undoubtedly discover better 
+and/or different ways to go about this.
diff --git a/admin_doc/source/conf.py b/admin_doc/source/conf.py
new file mode 100644
index 0000000000000000000000000000000000000000..b263faf11ec6516470f6016efa98950235c3569e
--- /dev/null
+++ b/admin_doc/source/conf.py
@@ -0,0 +1,286 @@
+# -*- coding: utf-8 -*-
+#
+# GraceDB Administration and Development documentation build configuration file, created by
+# sphinx-quickstart on Thu Feb 25 16:37:58 2016.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+import shlex
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+    'sphinx.ext.todo',
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+# source_suffix = ['.rst', '.md']
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'GraceDB Administration and Development'
+copyright = u'2016, Branson Stephens'
+author = u'Branson Stephens'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '1.20'
+# The full version, including alpha/beta/rc tags.
+release = '1.20'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = []
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+#keep_warnings = False
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = True
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+html_theme = 'alabaster'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Language to be used for generating the HTML full-text search index.
+# Sphinx supports the following languages:
+#   'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
+#   'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
+#html_search_language = 'en'
+
+# A dictionary with options for the search language support, empty by default.
+# Now only 'ja' uses this config value
+#html_search_options = {'type': 'default'}
+
+# The name of a javascript file (relative to the configuration directory) that
+# implements a search results scorer. If empty, the default will be used.
+#html_search_scorer = 'scorer.js'
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'GraceDBAdministrationandDevelopmentdoc'
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+
+# Latex figure (float) alignment
+#'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+#  author, documentclass [howto, manual, or own class]).
+latex_documents = [
+  (master_doc, 'GraceDBAdministrationandDevelopment.tex', u'GraceDB Administration and Development Documentation',
+   u'Branson Stephens', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    (master_doc, 'gracedbadministrationanddevelopment', u'GraceDB Administration and Development Documentation',
+     [author], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+  (master_doc, 'GraceDBAdministrationandDevelopment', u'GraceDB Administration and Development Documentation',
+   author, 'GraceDBAdministrationandDevelopment', 'One line description of project.',
+   'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+#texinfo_no_detailmenu = False
diff --git a/admin_doc/source/dev.rst b/admin_doc/source/dev.rst
new file mode 100644
index 0000000000000000000000000000000000000000..58cb4ccf37513d9f6f7ed57a604b8049562f1cf5
--- /dev/null
+++ b/admin_doc/source/dev.rst
@@ -0,0 +1,16 @@
+.. GraceDB developer's guide
+
+Developer's Guide
+=================
+
+Contents:
+
+.. toctree::
+   :maxdepth: 2
+
+   new_server_feature
+   new_gracedb_instance
+   new_event_subclass
+   client_release
+   shibbolized_client
+
diff --git a/admin_doc/source/index.rst b/admin_doc/source/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..9f2b386c7164bac06a8eda8570ffa8e0338ebdca
--- /dev/null
+++ b/admin_doc/source/index.rst
@@ -0,0 +1,23 @@
+.. GraceDB Administration and Development documentation master file, created by
+   sphinx-quickstart on Thu Feb 25 16:37:58 2016.
+   You can adapt this file completely to your liking, but it should at least
+   contain the root `toctree` directive.
+
+GraceDB Admin and Dev Info
+===========================
+
+Contents:
+
+.. toctree::
+   :maxdepth: 2
+
+   ops
+   dev
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
+
diff --git a/admin_doc/source/new_event_subclass.rst b/admin_doc/source/new_event_subclass.rst
new file mode 100644
index 0000000000000000000000000000000000000000..9af7d6324e78efc8d551462b860e3938d22e10c9
--- /dev/null
+++ b/admin_doc/source/new_event_subclass.rst
@@ -0,0 +1,13 @@
+.. _new_event_subclass:
+
+==================================
+Creating a new event subclass
+==================================
+
+Why?
+==========
+Most events in GraceDB have attributes that go beyond those in the base
+event class. If a new pipeline is developed, and the data analysts wish
+to upload events to GraceDB, these events will often have attributes
+that do not correspond to any of the existing event subclasses. In this
+case, you will need to create a new event subclass. 
diff --git a/admin_doc/source/new_gracedb_instance.rst b/admin_doc/source/new_gracedb_instance.rst
new file mode 100644
index 0000000000000000000000000000000000000000..337dc17566db0996a00f8ee6f3d88314a27aa0a4
--- /dev/null
+++ b/admin_doc/source/new_gracedb_instance.rst
@@ -0,0 +1,9 @@
+==================================
+Standing up a new GraceDB instance
+==================================
+
+Disclaimer
+==========
+
+These instructions will almost certainly not work. Please edit when you find
+something that fails.
diff --git a/admin_doc/source/new_pipeline.rst b/admin_doc/source/new_pipeline.rst
new file mode 100644
index 0000000000000000000000000000000000000000..66505b0b59ac4b3d406769f872e698af41355213
--- /dev/null
+++ b/admin_doc/source/new_pipeline.rst
@@ -0,0 +1,160 @@
+================================
+Adding a new pipeline or search
+================================
+
+Sometimes, users will request that a new ``Pipeline`` be added. Creating
+the pipeline object itself is the easy part. The hard part is figuring out
+what kind of data file the group will be uploading, and how to ingest the values.
+The directions below will focus on the easiest possible case--in which the 
+new pipeline's data files have the same format and information as those 
+of an existing pipeline. (For example, the ``gstlal-spiir`` group uploads
+the same type of data file as the ``gstlal`` group, and this made adding the
+``gstlal-spiir`` pipeline relatively easy.)
+Adding a new ``Search`` is simpler, but the steps relating to LVAlert are similar.
+
+
+.. NOTE::
+    The following suggests performing the necessary database operations
+    in the django console (i.e., a Python interpreter running with the correct
+    environment). These operations could also be done in the web-based django
+    admin interface. However, I never use it myself, so that's not the method
+    I'll show in this documentation. One could also issue raw SQL commands
+    if preferred.
+
+.. NOTE::
+    The database operations here could also be done with 'data migrations'. 
+    This leaves more of a paper trail, and as such might be considered 
+    'the right thing to do.' However, it seems like overkill for relatively
+    small tasks like this.
+
+GraceDB server side steps
+=========================
+
+First, create the new pipeline object. Since the only field in the Pipeline model
+is the name, it's pretty simple. Suppose we are creating a new pipeline called 
+``newpipeline``. We fire up the Django console:: 
+
+    cd /home/gracedb
+    ./manage.py shell
+
+Now we create the pipeline object itself::
+
+    from gracedb.models import Pipeline
+    newpipeline = Pipeline.objects.create(name='newpipeline')
+
+Now that the pipeline exists, one or more users will need to be given
+permission to *populate* the pipeline (i.e., to create new events for that
+pipeline). For more info on permissions, see :ref:`managing_user_permissions`.
+By default, all internal users will have permission to create ``Test``
+events for our new pipeline, but only specific users will be allowed to create
+non-``Test`` events. Let's suppose we want to give access to a human user
+(Albert Einstein) and a robotic user (``newpipeline_robot``)::
+
+    from django.contrib.auth.models import User, Permission
+    from guardian.models import UserObjectPermission
+    from django.contrib.contenttypes.models import ContentType
+
+    # Retrieve the objects we will need
+    p = Permission.objects.get(codename='populate_pipeline')
+    ctype = ContentType.objects.get(app_label='gracedb', model='pipeline')
+    einstein = User.objects.get(username='albert.einstein@LIGO.ORG')
+    robot = User.objects.get(username='newpipeline_robot')
+
+    # Create the new permission
+    UserObjectPermission.objects.create(user=einstein, permission=p, 
+        content_type=ctype, object_pk=newpipeline.id)
+    UserObjectPermission.objects.create(user=robot, permission=p, 
+        content_type=ctype, object_pk=newpipeline.id)
+
+The next step is to figure out how events from the 
+new pipeline will be represented in the database. If the base ``Event`` class
+is is sufficient, or if one of the existing subclasses can be used, then 
+no new database tables will be needed. However, if the events coming from the
+pipeline has new attributes, then a new event subclass will be needed to 
+adequately represent it. If the latter, see :ref:`new_event_subclass`. 
+
+For now, let's assume that the attributes of the new pipeline match up
+exactly with those of an existing pipeline, and that the data file can be
+parsed in the same way. Then all we need to do is to edit the utility function
+``_createEventFromForm`` in ``gracedb/view_logic.py`` so that our 
+new pipeline's name appears in the correct list, resulting in the correct
+event class being created. For example, if the events
+of the new pipeline match up with those from Fermi, then we can add it to
+the same list as Fermi, Swift, and SNEWS. 
+
+Next, edit the function ``handle_uploaded_data`` in ``gracedb/translator.py``
+so that, when an event is created for our new pipeline, the data file is
+parsed in the correct way. This function is basically just a huge ``if``
+statement on the pipeline name. So if we want the data file to be parsed
+in the same way as the files for Fermi and Swift, we would just add the name
+of our new pipeline next to Fermi and Swift in the control structure.
+
+Steps for LVAlert
+=================
+
+When a new pipeline is created, the corresponding LVAlert nodes need to be 
+created. Let's suppose our new pipeline is associated with the ``Burst``
+group. That means we will need at least two new LVAlert nodes::
+
+    test_newpipeline
+    burst_newpipeline
+
+If the relevant group (in this case, the burst group) wants to specify one
+or more ``Search`` values for their event, then these nodes need to be 
+created as well::
+
+    test_newpipeline_search1
+    burst_newpipeline_search1
+    test_newpipeline_search2
+    burst_newpipeline_search2
+
+where the names of the searches are ``search1`` and ``search2``. I typically
+use a script such as the one below to create the nodes and add the ``gracedb``
+user as a publisher::
+
+    #!/usr/bin/env python
+
+    import subprocess
+    import time
+
+    nodes = [
+        'test_newpipeline',
+        'burst_newpipeline',
+        'test_newpipeline_search1',
+        'burst_newpipeline_search1',
+        'test_newpipeline_search2',
+        'burst_newpipeline_search2',
+    ]
+
+    username = 'user.name'
+    password = 'passw0rd'
+
+    servers = [
+        'lvalert.cgca.uwm.edu',
+        'lvalert-test.cgca.uwm.edu',
+    ]
+
+    for server in servers:
+        for node in nodes:
+            print "creating node %s for server %s ..." % (node, server)
+            cmd = 'lvalert_admin -a {0} -b {1} -c {2} -d -q {3}'.format(username, 
+                password, server, node)
+            p = subprocess.Popen(cmd, shell=True)
+            out, err = p.communicate()
+
+            if err:
+                print "Error for node %s: %s" % (node, error)
+
+            # add gracedb as publisher
+            # Also serves as a check to whether the node exists if not creating
+            time.sleep(2)
+
+            print "adding gracedb as publisher to node %s for server %s ..." % (node, server)
+
+            cmd = 'lvalert_admin -a {0} -b {1} -c {2} -j gracedb -q {3}'.format(username,
+                password, server, node)
+            p = subprocess.Popen(cmd, shell=True)
+            out, err = p.communicate()
+
+            if err:
+                print "Error for node %s: %s" % (node, error)
diff --git a/admin_doc/source/new_server_feature.rst b/admin_doc/source/new_server_feature.rst
new file mode 100644
index 0000000000000000000000000000000000000000..cd654921ee6fee89dcb03c40f37c44b9e12e11a9
--- /dev/null
+++ b/admin_doc/source/new_server_feature.rst
@@ -0,0 +1,9 @@
+================================
+Adding a new server-side feature
+================================
+
+Disclaimer
+==========
+
+The steps here are only suggestions. You will undoubtedly discover better 
+and/or different ways to go about this.
diff --git a/admin_doc/source/ops.rst b/admin_doc/source/ops.rst
new file mode 100644
index 0000000000000000000000000000000000000000..c4ed4c6bd3ead907d62a888dc1aa1cd3e2cde8fc
--- /dev/null
+++ b/admin_doc/source/ops.rst
@@ -0,0 +1,14 @@
+.. GraceDB operation (admin) tasks
+
+Operational Tasks
+=================
+
+Contents:
+
+.. toctree::
+   :maxdepth: 2
+
+   new_pipeline
+   user_permissions
+   robot_certificate
+
diff --git a/admin_doc/source/robot_certificate.rst b/admin_doc/source/robot_certificate.rst
new file mode 100644
index 0000000000000000000000000000000000000000..9a1217548d8d6c046b8e4e23480c4b785264c8e0
--- /dev/null
+++ b/admin_doc/source/robot_certificate.rst
@@ -0,0 +1,164 @@
+================================
+Creating a robot account
+================================
+
+.. NOTE::
+    You could also do the database operations through the Django admin
+    interface. Instead, I show how to do it with a database migration
+    since that seems easier to me and leaves more of a paper trail. 
+
+General information or robot accounts
+=====================================
+
+The flagship data analysis pipelines are usually operated by groups of 
+users. Thus, it doesn't make much since if the events are submitted to GraceDB
+via a single user's account. This is also impractical, as an individual
+user's auth tokens expire often, but the pipeline process needs to be running
+all the time. 
+
+Thus, some kind of persistent auth token is required, and this should be
+attached to a GraceDB account that is *not* an individual user's personal
+account. That way, a user running the data analysis pipeline can also 
+comment on the event as him or herself, and the provenance information is 
+clear and consistent. Robot accounts are thus, in effect, shared accounts.
+
+Robot authentication
+====================
+
+At present, most robots authenticate to GraceDB using x509 certificates.
+Users are discouraged from moving cert/key pars around from machine to machine,
+so the usual recommendation is to ask that the person in charge of the robotic
+process(es) obtain a cert/key pair for each computing cluster as needed. 
+The Common Names will hopefully follow a sensible pattern::
+
+    RobotName/ldas-pcdev1.ligo.caltech.edu
+    RobotName/ldas-pcdev1.ligo-la.caltech.edu
+    RobotName/ldas-pcdev1.ligo-wa.caltech.edu
+    ...
+
+Instructions for obtaining LIGO CA robot certificates can be found 
+`here <https://wiki.ligo.org/AuthProject/LIGOCARobotCertificate>`__.
+
+.. NOTE::
+    LIGO CA robot certs expire after 1 year. The best way of "renewing"
+    is to generate a new Certificate Signing Request (CSR) with the old key, and
+    send that CSR to ``rt-auth``::
+
+        openssl x509 -x509toreq -in currentcert.pem -out robot_cert_req.pem -signkey currentkey.pem
+
+.. NOTE::
+    Neither Apache nor the GraceDB app will check that the domain name
+    in the user's cert DN resolves to the IP from which the user is connecting.
+    (This is in contrast with the latest Globus tools, which do perform this
+    check.) Thus, a user may connect from ``ldas-pcdev2`` at CIT, even if the CN
+    in the cert is ``RobotName/ldas-pcdev1.ligo.caltech.edu``.
+
+Once the user has obtained the certificates, ask him/her to send you the output
+of::
+
+    openssl x509 -subject -noout -in /path/to/robot_cert_file.pem
+
+That way you will know the subject(s) to link with the robotic user when
+you create it.
+
+In the future, it is hoped that robots will authenticate using Shibboleth
+rather than x509. The user would request a robotic keytab and this robotic
+user would have the correct group memberships in the LDAP. This will all you
+to eliminate the x509 authentication path in GraceDB altogether. See the
+sketch at :ref:`shibbolized_client`.
+
+Creating the robot user
+=======================
+
+These same steps could all be done by hand using the Django console. 
+However, using a migration is encouraged since there is more of a paper trail
+that way. See the Django docs on data migrations.
+
+Create an empty data migration::
+
+    python manage.py makemigrations --empty ligoauth
+
+Rename the resulting file to something sane::
+
+    cd ligoauth/migrations
+    mv 0004_auto_20160229_1541.py 0004_add_robot_RobotName.py
+
+Edit the migration to do what you want it to do. You could use this as a template::
+
+    # -*- coding: utf-8 -*-
+    from __future__ import unicode_literals
+
+    from django.db import migrations, models
+    from django.conf import settings
+
+    ROBOTS = [
+            {
+                'username' : 'NewRobot',
+                'first_name' : '',
+                'last_name' : 'My New Robot',  # Note that the last_name acts as a display
+                'email' : 'albert.einstein@ligo.org',
+                'dns' : [
+                    "/DC=org/DC=ligo/O=LIGO/OU=Services/CN=NewRobot/ldas-pcdev1.ligo.caltech.edu.edu",
+                    "/DC=org/DC=ligo/O=LIGO/OU=Services/CN=NewRobot/ldas-pcdev1.ligo-la.caltech.edu.edu",
+                    "/DC=org/DC=ligo/O=LIGO/OU=Services/CN=NewRobot/ldas-pcdev1.ligo-wa.caltech.edu.edu",
+                ]
+            },
+    ]
+
+    def create_robots(apps, schema_editor):
+        LocalUser = apps.get_model('ligoauth', 'LocalUser')
+        X509Cert = apps.get_model('ligoauth', 'X509Cert')
+        Group = apps.get_model('auth', 'Group')
+        lvc_group = Group.objects.get(name=settings.LVC_GROUP)
+
+        for entry in ROBOTS:
+            user, created = LocalUser.objects.get_or_create(username=entry['username'])
+            if created:
+                user.first_name = entry['first_name']
+                user.last_name = entry['last_name']
+                user.email = entry['email']
+                user.is_active = True
+                user.is_staff = False
+                user.is_superuser = False
+                user.save()
+
+            # Create the cert objects and link them to our user.
+            for dn in entry['dns']:
+                cert, created = X509Cert.objects.get_or_create(subject=dn)
+                if created:
+                    cert.save()
+                cert.users.add(user)
+
+            # Add our user to the LVC group. This permission is required to 
+            # do most things, but may *NOT* always be appropriate. It may
+            # also be necessary to give the robotic user permission to populate
+            # a particular pipeline.
+            lvc_group.user_set.add(user)
+
+    def delete_robots(apps, schema_editor):
+        LocalUser = apps.get_model('ligoauth', 'LocalUser')
+        X509Cert = apps.get_model('ligoauth', 'X509Cert')
+
+        for entry in ROBOTS:
+            for dn in entry['dns']:
+                X509Cert.objects.get(subject=dn).delete()
+            LocalUser.objects.get(username=entry['username']).delete()
+
+    class Migration(migrations.Migration):
+
+        dependencies = [
+            ('ligoauth', '0003_auto_20150819_1201'),
+        ]
+
+        operations = [
+            migrations.RunPython(create_robots, delete_robots),
+        ]
+              
+
+The above could definitely be refactored in some nice way. I'll leave that as
+an exercise for the reader :-) Now apply the migration::
+    
+    python manage.py migrate ligoauth
+    
+
+
diff --git a/admin_doc/source/shibbolized_client.rst b/admin_doc/source/shibbolized_client.rst
new file mode 100644
index 0000000000000000000000000000000000000000..88ee1c747eee99cd91e56434f8e17409e87cecca
--- /dev/null
+++ b/admin_doc/source/shibbolized_client.rst
@@ -0,0 +1,11 @@
+.. _shibbolized_client:
+
+================================
+The Shibbolized Client
+================================
+
+Rationale
+==========
+
+The idea here is to wean users off of using X509 certificates for
+authenticating to services. 
diff --git a/admin_doc/source/user_permissions.rst b/admin_doc/source/user_permissions.rst
new file mode 100644
index 0000000000000000000000000000000000000000..7e0577e430e4c319f98a8ab2040f310451c1d0b6
--- /dev/null
+++ b/admin_doc/source/user_permissions.rst
@@ -0,0 +1,28 @@
+.. _managing_user_permissions: 
+
+================================
+Managing user permissions
+================================
+
+Note
+==========
+
+You can do this stuff through the admin interface too, I think.
+I just don't like it, so I never use it.
+
+General info on the permissions infrastructure
+==============================================
+
+To see which users already have permissions, go to the Django shell and...
+
+Permissions to expose events 
+============================
+
+In effect, these permission objects allow specific users to maniupulate
+*other* permission objects.
+
+Permissions to edit GRB events
+==============================
+
+Sometimes the GRB group requests to add another user to the list of users
+allowed to provide supplementary information to GRB events by hand. 
diff --git a/doc/source/conf.py b/doc/source/conf.py
index 170cd93c35e4c25c80ba47c98c0771dc52270dd2..bf7db35d4ae7b7c2a7215fe352bd06c35d93ebbc 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -92,7 +92,7 @@ exclude_patterns = []
 
 # If true, sectionauthor and moduleauthor directives will be shown in the
 # output. They are ignored by default.
-#show_authors = False
+show_authors = True
 
 # The name of the Pygments (syntax highlighting) style to use.
 pygments_style = 'sphinx'
diff --git a/doc/source/index.rst b/doc/source/index.rst
index 2559af4f323753a10b8ddfbab697ec3b3b1d90bd..43fafd748f1d84a5e218d1cf51b3fe2c287451e7 100644
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -11,13 +11,8 @@ Contents:
 .. toctree::
    :maxdepth: 2
 
-   general
-   models
-   web
-   rest
-   lvalert
-   lvem
-   auth
+   ref_manual 
+   tutorials
 
 
 Indices and tables
diff --git a/doc/source/ref_manual.rst b/doc/source/ref_manual.rst
new file mode 100644
index 0000000000000000000000000000000000000000..ea75a9e05089490f3fd5827a6d91036f632fec8a
--- /dev/null
+++ b/doc/source/ref_manual.rst
@@ -0,0 +1,18 @@
+.. GraceDB documentation reference manual
+
+Reference Manual
+================
+
+Contents:
+
+.. toctree::
+   :maxdepth: 2
+
+   general
+   models
+   web
+   rest
+   lvalert
+   lvem
+   auth
+
diff --git a/doc/source/responding_to_lvalert.rst b/doc/source/responding_to_lvalert.rst
new file mode 100644
index 0000000000000000000000000000000000000000..f78bcbf17130cc2d4b693bf3a46c1b998c130214
--- /dev/null
+++ b/doc/source/responding_to_lvalert.rst
@@ -0,0 +1,418 @@
+==============================
+Responding to LVAlert Messages
+==============================
+
+.. sectionauthor:: Reed Essick
+
+This tutorial will show you how to
+  * register to receive LVAlerts 
+  * subscribe and unsubscribe from pubsub nodes
+  * instantiate and manage an ``lvalert_listen`` instance 
+  * interact with GraceDB through the Python REST interface in a script that is launched via ``lvalert_listen``
+
+This tutorial assumes that the ``ligo-lvalert`` software package is already installed on
+your machine (this is true on all cluster machines).
+
+While we attempt to be pedagogically complete as much as possible, we would
+like to stress that the existing documentation and help-strings for the
+command-line and Python packages are *very* useful and should be the final
+reference if you have any questions.
+
+Registering to receive LVAlert messages
+=======================================
+
+LSC-Virgo members can activate accounts by simply completing the form 
+`here <https://www.lsc-group.phys.uwm.edu/cgi-bin/jabber-acct.cgi>`__.
+
+If you need to create an account that is not attached to your user.name, you
+can email uwm-help@cgca.uwm.edu and request an account.  Once you have created an
+account, you will be able to subscribe the account to different pubsub nodes
+and receive lvalert messages.
+
+Subscribing to pubsub nodes
+===========================
+
+LVAlert messages are broadcast through pubsub nodes and different messages go
+through different nodes. For instance, all gstlal events created in GraceDB are
+announced through the pubsub node called::
+
+    cbc_gstlal
+
+which includes both highmass and lowmass events. If you instead want to only
+receive lowmass events, these are announced through::
+
+    cbc_gstlal_lowmass
+
+Importantly, if you subscribe to both ``cbc_gstlal`` and ``cbc_gstlal_lowmass``, you
+will receive two alerts for every gstlal lowmass event. The general format of::
+
+    group_pipeline[_search]
+
+is followed by all pubsub nodes used to announce events and annotations to
+those events in GraceDB.
+
+Fill out the form and follow all instructions to create an account attached to
+your "user.name". For the rest of this tutorial, I will refer to the username
+as "user.name" and the password as "passw0rd", but you should replace these
+with your own account's information.
+
+To actually subscribe to a pubsub node, we use ``lvalert_admin``
+which allows you to manage your subscriptions. This includes subscribing to new
+nodes, unsubscribing from nodes and viewing your current subscriptions. We will
+now subscribe your account to ``cbc_gstlal_lowmass``. Run::
+
+    lvalert_admin -a user.name -b passw0rd --subscribe --node cbc_gstlal_lowmass
+
+You can confirm that your account is successfully subscribed to this node by
+running::
+
+    lvalert_admin -a user.name -b passw0rd --subscriptions
+
+which will list your account's subscriptions. You should see
+``cbc_gstlal_lowmass`` in the resulting list.  To unsubscribe from a node, use::
+
+    lvalert_admin -a user.name -b passw0rd --unsubscribe --node cbc_gstlal_lowmass
+
+but for now we'll leave our subscription in place. If you'd like to subscribe
+to other nodes, simply repeat the subscription command and replace
+``cbc_gstlal_lowmass`` with the name of the node to which you'd like to
+subscribe. A complete list of nodes is available by running::
+
+    lvalert_admin -a user.name -b passw0rd --get-nodes
+
+For this tutorial, let's subscribe to another node to show how things scale.  Run::
+
+    lvalert_admin -a user.name -b passw0rd --subscribe --node cbc_gstlal_highmass
+
+Creating an LVAlert node
+========================
+
+Users can create their own LVAlert pubsub nodes as well. Unsurprisingly, this
+is also straightforward. Simply run::
+
+    lvalert_admin -a user.name -b passw0rd --create --node user.name-TestNode
+
+to create a node called ``user.name-TestNode``. Of course, you'll want to change
+"user.name" to your account's name. Go ahead and create this node. If you need
+to delete it at any time, you can with::
+
+    lvalert_admin -a user.name -b passw0rd --delete --node user.name-TestNode
+
+but leave it be for the moment. You now have a node owned by your account to
+which you can publish alerts. We'll come back to this when we test our set-up.
+You will also need to subscribe to this node with::
+
+    lvalert_admin -a user.name -b passw0rd --subscribe --node user.name-TestNode
+
+Run::
+
+    lvalert_admin -a user.name -b passw0rd --subscriptions
+
+and make sure you see::
+
+    cbc_gstlal_lowmass 
+    cbc_gstlal_highmass 
+    user.name-TestNode
+
+in the output.
+
+Starting and managing an ``lvalert_listen`` instance
+====================================================
+
+Now you have an lvalert account and it is subscribed to a few pubsub nodes.
+It's time to set up an ``lvalert_listen`` instance which allows your code to
+receive and react to announcements broadcast through the pubsub nodes. The
+first thing you'll need is a config file. Using your favorite text editor,
+create a file called ``myLVAlertListen.ini`` with the following as its contents::
+
+    [cbc_gstlal_lowmass]
+    executable = /bin/true
+
+    [cbc_gstlal_highmass]
+    executable = /bin/false
+
+    [user.name-TestNode]
+    executable = /bin/true
+
+Now run::
+
+    lvalert_listen -a user.name -b passw0rd -c myLVAlertListen.ini > myLVAlertListen.out &
+
+Congratulations! You've set up an ``lvalert_listen`` instance which reacts to
+announcements published to the ``cbc_gstlal_lowmass``, ``cbc_gstlal_highmass`` and
+``user.name-TestNode`` nodes.
+
+Here's what's happening: ``lvalert_listen`` hears announcements made to any node to
+which the user.name/passw0rd combination is subscribed. When an alert is
+received, it looks in the config file (loaded into memory) for the associated
+section. Importantly, if there is no section in the config file corresponding
+to the pubsub node's name (an exact match is required), ``lvalert_listen`` ignores
+the announcements from that node even if you are subscribed to it. If it finds
+a section, it looks for the "executable" option and attempts to run the
+associated value (in this case ``/bin/true``) via Python's subprocess module. The
+delegation to ``subprocess.Popen`` does *not* split the value so this must be a
+single filename for the executable. If your executable takes in options, we
+recommend wrapping it in a simple shell script and specifying the shell script
+within ``myLVAlertListen.ini``. We'll get to that in a bit.
+
+In this way, you can have multiple ``lvalert_listen`` instances for a single
+account listening to multiple different nodes and doing multiple different
+things. Furthermore, if you provide multiple sections in ``myLVAlertListen.ini``
+you can react to announcements from different pubsub nodes in different ways, all
+within the same ``lvalert_listen`` instance.
+
+Right now your listener (running in the background) isn't doing much. When
+``cbc_gstlal_lowmass`` alerts are received, it forks an instance of ``/bin/true`` and
+when ``cbc_gstlal_highmass`` alerts are received, it forks an instance of
+``/bin/false``. We can improve upon that pretty easily.
+
+Let's start by creating some basic wrapper scripts to print that we've received
+alerts. Again, using your favorite text editor, create the file ``lvalert-run_cbc_gstlal_lowmass.sh``
+and fill it with::
+
+    #!/bin/bash
+    echo "received an alert about a cbc_gstlal_lowmass event!" >> lvalert_cbc_gstlal_lowmass.out
+
+Similarly, create ``lvalert-run_cbc_gstlal_highmass.sh`` and fill it with::
+
+    #!/bin/bash
+    echo "received an alert about a cbc_gstlal_highmass event!" >> lvalert_cbc_gstlal_highmass.out
+
+Finally, create a file for your test node, ``lvalert-run_user.name-TestNode.sh``,
+which contains::
+
+    #!/bin/bash
+    read a
+    echo "received a test alert: ${a}" >> user.name-TestNode.out
+
+Once you've done that, ensure that all three shell scripts are executables (required
+by the delegation through ``subprocess.Popen``) with::
+
+    chmod +x lvalert-run_cbc_gstlal_lowmass.sh
+    chmod +x lvalert-run_cbc_gstlal_highmass.sh
+    chmod +x lvalert-run_user.name-TestNode.sh
+
+and edit myLVAlertListen.ini so it reads::
+
+    [cbc_gstlal_lowmass]
+    executable = lvalert-run_cbc_gstlal_lowmass.sh
+
+    [cbc_gstlal_highmass]
+    executable = lvalert-run_cbc_gstlal_highmass.sh
+
+    [user.name-TestNode]
+    executable = lvalert-run_user.name-TestNode.sh
+
+It is generally a good rule of thumb to provide the full paths to executables
+and output files in both ``myLVAlertListen.ini`` as well as these simple shell
+scripts. However, for the purpose of this tutorial we'll stick with relative
+paths.
+
+Now, because you have modified the ``lvalert_listen.ini`` file, you'll need to
+restart your ``lvalert_listen`` instance. Find the PID in the process table, kill
+the existing process, and restart the listener using the command from above.
+Alternatively, if you simply launch another instance of ``lvalert_listen`` with the
+same command line as before, the existing process will die and this one will
+take its place (with the new config file loading in memory). This is because
+only one listener can exist for any (user.name, passw0rd,
+resource.name) triple *anywhere in the network*. When you launch the second
+process, one of the processes is killed automatically (although which process
+dies may not be deterministic). Thus, I can kill processes running at CIT by
+creating processes at UWM with the same resource name. This can be extremely
+dangerous and annoying, so please be careful. It's generally best to specify a
+resource name for each listener, even if you expect to only have one, to ensure
+that you've thought through this. If you want to have multiple processes
+running under the same (user.name, passw0rd) pair, you will need to specify
+different resource.name options for each instance with the ``-r`` command line
+option. For example::
+
+    lvalert_listen -a user.name -b passw0rd -c myLVAlertListen.ini -r oneInstance &
+    lvalert_listen -a user.name -b passw0rd -c myLVAlertListen.ini -r twoInstance &
+
+will launch two instances of ``lvalert_listen`` (both using the same config file)
+with different resource names. They will both react to alerts and fork
+processes. If each points to a different config file, I can then get multiple
+types of follow-up processes forked for the same announcement through a single
+pubsub node.
+
+When alerts are received, you will see a line printed to the associated files.
+Note, the scripts for the ``cbc_gstlal`` nodes do not report anything about the
+actual alerts received, whereas the script for your test node reads in stdin
+(to a variable called "a") and then echo's that into the output file. This is
+how ``lvalert_listen`` passes the contents of the alert into the forked subprocess,
+via stdin. We'll come back to that later when we interact with GraceDB.
+
+For now, let's test your set-up by publishing a few announcements to your test
+pubsub node. Create a file called ``test.txt`` and fill it with some text like::
+
+    just a test announcment
+
+Then run::
+
+    lvalert_send -a user.name -b passw0rd -n user.name-TestNode --file test.txt
+
+This publishes the contents of test.txt as a string to the node
+``user.name-TestNode``. If your listener is running in the
+background, then you should see a new line in ``lvalert_user.name-TestNode.out``
+which reads::
+
+    received a test alert: just a test announcement
+
+If you repeat the ``lvalert_send`` command, you should see multiple lines appear,
+one for each time you sent an alert.
+
+Note, each time we change the ``lvalert_listen`` config file (``myLVAlertListen.ini``)
+we have to restart the listener for the changes to take effect.
+However, if the config file points to wrapper script we can modify the contents
+of the wrapper script and have the changes take effect immediately for all
+future events *without* restarting the ``lvalert_listen`` process. This can be
+quite handy, although you should be careful to keep track of what was actually
+run when (version controlling the config file and ``lvalert-run_*sh`` scripts is a
+good idea).
+
+It is worth stressing that you do *not* have to actually use a wrapper script.
+If you have an executable that can be called via subprocess in the same way as
+the wrapper script, then you can simply specify that within myLVAlertListen.ini
+instead of dealing with wrappers at all. This can reduce the number of files
+that have to be managed but because of how ``lvalert_listen`` forks the executable
+through subprocess the executable cannot take in any command line options or
+arguments.
+
+Now, ``lvalert_listen`` is a fairly robust process and is unlikely to throw errors
+or fall over by itself. However, occasionally server-side or local trouble can
+cause your listener to die and you will need to restart it.
+Several solutions exist, although the preferred option is 
+`Monit <https://mmonit.com/monit/>`__ which can automatically restart processes and
+notify you that it did so.
+
+Reacting to GraceDB
+===================
+
+Now that you've got an ``lvalert_listen`` instance running which reacts to a few
+different pubsub nodes, let's really dig into the full potential of this
+system.
+
+So far, we either haven't used the contents of the alert or have simply printed
+them into a file. That's nice, but we can do much better. GraceDB (the main
+publisher of alerts) sends JSON (JavaScript Object Notation) strings through
+lvalert and there are several convenient tools to parse these in Python.
+Similarly, there is an extremely useful RESTful interface to GraceDB
+implemented in Python, although command-line executables also exist.
+
+Let's start by mining the JSON string sent by GraceDB for some information.
+Create a Python executable ``iReact.py`` and fill it with the following::
+
+    #!/usr/bin/python 
+    import json 
+    import sys
+
+    alert = json.loads(sys.stdin.read())
+    print 'uid : '+alert['uid']
+
+Don't forget to give this executable permissions with::
+
+    chmod +x iReact.py
+
+Now, modify your wrapper script for the test node
+(``lvalert-run_user.name-TestNode.sh``) so it reads::
+
+    #!/bin/bash
+    ./iReact.py >> lvalert_user.name-TestNode.out
+
+When we send messages to the test node, it will now delegate to ``iReact.py``. We
+don't have to restart the ``lvalert_listen`` instance because that still points to
+``lvalert-run_user.name-TestNode.sh``, which is nice.
+
+Let's go ahead and send a test message in JSON format. Edit ``test.txt`` so it
+reads::
+
+    {'uid':'G12345'}
+
+and run::
+
+    lvalert_send -a user.name -b passw0rd --node user.name-TestNode --file test.txt
+
+You should see a new line in ``lvalert_user.name-TestNode.out`` which reads::
+
+    uid : G12345
+
+Ta-da! You've now sent, received, parsed, and reacted to a JSON string
+through lvalert. This is the key way all follow-up processes listen for events
+in GraceDB and react accordingly. Note, the ``sys.stdin.read()`` command will
+block until there is something in stdin and this can cause your code to hang if
+you don't specify anything. This should not be the case when it is called from
+within ``lvalert_listen``, but it can sometimes be annoying when debugging your
+follow-up scripts.
+
+Let's do something a bit more concrete with more specific examples of how we
+can interface with GraceDB based off lvalert messages.
+Open ``iReact.py`` and modify it so it reads::
+
+    #!/usr/bin/python 
+    import json 
+    import sys
+
+    from ligo.gracedb.rest import GraceDb
+
+    alert = json.loads(sys.stdin.read())
+    print 'uid : '+alert['uid']
+
+    gdb = GraceDb() ### instantiate a GraceDB object which connects to the default server
+
+    if alert['alert_type'] == 'new': ### the event was just created and this is the first announcment
+        gdb.writeLog( alert['uid'], message="user.name heard an alert about this new event!" )
+
+    elif alert['alert_type'] == 'update': ### something happened in GraceDB for this event and GraceDB is letting everyone know
+        gdb.writeLog( alert['uid'], message="user.name heard an alert about an update for this event!" )
+
+Now, if we modify ``test.txt`` to::
+
+    {'uid':'G12345','alert_type':'new','far':1e-8}
+
+and send it, ``iReact.py`` will try to write a log entry in GraceDB for event
+G12345. It's easy to see that you can filter alerts out (e.g.: only react to
+'new' events) and modify your follow-up processes behavior accordingly. To
+check that this worked, you'll need to look at the associated GraceDB page,
+expand the "full log" section and look for your log message.
+
+**IMPORTANTLY,** I've just made up 'G12345' as an example. If you really want
+to test your script you'll need to choose a real event from GraceDB. However,
+PLEASE DO NOT TEST YOUR SCRIPT WITH IMPORTANT GraceDB EVENTS like G184098 (the
+cWB entry for GW150914). Instead, please pick an event with high FAR that
+you've never heard of before. Chances are, if FAR>1e-5 no one will care about
+it and you can use it to test your script. There are also test instances of
+GraceDB available if you'd prefer to not work with the production server right
+away. Contact uwm-help@cgca.uwm.edu with a descriptive subject line for more
+information.
+
+At this point, you're pretty much ready to go. However, I'll leave you with one
+more example for what ``iReact.py`` might look like::
+
+    #!/usr/bin/python 
+    import json 
+    import sys
+
+    from ligo.gracedb.rest import GraceDb
+
+    FarThr = float(sys.argv[1])
+
+    alert = json.loads(sys.stdin.read()) 
+    print 'uid : '+alert['uid']
+
+    gdb = GraceDb() ### instantiate a GraceDB object which connects to the default server
+
+    if alert['alert_type'] == 'new': ### the event was just created and this is the first announcment
+        if alert['far'] < FarThr:
+            file_obj = open("iReact.txt", "w")
+            print >> file_obj, "wow! this was a rare event!  It had FAR = %.3e < %.3e, which was my threshold"%(alert['far'], FarThr)
+            file_obj.close()
+            gdb.writeLog( alert['uid'], message="user.name heard an alert about this new event!", filename="iReact.txt", tagname=["data_quality"] )
+
+Try to figure out exactly what this version does. If you can
+understand everything within this script you certainly know enough to get your
+follow-up process running! Hint: to get this to run correctly, you'll want to
+modify ``lvalert-run_user.name-TestNode.sh`` so it looks like::
+
+    #!/bin/bash
+    ./iReact.py 1e-8 >> lvalert_user.name-TestNode.out
+
diff --git a/doc/source/tutorials.rst b/doc/source/tutorials.rst
new file mode 100644
index 0000000000000000000000000000000000000000..3a1287dcb0055e6b71b38d0575d841f90941957b
--- /dev/null
+++ b/doc/source/tutorials.rst
@@ -0,0 +1,12 @@
+.. GraceDB documentation tutorials
+
+Tutorials
+=========
+
+Contents:
+
+.. toctree::
+   :maxdepth: 2
+
+   responding_to_lvalert
+
diff --git a/templates/base.html b/templates/base.html
index e361afa8f72d818563ddf1cdb117b68d897c9f39..ba3502038e7a4acf83358115dc1da5b6f175e939 100644
--- a/templates/base.html
+++ b/templates/base.html
@@ -32,46 +32,7 @@ function changeTime(obj, label) {
 </center>
 
 {% block nav %}
-<ul id="nav">
-    <li id="nav-home"><a href="{% url "home" %}">Home</a></li>
-    <li id="nav-search"><a href="{% url "search" %}">Search</a></li>
-    {% if user_is_internal %}
-        <li id="nav-create"><a href="{% url "create" %}">Create</a></li>
-        <li id="nav-reports"><a href="{% url "reports" %}">Reports</a></li>
-        <li id="nav-feeds"><a href="{% url "feeds" %}">RSS</a></li> 
-    {% endif %}
-    <li id="nav-latest"><a href="{% url "latest" %}">Latest</a></li>
-    {# if user_is_internal #}
-    <li id="nav-userprofile"><a href="{% url "userprofile-home" %}">Options</a></li>
-    {# endif #}
-    <li id="nav-docs"><a href="{% url "home" %}documentation/">Documentation</a></li>
-    {% if user %}
-        {% if user.first_name %}
-            <li id="nav-user">Authenticated as: {{ user.first_name }} {{user.last_name }}</li>
-        {% else %}
-            <li id="nav-user">Authenticated as: {{ user.username }}</li>
-        {% endif %}
-    {% else %}
-        <li id="nav-login"><a href="{{ login_url }}">Login</a></li>
-    {% endif %}
-</ul>
-<center>
-    {% if config_name %}
-      <h1 style="color: red;">
-          {{config_name}}
-      </h1>
-    {% endif %}
-</center>
-
-{% if 'lvem_view' in request.path %}
-<div id="lvem_view_message">
-<b>IMPORTANT:</b> You are viewing this page as a member of the LV-EM Observers group.
-At the end of your session, please remove the 'lvem_view/' string from the URL to 
-return to the regular GraceDB site. This will ensure that your group memberships 
-are correct the next time you log in. 
-</div>
-{% endif %}
-
+    {% include "navbar_frag.html" %}
 {% endblock %}
 
         <p>&nbsp;</p> <!-- bad way to create vertical space -->
diff --git a/templates/navbar_frag.html b/templates/navbar_frag.html
new file mode 100644
index 0000000000000000000000000000000000000000..e9edf5480e3cdad86f3d0dccd558bb8e91502b08
--- /dev/null
+++ b/templates/navbar_frag.html
@@ -0,0 +1,44 @@
+<ul id="nav">
+    <li id="nav-home"><a href="{% url "home" %}">Home</a></li>
+    <li id="nav-search"><a href="{% url "search" %}">Search</a></li>
+    {% if user_is_internal %}
+        <li id="nav-create"><a href="{% url "create" %}">Create</a></li>
+        <li id="nav-reports"><a href="{% url "reports" %}">Reports</a></li>
+        <li id="nav-feeds"><a href="{% url "feeds" %}">RSS</a></li> 
+    {% endif %}
+    <li id="nav-latest"><a href="{% url "latest" %}">Latest</a></li>
+    {# if user_is_internal #}
+    <li id="nav-userprofile"><a href="{% url "userprofile-home" %}">Options</a></li>
+    {# endif #}
+    <li id="nav-docs"><a href="{% url "home" %}documentation/">Documentation</a></li>
+    {% if user %}
+        {% if user.is_staff %}
+            <li id="nav-admin-docs"><a href="{% url "home" %}admin_docs/">Admin docs</a></li>
+        {% endif %}
+    {% endif %}
+    {% if user %}
+        {% if user.first_name %}
+            <li id="nav-user">Authenticated as: {{ user.first_name }} {{user.last_name }}</li>
+        {% else %}
+            <li id="nav-user">Authenticated as: {{ user.username }}</li>
+        {% endif %}
+    {% else %}
+        <li id="nav-login"><a href="{{ login_url }}">Login</a></li>
+    {% endif %}
+</ul>
+<center>
+    {% if config_name %}
+      <h1 style="color: red;">
+          {{config_name}}
+      </h1>
+    {% endif %}
+</center>
+
+{% if 'lvem_view' in request.path %}
+<div id="lvem_view_message">
+<b>IMPORTANT:</b> You are viewing this page as a member of the LV-EM Observers group.
+At the end of your session, please remove the 'lvem_view/' string from the URL to 
+return to the regular GraceDB site. This will ensure that your group memberships 
+are correct the next time you log in. 
+</div>
+{% endif %}
diff --git a/templates/navbar_only.html b/templates/navbar_only.html
index c9457388a72537af78c76e3a5a15f7010ebf44ff..c9f7a955839cb83d5ae427f90bcd2432fd3eaba6 100644
--- a/templates/navbar_only.html
+++ b/templates/navbar_only.html
@@ -13,36 +13,7 @@
 </center>
 
 {% block nav %}
-<ul id="nav">
-    <li id="nav-home"><a href="{% url "home" %}">Home</a></li>
-    <li id="nav-search"><a href="{% url "search" %}">Search</a></li>
-    {% if user_is_internal %}
-        <li id="nav-create"><a href="{% url "create" %}">Create</a></li>
-        <li id="nav-reports"><a href="{% url "reports" %}">Reports</a></li>
-        <li id="nav-feeds"><a href="{% url "feeds" %}">RSS</a></li>
-    {% endif %}
-    <li id="nav-latest"><a href="{% url "latest" %}">Latest</a></li>
-    {# if user_is_internal #}
-    <li id="nav-userprofile"><a href="{% url "userprofile-home" %}">Options</a></li>
-    {# endif #}
-    <li id="nav-docs"><a href="{% url "home" %}documentation/">Documentation</a></li>
-    {% if user %}
-        {% if user.first_name %}
-            <li id="nav-user">Authenticated as: {{ user.first_name }} {{user.last_name }}</li>
-        {% else %}
-            <li id="nav-user">Authenticated as: {{ user.username }}</li>
-        {% endif %}
-    {% else %}
-        <li id="nav-login"><a href="{{ login_url }}">Login</a></li>
-    {% endif %}
-</ul>
-<center>
-    {% if config_name %}
-      <h1 class="docnav" style="color: red;">
-          {{config_name}}
-      </h1>
-    {% endif %}
-</center>
+    {% include "navbar_frag.html" %}
 {% endblock %}
 
         <p>&nbsp;</p> <!-- bad way to create vertical space -->