Maintenance will be performed on git.ligo.org, chat.ligo.org, and docs.ligo.org, starting at approximately 10am CDT Tuesday 20 August 2019. The maintenance is expected to take around an hour and here will be two short periods of downtime, one at the beginning of the maintenance and another at the end.

...
 
Commits (132)
......@@ -363,7 +363,7 @@ test:offline:
allow_failure: true
pages:
image: aepace/gstlal-dev:el7-mkl-1.14
image: containers.ligo.org/alexander-pace/gstlal-dev/gstlal-dev:el7-mkl-1.14
stage: nightly-pages
script:
- echo "Building Documentation"
......
......@@ -5,13 +5,13 @@ GStreamer elements for gravitational-wave data analysis
[![pipeline status](https://git.ligo.org/lscsoft/gstlal/badges/master/pipeline.svg)](https://git.ligo.org/lscsoft/gstlal/commits/master)
| | version | docs | source |
| :------------:| :------: | :------: | :------: |
| `gstlal` | 1.5.0 |[docs](https://ldas-jobs.ligo.caltech.edu/~patrick.godwin/gstlal_docs/gstlal/gstlal.html) | [gstlal-1.5.0.tar.gz](http://software.ligo.org/lscsoft/source/gstlal-1.5.0.tar.gz) |
| `gstlal-ugly` | 1.6.4 |[docs](https://ldas-jobs.ligo.caltech.edu/~patrick.godwin/gstlal_docs/gstlal-ugly/gstlal-ugly.html) | [gstlal-ugly-1.6.4.tar.gz](http://software.ligo.org/lscsoft/source/gstlal-ugly-1.6.4.tar.gz) |
| `gstlal-inspiral` | 1.6.5 |[docs](https://ldas-jobs.ligo.caltech.edu/~patrick.godwin/gstlal_docs/gstlal-inspiral/gstlal-inspiral.html) | [gstlal-inspiral-1.6.5.tar.gz](http://software.ligo.org/lscsoft/source/gstlal-inspiral-1.6.5.tar.gz) |
| `gstlal-calibration` | 1.2.10 |[docs](https://ldas-jobs.ligo.caltech.edu/~patrick.godwin/gstlal_docs/gstlal-calibration/gstlal-calibration.html) | [gstlal-calibration-1.2.10.tar.gz](http://software.ligo.org/lscsoft/source/gstlal-calibration-1.2.10.tar.gz) |
| `gstlal-burst` | 0.1.1 |[docs](https://ldas-jobs.ligo.caltech.edu/~patrick.godwin/gstlal_docs/gstlal-burst/gstlal-burst.html) | [gstlal-burst-0.1.1.tar.gz](http://software.ligo.org/lscsoft/source/gstlal-burst-0.1.1.tar.gz) |
| | version | source |
| :------------:| :------: | :------: |
| `gstlal` | 1.5.0 | [gstlal-1.5.0.tar.gz](http://software.ligo.org/lscsoft/source/gstlal-1.5.0.tar.gz) |
| `gstlal-ugly` | 1.6.4 | [gstlal-ugly-1.6.4.tar.gz](http://software.ligo.org/lscsoft/source/gstlal-ugly-1.6.4.tar.gz) |
| `gstlal-inspiral` | 1.6.5 | [gstlal-inspiral-1.6.5.tar.gz](http://software.ligo.org/lscsoft/source/gstlal-inspiral-1.6.5.tar.gz) |
| `gstlal-calibration` | 1.2.10 | [gstlal-calibration-1.2.10.tar.gz](http://software.ligo.org/lscsoft/source/gstlal-calibration-1.2.10.tar.gz) |
| `gstlal-burst` | 0.1.1 | [gstlal-burst-0.1.1.tar.gz](http://software.ligo.org/lscsoft/source/gstlal-burst-0.1.1.tar.gz) |
Full documentation is provided [here](https://lscsoft.docs.ligo.org/gstlal/).
......
......@@ -3,33 +3,43 @@ gstlal-modules :
#mv ../gstlal/python/__init__.py ../gstlal/python/__init__.py.bk
sphinx-apidoc -e -o source/gstlal/python-modules ../gstlal/python ../gstlal/python/misc.py ../gstlal/python/bottle.py ../gstlal/python/coherent_null.py ../gstlal/python/matplotlibhelper.py
#mv ../gstlal/python/__init__.py.bk ../gstlal/python/__init__.py
sed -i "1s/python/\`\`gstlal\`\`/" source/gstlal/python-modules/modules.rst
gstlal-inspiral-modules :
sphinx-apidoc -e -o source/gstlal-inspiral/python-modules ../gstlal-inspiral/python ../gstlal-inspiral/python/cbc_template_iir.py ../gstlal-inspiral/python/spawaveform.py ../gstlal-inspiral/python/emcee.py ../gstlal-inspiral/python/spiirparts.py ../gstlal-inspiral/python/snglinspiraltable.py
sed -i "1s/python/\`\`gstlal-inspiral\`\`/" source/gstlal-inspiral/python-modules/modules.rst
gstlal-calibration-modules :
sphinx-apidoc -e -o source/gstlal-calibration/python-modules ../gstlal-calibration/python
sed -i "1s/python/\`\`gstlal-calibration\`\`/" source/gstlal-calibration/python-modules/modules.rst
gstlal-burst-modules :
sphinx-apidoc -e -o source/gstlal-burst/python-modules ../gstlal-burst/python ../gstlal-burst/python/excesspower ../gstlal-burst/python/fxtools/sngltriggertable.py
sed -i "1s/python/\`\`gstlal-burst\`\`/" source/gstlal-burst/python-modules/modules.rst
gstlal-ugly-modules :
sphinx-apidoc -e -o source/gstlal-ugly/python-modules ../gstlal-ugly/python ../gstlal-ugly/python/gviz_api.py ../gstlal-ugly/python/coherent_inspiral_metric.py ../gstlal-ugly/python/coherent_inspiral_metric_plots.py ../gstlal-ugly/python/coherent_inspiral_metric_detector_details.py ../gstlal-ugly/python/aggregator.py
sed -i "1s/python/\`\`gstlal-ugly\`\`/" source/gstlal-ugly/python-modules/modules.rst
gstlal-bin :
./sphinx-bindoc source/gstlal/bin ../gstlal/bin gstlal_ligo_data_find_check
sed -i "1s/bin/\`\`gstlal\`\`/" source/gstlal/bin/bin.rst
gstlal-inspiral-bin :
./sphinx-bindoc source/gstlal-inspiral/bin ../gstlal-inspiral/bin gstlal_inspiral_flopulator gstlal_inspiral_lvalert_omegascan gstlal_ll_inspiral_calculate_range gstlal_ll_inspiral_daily_page_online gstlal_iir_inspiral gstlal_inspiral_iir_bank_pipe gstlal_ll_inspiral_save_state gstlal_inspiral_marginalize_likelihoods_online gstlal_inspiral_plot_extrinsic_params gstlal_ll_inspiral_get_urls gstlal_inspiral_plot_svd_bank gstlal_inspiral_lvalert_background_plotter
sed -i "1s/bin/\`\`gstlal-inspiral\`\`/" source/gstlal-inspiral/bin/bin.rst
gstlal-calibration-bin :
./sphinx-bindoc source/gstlal-calibration/bin ../gstlal-calibration/bin
sed -i "1s/bin/\`\`gstlal-calibration\`\`/" source/gstlal-calibration/bin/bin.rst
gstlal-burst-bin :
./sphinx-bindoc source/gstlal-burst/bin ../gstlal-burst/bin gstlal_excesspower gstlal_excesspower_trigvis
sed -i "1s/bin/\`\`gstlal-burst\`\`/" source/gstlal-burst/bin/bin.rst
gstlal-ugly-bin :
./sphinx-bindoc source/gstlal-ugly/bin ../gstlal-ugly/bin gstlal_h1h2_coh_frames gstlal_geo_player gstlal_ll_inspiral_daily_summary_page gstlal_cache_to_segments gstlal_inspiral_plot_efficiency gstlal_burst_injections_by_rate_plot gstlal_display_from_logs gstlal_inspiral_bankviz gstlal_ll_inspiral_regen_reg_from_condor_q gstlal_cbc_sensitivity_vs_flow_at_fixed_computational_cost gstlal_inspiral_check_livetimes gstlal_rds_gen gstlal_injections_by_local_rate_plot gstlal_ninja_smooth_reference_psd gstlal_shift_frames
sed -i "1s/bin/\`\`gstlal-ugly\`\`/" source/gstlal-ugly/bin/bin.rst
html : gstlal-modules gstlal-inspiral-modules gstlal-calibration-modules gstlal-ugly-modules gstlal-burst-modules gstlal-bin gstlal-inspiral-bin gstlal-calibration-bin gstlal-ugly-bin gstlal-burst-bin
sphinx-build -b html source _build
......
GstLAL API
============
.. toctree::
:maxdepth: 2
:glob:
gstlal/python-modules/*modules
gstlal-inspiral/python-modules/*modules
gstlal-calibration/python-modules/*modules
gstlal-burst/python-modules/*modules
gstlal-ugly/python-modules/*modules
.. _cbc-analysis:
CBC Analysis
================
WRITEME
......@@ -44,7 +44,7 @@ extensions = [
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.imgmath',
'sphinx.ext.pngmath',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
......
Executables
===============
.. toctree::
:maxdepth: 2
gstlal/bin/bin
gstlal-inspiral/bin/bin
gstlal-calibration/bin/bin
gstlal-burst/bin/bin
gstlal-ugly/bin/bin
.. _fake-data:
Fake Data Generation
=========================
WRITEME
####################################################################################################
Overview
####################################################################################################
.. _burst-overview-feature_extraction:
.. _feature_extraction:
Feature Extraction
====================================================================================================
......
GstLAL burst code
=================
.. toctree::
:maxdepth: 2
bin/bin
python-modules/modules
####################################################################################################
GstLAL burst
####################################################################################################
`GstLAL burst` contains several projects targeting a variety of different searches. These include:
* **Feature extraction:** Identify noise transient bursts (glitches) in auxiliary channel data.
* **Cosmic string search**
* **Excess power**
Contents
-------------------------
.. toctree::
:maxdepth: 2
overview
tutorials/tutorials
code
####################################################################################################
Tutorials
####################################################################################################
.. toctree::
:maxdepth: 2
running_online_jobs
running_offline_jobs
GstLAL calibration code
=======================
.. toctree::
:maxdepth: 2
bin/bin
python-modules/modules
GstLAL calibration
==========================
.. toctree::
:maxdepth: 2
code
GstLAL inspiral code
====================
.. toctree::
:maxdepth: 2
bin/bin
python-modules/modules
GstLAL inspiral
=======================
.. toctree::
:maxdepth: 2
tutorials/tutorials
code
GstLAL ugly code
================
.. toctree::
:maxdepth: 2
bin/bin
python-modules/modules
GstLAL ugly
===================
.. toctree::
:maxdepth: 2
code
GstLAL Code
===========
.. toctree::
:maxdepth: 2
bin/bin
python-modules/modules
GstLAL
==============
.. toctree::
:maxdepth: 2
tutorials/tutorials
code
Tutorials
=========
.. toctree::
:maxdepth: 1
gstlal_fake_data_overview
......@@ -10,26 +10,41 @@ Overview
The GstLAL software package is used for the following activities:
- **GstLAL:** The package `GstLAL <http://software.ligo.org/lscsoft/source/gstlal-1.4.1.tar.gz>`_ provides core Gstreamer plugins for signal processing workflows with LIGO data and core python bindings for constructing such workflows.
- ``gstlal`` provides core Gstreamer plugins for signal processing workflows with LIGO data and core python bindings for constructing such workflows.
- **GstLAL Calibration:** The package `GstLAL Calibration <http://software.ligo.org/lscsoft/source/gstlal-calibration-1.2.4.tar.gz>`_ provides real-time calibration of LIGO control system data into strain data.
- ``gstlal-calibration`` provides real-time calibration of LIGO control system data into strain data.
- **GstLAL Inspiral:** The package `GstLAL Inspiral <http://software.ligo.org/lscsoft/source/gstlal-inspiral-1.5.1.tar.gz>`_ provides additional signal processing plugins that are specific for LIGO / Virgo searches for compact binaries as well as a substantial amount of python code for post-processing raw signal processing results into gravitational wave candidate lists. Several publications about the methodology and workflow exist, see :ref:`publications`
- ``gstlal-inspiral`` provides additional signal processing plugins that are specific for LIGO / Virgo searches for compact binaries as well as a substantial amount of python code for post-processing raw signal processing results into gravitational wave candidate lists. Several publications about the methodology and workflow exist, see :ref:`publications`
- **GstLAL Ugly:** The package `GstLAL Inspiral <http://software.ligo.org/lscsoft/source/gstlal-inspiral-1.5.1.tar.gz>`_ is an incubator project for gradual inclusion in the other packages.
- ``gstlal-burst`` provides additional signal processing plugins for use in astrophysical and noise transient burst searches.
- ``gstlal-ugly`` is an incubator project for gradual inclusion in the other packages.
.. _welcome-contents:
Contents
-------------------------
.. toctree::
:caption: Getting Started
:maxdepth: 2
installation
quickstart
tutorials/tutorials
.. toctree::
:caption: User Guide
:maxdepth: 2
cbc_analysis
feature_extraction
fake_data
psd_estimation
publications
.. toctree::
:maxdepth: 2
:caption: API Reference
:maxdepth: 2
getting-started
projects
publications
executables
api
Build/Test Results
-------------------------
......
Getting started
Installation
===============
You can get a development copy of the gstlal software suite from git. Doing this at minimum will require a development copy of lalsuite.
......
Projects
========
.. toctree::
:maxdepth: 1
gstlal/gstlal
gstlal-inspiral/gstlal-inspiral
gstlal-calibration/gstlal-calibration
gstlal-burst/gstlal-burst
gstlal-ugly/gstlal-ugly
.. _psd-generation:
PSD Generation
================
WRITEME
.. _publications:
Publications
============
......
.. _quickstart:
Quickstart
============
WRITEME
......@@ -23,7 +23,7 @@ The basic steps to generate and validate LIGO colored noise are:
An example PSD plot:
.. image:: ../images/H1L1fakedataexamplepsd.png
.. image:: ../gstlal/images/H1L1fakedataexamplepsd.png
:width: 400px
Custom colored noise, i.e. simulate your own detector
......@@ -40,7 +40,7 @@ then you will need to first use gstlal_psd_xml_from_asd_txt to convert it
in the documentation include this)
1. Repeat the same validation steps as above to obtain, e.g.:
.. image:: ../images/V1fakedataexamplepsd.png
.. image:: ../gstlal/images/V1fakedataexamplepsd.png
:width: 400px
......
Documentation for running an offline compact binary coalescence analysis
Running an offline compact binary coalescence analysis
========================================================================
Prerequisites
......
####################################################################################################
Running Offline Jobs
Running offline feature extraction jobs
####################################################################################################
An offline DAG is provided in /gstlal-burst/share/feature_extractor/Makefile.gstlal_feature_extractor_offline
......
Documentation for starting an online compact binary coalescence analysis
Running an online compact binary coalescence analysis
========================================================================
Prerequisites
......
####################################################################################################
Running Online Jobs
Running online feature extraction jobs
####################################################################################################
An online DAG is provided in /gstlal-burst/share/feature_extractor/Makefile.gstlal_feature_extractor_online
......
......@@ -4,5 +4,8 @@ Tutorials
.. toctree::
:maxdepth: 1
gstlal_fake_data_overview
online_analysis
offline_analysis
online_fx_jobs
offline_fx_jobs
......@@ -23,7 +23,7 @@ outdir = sys.argv[1]
tocf = open(os.path.join(outdir, "bin.rst"), "w")
tocf.write("""bin
===
=====================
.. toctree::
:maxdepth: 1
......
......@@ -130,9 +130,12 @@ class PipelineHandler(simplehandler.Handler):
# extract segment. move the segment's upper
# boundary to include all triggers.
buf_timestamp = LIGOTimeGPS(0, buf.pts)
buf_seg = {instrument: segments.segmentlist([segments.segment(buf_timestamp, buf_timestamp + LIGOTimeGPS(0, buf.duration))])}
if events:
buf_seg[instrument] |= segments.segmentlist([segments.segment(buf_timestamp, max(event.peak for event in events if event.ifo == instrument))])
if buf.mini_object.flags & Gst.BufferFlags.GAP:
buf_seg = None
# sanity check that gap buffers are empty
assert not events
else:
buf_seg = {instrument: segments.segmentlist([segments.segment(buf_timestamp, max(buf_timestamp + LIGOTimeGPS(0, buf.duration), max(event.peak for event in events if event.ifo == instrument) if events else 0.0))])}
# obtain union of this segment and the previously added segments
self.analyzed_seglistdict |= buf_seg
# put info of each event in the sngl burst table
......@@ -306,6 +309,8 @@ if options.reference_psd is not None:
else:
psd = None
# delete the reference_psd to save memory
del options.reference_psd
@lsctables.use_in
class LIGOLWContentHandler(ligolw.LIGOLWContentHandler):
......@@ -368,6 +373,8 @@ search_summary = lsctables.SearchSummary()
search_summary.process_id = process.process_id
if options.user_tag:
search_summary.comment = options.user_tag
else:
search_summary.comment = "None"
search_summary.ifos = ",".join(all_ifos)
search_summary.out_start = search_summary.in_start = LIGOTimeGPS(options.gps_start_time)
search_summary.out_end = search_summary.in_end = LIGOTimeGPS(options.gps_end_time)
......@@ -431,11 +438,10 @@ for ifo in all_ifos:
pipeparts.src_deferred_link(head, channel_dict[ifo], elem.get_static_pad("sink"))
head = elem
# put gate for the segments and vetoes
# currently with leaky option on to avoid step function-like disconts in the data affect the PSD.
if options.segments_file is not None:
head = datasource.mksegmentsrcgate(pipeline, head, seglists[ifo], invert_output = False, leaky = True)
head = datasource.mksegmentsrcgate(pipeline, head, seglists[ifo], invert_output = False)
if options.vetoes_file is not None:
head = datasource.mksegmentsrcgate(pipeline, head, vetolists[ifo], invert_output = True, leaky = True)
head = datasource.mksegmentsrcgate(pipeline, head, vetolists[ifo], invert_output = True)
# limit the maximum buffer duration. keeps RAM use under control
# in the even that we are loading gigantic frame files
head = pipeparts.mkreblock(pipeline, head, block_duration = 8 * 1000000000)
......
......@@ -36,6 +36,9 @@ import shutil
import h5py
import numpy
from ligo.segments import infinity, segment, segmentlist
from gstlal import aggregator
from gstlal.fxtools import utils
# =============================================================================
......@@ -52,11 +55,14 @@ def parse_command_line():
group = optparse.OptionGroup(parser, "Combiner Options", "General settings for configuring the file combiner.")
group.add_option("-v","--verbose", default=False, action="store_true", help = "Print to stdout in addition to writing to automatically generated log.")
group.add_option("--start-time", type = "int", help = "Set the start time to combine features.")
group.add_option("--end-time", type = "int", help = "Set the end time to combine features.")
group.add_option("--log-level", type = "int", default = 10, help = "Sets the verbosity of logging. Default = 10.")
group.add_option("--rootdir", metavar = "path", default = ".", help = "Sets the root directory where features, logs, and metadata are stored.")
group.add_option("--basename", metavar = "string", default = "GSTLAL_IDQ_FEATURES", help = "Sets the basename for files written to disk. Default = GSTLAL_IDQ_FEATURES")
group.add_option("--instrument", metavar = "string", default = "H1", help = "Sets the instrument for files written to disk. Default = H1")
group.add_option("--tag", metavar = "string", default = "test", help = "Sets the name of the tag used. Default = 'test'")
group.add_option("--outdir", metavar = "path", help = "If set, chooses an alternate directory to save the features to. Default = --rootdir")
parser.add_option_group(group)
opts, args = parser.parse_args()
......@@ -81,6 +87,11 @@ if __name__ == "__main__":
verbose=options.verbose
)
### define gps bounds to grab features
start_time = options.start_time if options.start_time else -infinity()
end_time = options.end_time if options.end_time else infinity()
file_segs = segmentlist([segment(start_time, end_time)])
### get base temp directory
if '_CONDOR_SCRATCH_DIR' in os.environ:
tmp_dir = os.environ['_CONDOR_SCRATCH_DIR']
......@@ -93,6 +104,11 @@ if __name__ == "__main__":
ifo=options.instrument[0],
)
cache = sorted(utils.path2cache(options.rootdir, pattern), key=lambda x: x.segment)
### filter cache with segments
cache = [entry for entry in cache if file_segs.intersects_segment(entry.segment)]
### group by segment
grouped_cache = [(seg, list(group)) for seg, group in itertools.groupby(cache, key=lambda x: x.segment)]
### combine features in each stride
......@@ -122,7 +138,16 @@ if __name__ == "__main__":
for dset in features[channel].keys():
utils.create_new_dataset(tmp_dir, filename, features[channel][dset], name=dset, group=channel, tmp=True, metadata=metadata)
final_path = os.path.join(dirname, filename)+".h5"
### determine final location for features
if options.outdir:
start_time = int(filename.split('-')[2])
basename = '-'.join([options.instrument[0], options.basename])
base_path = utils.to_trigger_path(options.outdir, basename, start_time)
aggregator.makedir(base_path)
final_path = os.path.join(base_path, filename)+".h5"
else:
final_path = os.path.join(dirname, filename)+".h5"
tmp_path = os.path.join(tmp_dir, filename)+".h5.tmp"
logger.info('saving features to: {}'.format(final_path))
shutil.move(tmp_path, final_path)
......@@ -42,7 +42,7 @@ from gstlal.fxtools import multichannel_datasource
from gstlal.fxtools import multirate_datasource
from gstlal.fxtools import utils
PSD_DROP_TIME = 512
PSD_DROP_FACTOR = 16
# =============================
#
......@@ -50,14 +50,21 @@ PSD_DROP_TIME = 512
#
# =============================
def analysis_segments(ifo, allsegs, boundary_seg, segment_length, max_template_length = 30):
def seglist_range(start, stop, stride):
b = start
while b <= stop:
seg = segments.segment(int(b), min(utils.floor_div(int(b) + stride, stride), stop))
b = utils.floor_div(int(b) + stride, stride)
yield seg
def analysis_segments(ifo, allsegs, boundary_seg, segment_length, psd_drop_time, max_template_length = 30):
"""
get a dictionary of all the analysis segments
"""
segsdict = segments.segmentlistdict()
# start pad to allow whitener to settle + the maximum template_length
start_pad = PSD_DROP_TIME + max_template_length
start_pad = psd_drop_time + max_template_length
segsdict[ifo] = segments.segmentlist([boundary_seg])
segsdict[ifo] = segsdict[ifo].protract(start_pad)
......@@ -103,7 +110,7 @@ def feature_extractor_node_gen(feature_extractor_job, dag, parent_nodes, segsdic
# define analysis times
gps_start_time = int(seg[0])
feature_start_time = gps_start_time + PSD_DROP_TIME + max_template_length
feature_start_time = gps_start_time + options.psd_drop_time + max_template_length
feature_end_time = min(int(seg[1]), options.gps_end_time)
feature_extractor_nodes[(ii, seg)] = \
......@@ -169,9 +176,12 @@ def parse_command_line():
# FIXME: once we figure out what the maximum concurrency is for parallel reads, should set that as a sanity check
# calculate psd drop time based on fft length
options.psd_drop_time = options.psd_fft_length * PSD_DROP_FACTOR
# sanity check to enforce a minimum segment length
# Minimum segment length chosen so that the overlap is a ~33% hit in run time
min_segment_length = int(4 * PSD_DROP_TIME)
min_segment_length = int(4 * options.psd_drop_time)
assert options.segment_length >= min_segment_length
return options, filenames
......@@ -209,13 +219,14 @@ aggregator.makedir("logs")
dag = dagparts.DAG("feature_extractor_pipe")
condor_options = {"request_memory":options.request_memory, "request_cpus":options.request_cpu, "want_graceful_removal":"True", "kill_sig":"15"}
condor_options = {"request_memory": options.request_memory, "request_cpus": options.request_cpu, "want_graceful_removal": "True", "kill_sig": "15"}
condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, condor_options)
feature_extractor_job = dagparts.DAGJob("gstlal_feature_extractor", condor_commands = condor_commands)
segsdict = analysis_segments(ifo, data_source_info.frame_segments, data_source_info.seg, options.segment_length, max_template_length=max_template_length)
segsdict = analysis_segments(ifo, data_source_info.frame_segments, data_source_info.seg, options.segment_length, options.psd_drop_time, max_template_length=max_template_length)
combiner_condor_options = {"request_memory":"4GB", "request_cpus":2, "want_graceful_removal":"True", "kill_sig":"15"}
feature_combiner_job = dagparts.DAGJob("gstlal_feature_combiner", condor_commands = combiner_condor_options)
combiner_condor_options = {"request_memory": "4GB", "request_cpus": 1, "want_graceful_removal": "True", "kill_sig": "15"}
combiner_condor_commands = dagparts.condor_command_dict_from_opts(options.condor_command, combiner_condor_options)
feature_combiner_job = dagparts.DAGJob("gstlal_feature_combiner", condor_commands = combiner_condor_commands)
#
# set up jobs
......@@ -230,7 +241,12 @@ feature_combiner_options = {
"instrument": ifo,
"tag": "offline",
}
feature_combiner_nodes = dagparts.DAGNode(feature_combiner_job, dag, parent_nodes = feature_extractor_nodes.values(), opts = feature_combiner_options)
for seg in seglist_range(data_source_info.seg[0], data_source_info.seg[1], 50000):
parent_nodes = [node for (i, job_seg), node in feature_extractor_nodes.items() if seg.intersects(job_seg)]
these_options = dict(feature_combiner_options)
these_options.update({"start-time": seg[0], "end-time": seg[1]})
feature_combiner_nodes = dagparts.DAGNode(feature_combiner_job, dag, parent_nodes = parent_nodes, opts = these_options)
#
# write out dag and sub files
......
......@@ -209,7 +209,7 @@ static GstFlowReturn trigger_generator(GSTLALStringTriggergen *element, GstBuffe
guint sample;
gint channel;
length = get_available_samples(element);
length = get_available_samples(element);
if(length < autocorrelation_length(element->autocorrelation_matrix)) {
/* FIXME: PTS and duration are not necessarily correct.
* they're correct for now because we know how this element
......@@ -489,6 +489,7 @@ static GstFlowReturn prepare_output_buffer(GstBaseTransform *trans, GstBuffer *i
static GstFlowReturn transform(GstBaseTransform *trans, GstBuffer *inbuf, GstBuffer *outbuf)
{
GSTLALStringTriggergen *element = GSTLAL_STRING_TRIGGERGEN(trans);
guint64 length;
GstFlowReturn result;
g_assert(GST_BUFFER_PTS_IS_VALID(inbuf));
......@@ -500,7 +501,7 @@ static GstFlowReturn transform(GstBaseTransform *trans, GstBuffer *inbuf, GstBuf
gst_audioadapter_clear(element->adapter);
element->t0 = GST_BUFFER_PTS(inbuf);
element->offset0 = GST_BUFFER_OFFSET(inbuf);
element->next_out_offset = 0;
element->next_out_offset = GST_BUFFER_OFFSET(inbuf);
} else if(!gst_audioadapter_is_empty(element->adapter))
g_assert_cmpuint(GST_BUFFER_PTS(inbuf), ==, gst_audioadapter_expected_timestamp(element->adapter));
element->next_in_offset = GST_BUFFER_OFFSET_END(inbuf);
......@@ -509,17 +510,21 @@ static GstFlowReturn transform(GstBaseTransform *trans, GstBuffer *inbuf, GstBuf
* gap logic
*/
gst_buffer_ref(inbuf);
gst_audioadapter_push(element->adapter, inbuf);
if (!GST_BUFFER_FLAG_IS_SET(inbuf, GST_BUFFER_FLAG_GAP)) {
/* not gaps */
gst_buffer_ref(inbuf);
gst_audioadapter_push(element->adapter, inbuf);
result = trigger_generator(element, outbuf);
} else {
/* gaps */
GST_BUFFER_PTS(outbuf) = element->t0;
GST_BUFFER_DURATION(outbuf) = 0;
length = get_available_samples(element);
element->next_out_offset += length;
gst_audioadapter_flush_samples(element->adapter, length);
GST_BUFFER_PTS(outbuf) = element->t0 + gst_util_uint64_scale_int_round(element->next_out_offset - element->offset0, GST_SECOND, GST_AUDIO_INFO_RATE(&element->audio_info));
GST_BUFFER_DURATION(outbuf) = gst_util_uint64_scale_int_round(length, GST_SECOND, GST_AUDIO_INFO_RATE(&element->audio_info));
/* we get no triggers, so outbuf offset is unchanged */
GST_BUFFER_OFFSET_END(outbuf) = GST_BUFFER_OFFSET(outbuf);
GST_BUFFER_FLAG_SET(outbuf, GST_BUFFER_FLAG_GAP);
result = GST_FLOW_OK;
}
......
......@@ -52,8 +52,6 @@ typedef struct {
PyObject_HEAD
SnglTriggerTable row;
COMPLEX8TimeSeries *snr;
/* FIXME: this should be incorporated into the LAL structure */
EventIDColumn event_id;
} gstlal_GSTLALSnglTrigger;
......@@ -185,11 +183,6 @@ static PyObject *__new__(PyTypeObject *type, PyObject *args, PyObject *kwds)
if(!new)
return NULL;
/* link the event_id pointer in the sngl_trigger row structure
* to the event_id structure */
//new->row.event_id = &new->event_id;
//new->event_id.id = 0;
/* done */
return (PyObject *) new;
}
......@@ -234,8 +227,6 @@ static PyObject *from_buffer(PyObject *cls, PyObject *args)
return NULL;
}
((gstlal_GSTLALSnglTrigger*)item)->row = gstlal_sngltrigger->parent;
/* repoint event_id to event_id structure */
//((gstlal_GSTLALSnglTrigger*)item)->row.event_id = &((gstlal_GSTLALSnglTrigger*)item)->event_id;
/* duplicate the SNR time series */
if(gstlal_sngltrigger->length)
{
......
......@@ -103,6 +103,14 @@ def create_new_dataset(path, base, data, name = 'data', group = None, tmp = Fals
return fname
def feature_dtype(columns):
"""
given a set of columns, returns back numpy dtypes associated with those
columns. All time-based columns are double-precision, others are stored
in single-precision.
"""
return [(column, numpy.float64) if 'time' in column else (column, numpy.float32) for column in columns]
#----------------------------------
### gps time utilities
......@@ -273,7 +281,7 @@ class HDF5TimeseriesFeatureData(FeatureData):
self.sample_rate = kwargs['sample_rate']
self.waveform = kwargs['waveform']
self.metadata = dict(**kwargs)
self.dtype = [(column, 'float') for column in self.columns]
self.dtype = feature_dtype(self.columns)
self.feature_data = {key: numpy.empty((self.cadence * self.sample_rate,), dtype = self.dtype) for key in keys}
self.last_save_time = 0
self.clear()
......@@ -322,7 +330,7 @@ class HDF5ETGFeatureData(FeatureData):
self.cadence = kwargs['cadence']
self.waveform = kwargs['waveform']
self.metadata = dict(**kwargs)
self.dtype = [(column, 'float') for column in self.columns]
self.dtype = feature_dtype(self.columns)
self.feature_data = {key: [] for key in keys}
self.clear()
......
ACLOCAL_AMFLAGS = -I gnuscripts
EXTRA_DIST = gstlal-calibration.spec
SUBDIRS = debian gst python bin tests
SUBDIRS = debian gst python bin tests share
# check that the most recent changelog entry's version matches the package
# version
......
dist_bin_SCRIPTS = \
gstlal_calibration_aggregator \
gstlal_compute_strain
#!/usr/bin/env python
#
# Copyright (C) 2019 Patrick Godwin
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from collections import defaultdict
import ConfigParser
import argparse
import json
import logging
import sys, os
import time
import timeit
import numpy
from kafka import KafkaConsumer
from ligo.scald import io
#
# =============================================================================
#
# Command Line
#
# =============================================================================
#
# Read command line options
def parse_command_line():
parser = argparse.ArgumentParser(description="Online calibration aggregator")
# directory to put everything in
parser.add_argument("--config-file", help="Specify configuration file.")
args = parser.parse_args()
return args
# Parse config sections
def ConfigSectionMap(section):
dict1 = {}
options = Config.options(section)
for option in options:
try:
dict1[option] = Config.get(section, option)
if dict1[option] == -1:
DebugPrint("skip: %s" % option)
except:
print("exception on %s!" % option)
dict1[option] = None
return dict1
#
# =============================================================================
#
# Main
#
# =============================================================================
#
if __name__ == '__main__':
options = parse_command_line()
Config = ConfigParser.ConfigParser()
Config.read(options.config_file)
MonitoringConfigs = ConfigSectionMap("MonitoringConfigurations")
kafka_server = MonitoringConfigs["kafkaserver"]
influx_hostname = MonitoringConfigs["influxhostname"]
influx_port = MonitoringConfigs["influxport"]
influx_database_name = MonitoringConfigs["influxdatabasename"]
enable_auth = Config.getboolean("MonitoringConfigurations", "enableauth")
enable_https = Config.getboolean("MonitoringConfigurations", "enablehttps")
across_jobs = Config.getboolean("MonitoringConfigurations", "acrossjobs")
data_type = MonitoringConfigs["datatype"]
dump_period = float(MonitoringConfigs["dumpperiod"])
topics = ['H1_latency', 'H1_statevector_bit_check']
channel = 'H1_HOFT_TEST'
statevector_tags = ['TDCFs_valid', 'monitor_on']
logging.basicConfig(level = logging.INFO, format = "%(asctime)s %(levelname)s:%(processName)s(%(process)d):%(funcName)s: %(message)s")
consumer = KafkaConsumer(
*topics,
bootstrap_servers=[kafka_server],
value_deserializer=lambda m: json.loads(m.decode('utf-8')),
group_id='%s_aggregator' % topics[0],
auto_offset_reset='latest',
max_poll_interval_ms = 60000,
session_timeout_ms=30000,
heartbeat_interval_ms=10000,
reconnect_backoff_ms=5000,
reconnect_backoff_max_ms=30000
)
# set up aggregator sink
agg_sink = io.influx.Aggregator(
hostname=influx_hostname,
port=influx_port,
db=influx_database_name,
auth=enable_auth,
https=enable_https,
reduce_across_tags=across_jobs
)
# register measurement schemas for aggregators
for topic in topics:
if 'latency' in topic:
agg_sink.register_schema(topic, columns='data', column_key='data', tags='stage', tag_key='stage')
elif 'statevector' in topic:
agg_sink.register_schema(topic, columns='data', column_key='data', tags='check', tag_key='check')
# start an infinite loop to keep updating and aggregating data
while True:
logging.info("sleeping for %.1f s" % dump_period)
time.sleep(dump_period)
logging.info("retrieving data from kafka")
start = timeit.default_timer()
data = {topic: defaultdict(lambda: {'time': [], 'fields': {'data': []}}) for topic in topics}
### poll consumer for messages
msg_pack = consumer.poll(timeout_ms = 1000, max_records = 1000)
for tp, messages in msg_pack.items():
for message in messages:
try:
topic = message.topic
if 'latency' in topic:
ifo = topic.split('_')[0]
tag = [name for name in message.value.keys() if channel in name][0]
formatted_tag = tag.strip(channel+'_')
data[topic][formatted_tag]['time'].append(message.value['time'])
data[topic][formatted_tag]['fields']['data'].append(message.value[tag])
elif 'statevector' in topic:
tags = [name for name in message.value.keys() if name in statevector_tags]
for tag in tags:
data[topic][tag]['time'].append(message.value['time'])
data[topic][tag]['fields']['data'].append(message.value[tag])
except KeyError: ### no metrics
pass
### convert series to numpy arrays
for topic in topics:
for tag in data[topic].keys():
data[topic][tag]['time'] = numpy.array(data[topic][tag]['time'])
data[topic][tag]['fields']['data'] = numpy.array(data[topic][tag]['fields']['data'])
elapsed = timeit.default_timer() - start
logging.info("time to retrieve data: %.1f s" % elapsed)
# store and reduce data for each job
start = timeit.default_timer()
for topic in topics:
logging.info("storing and reducing timeseries for measurement: %s" % topic)
agg_sink.store_columns(topic, data[topic], aggregate=data_type)
elapsed = timeit.default_timer() - start
logging.info("time to store/reduce timeseries: %.1f s" % elapsed)
# close connection to consumer if using kafka
if consumer:
consumer.close()
#
# always end on an error so that condor won't think we're done and will
# restart us
#
sys.exit(1)
......@@ -23,6 +23,7 @@ AC_CONFIG_FILES([ \
gst/cmath/Makefile \
gst/splitcounter/Makefile \
python/Makefile \
share/Makefile \
tests/Makefile \
])
......@@ -239,7 +240,7 @@ AX_PYTHON_LIGO_SEGMENTS([$MIN_LIGO_SEGMENTS_VERSION])
AC_SUBST([MIN_GSTLAL_VERSION], [1.0.0])
AC_SUBST([MIN_GSTLALUGLY_VERSION], [1.0.0])
AC_SUBST([MIN_GSTLALUGLY_VERSION], [1.5.6])
AC_SUBST([MIN_GSTPLUGINSMATH_VERSION], [1.0.1])
PKG_CHECK_MODULES([GSTLAL], [gstlal >= ${MIN_GSTLAL_VERSION}])
AC_SUBST([GSTLAL_VERSION], [`$PKG_CONFIG --modversion gstlal`])
......
......@@ -22,7 +22,6 @@ Depends: ${shlibs:Depends}, ${misc:Depends}, ${python:Depends},
gir1.2-glib-2.0,
gir1.2-gstreamer-1.0 (>= @MIN_GSTREAMER_VERSION@),
gir1.2-gst-plugins-base-1.0 (>= @MIN_GSTREAMER_VERSION@),
gst-plugins-math (>= 1.0.1),
gstlal (>= @MIN_GSTLAL_VERSION@),
gstlal-ugly (>= @MIN_GSTLALUGLY_VERSION@),
gstreamer1.0-plugins-base (>= @MIN_GSTREAMER_VERSION@),
......
......@@ -60,12 +60,10 @@ struct _GSTLALInsertGap {
GstElement element;
/* pads */
Gs