Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • steffen.grunewald/gstlal
  • sumedha.biswas/gstlal
  • spiir-group/gstlal
  • madeline-wade/gstlal
  • hunter.schuler/gstlal
  • adam-mercer/gstlal
  • amit.reza/gstlal
  • alvin.li/gstlal
  • duncanmmacleod/gstlal
  • rebecca.ewing/gstlal
  • javed.sk/gstlal
  • leo.tsukada/gstlal
  • brian.bockelman/gstlal
  • ed-maros/gstlal
  • koh.ueno/gstlal
  • leo-singer/gstlal
  • lscsoft/gstlal
17 results
Show changes
Commits on Source (58)
Showing
with 1792 additions and 1016 deletions
*.gwf filter=lfs diff=lfs merge=lfs -text
......@@ -8,12 +8,14 @@ ARG CONDA_ENV
# Build stage
FROM $CI_REGISTRY_IMAGE/dependencies/conda-$CONDA_ENV:$CI_COMMIT_REF_NAME AS build
ARG CI_REGISTRY_IMAGE
ARG CI_COMMIT_REF_NAME
ARG CONDA_ENV
# Labeling/packaging stuff:
LABEL name="GstLAL Runtime Package, conda" \
maintainer="Patrick Godwin <patrick.godwin@ligo.org>" \
date="2021-03-18"
date="2021-10-14"
# Copy source to container
COPY gstlal /gstlal
......@@ -22,16 +24,11 @@ COPY gstlal-calibration /gstlal-calibration
COPY gstlal-inspiral /gstlal-inspiral
COPY gstlal-burst /gstlal-burst
# Export environment variables needed before compile:
ENV MKLROOT $CONDA_PREFIX
ENV MKL_INTERFACE_LAYER LP64
ENV MKL_THREADING_LAYER SEQUENTIAL
# Make RUN commands use bash:
SHELL ["/bin/bash", "-c"]
# Install gstlal
RUN . $CONDA_PREFIX/etc/profile.d/conda.sh && \
RUN . /opt/conda/etc/profile.d/conda.sh && \
conda activate gstlal-$CONDA_ENV && \
export PREFIX="$CONDA_PREFIX" && \
export CONDA_BUILD="1" && \
......@@ -44,7 +41,7 @@ RUN . $CONDA_PREFIX/etc/profile.d/conda.sh && \
RUN rm -rf gstlal
# Install gstlal-ugly
RUN . $CONDA_PREFIX/etc/profile.d/conda.sh && \
RUN . /opt/conda/etc/profile.d/conda.sh && \
conda activate gstlal-$CONDA_ENV && \
export PREFIX="$CONDA_PREFIX" && \
export CONDA_BUILD="1" && \
......@@ -57,7 +54,7 @@ RUN . $CONDA_PREFIX/etc/profile.d/conda.sh && \
RUN rm -rf gstlal-ugly
# Install gstlal-burst
RUN . $CONDA_PREFIX/etc/profile.d/conda.sh && \
RUN . /opt/conda/etc/profile.d/conda.sh && \
conda activate gstlal-$CONDA_ENV && \
export PREFIX="$CONDA_PREFIX" && \
export CONDA_BUILD="1" && \
......@@ -71,7 +68,7 @@ RUN rm -rf gstlal-burst
# Install gstlal-calibration
RUN . $CONDA_PREFIX/etc/profile.d/conda.sh && \
RUN . /opt/conda/etc/profile.d/conda.sh && \
conda activate gstlal-$CONDA_ENV && \
export PREFIX="$CONDA_PREFIX" && \
export CONDA_BUILD="1" && \
......@@ -85,7 +82,7 @@ RUN rm -rf gstlal-calibration
# Install gstlal-inspiral
RUN . $CONDA_PREFIX/etc/profile.d/conda.sh && \
RUN . /opt/conda/etc/profile.d/conda.sh && \
conda activate gstlal-$CONDA_ENV && \
export PREFIX="$CONDA_PREFIX" && \
export CONDA_BUILD="1" && \
......@@ -98,19 +95,30 @@ RUN . $CONDA_PREFIX/etc/profile.d/conda.sh && \
RUN rm -rf gstlal-inspiral
# Run stage
ARG CI_REGISTRY_IMAGE
ARG CI_COMMIT_REF_NAME
FROM $CI_REGISTRY_IMAGE/dependencies/conda-$CONDA_ENV:$CI_COMMIT_REF_NAME AS run
ARG CONDA_ENV
FROM $CI_REGISTRY_IMAGE/dependencies/conda-$CONDA_ENV:$CI_COMMIT_REF_NAME AS run
# Replace created environment with the base environment
# Also grab conda global config settings
COPY --from=build /opt/conda /opt/conda
COPY --from=build /opt/conda/.condarc /opt/conda
# Copy build artifacts
COPY --from=build $CONDA_PREFIX $CONDA_PREFIX
# Set up entrypoint
COPY .gitlab-ci.conda_entrypoint.sh /bin/entrypoint.sh
RUN chmod +x /bin/entrypoint.sh
# Export environment variables:
ENV PKG_CONFIG_PATH $CONDA_PREFIX/envs/gstlal-$CONDA_ENV/lib/pkgconfig
ENV GST_PLUGIN_PATH $CONDA_PREFIX/envs/gstlal-$CONDA_ENV/lib/gstreamer-1.0
# Export environment variables
ENV PKG_CONFIG_PATH /opt/conda/envs/gstlal-$CONDA_ENV/lib/pkgconfig
ENV GST_PLUGIN_PATH /opt/conda/envs/gstlal-$CONDA_ENV/lib/gstreamer-1.0
ENV GSTLAL_FIR_WHITEN 0
ENV TMPDIR /tmp
ENTRYPOINT bash
# Give entrypoint knowledge about environment to source
ENV CONDA_ENV $CONDA_ENV
# Setup for interactive shell sessions (without polluting $HOME)
RUN echo ". /opt/conda/etc/profile.d/conda.sh && conda activate gstlal-${CONDA_ENV}" >> /root/.bashrc
ENTRYPOINT ["/bin/entrypoint.sh"]
CMD ["/bin/bash"]
# NOTE: this Dockerfile is adapted from https://github.com/conda-forge/miniforge-images/blob/master/ubuntu/Dockerfile
# removing tini as an entrypoint as it doesn't play nice with Singularity
ARG CONDA_ENV
FROM debian:bullseye-slim
ARG CONDA_ENV
ARG MINIFORGE_NAME=Mambaforge
ARG MINIFORGE_VERSION=4.10.3-7
ENV CONDA_DIR=/opt/conda
ENV PATH=${CONDA_DIR}/bin:${PATH}
# 1. Install just enough for conda to work
# 2. Keep $HOME clean (no .wget-hsts file), since HSTS isn't useful in this context
# 3. Install miniforge from GitHub releases
# 4. Apply some cleanup tips from https://jcrist.github.io/conda-docker-tips.html
# Particularly, we remove pyc and a files. The default install has no js, we can skip that
# 5. Activate base by default when running as any *non-root* user as well
# Good security practice requires running most workloads as non-root
# This makes sure any non-root users created also have base activated
# for their interactive shells.
# 6. Activate base by default when running as root as well
# The root user is already created, so won't pick up changes to /etc/skel
RUN apt-get update > /dev/null && \
apt-get install --no-install-recommends --yes \
wget bzip2 ca-certificates \
git > /dev/null && \
apt-get clean && \
rm -rf /var/lib/apt/lists/* && \
wget --no-hsts --quiet https://github.com/conda-forge/miniforge/releases/download/${MINIFORGE_VERSION}/${MINIFORGE_NAME}-${MINIFORGE_VERSION}-Linux-$(uname -m).sh -O /tmp/miniforge.sh && \
/bin/bash /tmp/miniforge.sh -b -p ${CONDA_DIR} && \
rm /tmp/miniforge.sh && \
conda clean -tipsy && \
find ${CONDA_DIR} -follow -type f -name '*.a' -delete && \
find ${CONDA_DIR} -follow -type f -name '*.pyc' -delete && \
conda clean -afy
COPY gstlal/share/conda/envs/lock/gstlal-$CONDA_ENV-linux-64.lock .
SHELL ["/bin/bash", "-c"]
RUN conda config --set always_yes yes
RUN conda config --add channels conda-forge
RUN conda update -n base conda && \
conda clean -af
RUN conda create -n gstlal-$CONDA_ENV --file gstlal-$CONDA_ENV-linux-64.lock --force && \
conda clean -af
RUN rm -f gstlal-$CONDA_ENV-linux-64.lock
ENV PKG_CONFIG_PATH /opt/conda/envs/gstlal-$CONDA_ENV/lib/pkgconfig
ENV GST_PLUGIN_PATH /opt/conda/envs/gstlal-$CONDA_ENV/lib/gstreamer-1.0
ENTRYPOINT ["bin/bash"]
#!/usr/bin/env bash
. /opt/conda/etc/profile.d/conda.sh
conda activate gstlal-${CONDA_ENV}
exec "$@"
......@@ -42,7 +42,7 @@ stages:
- docker
- docker-latest
- test-gstlal
- test-gstlal-only-ugly
- test-gstlal-full-build
- test-gstlal-ugly
- test-burst
- test-calibration
......@@ -60,29 +60,13 @@ stages:
before_script: [ ]
script:
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
- |
cat <<EOF > Dockerfile
FROM igwn/base:conda
COPY gstlal/share/conda/envs/lock/gstlal-$CONDA_ENV-linux-64.lock .
SHELL ["/bin/bash", "-c"]
RUN conda config --set always_yes yes
RUN conda config --add channels conda-forge
RUN conda update -n base conda && \
conda clean -af
RUN conda create -n gstlal-$CONDA_ENV --file gstlal-$CONDA_ENV-linux-64.lock --force && \
conda clean -af
RUN rm -f gstlal-$CONDA_ENV-linux-64.lock
ENV PKG_CONFIG_PATH $CONDA_PREFIX/lib/pkgconfig
ENV GST_PLUGIN_PATH $CONDA_PREFIX/lib/gstreamer-1.0
ENTRYPOINT bash
EOF
- docker build -t $IMAGE_TAG .
- >
docker build --pull
--build-arg CONDA_ENV=$CONDA_ENV
-t $IMAGE_TAG
--file .gitlab-ci.Dockerfile.conda-deps
.
- docker push $IMAGE_TAG
# NOTE: waiting on https://gitlab.com/gitlab-org/gitlab/-/issues/30680, otherwise
# job dependencies don't work correctly for downstream stages
#only:
# changes:
# - gstlal-inspiral/share/conda/dev-environment.yml
needs: [ ]
only:
- schedules
......@@ -348,19 +332,22 @@ test:gstlal:el7:
# Run doctests
- cd gstlal
- python3 -m pytest -c pytest.ini -m "not requires_gstlal_ugly"
- python3 -m pytest -c pytest.ini -m "not requires_full_build"
only:
- schedules
- pushes
allow_failure: true
test:gstlal-only-ugly:el7:
test:gstlal-full-build:el7:
interruptible: true
image: containers.ligo.org/alexander.pace/gstlal-dev/gstlal-dev:el7
stage: test-gstlal-only-ugly
stage: test-gstlal-full-build
needs:
- level0:rpm:gstlal
- level1:rpm:gstlal-ugly
- level2:rpm:gstlal-burst
- level2:rpm:gstlal-calibration
- level2:rpm:gstlal-inspiral
script:
# Install RPMs and set up the test environment:
- if [ -d rpmbuild ]; then yum -y install rpmbuild/RPMS/x86_64/*.rpm; fi
......@@ -369,7 +356,7 @@ test:gstlal-only-ugly:el7:
# Run doctests
- cd gstlal
- python3 -m pytest -c pytest.ini -m "requires_gstlal_ugly"
- python3 -m pytest -c pytest.ini -m "requires_full_build"
only:
- schedules
- pushes
......@@ -520,43 +507,35 @@ test:gstlal:conda:
- docker:conda:dev
before_script: [ ]
script:
- source $CONDA_PREFIX/etc/profile.d/conda.sh
- conda activate gstlal-dev
- echo $CONDA_PREFIX
- ls $CONDA_PREFIX/lib/gstreamer-1.0
- export GSTLAL_FIR_WHITEN=0
- gst-inspect-1.0
# Run doctests
- cd gstlal
- python3 -m pytest -c pytest.ini -m "not requires_gstlal_ugly"
- python3 -m pytest -c pytest.ini -m "not requires_full_build" --junitxml=report.xml
only:
- schedules
- pushes
allow_failure: true
artifacts:
when: always
reports:
junit: gstlal/report.xml
test:gstlal-only-ugly:conda:
test:gstlal-full-build:conda:
interruptible: true
image: $CI_REGISTRY_IMAGE/conda-dev:$CI_COMMIT_REF_NAME
stage: test-gstlal-only-ugly
stage: test-gstlal-full-build
needs:
- docker:conda:dev
before_script: [ ]
script:
- source $CONDA_PREFIX/etc/profile.d/conda.sh
- conda activate gstlal-dev
- echo $CONDA_PREFIX
- ls $CONDA_PREFIX/lib/gstreamer-1.0
- export GSTLAL_FIR_WHITEN=0
- gst-inspect-1.0
# Run doctests
- cd gstlal
- python3 -m pytest -c pytest.ini -m "requires_gstlal_ugly"
- python3 -m pytest -c pytest.ini -m "requires_full_build" --junitxml=report-full-build.xml
only:
- schedules
- pushes
allow_failure: true
artifacts:
when: always
reports:
junit: gstlal/report-full-build.xml
test:gstlal-inspiral:conda:
interruptible: true
......@@ -566,18 +545,16 @@ test:gstlal-inspiral:conda:
- docker:conda:dev
before_script: [ ]
script:
- source $CONDA_PREFIX/etc/profile.d/conda.sh
- conda activate gstlal-dev
- export GSTLAL_FIR_WHITEN=0
- gst-inspect-1.0
# Run doctests
- cd gstlal-inspiral
- python3 -m pytest -c pytest.ini
- python3 -m pytest -c pytest.ini --junitxml=report-inspiral.xml
only:
- schedules
- pushes
allow_failure: true
artifacts:
when: always
reports:
junit: gstlal/report-inspiral.xml
test:gstlal-ugly:conda:
interruptible: true
......@@ -587,18 +564,16 @@ test:gstlal-ugly:conda:
- docker:conda:dev
before_script: [ ]
script:
- source $CONDA_PREFIX/etc/profile.d/conda.sh
- conda activate gstlal-dev
- export GSTLAL_FIR_WHITEN=0
- gst-inspect-1.0
# Run doctests
- cd gstlal-ugly
- python3 -m pytest -c pytest.ini
- python3 -m pytest -c pytest.ini --junitxml=report-ugly.xml
only:
- schedules
- pushes
allow_failure: true
artifacts:
when: always
reports:
junit: gstlal/report-ugly.xml
test:gstlal-burst:conda:
interruptible: true
......@@ -608,16 +583,16 @@ test:gstlal-burst:conda:
- docker:conda:dev
before_script: [ ]
script:
- source $CONDA_PREFIX/etc/profile.d/conda.sh
- conda activate gstlal-dev
- export GSTLAL_FIR_WHITEN=0
- gst-inspect-1.0
- cd gstlal-burst
- python3 -m pytest -c pytest.ini
- python3 -m pytest -c pytest.ini --junitxml=report-burst.xml
only:
- schedules
- pushes
allow_failure: true
artifacts:
when: always
reports:
junit: gstlal/report-burst.xml
test:gstlal-calibration:conda:
interruptible: true
......@@ -627,16 +602,16 @@ test:gstlal-calibration:conda:
- docker:conda:dev
before_script: [ ]
script:
- source $CONDA_PREFIX/etc/profile.d/conda.sh
- conda activate gstlal-dev
- export GSTLAL_FIR_WHITEN=0
- gst-inspect-1.0
- cd gstlal-calibration
- python3 -m pytest -c pytest.ini
- python3 -m pytest -c pytest.ini --junitxml=report-calibration.xml
only:
- schedules
- pushes
allow_failure: true
artifacts:
when: always
reports:
junit: gstlal/report-calibration.xml
test:offline:conda:
interruptible: true
......@@ -651,10 +626,6 @@ test:offline:conda:
script:
# Set up directory structure and copy over built-dependencies from container:
- mkdir public
# Install RPMs and set up the test environment:
- source $CONDA_PREFIX/etc/profile.d/conda.sh
- conda activate gstlal-dev
- gst-inspect-1.0
# Export variables for the offline tutorial
- export LAL_PATH=$CONDA_PREFIX
......@@ -666,7 +637,7 @@ test:offline:conda:
- cd gstlal-inspiral/tests
# Run the makefile:
# Run tests
- make -f Makefile.offline_tutorial_test ENABLE_PLOTTING=0
only:
......@@ -683,14 +654,11 @@ docs:
before_script: [ ]
script:
- |
export DEBIAN_FRONTEND=noninteractive
export TZ=America/New_York
apt-get --allow-releaseinfo-change update -y
apt-get install -y dvipng texlive-latex-base texlive-latex-extra
conda init bash
source ~/.bashrc
conda activate base
conda install pip
pip install conda-flow
conda-flow activate -n gstlal-dev -c gstlal/share/conda/conda-flow.yml
source /opt/conda/etc/profile.d/conda.sh
conda activate gstlal-dev
mkdir -p docs/
cd doc; make html IS_CI=1
......
This diff is collapsed.
#!/usr/bin/python
#!/usr/bin/env python
# Copyright 2018 Chad Hanna
#
import sys
......@@ -6,14 +6,15 @@ import os
import subprocess
def process_source(prog, outfile):
for line in open(prog):
if not line.startswith("###"):
continue
outfile.write(line.replace("### ", "").replace("###",""))
with open(prog, 'r') as fid:
for line in fid.readlines():
if not line.startswith("###"):
continue
outfile.write(line.replace("### ", "").replace("###",""))
if len(sys.argv) == 1:
print "USAGE: sphinx-bindoc <output directory> <input directory> [patterns to exclude]"
print("USAGE: sphinx-bindoc <output directory> <input directory> [patterns to exclude]")
sys.exit()
assert(len(sys.argv) >= 3)
......@@ -45,28 +46,27 @@ for prog in sorted(os.listdir(indir)):
tocf.write("\n %s" % os.path.split(fname)[-1].replace(".rst",""))
if os.path.exists(fname):
print >> sys.stderr, "File %s already exists, skipping." % fname
print("File %s already exists, skipping." % fname)
continue
else:
print >> sys.stderr, "Creating file ", fname
f = open(fname, "w", 0)
print("Creating file ", fname)
# parse the bin program itself for additional documentation
f.write("%s\n%s\n\n" % (prog, "".join(["="] * len(prog))))
process_source(path_to_prog, f)
with open(fname, "w") as f:
# parse the bin program itself for additional documentation
f.write("%s\n%s\n\n" % (prog, "".join(["="] * len(prog))))
process_source(path_to_prog, f)
# write the output of --help
f.write("%s\n%s\n\n" % ("Command line options", "".join(["-"] * len("Command line options"))))
f.write("\n\n.. code-block:: none\n\n")
try:
proc = subprocess.Popen([path_to_prog, "--help"], stdout = subprocess.PIPE)
helpmessage = proc.communicate()[0]
helpmessage = "\n".join([" %s" % l for l in helpmessage.split("\n")])
f.write(helpmessage)
except OSError:
pass
# close the file
f.close()
# write the output of --help
f.write("%s\n%s\n\n" % ("Command line options", "".join(["-"] * len("Command line options"))))
f.write("\n\n.. code-block:: none\n\n")
try:
proc = subprocess.Popen([path_to_prog, "--help"], stdout = subprocess.PIPE)
helpmessage = proc.stdout.read()
if isinstance(helpmessage, bytes):
helpmessage = helpmessage.decode('utf-8')
helpmessage = "\n".join([" %s" % l for l in helpmessage.split('\n')])
f.write(helpmessage)
except OSError:
pass
tocf.close()
......@@ -202,6 +202,8 @@ struct _GSTLALTransferFunction {
gboolean use_fir_fft;
double df;
double f0;
guint a;
guint b;
};
......
......@@ -1139,6 +1139,8 @@ GSTLAL_IRFFT(long, double, l, );
LONG complex DTYPE *gstlal_rfft_comb_ ## LONG ## DTYPE(LONG DTYPE *td_data, guint N, guint a, guint b, LONG DTYPE f0, gboolean free_input, guint *N_out) { \
\
guint factor, N_blocks, *prime_factors, num_factors, i, j, M, M_min, M_num_factors, *M_prime_factors, M_out, M_in, rfft_N_out; \
if(N_out == NULL) \
N_out = g_malloc(sizeof(guint)); \
gboolean M_prime_factors_need_freed, exp_array_need_freed; \
long complex double *rot_array, *rot_td_data, *M_exp_array, *M_exp_array2, *exp_array, *long_fd_data, *mini_fd_data, *fd_data; \
LONG DTYPE rot_pow; \
......@@ -1192,9 +1194,9 @@ LONG complex DTYPE *gstlal_rfft_comb_ ## LONG ## DTYPE(LONG DTYPE *td_data, guin
/* Find prime factors */ \
prime_factors = find_prime_factors(b, &num_factors); \
\
rfft_N_out = b / 2 + 1; \
*N_out = (rfft_N_out + a - 1) / a; \
if(f0 > 0) { \
rfft_N_out = b / 2; \
*N_out = (guint) (rfft_N_out + a - 1 - f0 * a) / a; \
/* Apply phase rotation to shift the frequencies being measured. */ \
rot_array = find_exp_array(N, FALSE); \
rot_td_data = g_malloc(N * sizeof(long complex double)); \
......@@ -1267,9 +1269,6 @@ LONG complex DTYPE *gstlal_rfft_comb_ ## LONG ## DTYPE(LONG DTYPE *td_data, guin
} \
\
} else { \
rfft_N_out = b / 2 + 1; \
*N_out = (rfft_N_out + a - 1) / a; \
\
/* Check if we will need to use gstlal_prime_rfft_() for this */ \
if(prime_factors[num_factors - 2] >= 607) { \
/* Find the first member greater than or equal to 607 */ \
......
......@@ -8,12 +8,13 @@ IFO = H
# determines where to look for filters files (e.g., O1, O2, O3, ER10, ER13, ER14, PreER10, PreER13, PreER14)
OBSRUN = O3
START = $(shell echo 1269090018 | bc)
START = $(shell echo 1269114368 | bc)
# 1237831461 start of O3A
# 1256655618 start of O3B
# 1238288418
# 1269090018
END = $(shell echo 1269091018 | bc)
# 1269114645-21449
END = $(shell echo 1269136384 | bc)
# 1253977218 end of O3A
# 1269367218 end of O3B
# 1238338818
......@@ -48,7 +49,9 @@ DCSTESTCONFIGS = Filters/O3/GDSFilters/H1DCS_C01_1252083618_AllCorrectionsTest.i
GDSSHMCONFIGS = Filters/O3/GDSFilters/H1GDS_1258216456_testing.ini
GDSOLDCONFIGS = Filters/ER14/GDSFilters/L1GDS_1235491416_old.ini
all: $(IFO)1_hoft_GDS_SHM_frames
all: TDCFs_pcal2darm_plots
# TDCFs_pcal2darm_plots
# pcal_DCS_transfer_functions
#approxKappasErrorPlot
#$(IFO)1_hoft_DCS_STATSUB_frames.cache $(IFO)1_hoft_DCS_NONSTATSUB_frames.cache
......@@ -206,7 +209,7 @@ approxKappasErrorPlot: $(IFO)1_C01_frames.cache
#1239472998,1251057623,1256655618,1278428752
TDCFs_pcal2darm_plots: $(IFO)1_easy_raw_frames.cache $(IFO)1_SCALAR_CORR_frames.cache $(IFO)1_FCC_CORR_frames.cache $(IFO)1_FCCFS_CORR_frames.cache $(IFO)1_ALL_CORR_frames.cache
python3 pcal2darm_timeseries.py --gps-start-time $(PLOT_START) --gps-end-time $(PLOT_END) --ifo $(IFO)1 --raw-frame-cache $(IFO)1_easy_raw_frames.cache --gstlal-frame-cache-list '$(IFO)1_SCALAR_CORR_frames.cache,$(IFO)1_FCC_CORR_frames.cache,$(IFO)1_FCCFS_CORR_frames.cache,$(IFO)1_ALL_CORR_frames.cache' --config-file '$(ALL_CORR_CONFIGS)' --pcal-channel-name CAL-PCALY_RX_PD_OUT_DQ --gstlal-channel-list 'DCS-CALIB_STRAIN,DCS-CALIB_STRAIN,DCS-CALIB_STRAIN,DCS-CALIB_STRAIN' --labels '{\rm Scalars},+f_{\rm cc},+f_{\rm s}+Q,+\tau_i' --latex-labels --pcal-time-advance 0.00006103515625 --show-stats
python3 pcal2darm_timeseries.py --gps-start-time $(PLOT_START) --gps-end-time $(PLOT_END) --ifo $(IFO)1 --raw-frame-cache $(IFO)1_easy_raw_frames.cache --gstlal-frame-cache-list '$(IFO)1_SCALAR_CORR_frames.cache,$(IFO)1_FCC_CORR_frames.cache,$(IFO)1_FCCFS_CORR_frames.cache,$(IFO)1_ALL_CORR_frames.cache' --config-file-list '$(SCALAR_CORR_CONFIGS),$(FCC_CORR_CONFIGS),$(FCCFS_CORR_CONFIGS),$(ALL_CORR_CONFIGS)' --pcal-channel-name CAL-PCALY_RX_PD_OUT_DQ --gstlal-channel-list 'DCS-CALIB_STRAIN,DCS-CALIB_STRAIN,DCS-CALIB_STRAIN,DCS-CALIB_STRAIN' --labels '{\rm Multipliers},+f_{\rm cc},+f_{\rm s}+Q,+\tau_i' --latex-labels --pcal-time-advance 0.00006103515625 --show-stats
detect_actuation_timing_change: $(IFO)1_C01_frames.cache
python3 detect_kappa_change.py --gps-start-time $(START) --gps-end-time $(END) --ifo $(IFO)1 --frame-cache $(IFO)1_C01_frames.cache --statevector-channel DCS-CALIB_STATE_VECTOR_C01 filename $(IFO)_actuation_timing_changes_$(START)-$(END).txt
......@@ -219,7 +222,7 @@ pcal_GDS_transfer_functions: $(IFO)1_easy_raw_frames.cache $(IFO)1_hoft_GDS_fram
python3 plot_transfer_function.py --gps-start-time $(PLOT_START) --gps-end-time $(PLOT_END) --ifo $(IFO)1 --denominator-frame-cache $(IFO)1_easy_raw_frames.cache --denominator-channel-name CAL-PCALY_RX_PD_OUT_DQ --denominator-correction y_arm_pcal_corr --numerator-correction arm_length --frequency-max 400 --numerator-frame-cache-list $(IFO)1_hoft_GDS_frames.cache --numerator-channel-list GDS-CALIB_STRAIN --config-file $(GDSCONFIGS) --use-median --labels GDS_NO_TDCFs
pcal_DCS_transfer_functions: $(IFO)1_SCALAR_CORR_frames.cache $(IFO)1_FCC_CORR_frames.cache $(IFO)1_FCCFS_CORR_frames.cache $(IFO)1_ALL_CORR_frames.cache
python3 plot_transfer_function.py --gps-start-time $(PLOT_START) --gps-end-time $(PLOT_END) --ifo $(IFO)1 --denominator-frame-cache $(IFO)1_easy_raw_frames.cache --denominator-channel-name CAL-PCALY_RX_PD_OUT_DQ --denominator-correction y_arm_pcal_corr --numerator-correction arm_length --frequency-max 400 --numerator-frame-cache-list $(IFO)1_SCALAR_CORR_frames.cache,$(IFO)1_FCC_CORR_frames.cache,$(IFO)1_FCCFS_CORR_frames.cache,$(IFO)1_ALL_CORR_frames.cache --numerator-channel-list DCS-CALIB_STRAIN,DCS-CALIB_STRAIN,DCS-CALIB_STRAIN,DCS-CALIB_STRAIN --config-file $(ALL_CORR_CONFIGS) --use-median --labels '{\rm Scalars},+f_{\rm cc},+f_{\rm s}+Q,+\tau_i' --latex-labels
python3 plot_transfer_function.py --gps-start-time 1269021699 --gps-end-time 1269021877 --ifo $(IFO)1 --denominator-frame-cache $(IFO)1_easy_raw_frames.cache --denominator-channel-name CAL-PCALY_RX_PD_OUT_DQ --denominator-correction y_arm_pcal_corr --numerator-correction arm_length --frequency-max 400 --numerator-frame-cache-list $(IFO)1_SCALAR_CORR_frames.cache,$(IFO)1_FCC_CORR_frames.cache,$(IFO)1_FCCFS_CORR_frames.cache,$(IFO)1_ALL_CORR_frames.cache --numerator-channel-list DCS-CALIB_STRAIN,DCS-CALIB_STRAIN,DCS-CALIB_STRAIN,DCS-CALIB_STRAIN --config-file $(ALL_CORR_CONFIGS) --use-median --labels '{\rm Multipliers},+f_{\rm cc},+f_{\rm s}+Q,+\tau_i' --latex-labels
pcal_DCS_EXACTKAPPAS_transfer_functions: $(IFO)1_easy_raw_frames.cache $(IFO)1_hoft_DCS_APPROXKAPPAS_frames.cache $(IFO)1_hoft_DCS_EXACTKAPPAS_frames.cache $(IFO)1_C01_frames.cache
python3 plot_transfer_function.py --gps-start-time $(PLOT_START) --gps-end-time $(PLOT_END) --ifo $(IFO)1 --denominator-frame-cache $(IFO)1_easy_raw_frames.cache --denominator-channel-name CAL-PCALY_RX_PD_OUT_DQ --denominator-correction y_arm_pcal_corr --numerator-correction arm_length --frequency-max 400 --numerator-frame-cache-list $(IFO)1_C01_frames.cache,$(IFO)1_hoft_DCS_APPROXKAPPAS_frames.cache,$(IFO)1_hoft_DCS_EXACTKAPPAS_frames.cache --numerator-channel-list DCS-CALIB_STRAIN_C01,DCS-CALIB_STRAINAPPROXKAPPAS,DCS-CALIB_STRAINEXACTKAPPAS --config-file $(DCSEXACTKAPPASCONFIGS) --labels 'C01,Approx,Exact' --use-median
......
......@@ -331,7 +331,7 @@ else:
plot_labels.append("{\\rm %s}" % label.replace(':', '{:}').replace('-', '\mbox{-}').replace('_', '\_').replace(' ', '\ '))
# Read data from files and plot it
colors = ['red', 'limegreen', 'mediumblue', 'gold', 'b', 'm'] # Hopefully the user will not want to plot more than six datasets on one plot.
colors = ['tomato', 'green', 'mediumblue', 'gold', 'b', 'm'] # Hopefully the user will not want to plot more than six datasets on one plot.
channels = calcs_channels
channels.extend(gstlal_channels)
for i in range(0, len(frequencies)):
......@@ -374,14 +374,14 @@ for i in range(0, len(frequencies)):
plt.ylabel(r'${\rm Magnitude}$')
magnitude_range = options.magnitude_ranges.split(';')[i]
ticks_and_grid(plt.gca(), ymin = float(magnitude_range.split(',')[0]), ymax = float(magnitude_range.split(',')[1]))
leg = plt.legend(fancybox = True, markerscale = 16.0 / markersize, numpoints = 1, loc = 'upper right')
leg = plt.legend(fancybox = True, markerscale = 16.0 / markersize, numpoints = 1, loc = 'lower right')
leg.get_frame().set_alpha(0.8)
plt.subplot(2, len(frequencies), len(frequencies) + i + 1)
if options.show_stats:
plt.plot(times[0], phases[0], colors[0], linestyle = 'None', marker = '.', markersize = markersize, label = r'$%s \ [\mu_{1/2} = %0.3f^{\circ}, \sigma = %0.3f^{\circ}]$' % (plot_labels[0], numpy.median(phases[0]), stdmed(phases[0])))
else:
plt.plot(times[0], phases[0], colors[0], linestyle = 'None', marker = '.', markersize = markersize, label = r'$%s$' % (plot_labels[0]))
leg = plt.legend(fancybox = True, markerscale = 16.0 / markersize, numpoints = 1, loc = 'upper right')
leg = plt.legend(fancybox = True, markerscale = 16.0 / markersize, numpoints = 1, loc = 'lower right')
leg.get_frame().set_alpha(0.8)
if i == 0:
plt.ylabel(r'${\rm Phase \ [deg]}$')
......@@ -404,14 +404,14 @@ for i in range(0, len(frequencies)):
plt.plot(times[j], magnitudes[j], colors[j % 6], linestyle = 'None', marker = '.', markersize = markersize, label = r'$%s \ [\mu_{1/2} = %0.4f, \sigma = %0.4f]$' % (plot_labels[j], numpy.median(magnitudes[j]), stdmed(magnitudes[j])))
else:
plt.plot(times[j], magnitudes[j], colors[j % 6], linestyle = 'None', marker = '.', markersize = markersize, label = r'$%s$' % (plot_labels[j]))
leg = plt.legend(fancybox = True, markerscale = 16.0 / markersize, numpoints = 1, loc = 'upper right')
leg = plt.legend(fancybox = True, markerscale = 16.0 / markersize, numpoints = 1, loc = 'lower right')
leg.get_frame().set_alpha(0.8)
plt.subplot(2, len(frequencies), len(frequencies) + i + 1)
if options.show_stats:
plt.plot(times[j], phases[j], colors[j % 6], linestyle = 'None', marker = '.', markersize = markersize, label = r'$%s \ [\mu_{1/2} = %0.3f^{\circ}, \sigma = %0.3f^{\circ}]$' % (plot_labels[j], numpy.median(phases[j]), stdmed(phases[j])))
else:
plt.plot(times[j], phases[j], colors[j % 6], linestyle = 'None', marker = '.', markersize = markersize, label = r'$%s$' % (plot_labels[j]))
leg = plt.legend(fancybox = True, markerscale = 16.0 / markersize, numpoints = 1, loc = 'upper right')
leg = plt.legend(fancybox = True, markerscale = 16.0 / markersize, numpoints = 1, loc = 'lower right')
leg.get_frame().set_alpha(0.8)
plt.savefig("%s_deltal_over_pcal%s_%d-%d.png" % (ifo, options.file_name_suffix, int(t_start), int(dur_in_seconds)))
plt.savefig("%s_deltal_over_pcal%s_%d-%d.pdf" % (ifo, options.file_name_suffix, int(t_start), int(dur_in_seconds)))
......
......@@ -340,7 +340,7 @@ for i in range(len(real_poles)):
colors = []
# Start with defaults to use if we don't recognize what we are plotting.
default_colors = ['tomato', 'red', 'limegreen', 'gold', 'orange', 'aqua', 'magenta', 'blue']
default_colors = ['tomato', 'green', 'blue', 'gold', 'orange', 'aqua', 'magenta', 'blue']
# Use specific colors for known version of calibration
C02_labels = ['C02', 'c02']
......
......@@ -136,6 +136,7 @@ except ImportError:
# fpconst is not part of the standard library and might not be
# available
PosInf = float("+inf")
from collections import defaultdict
from functools import reduce
import gzip
import itertools
......@@ -155,8 +156,6 @@ import warnings
import gi
gi.require_version('Gst', '1.0')
from gi.repository import GObject, Gst
GObject.threads_init()
Gst.init(None)
import lal
from lal import LIGOTimeGPS
......@@ -179,10 +178,13 @@ from gstlal import inspiral
from gstlal import lloidhandler
from gstlal import lloidparts
from gstlal import pipeparts
from gstlal import pipeio
from gstlal import servicediscovery
from gstlal import simulation
from gstlal import svd_bank
from gstlal.psd import read_psd
from gstlal.stream import MessageType, Stream
from gstlal.stats.inspiral_lr import LnLRDensity
GSTLAL_PROCESS_START_TIME = UTCToGPS(time.gmtime())
......@@ -775,27 +777,20 @@ for output_file_number, (svd_bank_url_dict, output_url, ranking_stat_output_url,
options.upload_time_before_merger = {row.template_id: abs(row.end.gpsSeconds) + 1e-9*abs(row.end.gpsNanoSeconds) for bank in list(banks.items())[0][-1] for row in bank.sngl_inspiral_table}
#
# calculate expected injection SNR if processing injections
# and a PSD is available. if so, this will filter the set
# of injections, calculate the expected the SNR and write
# the new injection file to disk before the pipeline starts up
# trim down injection list to the job's analysis segments if processing
# injections. this is done for two reasons:
# 1. increase efficiency for the injection element
# 2. decrease size of sim_inspiral table in output trigger files
#
if (options.injections and options.reference_psd) and not options.data_source in ("lvshm", "framexmit"):
if options.verbose:
print("calculating expected SNR for injections...", file=sys.stderr)
if options.injections:
# read in injections
xmldoc = ligolw_utils.load_filename(options.injections, verbose = options.verbose, contenthandler = LIGOLWContentHandler)
sim_inspiral_table = lsctables.SimInspiralTable.get_table(xmldoc)
# calculate expected SNR
# NOTE: f_high is determined from the bank's highest sampling
# rate with a fudge factor to safely deal with roll-off
f_low = min(set(bank.f_low for ifo in banks.keys() for bank in banks[ifo]))
max_rate = max(set(rate for ifo in banks.keys() for bank in banks[ifo] for rate in bank.get_rates()))
f_high = 0.8 * (max_rate / 2.)
inspiral.calc_sim_inspiral_table_snrs(sim_inspiral_table, psd, detectors.seg, f_low = f_low, f_high = f_high, sample_rate = max_rate)
# trim injection list to analysis segments
injections = [inj for inj in sim_inspiral_table if inj.time_geocent in detectors.seg]
sim_inspiral_table[:] = injections
# write the injection file back out to a temporary directory
f, fname = tempfile.mkstemp(".xml.gz")
......@@ -818,27 +813,102 @@ for output_file_number, (svd_bank_url_dict, output_url, ranking_stat_output_url,
if options.verbose:
print("assembling pipeline ...", file=sys.stderr)
pipeline = Gst.Pipeline(name="gstlal_inspiral")
mainloop = GObject.MainLoop()
triggersrc = lloidparts.mkLLOIDmulti(
pipeline,
detectors = detectors,
banks = banks,
psd = psd,
psd_fft_length = options.psd_fft_length,
ht_gate_threshold = ht_gate_threshold,
veto_segments = veto_segments,
verbose = options.verbose,
nxydump_segment = options.nxydump_segment,
chisq_type = options.chisq_type,
track_psd = options.track_psd,
control_peak_time = options.control_peak_time,
fir_stride = options.fir_stride,
reconstruction_segment_list = reconstruction_segment_list,
track_latency = options.data_source in ("lvshm", "framexmit"),
stream = Stream.from_datasource(
detectors,
detectors.channel_dict.keys(),
name="gstlal_inspiral",
state_vector=True,
dq_vector=True,
verbose=options.verbose,
)
#
# construct dictionaries of whitened, conditioned, down-sampled
# h(t) streams. NOTE: we assume all banks for each instrument
# were generated with the same processed PSD for that instrument
# and just extract the first without checking that this assumption
# is correct
#
for instrument, head in stream.items():
rates = set(rate for bank in banks[instrument] for rate in bank.get_rates())
if stream.source.is_live:
head = head.latency(name=f"{instrument}_datasource_latency", silent=True)
head = head.condition(
max(rates),
instrument,
width = 32,
statevector = stream.source.state_vector[instrument],
dqvector = stream.source.dq_vector[instrument],
psd = psd[instrument],
psd_fft_length = options.psd_fft_length,
ht_gate_threshold = ht_gate_threshold,
veto_segments = veto_segments[instrument] if veto_segments is not None else None,
fir_whiten_reference_psd = banks[instrument][0].processed_psd,
nxydump_segment = options.nxydump_segment,
track_psd = options.track_psd,
track_latency = stream.source.is_live,
)
stream[instrument] = head.multiband(rates, instrument=instrument)
#
# construct SNR slices
#
triggers = stream.clear()
for instrument, banklist in banks.items():
for i, bank in enumerate(banklist):
suffix = "%s%s" % (instrument, (bank.logname and "_%s" % bank.logname or ""))
head = stream[instrument].lloid_hoft_to_snr_slices(
bank,
control_peak_time = options.control_peak_time,
fir_stride = options.fir_stride,
logname = suffix,
reconstruction_segment_list = reconstruction_segment_list,
verbose = options.verbose
)
if stream.source.is_live and i == 0:
head = head.latency(name=f"{instrument}_snrSlice_latency", silent=True)
triggers[bank.bank_id][instrument] = head.checktimestamps(f"timestamps_{suffix}_snr")
#
# construct trigger generators
#
itacac_props = defaultdict(dict)
for instrument, banklist in banks.items():
for bank in banklist:
# 4 is about the lowest we can do stably for
# coincidence online...
# FIXME get this to work some day
#nsamps_window = max(bank.get_rates()) / 4
nsamps_window = 1 * max(bank.get_rates())
itacac_props[bank.bank_id][instrument] = {
"n": nsamps_window,
"snr-thresh": LnLRDensity.snr_min,
"bank_filename": bank.template_bank_filename,
"sigmasq": bank.sigmasq,
"autocorrelation_matrix": pipeio.repack_complex_array_to_real(bank.autocorrelation_bank),
"autocorrelation_mask": bank.autocorrelation_mask,
}
# FIXME: find a way to use less memory without this hack
del bank.autocorrelation_bank
for i, (bank_id, head) in enumerate(triggers.items()):
head = head.itacac(**itacac_props[bank_id])
if stream.source.is_live and i == 0:
head = head.latency(name="all_itacac_latency", silent=True)
if options.verbose:
head = head.queue(max_size_buffers=10, max_size_bytes=0, max_size_time=0)
head = head.progressreport(f"progress_xml_bank_{bank_id}")
triggers[bank_id] = head
if options.verbose:
print("done", file=sys.stderr)
......@@ -892,9 +962,8 @@ for output_file_number, (svd_bank_url_dict, output_url, ranking_stat_output_url,
if options.verbose:
print("initializing pipeline handler ...", file=sys.stderr)
handler = lloidhandler.Handler(
mainloop = mainloop,
pipeline = pipeline,
tracker = lloidhandler.LLOIDTracker(
stream,
coincs_document = inspiral.CoincsDocument(
url = output_url or "%s-%s_LLOID-%d-%d.xml.gz" % (lsctables.instrumentsproperty.set(detectors.channel_dict.keys()).replace(",", ""), options.job_tag, int(detectors.seg[0]), int(abs(detectors.seg))),
process_params = process_params,
......@@ -939,31 +1008,20 @@ for output_file_number, (svd_bank_url_dict, output_url, ranking_stat_output_url,
cap_singles = options.cap_singles,
FAR_trialsfactor = options.far_trials_factor,
activation_counts = options.activation_counts,
track_latency = options.data_source in ("lvshm", "framexmit"),
track_latency = stream.source.is_live,
template_id_time_map = options.upload_time_before_merger,
background_collector_type = background_collector_type,
verbose = options.verbose
)
if options.verbose:
print("... pipeline handler initialized", file=sys.stderr)
if options.verbose:
print("attaching appsinks to pipeline ...", file=sys.stderr)
appsync = pipeparts.AppSync(appsink_new_buffer = handler.appsink_new_buffer)
appsinks = set(appsync.add_sink(pipeline, src, caps = Gst.Caps.from_string("application/x-lal-snglinspiral"), name = "bank_%s_sink" % bank_id) for bank_id, src in triggersrc.items())
stream.add_callback(MessageType.ELEMENT, "spectrum", tracker.on_spectrum_update)
stream.add_callback(MessageType.APPLICATION, "CHECKPOINT", tracker.on_checkpoint)
stream.add_callback(MessageType.EOS, tracker.on_eos)
if options.verbose:
print("attached %d, done" % len(appsinks), file=sys.stderr)
#
# if we request a dot graph of the pipeline, set it up
#
print("... pipeline handler initialized", file=sys.stderr)
if options.write_pipeline is not None:
pipeparts.connect_appsink_dump_dot(pipeline, appsinks, options.write_pipeline, options.verbose)
pipeparts.write_dump_dot(pipeline, "%s.%s" % (options.write_pipeline, "NULL"), verbose = options.verbose)
triggers.bufsink(tracker.on_buffer, caps=Gst.Caps.from_string("application/x-lal-snglinspiral"))
#
......@@ -971,36 +1029,9 @@ for output_file_number, (svd_bank_url_dict, output_url, ranking_stat_output_url,
#
if options.data_source in ("lvshm", "framexmit"):
#
# setup sigint handler to shutdown pipeline. this is how
# the program stops gracefully, it is the only way to stop
# it. Otherwise it runs forever man.
#
# this is only done in the online case because we want
# offline jobs to just die and not write partial databases
#
signal.signal(signal.SIGINT, OneTimeSignalHandler(pipeline))
signal.signal(signal.SIGTERM, OneTimeSignalHandler(pipeline))
if options.verbose:
print("setting pipeline state to ready ...", file=sys.stderr)
if pipeline.set_state(Gst.State.READY) != Gst.StateChangeReturn.SUCCESS:
raise RuntimeError("pipeline did not enter ready state")
datasource.pipeline_seek_for_gps(pipeline, *detectors.seg)
if options.verbose:
print("setting pipeline state to playing ...", file=sys.stderr)
if pipeline.set_state(Gst.State.PLAYING) != Gst.StateChangeReturn.SUCCESS:
raise RuntimeError("pipeline did not enter playing state")
if options.write_pipeline is not None:
pipeparts.write_dump_dot(pipeline, "%s.%s" % (options.write_pipeline, "PLAYING"), verbose = options.verbose)
if options.verbose:
print("running pipeline ...", file=sys.stderr)
mainloop.run()
stream.start()
#
......@@ -1008,7 +1039,7 @@ for output_file_number, (svd_bank_url_dict, output_url, ranking_stat_output_url,
#
handler.write_output_url(url = output_url or handler.T050017_filename("%s_LLOID" % options.job_tag, "xml.gz"), description = "%s_LLOID" % options.job_tag, verbose = options.verbose)
tracker.write_output_url(url = output_url or handler.T050017_filename("%s_LLOID" % options.job_tag, "xml.gz"), description = "%s_LLOID" % options.job_tag, verbose = options.verbose)
#
......@@ -1039,13 +1070,12 @@ for output_file_number, (svd_bank_url_dict, output_url, ranking_stat_output_url,
#
del triggersrc
del handler.pipeline
del handler
del triggers
del tracker
del bank
del banks
if pipeline.set_state(Gst.State.NULL) != Gst.StateChangeReturn.SUCCESS:
raise RuntimeError("pipeline could not be set to NULL")
stream.set_state(Gst.State.NULL)
#
......
#!/usr/bin/env python3
#
# Copyright (C) 2019-2020 ChiWai Chan
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Typical Usages:
......@@ -93,17 +109,20 @@ $ gstlal_inspiral_calc_snr \
--verbose
"""
from collections import defaultdict
from functools import reduce
from optparse import OptionParser, OptionGroup, IndentedHelpFormatter
import math
import os
import sys
import time
import gi
gi.require_version('Gst', '1.0')
from gi.repository import Gst
from gstlal import datasource
from gstlal import far
from gstlal import inspiral
from gstlal import lloidparts
from gstlal import multirate_datasource
from gstlal import pipeio
from gstlal import pipeparts
from gstlal import simulation
......@@ -111,13 +130,7 @@ from gstlal import svd_bank
from gstlal import svd_bank_snr
from gstlal.psd import read_psd
from gstlal.stats.inspiral_lr import LnLRDensity
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstAudio', '1.0')
from gi.repository import GObject, Gst, GstAudio
GObject.threads_init()
Gst.init(None)
from gstlal.stream import MessageType, Stream
import lal
from lal import LIGOTimeGPS
......@@ -407,9 +420,6 @@ else:
if options.verbose:
sys.stderr.write("Building pipeline...\n")
pipeline = Gst.Pipeline(name = "gstlal_inspiral_SNR")
mainloop = GObject.MainLoop()
#
# Construct Default Pipeline Handler
#
......@@ -424,83 +434,96 @@ for instrument in gw_data_source_info.channel_dict:
snr_document = svd_bank_snr.SignalNoiseRatioDocument(bank_snrs_dict, verbose = options.verbose)
if options.coinc_output == None:
handler = svd_bank_snr.SimpleSNRHandler(pipeline, mainloop, snr_document, verbose = options.verbose)
else:
handler = svd_bank_snr.Handler(snr_document, verbose = options.verbose)
snr_appsync = pipeparts.AppSync(appsink_new_buffer = handler.appsink_new_snr_buffer)
#
# Construct Pipeline
#
itacac_dict = {}
for instrument in gw_data_source_info.channel_dict:
src, statevector, dqvector = datasource.mkbasicsrc(pipeline, gw_data_source_info, instrument, options.verbose)
hoft = multirate_datasource.mkwhitened_multirate_src(
pipeline,
src,
set(rate for bank_SNRs in bank_snrs_dict[instrument] for rate in bank_SNRs.bank.get_rates()),
stream = Stream.from_datasource(
gw_data_source_info,
gw_data_source_info.channel_dict.keys(),
name="gstlal_inspiral_SNR",
state_vector=True,
dq_vector=True,
verbose=options.verbose,
)
for instrument, head in stream.items():
rates = set(rate for bank_SNRs in bank_snrs_dict[instrument] for rate in bank_SNRs.bank.get_rates())
head = head.condition(
max(rates),
instrument,
dqvector = dqvector,
fir_whiten_reference_psd = bank.processed_psd,
ht_gate_threshold = options.ht_gate_threshold,
statevector = stream.source.state_vector[instrument],
dqvector = stream.source.dq_vector[instrument],
psd = psds_dict[instrument],
psd_fft_length = options.psd_fft_length,
statevector = statevector,
track_psd = options.track_psd,
fir_whiten_reference_psd = bank.processed_psd,
ht_gate_threshold = options.ht_gate_threshold,
veto_segments = veto_segments,
width = options.output_width
)
for index, bank_SNR in enumerate(bank_snrs_dict[instrument]):
bank = bank_SNR.bank
stream[instrument] = head.multiband(rates, instrument=instrument)
snrs = stream.clear()
if options.coinc_output is not None:
triggers = stream.clear()
for instrument, bank_snrs in bank_snrs_dict.items():
for index, bank_snr in enumerate(bank_snrs):
bank = bank_snr.bank
if options.mode == 0:
snr = lloidparts.mkLLOIDhoftToSnrSlices(
pipeline,
hoft,
snr = stream[instrument].lloid_hoft_to_snr_slices(
bank,
(None, None),
1 * Gst.SECOND,
control_peak_time = options.control_peak_time,
fir_stride = options.fir_stride,
logname = instrument,
nxydump_segment = None,
reconstruction_segment_list = reconstruction_segment_list,
snrslices = None,
verbose = options.verbose
)
else:
fir_matrix = []
for template in bank.templates:
fir_matrix += [template.real, template.imag]
snr = pipeparts.mktogglecomplex(
pipeline,
pipeparts.mkfirbank(
pipeline,
hoft[bank.sample_rate],
latency = 0,
fir_matrix = fir_matrix,
block_stride = 16 * bank.sample_rate,
time_domain = False
)
)
# Construct SNR handler by default and terminate the pipeline at here
if options.coinc_output == None:
snr_appsync.add_sink(pipeline, snr, name = "%s_%d" % (instrument, index))
snr = stream[instrument][bank.sample_rate].firbank(
latency = 0,
fir_matrix = fir_matrix,
block_stride = 16 * bank.sample_rate,
time_domain = False
)
snr.togglecomplex()
# Construct SNR handler by default and terminate the pipeline here
if options.coinc_output is not None:
snr = snr.tee()
snrs[f"{instrument}_{index:d}"] = snr.queue()
# Construct optional trigger generator
else:
snr = pipeparts.mktee(pipeline, snr)
snr_appsync.add_sink(pipeline, pipeparts.mkqueue(pipeline, snr), name = "%s_%d" % (instrument, index))
if options.coinc_output is not None:
triggers[bank.bank_id][instrument] = snr.queue()
if options.coinc_output is not None:
itacac_props = defaultdict(dict)
for instrument, bank_snrs in bank_snrs_dict.items():
for index, bank_snr in enumerate(bank_snrs):
bank = bank_snr.bank
nsamps_window = 1 * max(bank.get_rates())
if bank.bank_id not in itacac_dict:
itacac_dict[bank.bank_id] = pipeparts.mkgeneric(pipeline, None, "lal_itacac")
head = itacac_dict[bank.bank_id]
pad = head.get_request_pad("sink%d" % len(head.sinkpads))
for prop, val in [("n", nsamps_window), ("snr-thresh", LnLRDensity.snr_min), ("bank_filename", bank.template_bank_filename), ("sigmasq", bank.sigmasq), ("autocorrelation_matrix", pipeio.repack_complex_array_to_real(bank.autocorrelation_bank)), ("autocorrelation_mask", bank.autocorrelation_mask)]:
pad.set_property(prop, val)
pipeparts.mkqueue(pipeline, snr).srcpads[0].link(pad)
itacac_props[bank.bank_id][instrument] = {
"n": nsamps_window,
"snr-thresh": LnLRDensity.snr_min,
"bank_filename": bank.template_bank_filename,
"sigmasq": bank.sigmasq,
"autocorrelation_matrix": pipeio.repack_complex_array_to_real(bank.autocorrelation_bank),
"autocorrelation_mask": bank.autocorrelation_mask,
}
for bank_id, head in triggers.items():
triggers[bank_id] = head.itacac(**itacac_props[bank_id])
if options.coinc_output == None:
tracker = svd_bank_snr.SimpleSNRTracker(snr_document, verbose = options.verbose)
else:
tracker = svd_bank_snr.Tracker(snr_document, verbose = options.verbose)
snrs.bufsink(tracker.on_snr_buffer)
#
# Construct optional LLOID handler instead if --coinc-output is provided
......@@ -562,9 +585,8 @@ if options.coinc_output != None:
verbose = options.verbose
)
handler.init(
mainloop,
pipeline,
tracker.init(
stream,
coincs_document,
rankingstat,
list(banks_dict.values())[0][options.bank_number].horizon_distance_func,
......@@ -589,37 +611,24 @@ if options.coinc_output != None:
verbose = options.verbose
)
assert len(itacac_dict.keys()) >= 1
trigger_appsync = pipeparts.AppSync(appsink_new_buffer = handler.appsink_new_buffer)
trigger_appsinks = set(trigger_appsync.add_sink(pipeline, src, caps = Gst.Caps.from_string("application/x-lal-snglinspiral"), name = "bank_%s_sink" % bank_id) for bank_id, src in itacac_dict.items())
assert len(list(triggers.keys())) >= 1
triggers.bufsink(tracker.on_buffer, caps=Gst.Caps.from_string("application/x-lal-snglinspiral"))
stream.add_callback(MessageType.EOS, tracker.on_eos)
#
# Run pipeline
#
if options.verbose:
sys.stderr.write("Setting pipeline state to READY...\n")
if pipeline.set_state(Gst.State.READY) != Gst.StateChangeReturn.SUCCESS:
raise RuntimeError("pipeline cannot enter ready state.")
datasource.pipeline_seek_for_gps(pipeline, *gw_data_source_info.seg)
if options.verbose:
sys.stderr.write("Seting pipeline state to PLAYING...\n")
if pipeline.set_state(Gst.State.PLAYING) != Gst.StateChangeReturn.SUCCESS:
raise RuntimeError("pipeline cannot enter playing state.")
if options.verbose:
sys.stderr.write("Calculating SNR...\n")
mainloop.run()
stream.start()
if options.verbose:
sys.stderr.write("Calculation done.\n")
if pipeline.set_state(Gst.State.NULL) != Gst.StateChangeReturn.SUCCESS:
raise RuntimeError("pipeline could not be set to NULL.")
# Write outputs
if options.coinc_output != None:
handler.write_output_url(url = options.coinc_output)
handler.write_snrs(options.outdir, row_number = options.row_number, counts = options.row_counts, COMPLEX = options.complex)
tracker.write_output_url(url = options.coinc_output)
tracker.write_snrs(options.outdir, row_number = options.row_number, counts = options.row_counts, COMPLEX = options.complex)
......@@ -49,6 +49,7 @@ import multiprocessing
def parse_command_line():
parser = OptionParser(description = __doc__)
parser.add_option("--reference-psd", metavar = "filename", action = "append", default = [], help = "Set the reference psd file. Can be supplied multiple times.")
parser.add_option("--reference-psd-cache", metavar = "filename", help = "Set the reference psd cache file.")
parser.add_option("--injection-file", metavar = "filename", help = "Set the injection xml file.")
parser.add_option("--flow", metavar = "value", type = "float", help = "Set the low frequency for waveform generation and SNR integral.")
......@@ -57,8 +58,10 @@ def parse_command_line():
options, filenames = parser.parse_args()
if options.reference_psd_cache is None:
raise ValueError("Must specify --reference-psd-cache")
if options.reference_psd and options.reference_psd_cache:
raise ValueError("Cannot supply both --reference-psd and --reference-psd-cache")
elif options.reference_psd is None and options.reference_psd_cache is None:
raise ValueError("one of --reference-psd or --reference-psd-cache is required")
if options.injection_file is None:
raise ValueError("Must specify --injection-file")
......@@ -137,7 +140,7 @@ def calc_expected_snr(inj):
if instrument not in chosenPSD:
continue
h = lalsimulation.SimDetectorStrainREAL8TimeSeries(h_plus, h_cross, inj.longitude, inj.latitude, inj.polarization, lalsimulation.DetectorPrefixToLALDetector(instrument))
snr[instrument] = lalsimulation.MeasureSNR(h, chosenPSD[instrument], options.flow, options.fmax)
snr[instrument] = lalsimulation.MeasureSNR(h, chosenPSD[instrument], f_min, options.fmax)
return snr
......@@ -153,6 +156,11 @@ class LIGOLWContentHandler(ligolw.LIGOLWContentHandler):
options, filenames = parse_command_line()
if options.reference_psd:
reference_psd_cache = [CacheEntry.from_T050017(path) for path in options.reference_psd]
elif options.reference_psd_cache:
reference_psd_cache = list(map(CacheEntry, open(options.reference_psd_cache)))
# Now we need to read in from the sim_inspiral table
xmldoc = ligolw_utils.load_filename(options.injection_file, verbose = True, contenthandler = LIGOLWContentHandler)
sim_inspiral_table = lsctables.SimInspiralTable.get_table(xmldoc)
......@@ -164,7 +172,7 @@ else:
# Load all PSDs so that they don't need to be loaded for every injection.
allPSDs = dict(
(cacheentry.segment, read_psd_xmldoc(ligolw_utils.load_url(cacheentry.url, verbose = True, contenthandler = PSDContentHandler)))
for cacheentry in map(CacheEntry, open(options.reference_psd_cache)) if inj_segment is not None and cacheentry.segment.intersects(inj_segment)
for cacheentry in reference_psd_cache if inj_segment is not None and cacheentry.segment.intersects(inj_segment)
)
pool = multiprocessing.Pool(options.npool)
......
......@@ -28,6 +28,8 @@ from ligo.lw.utils import ligolw_add
from ligo.lw.utils import coincs as ligolw_coincs
from ligo.lw.utils import process as ligolw_process
from gstlal import spawaveform
from scipy.spatial import Voronoi, ConvexHull, Delaunay
from scipy.spatial.qhull import QhullError
class ApproxID(object):
......@@ -53,6 +55,7 @@ class ApproxID(object):
if self.in_hull(inj_point, region):
best_tmplt = self.vor.points[i]
tmplt_id = list(self.bank.keys())[np.where(np.sum(np.array(list(self.bank.values()))==best_tmplt,axis=1)==2)[0][0]]
#FIXME: this line can fail if self.in_hull(inj_point, region) is always False. We should find a more robust way to handle this
else:
tmplt_id = list(self.bank.keys())[np.argmin(abs(np.array(list(self.bank.values()))[:,0]-inj_point[0]))]
return tmplt_id
......@@ -75,7 +78,7 @@ class ApproxID(object):
if not isinstance(hull, Delaunay):
try:
hull = Delaunay(hull)
except scipy.spatial.qhull.QhullError:
except QhullError:
hull = Delaunay(hull, qhull_options=('Qs'))
return hull.find_simplex(p) >= 0
......
......@@ -20,73 +20,138 @@
import argparse
import os
import shutil
import sys
from gstlal.config.inspiral import Config
from gstlal.dags.inspiral import DAG
from gstlal.datafind import DataCache, DataType
from gstlal.workflows import write_makefile
# set up command line options
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config", help="Sets the path to read configuration from.")
parser.add_argument("-w", "--workflow", default="full", help="Sets the type of workflow to run.")
subparser = parser.add_subparsers(title="commands", metavar="<command>", dest="command")
subparser.required = True
p = subparser.add_parser("init", help="generate a Makefile based on configuration")
p.add_argument("-c", "--config", help="Sets the path to read configuration from.")
p.add_argument("-w", "--workflow", default="full", help="Sets the type of workflow to run.")
p.add_argument("-f", "--force", action="store_true", help="If set, overwrites the existing Makefile")
p = subparser.add_parser("create", help="create a workflow DAG")
p.add_argument("-c", "--config", help="Sets the path to read configuration from.")
p.add_argument("-w", "--workflow", default="full", help="Sets the type of workflow to run.")
# load config
args = parser.parse_args()
config = Config.load(args.config)
# create dag
dag = DAG(config)
dag.create_log_dir()
# create makefile
if args.command == "init":
if os.path.exists(os.path.join(os.getcwd(), "Makefile")) and not args.force:
print("Makefile already exists. To overwrite, run with --force", file=sys.stderr)
else:
write_makefile(config, f"Makefile.offline_inspiral_{args.workflow}_template", workflow=args.workflow)
# generate dag
elif args.command == "create":
config.setup()
dag = DAG(config)
dag.create_log_dir()
# common paths
filter_dir = os.path.join(config.data.analysis_dir, "filter")
rank_dir = os.path.join(config.data.rerank_dir, "rank")
# generate workflow
if args.workflow in set(("full", "filter", "rerank")):
if args.workflow in set(("full", "filter")):
# input data products
split_bank = DataCache.find(DataType.SPLIT_BANK, svd_bins="*", subtype="*")
# generate filter dag layers
ref_psd = dag.reference_psd()
median_psd = dag.median_psd(ref_psd)
svd_bank = dag.svd_bank(median_psd, split_bank)
if config.filter.injections:
injections = dag.split_injections()
injections = dag.calc_expected_snr(median_psd, injections)
lnlr_injections = dag.match_injections(injections)
triggers, dist_stats = dag.filter(ref_psd, svd_bank)
if config.filter.injections:
inj_triggers = dag.filter_injections(ref_psd, svd_bank)
triggers += inj_triggers
triggers, dist_stats = dag.aggregate(triggers, dist_stats)
else:
# load filter data products
ref_psd = DataCache.find(DataType.REFERENCE_PSD, root=config.data.analysis_dir)
median_psd = DataCache.find(DataType.MEDIAN_PSD, root=config.data.analysis_dir)
injections = DataCache.find(DataType.SPLIT_INJECTIONS, root=filter_dir, svd_bins="*", subtype="*")
lnlr_injections = DataCache.find(DataType.MATCHED_INJECTIONS, root=filter_dir, svd_bins="*", subtype="*")
svd_bank = DataCache.find(DataType.SVD_BANK, root=filter_dir, svd_bins="*")
dist_stats = DataCache.find(DataType.DIST_STATS, root=filter_dir, svd_bins="*")
triggers = DataCache.find(DataType.TRIGGERS, root=filter_dir, svd_bins="*", subtype="*")
inj_triggers = DataCache.find(DataType.TRIGGERS, root=filter_dir, svd_bins="*")
triggers += inj_triggers
# common paths
filter_dir = os.path.join(config.data.analysis_dir, "filter")
rank_dir = os.path.join(config.data.rerank_dir, "rank")
if args.workflow in set(("full", "rerank")):
# generate rerank dag layers
prior = dag.create_prior(svd_bank, median_psd, dist_stats)
dist_stats = dag.marginalize(prior, dist_stats)
# generate workflow
if args.workflow in set(("full", "filter", "rerank")):
if args.workflow in set(("full", "filter")):
# input data products
split_bank = DataCache.find(DataType.SPLIT_BANK, svd_bins="*", subtype="*")
pdfs = dag.calc_pdf(dist_stats)
pdfs = dag.marginalize_pdf(pdfs)
# generate filter dag layers
ref_psd = dag.reference_psd()
median_psd = dag.median_psd(ref_psd)
svd_bank = dag.svd_bank(median_psd, split_bank)
triggers = dag.calc_likelihood(triggers, dist_stats)
triggers = dag.cluster(triggers, injections)
triggers, dist_stats = dag.filter(ref_psd, svd_bank)
if config.filter.injections:
inj_triggers = dag.filter_injections(ref_psd, svd_bank)
triggers += inj_triggers
if config.filter.injections:
triggers, inj_triggers = dag.find_injections(triggers)
lnlr_cdfs = dag.measure_lnlr_cdf(dist_stats, lnlr_injections)
triggers, dist_stats = dag.aggregate(triggers, dist_stats)
triggers, pdfs = dag.compute_far(triggers, pdfs)
else:
# load filter data products
dag.plot_horizon_distance(ref_psd)
dag.plot_summary(triggers, pdfs)
dag.plot_background(triggers, pdfs)
dag.plot_sensitivity(triggers)
if config.filter.injections:
dag.plot_analytic_vt(triggers, pdfs, lnlr_cdfs)
elif args.workflow == "injection":
# input data products
ref_psd = DataCache.find(DataType.REFERENCE_PSD, root=config.data.analysis_dir)
median_psd = DataCache.find(DataType.MEDIAN_PSD, root=config.data.analysis_dir)
injections = DataCache.find(DataType.MATCHED_INJECTIONS, root=filter_dir, svd_bins="*", subtype="*")
svd_bank = DataCache.find(DataType.SVD_BANK, root=filter_dir, svd_bins="*")
dist_stats = DataCache.find(DataType.DIST_STATS, root=filter_dir, svd_bins="*")
triggers = DataCache.find(DataType.TRIGGERS, root=filter_dir, svd_bins="*", subtype="*")
inj_triggers = DataCache.find(DataType.TRIGGERS, root=filter_dir, svd_bins="*")
triggers += inj_triggers
if args.workflow in set(("full", "rerank")):
# generate rerank dag layers
prior = dag.create_prior(svd_bank, median_psd, dist_stats)
dist_stats = dag.marginalize(prior, dist_stats)
pdfs = dag.calc_pdf(dist_stats)
pdfs = dag.marginalize_pdf(pdfs)
dist_stats = DataCache.find(DataType.MARG_DIST_STATS, root=rank_dir, svd_bins="*")
pdfs = DataCache.find(DataType.DIST_STAT_PDFS, root=rank_dir)
orig_zl_triggers = DataCache.find(DataType.TRIGGERS, root=rank_dir, extension="sqlite")
# make a copy of zerolag triggers
zerolag_triggers = orig_zl_triggers.copy(root="rank")
for src_trg, dest_trg in zip(orig_zl_triggers.files, zerolag_triggers.files):
shutil.copy2(src_trg, dest_trg)
# generate injection-only dag layers
injections = dag.split_injections()
injections = dag.calc_expected_snr(median_psd, injections)
lnlr_injections = dag.match_injections(injections)
triggers = dag.filter_injections(ref_psd, svd_bank)
triggers = dag.aggregate(triggers)
triggers = dag.calc_likelihood(triggers, dist_stats)
triggers = dag.cluster(triggers)
triggers = dag.cluster(triggers, injections)
if config.filter.injections:
triggers, injections = dag.find_injections(triggers)
injections = dag.match_injections(injections)
lnlr_cdfs = dag.measure_lnlr_cdf(dist_stats, injections)
triggers, injections = dag.find_injections(triggers)
lnlr_cdfs = dag.measure_lnlr_cdf(dist_stats, lnlr_injections)
triggers += zerolag_triggers
triggers, pdfs = dag.compute_far(triggers, pdfs)
......@@ -94,48 +159,12 @@ if args.workflow in set(("full", "filter", "rerank")):
dag.plot_summary(triggers, pdfs)
dag.plot_background(triggers, pdfs)
dag.plot_sensitivity(triggers)
dag.plot_analytic_vt(triggers, pdfs, lnlr_cdfs)
else:
raise ValueError(f"{args.workflow} is not a valid workflow option")
if config.filter.injections:
dag.plot_analytic_vt(triggers, pdfs, lnlr_cdfs)
elif args.workflow == "injection":
# input data products
ref_psd = DataCache.find(DataType.REFERENCE_PSD, root=config.data.analysis_dir)
svd_bank = DataCache.find(DataType.SVD_BANK, root=filter_dir, svd_bins="*")
dist_stats = DataCache.find(DataType.MARG_DIST_STATS, root=rank_dir, svd_bins="*")
pdfs = DataCache.find(DataType.DIST_STAT_PDFS, root=rank_dir)
orig_zl_triggers = DataCache.find(DataType.TRIGGER_DATABASE, root=rank_dir)
# make a copy of zerolag triggers
zerolag_triggers = orig_zl_triggers.copy(root="rank")
for src_trg, dest_trg in zip(orig_zl_triggers.files, zerolag_triggers.files):
shutil.copy2(src_trg, dest_trg)
# generate injection-only dag layers
injections = dag.match_injections()
triggers = dag.filter_injections(ref_psd, svd_bank)
triggers = dag.aggregate(triggers)
triggers = dag.calc_likelihood(triggers, dist_stats)
triggers = dag.cluster(triggers)
triggers, injections = dag.find_injections(triggers)
injections = dag.match_injections(injections)
lnlr_cdfs = dags.measure_lnlr_cdf(dist_stats, injections)
triggers += zerolag_triggers
triggers, pdfs = dag.compute_far(triggers, pdfs)
dag.plot_horizon_distance(ref_psd)
dag.plot_summary(triggers, pdfs)
dag.plot_background(triggers, pdfs)
dag.plot_sensitivity(triggers)
dag.plot_analytic_vt(triggers, pdfs, lnlr_cdfs)
else:
raise ValueError(f"{args.workflow} is not a valid workflow option")
# write dag/script to disk
dag_name = f"{args.workflow}_inspiral_dag"
dag.write_dag(f"{dag_name}.dag")
dag.write_script(f"{dag_name}.sh")
# write dag/script to disk
dag_name = f"{args.workflow}_inspiral_dag"
dag.write_dag(f"{dag_name}.dag")
dag.write_script(f"{dag_name}.sh")
......@@ -31,6 +31,7 @@ parser.add_argument("-w", "--workflow", default="inspiral", help="Sets the type
# load config
args = parser.parse_args()
config = Config.load(args.config)
config.setup()
# create dag
dag = DAG(config)
......