Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • sudarshan-ghonge/bayeswave
  • april.partington/bayeswave
  • lscsoft/bayeswave
  • tyson-littenberg/bayeswave
  • james-clark/bayeswave
  • meg.millhouse/bayeswave
  • katerina.chatziioannou/bayeswave
  • deborah.ferguson/bayeswave
  • thomas-callister/bayeswave
  • andoni.torres/bayeswave
  • erika.cowan/bayeswave
  • bcheeseboro/bayeswave
  • salvatore-vitale/bayeswave
  • bence.becsy/bayeswave
  • duncanmmacleod/bayeswave
  • paul.baker/bayeswave
  • leo-singer/bayeswave
  • hannah.griggs/bayeswave
  • bhooshan.gadre/bayeswave
  • ka-wa.tsang/bayeswave
  • marcella.wijngaarden/bayeswave
  • bruce.edelman/bayeswave
  • sangeet.paul/bayeswave
  • colm.talbot/bayeswave
  • sophie.hourihane/bayeswave
  • arianna.renzini/bayeswave
  • nayyer.raza/bayeswave
  • cailin.plunkett/bayeswave
  • johnmichael.sullivan/bayeswave
  • seth.moriarty/bayeswave
  • howard.deshong/bayeswave
  • argyro.sasli/bayeswave
  • megan.arogeti/bayeswave
  • johnmichael.sullivan/bayeswave-master-dev
  • tomasz.baka/bayeswave
  • cjhaster/bayeswave
  • meg.millhouse/bayeswave-cicd-testing
  • neil.cornish/bayeswave
38 results
Show changes
Commits on Source (817)
Showing
with 4120 additions and 965 deletions
FROM continuumio/miniconda3 AS build
ARG BUILD_DATE
ARG CI_COMMIT_SHA
# http://label-schema.org/rc1/
LABEL org.label-schema.schema-version="1.0"
LABEL org.label-schema.name="bayeswave-conda-env"
LABEL org.label-schema.description="BayesWave: distinguish gravitational wave signals from noise and instrumental glitches"
LABEL org.label-schema.url="https://docs.ligo.org/lscsoft/bayeswave/"
LABEL org.label-schema.vcs-url="https://git.ligo.org/lscsoft/bayeswave"
LABEL org.label-schema.build-date="${BUILD_DATE}"
LABEL org.label-schema.vcs-ref="${CI_COMMIT_SHA}"
## Build bayeswave conda environment
RUN conda update -yq -n base conda
COPY environment.yaml .
RUN conda env create -f environment.yaml
SHELL ["conda", "run", "-n", "bayeswave", "/bin/bash", "-c"]
## Use conda-pack to create a standalone enviornment
## in /venv:
RUN conda install -c conda-forge conda-pack
RUN conda-pack -n bayeswave -o /tmp/env.tar && \
mkdir /venv && cd /venv && tar xf /tmp/env.tar && \
rm /tmp/env.tar
## We've put venv in same path it'll be in final image,
## so now fix up paths:
RUN /venv/bin/conda-unpack
## Next stage
FROM rockylinux/rockylinux:latest
## Essentials
RUN yum install -y git which && \
yum clean all && \
rm -rf /var/cache/yum
## Copy /venv from the previous stage:
COPY --from=build /venv /venv
ENV PATH="/venv/bin:$PATH"
SHELL ["/bin/bash", "-c"]
FROM containers.ligo.org/docker/base:el7
LABEL name="BayesWave Build Dependencies - EL7" \
maintainer="James Alexander Clark <james.clark@ligo.org>" \
support="Base image for buillding BayesWave " \
date="20190726"
# Yum dependencies
RUN yum upgrade -y && \
yum install -y cmake3 \
gcc \
gcc-c++ \
git \
help2man \
lalapps \
lal-devel \
lalframe-devel \
lalinference-devel \
lalsimulation-devel \
python-devel \
python-ligo-lw \
python-pip \
rpm-build && \
yum clean all && \
rm -rf /var/cache/yum
RUN pip install --no-cache-dir htchirp
......@@ -7,6 +7,7 @@
__pycache__/
*.egg-info/
*.eggs/
src/.ipynb_checkpoints/*
# built objects
*.o
......@@ -19,6 +20,9 @@ BayesWaveToLALPSD
src/version.h
src/bayeswave.pc
# dist dir
dist/
# autotools stuff
.libs/
.deps/
......
# CI/CD workflow for bayeswave
# 1. Build a container with conda env dependences. Scheduled builds produce
# the "latest" image; tags produce a tagged version of the dependencies at the
# time of that tag.
# 2. Build the bayeswave package from source in the conda env image and docs pages
# 3. Test major executables
# 4. Build & push the bayeswave runtime container from BayesWave source on top of the
# conda env dependencies.
# 5. Push documentation for tagged commits only
#
# TODO: build the env image only for master
# TODO: use the upstream for the env image
variables:
BRANCH: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME
COMMIT: $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA
NIGHTLY: $CI_REGISTRY_IMAGE:nightly
TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_TAG
DEPENDS_IMAGE: $CI_REGISTRY_IMAGE/dependencies
UPSTREAM_REGISTRY: containers.ligo.org/lscsoft/bayeswave
CONDA_ENV_IMAGE: conda-env
BUILD_DIR: test-install
BUILD_TARGET: $CI_PROJECT_DIR/$BUILD_DIR
TEST_OUTPUT: test-output
stages:
- docker
- build
- test
- docker
- deploy
build-env:
.docker_template: &docker_deploy
image: docker
before_script:
- echo "Logging in"
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
script:
- if [ -z $CI_COMMIT_TAG ]; then IMAGE_TAG="latest"; else IMAGE_TAG="$CI_COMMIT_TAG"; fi
- if [ -z $IMAGE_NAME ]; then IMAGE="$CI_REGISTRY_IMAGE:$IMAGE_TAG"; else IMAGE="$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" ; fi
- echo "Building image - $IMAGE"
- docker build --no-cache
--build-arg CI_COMMIT_SHA=${CI_COMMIT_SHA}
--build-arg BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')
--build-arg BUILD_IMAGE=$UPSTREAM_REGISTRY/$CONDA_ENV_IMAGE:$IMAGE_TAG
-t $IMAGE --file $DOCKERFILE .
- docker push $IMAGE
# Build the conda environment dependencies
conda-env:
stage: docker
<<: *docker_deploy
variables:
IMAGE_NAME: $CONDA_ENV_IMAGE
DOCKERFILE: .conda-env.Dockerfile
only:
refs:
- tags
- master@lscsoft/bayeswave
# FIXME: reconcile with above - SHORT TERM HACK (seriously - this could break paperwork)
# Build the conda environment dependencies
conda-env-manual:
stage: docker
<<: *docker_deploy
variables:
IMAGE_NAME: $CONDA_ENV_IMAGE
DOCKERFILE: .conda-env.Dockerfile
when: manual
# -------------------------------------------------------
# BUILD
# Build bayeswave from source
bayeswave:
stage: build
image: $UPSTREAM_REGISTRY/$CONDA_ENV_IMAGE:latest
script:
- mkdir -p $BUILD_TARGET
- sed "s|INSTALL_DIR|$BUILD_DIR|g" $CI_PROJECT_DIR/etc/bayeswave-user-env.sh > $BUILD_TARGET/bayeswave-user-env.sh
- mkdir -p build
- cmake . -DCMAKE_BUILD_TYPE=Debug -DCMAKE_EXPORT_COMPILE_COMMANDS=true -DCMAKE_INSTALL_PREFIX=$BUILD_DIR
- cmake --build . -- VERBOSE=1
- cmake --build . --target install
artifacts:
expire_in: 6h
expire_in: 1h
paths:
- $BUILD_DIR
build-bayeswave:
BayesWaveUtils:
stage: build
image: containers.ligo.org/lscsoft/bayeswave/dependencies:el7
script:
- pushd $CI_PROJECT_DIR
- mkdir -p build
- cmake3 . -DCMAKE_BUILD_TYPE=Debug -DCMAKE_EXPORT_COMPILE_COMMANDS=true -DCMAKE_INSTALL_PREFIX=$BUILD_DIR
- cmake3 --build . -- VERBOSE=1
- cmake3 --build . --target install
image: $UPSTREAM_REGISTRY/$CONDA_ENV_IMAGE:latest
script:
- pushd BayesWaveUtils
- python setup.py install --prefix $BUILD_TARGET
- popd
artifacts:
expire_in: 6h
expire_in: 1h
paths:
- $BUILD_DIR
build-bayeswave-rpm:
# Build environment script
env-script:
stage: build
image: containers.ligo.org/lscsoft/bayeswave/dependencies:el7
script:
- pushd $CI_PROJECT_DIR
- mkdir -p $CI_PROJECT_DIR/rpmbuild/{BUILD,RPMS,SOURCES,SPECS,RPMS}
- echo '%_topdir %(echo $CI_PROJECT_DIR)/rpmbuild' > ~/.rpmmacros
- cmake3 . -DCMAKE_BUILD_TYPE=Release -DCMAKE_EXPORT_COMPILE_COMMANDS=true
- cmake3 --build . --target package_source
- mv bayeswave-*.tar.xz $CI_PROJECT_DIR/rpmbuild/SOURCES
- rpmbuild -ba bayeswave.spec
- popd
- mkdir -p $BUILD_TARGET
- sed "s|INSTALL_DIR|$BUILD_DIR|g" $CI_PROJECT_DIR/etc/bayeswave-user-env.sh > $BUILD_TARGET/bayeswave-user-env.sh
artifacts:
expire_in: 6h
expire_in: 1h
paths:
- $CI_PROJECT_DIR/rpmbuild/RPMS/x86_64/bayeswave-*.el7.x86_64.rpm
build-BayesWaveUtils:
- $BUILD_DIR
docs:
stage: build
image: containers.ligo.org/lscsoft/bayeswave/dependencies:el7
script:
- pushd BayesWaveUtils
- python setup.py install --prefix $BUILD_TARGET
- popd
image: python:3.7-slim-stretch
variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
before_script:
# install pandoc >= 2.0.0
- apt-get -yqq update
- apt-get -yqq install curl
- curl --location --output pandoc.deb https://github.com/jgm/pandoc/releases/download/2.7.2/pandoc-2.7.2-1-amd64.deb
- dpkg --install pandoc.deb || apt-get -y -f install; dpkg --install pandoc.deb;
# install python dependencies
- python3 -m pip install -r doc/requirements.txt
script:
- bash -ex doc/build.sh
artifacts:
expire_in: 6h
paths:
- $BUILD_DIR
- doc/_build
cache:
paths:
- .cache/pip
only:
- tags
# -------------------------------------------------------
# TEST
# FIXME: including the full commandline here is illustrative. We have a test
# script in the repository that does the same thing.
test:BayesWave:
# Test bayeswave functionality
BayesWave:
stage: test
image: containers.ligo.org/lscsoft/bayeswave/dependencies:el7
image: $UPSTREAM_REGISTRY/$CONDA_ENV_IMAGE:latest
script:
- source $BUILD_DIR/bayeswave-user-env.sh
- cat $BUILD_DIR/bayeswave-user-env.sh
......@@ -84,120 +147,86 @@ test:BayesWave:
--PSDlength 1024 --NCmin 2 --NCmax 2 --dataseed 1234 \
--Niter 500 --outputDir $TEST_OUTPUT"
dependencies:
- build-env
- build-bayeswave
- env-script
- bayeswave
test:BayesWavePost:
# Test bayeswavePost functionality
BayesWavePost:
stage: test
image: containers.ligo.org/lscsoft/bayeswave/dependencies:el7
image: $UPSTREAM_REGISTRY/$CONDA_ENV_IMAGE:latest
script:
- source $BUILD_DIR/bayeswave-user-env.sh
- cat $BUILD_DIR/bayeswave-user-env.sh
- ls -R $BUILD_DIR
- BayesWavePost --help
dependencies:
- build-env
- build-bayeswave
- env-script
- bayeswave
test:bayeswave_pipe:
bayeswave_pipe:
stage: test
image: containers.ligo.org/lscsoft/bayeswave/dependencies:el7
image: $UPSTREAM_REGISTRY/$CONDA_ENV_IMAGE:latest
script:
- ls $BUILD_DIR
- source $BUILD_DIR/bayeswave-user-env.sh
- bayeswave_pipe --help
dependencies:
- build-env
- build-BayesWaveUtils
- env-script
- BayesWaveUtils
docker:nightly:
# Update the dependency image and bayeswave every night
stage: docker
image: docker:latest
only:
- schedules
before_script:
- docker login -u gitlab-ci-token -p $CI_BUILD_TOKEN $CI_REGISTRY
script:
# Build and push dependency image
- docker build --pull -t $DEPENDS_IMAGE:el7 --file .dependencies-el7.Dockerfile .
- docker push $DEPENDS_IMAGE:el7
- mkdir rpms && mv $CI_PROJECT_DIR/rpmbuild/RPMS/x86_64/bayeswave-*.el7.x86_64.rpm rpms
# Build, test and push bayeswave image
- docker build --pull -t $CI_REGISTRY_IMAGE:nightly --file Dockerfile .
- docker run $CI_REGISTRY_IMAGE:nightly /test-bayeswave.sh
- docker push $CI_REGISTRY_IMAGE:nightly
dependencies:
- build-bayeswave-rpm
# FIXME: c'mon now. Someone should add an argparser to these codes so there is SOME kind of usage info
docker:latest:
stage: docker
image: docker:latest
only:
- master@lscsoft/bayeswave
- master@james-clark/bayeswave
before_script:
- docker login -u gitlab-ci-token -p $CI_BUILD_TOKEN $CI_REGISTRY
script:
- mkdir rpms && mv $CI_PROJECT_DIR/rpmbuild/RPMS/x86_64/bayeswave-*.el7.x86_64.rpm rpms
- docker build --pull -t $CI_REGISTRY_IMAGE:latest --file Dockerfile .
# Build, test and push bayeswave image
- docker run $CI_REGISTRY_IMAGE:latest /test-bayeswave.sh
- docker push $CI_REGISTRY_IMAGE:latest
dependencies:
- build-bayeswave-rpm
except:
- schedules
docker:tags:
# FIXME: Until we version the dependency image, the tags containers will
# always use the nightly build of the dependency image, which always has the
# latest release of the dependencies.
stage: docker
# megaplot:
# stage: test
# image: $CONDA_ENV_IMAGE
# script:
# - ls $BUILD_DIR
# - source $BUILD_DIR/bayeswave-user-env.sh
# - megaplot.py --help
# dependencies:
# - env-script
# - BayesWaveUtils
#
# megasky:
# stage: test
# image: $CONDA_ENV_IMAGE
# script:
# - ls $BUILD_DIR
# - source $BUILD_DIR/bayeswave-user-env.sh
# - megasky.py --help
# dependencies:
# - env-script
# - BayesWaveUtils
# -------------------------------------------------------
# DEPLOY
# Latest image - install from master
bayeswave-runtime:latest:
stage: deploy
image: docker:latest
<<: *docker_deploy
variables:
DOCKERFILE: Dockerfile
only:
- tags@lscsoft/bayeswave
- tags@james-clark/bayeswave
before_script:
- docker login -u gitlab-ci-token -p $CI_BUILD_TOKEN $CI_REGISTRY
script:
- mkdir rpms && mv $CI_PROJECT_DIR/rpmbuild/RPMS/x86_64/bayeswave-*.el7.x86_64.rpm rpms
- docker build --pull -t $CI_REGISTRY_IMAGE:$CI_COMMIT_TAG --file Dockerfile .
# Build, test and push bayeswave image
- docker run $CI_REGISTRY_IMAGE:latest /test-bayeswave.sh
- docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_TAG
dependencies:
- build-bayeswave-rpm
except:
- schedules
- master@lscsoft/bayeswave
docs:
stage: build
image: python:3.7-slim-stretch
bayeswave-runtime:tag:
stage: deploy
image: docker:latest
<<: *docker_deploy
variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
before_script:
# install pandoc >= 2.0.0
- apt-get -yqq update
- apt-get -yqq install curl
- curl --location --output pandoc.deb https://github.com/jgm/pandoc/releases/download/2.7.2/pandoc-2.7.2-1-amd64.deb
- dpkg --install pandoc.deb || apt-get -y -f install; dpkg --install pandoc.deb;
# install python dependencies
- python3 -m pip install -r doc/requirements.txt
script:
- bash -ex doc/build.sh
artifacts:
expire_in: 6h
paths:
- doc/_build
cache:
paths:
- .cache/pip
DOCKERFILE: Dockerfile
only:
- tags
pages:
stage: deploy
dependencies:
- docs
only:
- tags@lscsoft/bayeswave
- tags
#- tags@lscsoft/bayeswave
script:
- mv doc/_build/html public
artifacts:
......
This diff is collapsed.
......@@ -17,4 +17,4 @@
"""
"""
__all__=['bayeswave_utils']
__all__=['bayeswave_utils', "BW_Flags", "wavelets"]
#!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
ifoColors = ['darkgoldenrod','darkkhaki','darkseagreen','olive','cadetblue','green','slategray','darkcyan']
def plot_repeats(runFlag, figpath = None, chain = 0):
# Takes in chain file name, computes wavelets repeated and makes a plot
# Plots histogram of that information
# should only be run when the glitch model is on (and no chirplets)
fig, ax = plt.subplots()
for i, ifo in enumerate(runFlag.ifoNames):
print("chain name = ", runFlag.get_chain_name())
try:
wf = get_wavelet_params(f"{runFlag.trigdir}/chains/glitch_params_{ifo}.dat.{chain}", model = 'glitch',
burnin = 'half') # I think it's ok to hardcode glitch into the filename because this is only done for the glitch model
except:
try:
wf = get_wavelet_params(f"{runFlag.trigdir}/chains/full_params_{ifo}.dat.{chain}", model = 'glitch',
burnin = 'half') # for full model
except:
try:
wf = get_wavelet_params(f"{runFlag.trigdir}/chains/cbc_params_{ifo}.dat.{chain}", model = 'glitch',
burnin = 'half') # for CBC glitch runs
except:
print("Couldn't get glitch files for repeat plot.\n")
return(1)
repeats, percent_unique = get_repeats(wf)
# Get weighted average
sum_z2 = 0
for r in repeats:
sum_z2 += r ** 2
weight_avg = sum_z2 / sum(repeats)
if max(repeats) < 1:
bins = 1
else:
bins = np.arange(-0.5, max(repeats) + 0.5, 1)
ax.hist(repeats, bins = bins, density = False, label = "%s %.4g percent unique wavelets" % (ifo,percent_unique), alpha = 0.5, color = ifoColors[i])
ax.set_xlabel("Number of repeats")
ax.set_ylabel("Number of wavelets")
ax.set_title("Rate of glitch wavelet repitition")
ax.legend()
if figpath is not None:
plt.savefig(figpath)
return
def get_repeats(glitch_dict):
"""
Takes in output of the get_wavelet_params function
Outputs list containing the number of repeated values for each glitch waveform
# TODO, will not work for chirplets
"""
sum_D = sum(glitch_dict['D'])
wavelet_params = fill_wavelet_matrix(glitch_dict)
percent_unique = 100 * get_unique_wavelet_list(wavelet_params) / sum_D
print(f"Percent unique {percent_unique}%")
# Holds onto info about whether that value has been repeated
been_repeated = np.zeros(sum_D)
repeats = []
for i in range(sum_D):
if i%10000 == 0:
print(f"Repeats is {len(repeats)} long and is {100 * sum(repeats) / sum_D} % done")
print(f"{i} of {sum_D}")
if been_repeated[i] != 0:
continue
w = np.where(np.all(np.isclose(wavelet_params.T - wavelet_params[:, i], 0), axis=1))
repeats.append(len(w[0]))
been_repeated[w] = np.ones(len(w[0]))
return repeats, percent_unique
def fill_wavelet_matrix(glitch_dict):
sum_D = sum(glitch_dict['D'])
wavelet_params = np.zeros((5, sum_D))
i = 0
for key in glitch_dict.keys():
if key == 'D':
continue
wavelet_params[i] = glitch_dict[key]
i += 1
return wavelet_params
def get_unique_wavelet_list(wavelet_params):
unique_rows = np.unique(wavelet_params, axis=1)
print(f"get_unique_wavelet_list {np.shape(unique_rows)}")
return np.shape(unique_rows)[1]
def get_wavelet_params(filename, model, chirpflag=False, O1version=False, **keyword_parameters):
"""
Read in chain file and get all wavelet params
arguments
---------
filename (str): the chain file
model (str): signal or glitch
optional, chirpflag: True if using chirplets
optional, O1version: True if using O1 era chains
optional, restrict (int): line number if you only want one draw from the chain
optional, burnin: skip first N set of wavelets (or burnin = 'half' slips the first half)
outputs
-------
dictionary of the wavelet params
"""
NW = 5 # number of intrinsic parameters (changes for chirplets)
NE = 6 # number of extrinsic parameters
start = 1
labels = ['t','f','Q','logA','phi_int'] # parameters of individual wavelets
extlabels = ['alpha','sindelta','psi','elip', 'phi_ext','scale'] # Common extrinsic parameters
if chirpflag:
NW = 6
labels.append('beta')
data = {}
for l in labels:
data[l] = []
if model == 'signal': # get extrinsic parameters
for l in extlabels:
data[l] = []
data['D'] = []
infile = open(filename)
lines = infile.readlines()
if ('restrict' in keyword_parameters):
restrict = int(keyword_parameters['restrict'])
rn = [restrict]
elif ('burnin' in keyword_parameters):
if keyword_parameters['burnin'] == 'half':
burnin = int(len(lines) // 2)
else:
burnin = keyword_parameters['burnin']
rn = np.arange(burnin, len(lines))
else:
rn = np.arange(0,len(lines))
for j in rn:
line = lines[j]
spl = line.split()
waveletnumber = int(spl[0]) # how many wavelets
data['D'].append(waveletnumber)
if model == 'signal':
if waveletnumber > 0: # only do this if there are signal wavelets active
start = NE+1 # extra parameters
if O1version:
start += 1
for l in range(0,NE):
data[extlabels[l]].append(float(spl[l+1]))
for i in range(0,waveletnumber):
for l in range(0,NW):
if labels[l] == 'logA':
data[labels[l]].append(np.log10(float(spl[start+i*NW+l])))
else:
data[labels[l]].append(float(spl[start+i*NW+l]))
return data
......@@ -13,6 +13,32 @@ $(document).ready(function(){
$("#main").load("./html/glitch.html");
});
});
$(document).ready(function(){
$("#cbc").click(function(){
$("#main").load("./html/cbc.html");
});
});
$(document).ready(function(){
$("#cbcglitch").click(function(){
$("#main").load("./html/cbcglitch.html");
});
});
$(document).ready(function(){
$("#cbcsignal").click(function(){
$("#main").load("./html/cbcsignal.html");
});
});
$(document).ready(function(){
$("#cbcmoments").click(function(){
$("#main").load("./html/cbcmoments.html");
});
});
$(document).ready(function(){
$("#cbcparams").click(function(){
$("#main").load("./html/cbcparams.html");
});
});
$(document).ready(function(){
$("#full").click(function(){
$("#main").load("./html/full.html");
......@@ -93,11 +119,22 @@ $(document).ready(function(){
$("#main").load("./html/diagnostics.html");
});
});
$(document).ready(function(){
$("#verbose").click(function(){
$("#main").load("./html/verbose.html");
});
});
$(document).ready(function(){
$("#skymap").click(function(){
$("#main").load("./html/skymap.html");
});
});
$(document).ready(function(){
$("#stokes").click(function(){
$("#main").load("./html/stokes.html");
});
});
$(document).ready(function(){
$("#injections").click(function(){
$("#main").load("./html/injections.html");
......@@ -108,6 +145,21 @@ $(document).ready(function(){
$("#main").load("./html/snr.html");
});
});
$(document).ready(function(){
$("#clean").click(function(){
$("#main").load("./html/clean.html");
});
});
$(document).ready(function(){
$("#cleanmoments").click(function(){
$("#main").load("./html/cleanmoments.html");
});
});
$(document).ready(function(){
$("#cleaning").click(function(){
$("#main").load("./html/cleaning.html");
});
});
function toggle(showHideDiv, switchTextDiv) {
var ele = document.getElementById(showHideDiv);
var text = document.getElementById(switchTextDiv);
......
matplotlib
numpy
astropy
gwpy
scipy
glue
\ No newline at end of file
This diff is collapsed.
#!/usr/bin/env python
import shutil, sys, os, subprocess
from bayeswave_plot import BW_Flags as bwf
def keep_which_directory(temp, local):
# read in from each what model is currently being run, and decide from there which one to keep
# If one is in the cleaning stage, while the other is not, it will keep the one not being cleaned
# if the 2 are in the same state, it will keep the version with more iterations
# returns winner, loser
# Read in trigger directory name
trigdir = str(sys.argv[1])
runFlag_local = bwf.Flags(local)
runFlag_temp = bwf.Flags(temp)
runFlag_dict = {'temp':runFlag_temp,
'local':runFlag_local}
dirs = { 'temp' : temp,
'local' : local}
#dirs = [temp, local]
# If temp bayeswave.run doesn't exist, then just keep the local version
# if local bayeswave.run doesn't exist (and temp does), then keep the temp version!
for i, key in enumerate(runFlag_dict.keys()):
try:
bwf.readbwb(runFlag_dict[key])
except FileNotFoundError as e:
print(e)
# if we did not find the bayeswave.run file, then choose the other key as the winner
print("WARNING: " + runFlag_dict[key].trigdir + '/bayeswave.run' + " has yet to be created")
if key == 'local':
# winner, loser
return dirs['temp'], dirs['local']
return dirs['local'], dirs['temp']
# checkpointing isn't even on, so there is no reason to check checkpointing files, exiting this function
if not runFlag_dict['local'].checkpoint:
sys.exit("--checkpoint has not been turned on, returning")
models = {'temp':None, 'local':None}
lengths = {}
# now actually read in the model and iterations
for i, key in enumerate(runFlag_dict.keys()):
### Read in model
try:
# read in the model name
f = open(runFlag_dict[key].trigdir + '/checkpoint/state.dat')
except FileNotFoundError:
print("FileNotFound in {0}/checkpoint/state.dat".format(runFlag_dict[key].trigdir))
# if checkpointing file not found, then return the other directory
if key == 'local':
# winner, loser
return dirs['temp'], dirs['local']
return dirs['local'], dirs['temp']
# read in model from first word of first line in the state file
model = str(f.readlines()[0].split()[0])
models[key] = model
f.close()
### Read in iterations
try:
f = open(runFlag_dict[key].trigdir + '/checkpoint/temperature.dat')
except FileNotFoundError:
# if checkpointing file not found, then return the other directory
if key == 'local':
# winner, loser
return dirs['temp'], dirs['local']
return dirs['local'], dirs['temp']
# Keep track of file length
file_length = int(f.readlines()[0]) // runFlag_dict[key].Ncycle
lengths[key] = file_length
f.close()
# now we confirmed we have 2 directories that both have stuff in them, let's decide which one is more full.
if models['temp'] == models['local']:
# the 2 models are the same, return the directory corresponding to the larger checkpoint
if lengths['temp'] > lengths['local']:
return dirs['temp'], dirs['local']
# if they are the same length, keep the local directory
return dirs['local'], dirs['temp']
# the 2 models are different, prioritize whichever one is not currently in the cleaning phase
if models['local'] == 'clean':
return dirs['temp'], dirs['local']
if models['temp'] == 'clean':
return dirs['local'], dirs['temp']
# when in doubt, just keep the local directory
# winner, loser
return dirs['local'], dirs['temp']
def switch_and_delete_dirs(winner, loser, dirname):
# winner becomes dirname, loser gets deleted
# To make sure we don't delete something important, we make sure that the loser directory
# is something that we actually do want to delete
# directories we are deleteing should be of the form: WHATEVER/trigtime_WHATEVER or temp
deletenames = ['trigtime', 'temp']
delete_loser = False
for delete in deletenames:
if delete in loser:
delete_loser = True
if not delete_loser:
print("I AM NOT DELETING THE {loser} DIRECTORY!".format(loser = loser))
print("ERROR switch_and_delete_dirs: name does not contain temp or trigtime, returning")
return
else:
# loser gets deleted
shutil.rmtree(loser)
# winner gets moved
shutil.move(winner, dirname)
return
# Make sure that directory string does not have a trailing '/'
def clean_input(dir_string):
if dir_string[-1] == '/':
return dir_string[:-1]
else:
return(dir_string)
# Call to cp_files is
# cp_files.py local temp
workdir = os.getcwd()
print("cwd is {0}".format(workdir))
# read in the directory local directory, then we make a temp directory from that directory
# copy over the working directory to the remote machine
tempdir = clean_input(str(sys.argv[2]))
temp = './temp'
#shutil.copytree(tempdir, temp)
print("Copying over files from tempdir {0} to tempdir {1}".format(tempdir, temp))
p = subprocess.Popen('cp -r {0} {1}'.format(tempdir, temp), shell = True,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
p.wait()
# localdir
outputdir = clean_input(str(sys.argv[1]))
print("localdir is {0}".format(outputdir))
try:
process = subprocess.run('head {0}/checkpoint/temperature.dat'.format(outputdir), shell = True)
except:
pass
winner, loser = keep_which_directory(temp, outputdir)
print("Keeping winner {0} \n Deleting loser {1}".format(winner, loser))
switch_and_delete_dirs(winner, loser, outputdir)
#!/usr/bin/env python
import subprocess, os, sys
from bayeswave_plot import BW_Flags as bwf
def delete_lines(filename, file_length):
# delete all lines except the first `file_length` lines
# get number of lines currently in the file
try:
N_lines = int(subprocess.check_output("wc -l <{filename}".format(filename = filename), shell=True))
except subprocess.CalledProcessError:
# returns if file does not exist
return
if N_lines == file_length:
return
elif N_lines < file_length:
print("ERROR!! File {filename} too short! Should be {file_length} but is {N_lines}".format(filename = filename, file_length = file_length, N_lines = N_lines))
else:
print("Deleting {0} lines from {filename} ".format(N_lines - file_length, filename = filename))
# deletes from file_length (exclusive) to end of file
subprocess.run("sed -i '{file_length},$ d' {filename}".format(filename = filename, file_length = file_length+1), shell = True)
# Read in trigger directory name
trigdir = str(sys.argv[1])
runFlags = bwf.Flags(trigdir)
try:
bwf.readbwb(runFlags)
except FileNotFoundError:
sys.exit("WARNING: " + runFlags.trigdir + '/bayeswave.run' + " has yet to be created, returning")
if not runFlags.checkpoint:
sys.exit("--checkpoint has not been turned on, returning")
# read in the model flag to see what checkpoint iteration we should be at
f = open(runFlags.trigdir + '/checkpoint/temperature.dat')
file_length = int(f.readlines()[0]) // runFlags.Ncycle
f.close()
# get name of model
try:
f = open(trigdir + '/checkpoint/state.dat', 'r')
except:
sys.exit(runFlags.trigdir + '/checkpoint/state.dat' + " has yet to be created, returning")
model = f.readlines()[0].split()[0] # first entry is what model we are on
f.close()
print("Model is {model}".format(model = model))
# Get files relating to model
chaindir = runFlags.trigdir + '/chains'
onlyfiles = [f for f in os.listdir(chaindir) if os.path.isfile(os.path.join(chaindir, f))]
model_files = [f for f in onlyfiles if (model in f)]
# if model is cbc, cbc_skychain.dat should not be considered part of the cbc files
if model == 'cbc':
model_files.remove('cbc_skychain.dat')
# delete extra lines from files
for f in model_files:
delete_lines(runFlags.trigdir + '/chains/' + f, file_length)
This diff is collapsed.
#!/usr/bin/env python
from __future__ import print_function
print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\
Warning! megasky.py is now deprecated. Sky maps are now made within megaploy.py\n\
Use ONLY IF you need to make a fits file and know exactly what you're doing.\n\
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
import lal
import matplotlib
matplotlib.use('Agg')
......@@ -208,12 +214,26 @@ def make_skyview(directory='.', mdc=None, NSIDE=128, inj=None, npost=5000, geo=F
# -- Input skymap data
print("Extracting RA/DEC samples")
cbc_run = False
try:
filename = './chains/' + 'signal_params_h0.dat.0'
print("trying to read %s"%(filename))
data = np.loadtxt(filename, unpack=True,usecols=(0,1,2))
except:
filename = './chains/' + 'full_params_h0.dat.0'
data = np.loadtxt(filename, unpack=True,usecols=(0,1,2))
try:
print("trying to read %s"%(filename))
filename = './chains/' + 'full_params_h0.dat.0'
data = np.loadtxt(filename, unpack=True,usecols=(0,1,2))
except:
try:
print("trying to read %s"%(filename))
filename = './chains/' + 'cbc_params.dat.0'
data = np.loadtxt(filename, unpack=True,usecols=(0,11,12))
cbc_run = True
except:
print("Was not able to find anything to read, is this a glitchOnly run?")
pass
ralist = data[1]
sin_dec = data[2]
print("Total samples are {0}".format(ralist.size))
......@@ -378,13 +398,30 @@ if __name__ == "__main__":
if(opts.eventnum is None):
print("Provide event num if giving injfile", file=sys.stderr)
sys.exit()
print("Loading xml")
xmldoc = utils.load_filename(
opts.inj, contenthandler=LIGOLWContentHandler)
print("Loading xml")
print('xml filename', opts.inj, type(opts.inj))
try:
print('trying to open', opts.inj)
xmldoc = utils.load_filename(
opts.inj, contenthandler=LIGOLWContentHandler)
except:
try:
print('xml file not found, try looking a directory back')
print('trying to open', topdir + '/' + opts.inj)
xmldoc = utils.load_filename(
topdir + '/' + opts.inj, contenthandler=LIGOLWContentHandler)
except:
print("didn't work either")
try:
print('trying to open just the regular way, with open()', opts.inj)
xmldoc = open(opts.inj)
except:
print("Nothing is working here, watch me crash")
try:
print('Checking if using a sim_inspiral table...')
injs = table.get_table(
xmldoc, lsctables.SimInspiralTable.tableName)
xmldoc, lsctables.SimInspiralTable.tableName)
inj = injs[opts.eventnum]
injpos = {
'ra': inj.longitude,
......
......@@ -27,17 +27,37 @@ def package_files(directory):
paths.append(os.path.join("..", path, filename))
return paths
def get_requirements(kind=None):
'''
'' This helper function is borrowed from Bilby
'' (https://git.ligo.org/lscsoft/bilby/-/blob/master/setup.py)
'''
if kind is None:
fname = "requirements.txt"
else:
fname = f"{kind}_requirements.txt"
with open(fname, "r") as ff:
requirements = ff.readlines()
print(requirements)
return requirements
bayeswave_plot_data = package_files("bayeswave_plot_data")
bayeswave_pipe_examples = package_files("bayeswave_pipe_examples")
setup(
name='BayesWaveUtils',
version='0.1dev',
author='James Clark, Sophie Hourihane, Meg Millhouse, Sudarhsan Ghonge',
author_email="bayeswave@sympa.ligo.org",
description='Python tools for BayesWave.',
packages=['bayeswave_plot','bayeswave_pipe'],
scripts=['scripts/megaplot.py', 'scripts/megasky.py', 'scripts/bayeswave_pipe'],
scripts=['scripts/megaplot.py', 'scripts/megasky.py', 'scripts/bayeswave_pipe', 'scripts/delete_corruption.py', 'scripts/cp_files.py'],
package_data = {'bayeswave_plot': bayeswave_plot_data,
'bayeswave_pipe': bayeswave_pipe_examples},
license='GPL',
long_description=open('README.md').read(),
url='https://git.ligo.org/lscsoft/bayeswave/',
)
......@@ -68,7 +68,7 @@ def get_wavelet_params(filename, model, chirpflag=False, O1version=False, **keyw
return data
def wt(wave_params,psdfile):
def wt(wave_params,psdfile, ID):
"""
Makes a waveform from a set of wavelets
......@@ -79,6 +79,8 @@ def wt(wave_params,psdfile):
psdfile (str): data file of the PSD
ID (int): the number of the sample for the suggested reconstruction
outputs
-------
array of the waveform
......@@ -98,19 +100,18 @@ def wt(wave_params,psdfile):
fmin = int(psd[0,0]*Tobs)
wavenumber = wave_params['D'][0]
wavenumber = wave_params['D'][ID]
# the starting index in order to export the right set of parameters
start = sum(wave_params['D'][:ID])
for j in range(0,wavenumber):
t0 = wave_params['t'][j]
f0 = wave_params['f'][j]
Q = wave_params['Q'][j]
A = wave_params['A'][j]
phi0 = wave_params['phi'][j]
t0 = wave_params['t'][j+start]
f0 = wave_params['f'][j+start]
Q = wave_params['Q'][j+start]
A = wave_params['A'][j+start]
phi0 = wave_params['phi'][j+start]
i = int(f0*Tobs)
fac = 1.0/math.sqrt(psd[i-fmin,1])
tau = Q/(2*np.pi*f0)
......@@ -125,7 +126,6 @@ def wt(wave_params,psdfile):
for i in range(imin,imax):
t = float(i)/Nsamp*Tobs
sf = A*np.exp(-((t-t0)**2)/(tau**2))
sf *= fac
hs[i] += sf*np.cos(2*np.pi*f0*(t-t0)+phi0)
return hs
......
#
# CMake packaging for BayesWave
# Copyright 2019 James Alexander Clark <james.clark@ligo.org>
# Copyright 2019 James Alexander Clark <james.clark@ligo.org>, Meg Millhouse <meg.millhouse@ligo.org>
# Based on CMake packaging for libframe & Frv by Duncan MacLeod <duncan.macleod@ligo.org>
#
......@@ -10,7 +10,7 @@ cmake_minimum_required(VERSION 3.12.0 FATAL_ERROR)
project(
bayeswave
LANGUAGES C
VERSION 1.0.6
VERSION 1.1.1
DESCRIPTION "LIGO/VIRGO burst analysis algorithm"
HOMEPAGE_URL "https://git.ligo.org/lscsoft/bayeswave"
)
......@@ -19,6 +19,33 @@ include(GNUInstallDirs)
include(CheckFunctionExists)
find_package(PkgConfig)
find_package(Git)
find_package(OpenMP)
#-----------------------------------------------------------------------
# Set rpath so you don't have to set $LD_LIBRARY_PATH before running
# instructions from:
# https://gitlab.kitware.com/cmake/community/-/wikis/doc/cmake/RPATH-handling
IF(DEFINED ENV{CONDA_PREFIX})
MESSAGE(STATUS "CONDA_PREFIX: --[$ENV{CONDA_PREFIX}]--")
ELSE()
MESSAGE(STATUS "Warning: could not find conda environment")
ENDIF()
# use, i.e. don't skip the full RPATH for the build tree
set(CMAKE_SKIP_BUILD_RPATH FALSE)
# when building, don't use the install RPATH already
# (but later on when installing)
set(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE)
# In order to be able to install into a user specified directory, need to make sure the RPATH has the install directory first, and then the conda prefix.
# So make sure to remove environment rpath, and then explicitly add conda library back in.
# May be a more elegant way to do this
set(CMAKE_INSTALL_REMOVE_ENVIRONMENT_RPATH TRUE)
set(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}:$ENV{CONDA_PREFIX}/lib") #
# -- build components -------
......@@ -55,6 +82,7 @@ set(CPACK_SOURCE_IGNORE_FILES
"\\\\.svn/"
"\\\\.git"
"build/"
"dist/"
"CMakeFiles/"
"CMakeCache.txt"
"_CPack_Packages/"
......
FROM containers.ligo.org/lscsoft/bayeswave/dependencies:el7
ARG BUILD_IMAGE
FROM $BUILD_IMAGE
ARG BUILD_DATE
ARG CI_COMMIT_SHA
LABEL name="BayesWave - EL7" \
maintainer="James Alexander Clark <james.clark@ligo.org>" \
support="Reference Platform" \
date="20190726"
# http://label-schema.org/rc1/
LABEL org.label-schema.schema-version="1.0"
LABEL org.label-schema.name="bayeswave-runtime"
LABEL org.label-schema.description="BayesWave: distinguish gravitational wave signals from noise and instrumental glitches"
LABEL org.label-schema.url="https://docs.ligo.org/lscsoft/bayeswave/"
LABEL org.label-schema.vcs-url="https://git.ligo.org/lscsoft/bayeswave"
LABEL org.label-schema.build-date="${BUILD_DATE}"
LABEL org.label-schema.vcs-ref="${CI_COMMIT_SHA}"
# Directories we may want to bind
RUN mkdir -p /cvmfs /hdfs /ceph /hadoop /etc/condor /test
# Copy all the source into /tmp which we will empty later
COPY . /tmp
# Build and install from RPM built in CI
COPY rpms /rpms
RUN yum upgrade -y && \
yum -y localinstall /rpms/*.rpm && \
rm -rf /rpms && yum clean all
RUN cd /tmp && \
mkdir -p build && \
pushd build && \
cmake .. \
-DCMAKE_INSTALL_PREFIX= \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_EXPORT_COMPILE_COMMANDS=true && \
cmake --build . -- VERBOSE=1 && \
cmake --build . --target install && \
popd && \
pip install --no-cache /tmp/BayesWaveUtils && \
rm -rf /tmp/*
# Python Utils
COPY test/test-bayeswave.sh /test-bayeswave.sh
COPY BayesWaveUtils /tmp/BayesWaveUtils
RUN cd /tmp/BayesWaveUtils && \
python /tmp/BayesWaveUtils/setup.py install
RUN rm -rf /tmp/*
## Directories we may want to bind
RUN mkdir -p /cvmfs /hdfs /hadoop /etc/condor /test
# Remove build dependencies
RUN yum remove -y cmake3 \
gcc \
gcc-c++ \
help2man \
lal-devel \
lalframe-devel \
lalinference-devel \
lalsimulation-devel \
python-pip \
rpm-build && \
yum clean all && \
rm -rf /var/cache/yum
WORKDIR /
ENTRYPOINT ["/bin/bash"]
COPY docker-entrypoint.sh /docker-entrypoint.sh
ENTRYPOINT ["/docker-entrypoint.sh"]
......@@ -9,7 +9,9 @@ signals and instrumental glitches, allowing robust, morphology-independent wavef
See [arXiv:1410.3835](https://arxiv.org/abs/1410.3835) for an introduction to the BayesWave
algorithm.
*LSC/Virgo members*: See the [BayesWave wiki](https://wiki.ligo.org/Bursts/BayesWave) for further info
Work in progress documentation can be found [here](https://ldas-jobs.ligo.caltech.edu/~meg.millhouse/Docs/).
*LSC/Virgo members*: See the [BayesWave wiki](https://wiki.ligo.org/Bursts/BayesWave) for further info.
## Citations
......@@ -53,6 +55,20 @@ archivePrefix = {arXiv},
url = {https://link.aps.org/doi/10.1103/PhysRevD.91.084034}
}
@article{Cornish:2020dwh,
author = "Cornish, Neil J. and Littenberg, Tyson B. and B\'ecsy, Bence and Chatziioannou, Katerina and Clark, James A. and Ghonge, Sudarshan and Millhouse, Margaret",
title = "{BayesWave analysis pipeline in the era of gravitational wave observations}",
eprint = "2011.09494",
archivePrefix = "arXiv",
primaryClass = "gr-qc",
doi = "10.1103/PhysRevD.103.044006",
journal = "Phys. Rev. D",
volume = "103",
number = "4",
pages = "044006",
year = "2021"
}
```
### Citing the BayesWave Software
......