Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • sudarshan-ghonge/bayeswave
  • april.partington/bayeswave
  • lscsoft/bayeswave
  • tyson-littenberg/bayeswave
  • james-clark/bayeswave
  • meg.millhouse/bayeswave
  • katerina.chatziioannou/bayeswave
  • deborah.ferguson/bayeswave
  • thomas-callister/bayeswave
  • andoni.torres/bayeswave
  • erika.cowan/bayeswave
  • bcheeseboro/bayeswave
  • salvatore-vitale/bayeswave
  • bence.becsy/bayeswave
  • duncanmmacleod/bayeswave
  • paul.baker/bayeswave
  • leo-singer/bayeswave
  • hannah.griggs/bayeswave
  • bhooshan.gadre/bayeswave
  • ka-wa.tsang/bayeswave
  • marcella.wijngaarden/bayeswave
  • bruce.edelman/bayeswave
  • sangeet.paul/bayeswave
  • colm.talbot/bayeswave
  • sophie.hourihane/bayeswave
  • arianna.renzini/bayeswave
  • nayyer.raza/bayeswave
  • cailin.plunkett/bayeswave
  • johnmichael.sullivan/bayeswave
  • seth.moriarty/bayeswave
  • howard.deshong/bayeswave
  • argyro.sasli/bayeswave
  • megan.arogeti/bayeswave
  • johnmichael.sullivan/bayeswave-master-dev
  • tomasz.baka/bayeswave
  • cjhaster/bayeswave
  • meg.millhouse/bayeswave-cicd-testing
  • neil.cornish/bayeswave
38 results
Show changes
Commits on Source (782)
Showing
with 3803 additions and 852 deletions
FROM conda/miniconda3-centos7 AS build
FROM continuumio/miniconda3 AS build
ARG BUILD_DATE
ARG CI_COMMIT_SHA
......@@ -12,24 +12,24 @@ LABEL org.label-schema.build-date="${BUILD_DATE}"
LABEL org.label-schema.vcs-ref="${CI_COMMIT_SHA}"
## Build bayeswave conda environment
RUN conda update -yq -nbase conda
RUN conda update -yq -n base conda
COPY environment.yaml .
RUN conda env create -f environment.yaml
SHELL ["conda", "run", "-n", "bayeswave", "/bin/bash", "-c"]
## Use conda-pack to create a standalone enviornment
## in /venv:
RUN conda install -c conda-forge conda-pack
RUN conda-pack -n bayeswave -o /tmp/env.tar && \
mkdir /venv && cd /venv && tar xf /tmp/env.tar && \
rm /tmp/env.tar
RUN conda install -c conda-forge conda-pack
RUN conda-pack -n bayeswave -o /tmp/env.tar && \
mkdir /venv && cd /venv && tar xf /tmp/env.tar && \
rm /tmp/env.tar
## We've put venv in same path it'll be in final image,
## so now fix up paths:
RUN /venv/bin/conda-unpack
## Next stage
FROM centos:7
FROM rockylinux/rockylinux:latest
## Essentials
RUN yum install -y git which && \
......
......@@ -7,6 +7,7 @@
__pycache__/
*.egg-info/
*.eggs/
src/.ipynb_checkpoints/*
# built objects
*.o
......@@ -19,6 +20,9 @@ BayesWaveToLALPSD
src/version.h
src/bayeswave.pc
# dist dir
dist/
# autotools stuff
.libs/
.deps/
......
......@@ -7,11 +7,15 @@
# 4. Build & push the bayeswave runtime container from BayesWave source on top of the
# conda env dependencies.
# 5. Push documentation for tagged commits only
#
# TODO: build the env image only for master
# TODO: use the upstream for the env image
variables:
BRANCH: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME
COMMIT: $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA
TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_TAG
UPSTREAM_REGISTRY: containers.ligo.org/lscsoft/bayeswave
CONDA_ENV_IMAGE: conda-env
BUILD_DIR: test-install
BUILD_TARGET: $CI_PROJECT_DIR/$BUILD_DIR
......@@ -27,7 +31,7 @@ stages:
image: docker
before_script:
- echo "Logging in"
- docker login -u gitlab-ci-token -p $CI_BUILD_TOKEN $CI_REGISTRY
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
script:
- if [ -z $CI_COMMIT_TAG ]; then IMAGE_TAG="latest"; else IMAGE_TAG="$CI_COMMIT_TAG"; fi
- if [ -z $IMAGE_NAME ]; then IMAGE="$CI_REGISTRY_IMAGE:$IMAGE_TAG"; else IMAGE="$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG" ; fi
......@@ -35,20 +39,12 @@ stages:
- docker build --no-cache
--build-arg CI_COMMIT_SHA=${CI_COMMIT_SHA}
--build-arg BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ')
--build-arg BUILD_IMAGE=$CI_REGISTRY_IMAGE/conda-env:$IMAGE_TAG
--build-arg BUILD_IMAGE=$UPSTREAM_REGISTRY/$CONDA_ENV_IMAGE:$IMAGE_TAG
-t $IMAGE --file $DOCKERFILE .
- docker push $IMAGE
# Build the conda environment dependencies
conda-env:manual:
stage: docker
<<: *docker_deploy
variables:
IMAGE_NAME: $CONDA_ENV_IMAGE
DOCKERFILE: .conda-env.Dockerfile
when: manual
conda-env:schedule:
conda-env:
stage: docker
<<: *docker_deploy
variables:
......@@ -56,17 +52,18 @@ conda-env:schedule:
DOCKERFILE: .conda-env.Dockerfile
only:
refs:
- schedules
- tags
- master@lscsoft/bayeswave
conda-env:tag:
# FIXME: reconcile with above - SHORT TERM HACK (seriously - this could break paperwork)
# Build the conda environment dependencies
conda-env-manual:
stage: docker
<<: *docker_deploy
variables:
IMAGE_NAME: $CONDA_ENV_IMAGE
DOCKERFILE: .conda-env.Dockerfile
only:
refs:
- tags
when: manual
# -------------------------------------------------------
# BUILD
......@@ -74,7 +71,7 @@ conda-env:tag:
# Build bayeswave from source
bayeswave:
stage: build
image: $CI_REGISTRY_IMAGE/$CONDA_ENV_IMAGE:latest
image: $UPSTREAM_REGISTRY/$CONDA_ENV_IMAGE:latest
script:
- mkdir -p build
- cmake . -DCMAKE_BUILD_TYPE=Debug -DCMAKE_EXPORT_COMPILE_COMMANDS=true -DCMAKE_INSTALL_PREFIX=$BUILD_DIR
......@@ -87,7 +84,7 @@ bayeswave:
BayesWaveUtils:
stage: build
image: $CI_REGISTRY_IMAGE/$CONDA_ENV_IMAGE:latest
image: $UPSTREAM_REGISTRY/$CONDA_ENV_IMAGE:latest
script:
- pushd BayesWaveUtils
- python setup.py install --prefix $BUILD_TARGET
......@@ -138,7 +135,7 @@ docs:
# Test bayeswave functionality
BayesWave:
stage: test
image: $CI_REGISTRY_IMAGE/$CONDA_ENV_IMAGE:latest
image: $UPSTREAM_REGISTRY/$CONDA_ENV_IMAGE:latest
script:
- source $BUILD_DIR/bayeswave-user-env.sh
- cat $BUILD_DIR/bayeswave-user-env.sh
......@@ -156,7 +153,7 @@ BayesWave:
# Test bayeswavePost functionality
BayesWavePost:
stage: test
image: $CI_REGISTRY_IMAGE/$CONDA_ENV_IMAGE:latest
image: $UPSTREAM_REGISTRY/$CONDA_ENV_IMAGE:latest
script:
- source $BUILD_DIR/bayeswave-user-env.sh
- cat $BUILD_DIR/bayeswave-user-env.sh
......@@ -168,7 +165,7 @@ BayesWavePost:
bayeswave_pipe:
stage: test
image: $CI_REGISTRY_IMAGE/$CONDA_ENV_IMAGE:latest
image: $UPSTREAM_REGISTRY/$CONDA_ENV_IMAGE:latest
script:
- ls $BUILD_DIR
- source $BUILD_DIR/bayeswave-user-env.sh
......@@ -211,8 +208,8 @@ bayeswave-runtime:latest:
<<: *docker_deploy
variables:
DOCKERFILE: Dockerfile
except:
- tags
only:
- master@lscsoft/bayeswave
bayeswave-runtime:tag:
stage: deploy
......
This diff is collapsed.
......@@ -17,4 +17,4 @@
"""
"""
__all__=['bayeswave_utils']
__all__=['bayeswave_utils', "BW_Flags", "wavelets"]
#!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
ifoColors = ['darkgoldenrod','darkkhaki','darkseagreen','olive','cadetblue','green','slategray','darkcyan']
def plot_repeats(runFlag, figpath = None, chain = 0):
# Takes in chain file name, computes wavelets repeated and makes a plot
# Plots histogram of that information
# should only be run when the glitch model is on (and no chirplets)
fig, ax = plt.subplots()
for i, ifo in enumerate(runFlag.ifoNames):
print("chain name = ", runFlag.get_chain_name())
try:
wf = get_wavelet_params(f"{runFlag.trigdir}/chains/glitch_params_{ifo}.dat.{chain}", model = 'glitch',
burnin = 'half') # I think it's ok to hardcode glitch into the filename because this is only done for the glitch model
except:
try:
wf = get_wavelet_params(f"{runFlag.trigdir}/chains/full_params_{ifo}.dat.{chain}", model = 'glitch',
burnin = 'half') # for full model
except:
try:
wf = get_wavelet_params(f"{runFlag.trigdir}/chains/cbc_params_{ifo}.dat.{chain}", model = 'glitch',
burnin = 'half') # for CBC glitch runs
except:
print("Couldn't get glitch files for repeat plot.\n")
return(1)
repeats, percent_unique = get_repeats(wf)
# Get weighted average
sum_z2 = 0
for r in repeats:
sum_z2 += r ** 2
weight_avg = sum_z2 / sum(repeats)
if max(repeats) < 1:
bins = 1
else:
bins = np.arange(-0.5, max(repeats) + 0.5, 1)
ax.hist(repeats, bins = bins, density = False, label = "%s %.4g percent unique wavelets" % (ifo,percent_unique), alpha = 0.5, color = ifoColors[i])
ax.set_xlabel("Number of repeats")
ax.set_ylabel("Number of wavelets")
ax.set_title("Rate of glitch wavelet repitition")
ax.legend()
if figpath is not None:
plt.savefig(figpath)
return
def get_repeats(glitch_dict):
"""
Takes in output of the get_wavelet_params function
Outputs list containing the number of repeated values for each glitch waveform
# TODO, will not work for chirplets
"""
sum_D = sum(glitch_dict['D'])
wavelet_params = fill_wavelet_matrix(glitch_dict)
percent_unique = 100 * get_unique_wavelet_list(wavelet_params) / sum_D
print(f"Percent unique {percent_unique}%")
# Holds onto info about whether that value has been repeated
been_repeated = np.zeros(sum_D)
repeats = []
for i in range(sum_D):
if i%10000 == 0:
print(f"Repeats is {len(repeats)} long and is {100 * sum(repeats) / sum_D} % done")
print(f"{i} of {sum_D}")
if been_repeated[i] != 0:
continue
w = np.where(np.all(np.isclose(wavelet_params.T - wavelet_params[:, i], 0), axis=1))
repeats.append(len(w[0]))
been_repeated[w] = np.ones(len(w[0]))
return repeats, percent_unique
def fill_wavelet_matrix(glitch_dict):
sum_D = sum(glitch_dict['D'])
wavelet_params = np.zeros((5, sum_D))
i = 0
for key in glitch_dict.keys():
if key == 'D':
continue
wavelet_params[i] = glitch_dict[key]
i += 1
return wavelet_params
def get_unique_wavelet_list(wavelet_params):
unique_rows = np.unique(wavelet_params, axis=1)
print(f"get_unique_wavelet_list {np.shape(unique_rows)}")
return np.shape(unique_rows)[1]
def get_wavelet_params(filename, model, chirpflag=False, O1version=False, **keyword_parameters):
"""
Read in chain file and get all wavelet params
arguments
---------
filename (str): the chain file
model (str): signal or glitch
optional, chirpflag: True if using chirplets
optional, O1version: True if using O1 era chains
optional, restrict (int): line number if you only want one draw from the chain
optional, burnin: skip first N set of wavelets (or burnin = 'half' slips the first half)
outputs
-------
dictionary of the wavelet params
"""
NW = 5 # number of intrinsic parameters (changes for chirplets)
NE = 6 # number of extrinsic parameters
start = 1
labels = ['t','f','Q','logA','phi_int'] # parameters of individual wavelets
extlabels = ['alpha','sindelta','psi','elip', 'phi_ext','scale'] # Common extrinsic parameters
if chirpflag:
NW = 6
labels.append('beta')
data = {}
for l in labels:
data[l] = []
if model == 'signal': # get extrinsic parameters
for l in extlabels:
data[l] = []
data['D'] = []
infile = open(filename)
lines = infile.readlines()
if ('restrict' in keyword_parameters):
restrict = int(keyword_parameters['restrict'])
rn = [restrict]
elif ('burnin' in keyword_parameters):
if keyword_parameters['burnin'] == 'half':
burnin = int(len(lines) // 2)
else:
burnin = keyword_parameters['burnin']
rn = np.arange(burnin, len(lines))
else:
rn = np.arange(0,len(lines))
for j in rn:
line = lines[j]
spl = line.split()
waveletnumber = int(spl[0]) # how many wavelets
data['D'].append(waveletnumber)
if model == 'signal':
if waveletnumber > 0: # only do this if there are signal wavelets active
start = NE+1 # extra parameters
if O1version:
start += 1
for l in range(0,NE):
data[extlabels[l]].append(float(spl[l+1]))
for i in range(0,waveletnumber):
for l in range(0,NW):
if labels[l] == 'logA':
data[labels[l]].append(np.log10(float(spl[start+i*NW+l])))
else:
data[labels[l]].append(float(spl[start+i*NW+l]))
return data
......@@ -13,6 +13,32 @@ $(document).ready(function(){
$("#main").load("./html/glitch.html");
});
});
$(document).ready(function(){
$("#cbc").click(function(){
$("#main").load("./html/cbc.html");
});
});
$(document).ready(function(){
$("#cbcglitch").click(function(){
$("#main").load("./html/cbcglitch.html");
});
});
$(document).ready(function(){
$("#cbcsignal").click(function(){
$("#main").load("./html/cbcsignal.html");
});
});
$(document).ready(function(){
$("#cbcmoments").click(function(){
$("#main").load("./html/cbcmoments.html");
});
});
$(document).ready(function(){
$("#cbcparams").click(function(){
$("#main").load("./html/cbcparams.html");
});
});
$(document).ready(function(){
$("#full").click(function(){
$("#main").load("./html/full.html");
......@@ -93,11 +119,22 @@ $(document).ready(function(){
$("#main").load("./html/diagnostics.html");
});
});
$(document).ready(function(){
$("#verbose").click(function(){
$("#main").load("./html/verbose.html");
});
});
$(document).ready(function(){
$("#skymap").click(function(){
$("#main").load("./html/skymap.html");
});
});
$(document).ready(function(){
$("#stokes").click(function(){
$("#main").load("./html/stokes.html");
});
});
$(document).ready(function(){
$("#injections").click(function(){
$("#main").load("./html/injections.html");
......@@ -108,6 +145,21 @@ $(document).ready(function(){
$("#main").load("./html/snr.html");
});
});
$(document).ready(function(){
$("#clean").click(function(){
$("#main").load("./html/clean.html");
});
});
$(document).ready(function(){
$("#cleanmoments").click(function(){
$("#main").load("./html/cleanmoments.html");
});
});
$(document).ready(function(){
$("#cleaning").click(function(){
$("#main").load("./html/cleaning.html");
});
});
function toggle(showHideDiv, switchTextDiv) {
var ele = document.getElementById(showHideDiv);
var text = document.getElementById(switchTextDiv);
......
matplotlib
numpy
astropy
gwpy
scipy
glue
\ No newline at end of file
This diff is collapsed.
#!/usr/bin/env python
import shutil, sys, os, subprocess
from bayeswave_plot import BW_Flags as bwf
def keep_which_directory(temp, local):
# read in from each what model is currently being run, and decide from there which one to keep
# If one is in the cleaning stage, while the other is not, it will keep the one not being cleaned
# if the 2 are in the same state, it will keep the version with more iterations
# returns winner, loser
# Read in trigger directory name
trigdir = str(sys.argv[1])
runFlag_local = bwf.Flags(local)
runFlag_temp = bwf.Flags(temp)
runFlag_dict = {'temp':runFlag_temp,
'local':runFlag_local}
dirs = { 'temp' : temp,
'local' : local}
#dirs = [temp, local]
# If temp bayeswave.run doesn't exist, then just keep the local version
# if local bayeswave.run doesn't exist (and temp does), then keep the temp version!
for i, key in enumerate(runFlag_dict.keys()):
try:
bwf.readbwb(runFlag_dict[key])
except FileNotFoundError as e:
print(e)
# if we did not find the bayeswave.run file, then choose the other key as the winner
print("WARNING: " + runFlag_dict[key].trigdir + '/bayeswave.run' + " has yet to be created")
if key == 'local':
# winner, loser
return dirs['temp'], dirs['local']
return dirs['local'], dirs['temp']
# checkpointing isn't even on, so there is no reason to check checkpointing files, exiting this function
if not runFlag_dict['local'].checkpoint:
sys.exit("--checkpoint has not been turned on, returning")
models = {'temp':None, 'local':None}
lengths = {}
# now actually read in the model and iterations
for i, key in enumerate(runFlag_dict.keys()):
### Read in model
try:
# read in the model name
f = open(runFlag_dict[key].trigdir + '/checkpoint/state.dat')
except FileNotFoundError:
print("FileNotFound in {0}/checkpoint/state.dat".format(runFlag_dict[key].trigdir))
# if checkpointing file not found, then return the other directory
if key == 'local':
# winner, loser
return dirs['temp'], dirs['local']
return dirs['local'], dirs['temp']
# read in model from first word of first line in the state file
model = str(f.readlines()[0].split()[0])
models[key] = model
f.close()
### Read in iterations
try:
f = open(runFlag_dict[key].trigdir + '/checkpoint/temperature.dat')
except FileNotFoundError:
# if checkpointing file not found, then return the other directory
if key == 'local':
# winner, loser
return dirs['temp'], dirs['local']
return dirs['local'], dirs['temp']
# Keep track of file length
file_length = int(f.readlines()[0]) // runFlag_dict[key].Ncycle
lengths[key] = file_length
f.close()
# now we confirmed we have 2 directories that both have stuff in them, let's decide which one is more full.
if models['temp'] == models['local']:
# the 2 models are the same, return the directory corresponding to the larger checkpoint
if lengths['temp'] > lengths['local']:
return dirs['temp'], dirs['local']
# if they are the same length, keep the local directory
return dirs['local'], dirs['temp']
# the 2 models are different, prioritize whichever one is not currently in the cleaning phase
if models['local'] == 'clean':
return dirs['temp'], dirs['local']
if models['temp'] == 'clean':
return dirs['local'], dirs['temp']
# when in doubt, just keep the local directory
# winner, loser
return dirs['local'], dirs['temp']
def switch_and_delete_dirs(winner, loser, dirname):
# winner becomes dirname, loser gets deleted
# To make sure we don't delete something important, we make sure that the loser directory
# is something that we actually do want to delete
# directories we are deleteing should be of the form: WHATEVER/trigtime_WHATEVER or temp
deletenames = ['trigtime', 'temp']
delete_loser = False
for delete in deletenames:
if delete in loser:
delete_loser = True
if not delete_loser:
print("I AM NOT DELETING THE {loser} DIRECTORY!".format(loser = loser))
print("ERROR switch_and_delete_dirs: name does not contain temp or trigtime, returning")
return
else:
# loser gets deleted
shutil.rmtree(loser)
# winner gets moved
shutil.move(winner, dirname)
return
# Make sure that directory string does not have a trailing '/'
def clean_input(dir_string):
if dir_string[-1] == '/':
return dir_string[:-1]
else:
return(dir_string)
# Call to cp_files is
# cp_files.py local temp
workdir = os.getcwd()
print("cwd is {0}".format(workdir))
# read in the directory local directory, then we make a temp directory from that directory
# copy over the working directory to the remote machine
tempdir = clean_input(str(sys.argv[2]))
temp = './temp'
#shutil.copytree(tempdir, temp)
print("Copying over files from tempdir {0} to tempdir {1}".format(tempdir, temp))
p = subprocess.Popen('cp -r {0} {1}'.format(tempdir, temp), shell = True,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
p.wait()
# localdir
outputdir = clean_input(str(sys.argv[1]))
print("localdir is {0}".format(outputdir))
try:
process = subprocess.run('head {0}/checkpoint/temperature.dat'.format(outputdir), shell = True)
except:
pass
winner, loser = keep_which_directory(temp, outputdir)
print("Keeping winner {0} \n Deleting loser {1}".format(winner, loser))
switch_and_delete_dirs(winner, loser, outputdir)
#!/usr/bin/env python
import subprocess, os, sys
from bayeswave_plot import BW_Flags as bwf
def delete_lines(filename, file_length):
# delete all lines except the first `file_length` lines
# get number of lines currently in the file
try:
N_lines = int(subprocess.check_output("wc -l <{filename}".format(filename = filename), shell=True))
except subprocess.CalledProcessError:
# returns if file does not exist
return
if N_lines == file_length:
return
elif N_lines < file_length:
print("ERROR!! File {filename} too short! Should be {file_length} but is {N_lines}".format(filename = filename, file_length = file_length, N_lines = N_lines))
else:
print("Deleting {0} lines from {filename} ".format(N_lines - file_length, filename = filename))
# deletes from file_length (exclusive) to end of file
subprocess.run("sed -i '{file_length},$ d' {filename}".format(filename = filename, file_length = file_length+1), shell = True)
# Read in trigger directory name
trigdir = str(sys.argv[1])
runFlags = bwf.Flags(trigdir)
try:
bwf.readbwb(runFlags)
except FileNotFoundError:
sys.exit("WARNING: " + runFlags.trigdir + '/bayeswave.run' + " has yet to be created, returning")
if not runFlags.checkpoint:
sys.exit("--checkpoint has not been turned on, returning")
# read in the model flag to see what checkpoint iteration we should be at
f = open(runFlags.trigdir + '/checkpoint/temperature.dat')
file_length = int(f.readlines()[0]) // runFlags.Ncycle
f.close()
# get name of model
try:
f = open(trigdir + '/checkpoint/state.dat', 'r')
except:
sys.exit(runFlags.trigdir + '/checkpoint/state.dat' + " has yet to be created, returning")
model = f.readlines()[0].split()[0] # first entry is what model we are on
f.close()
print("Model is {model}".format(model = model))
# Get files relating to model
chaindir = runFlags.trigdir + '/chains'
onlyfiles = [f for f in os.listdir(chaindir) if os.path.isfile(os.path.join(chaindir, f))]
model_files = [f for f in onlyfiles if (model in f)]
# if model is cbc, cbc_skychain.dat should not be considered part of the cbc files
if model == 'cbc':
model_files.remove('cbc_skychain.dat')
# delete extra lines from files
for f in model_files:
delete_lines(runFlags.trigdir + '/chains/' + f, file_length)
This diff is collapsed.
#!/usr/bin/env python
from __future__ import print_function
print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\
Warning! megasky.py is now deprecated. Sky maps are now made within megaploy.py\n\
Use ONLY IF you need to make a fits file and know exactly what you're doing.\n\
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n")
import lal
import matplotlib
matplotlib.use('Agg')
......@@ -208,12 +214,26 @@ def make_skyview(directory='.', mdc=None, NSIDE=128, inj=None, npost=5000, geo=F
# -- Input skymap data
print("Extracting RA/DEC samples")
cbc_run = False
try:
filename = './chains/' + 'signal_params_h0.dat.0'
print("trying to read %s"%(filename))
data = np.loadtxt(filename, unpack=True,usecols=(0,1,2))
except:
filename = './chains/' + 'full_params_h0.dat.0'
data = np.loadtxt(filename, unpack=True,usecols=(0,1,2))
try:
print("trying to read %s"%(filename))
filename = './chains/' + 'full_params_h0.dat.0'
data = np.loadtxt(filename, unpack=True,usecols=(0,1,2))
except:
try:
print("trying to read %s"%(filename))
filename = './chains/' + 'cbc_params.dat.0'
data = np.loadtxt(filename, unpack=True,usecols=(0,11,12))
cbc_run = True
except:
print("Was not able to find anything to read, is this a glitchOnly run?")
pass
ralist = data[1]
sin_dec = data[2]
print("Total samples are {0}".format(ralist.size))
......@@ -378,13 +398,30 @@ if __name__ == "__main__":
if(opts.eventnum is None):
print("Provide event num if giving injfile", file=sys.stderr)
sys.exit()
print("Loading xml")
xmldoc = utils.load_filename(
opts.inj, contenthandler=LIGOLWContentHandler)
print("Loading xml")
print('xml filename', opts.inj, type(opts.inj))
try:
print('trying to open', opts.inj)
xmldoc = utils.load_filename(
opts.inj, contenthandler=LIGOLWContentHandler)
except:
try:
print('xml file not found, try looking a directory back')
print('trying to open', topdir + '/' + opts.inj)
xmldoc = utils.load_filename(
topdir + '/' + opts.inj, contenthandler=LIGOLWContentHandler)
except:
print("didn't work either")
try:
print('trying to open just the regular way, with open()', opts.inj)
xmldoc = open(opts.inj)
except:
print("Nothing is working here, watch me crash")
try:
print('Checking if using a sim_inspiral table...')
injs = table.get_table(
xmldoc, lsctables.SimInspiralTable.tableName)
xmldoc, lsctables.SimInspiralTable.tableName)
inj = injs[opts.eventnum]
injpos = {
'ra': inj.longitude,
......
......@@ -27,17 +27,37 @@ def package_files(directory):
paths.append(os.path.join("..", path, filename))
return paths
def get_requirements(kind=None):
'''
'' This helper function is borrowed from Bilby
'' (https://git.ligo.org/lscsoft/bilby/-/blob/master/setup.py)
'''
if kind is None:
fname = "requirements.txt"
else:
fname = f"{kind}_requirements.txt"
with open(fname, "r") as ff:
requirements = ff.readlines()
print(requirements)
return requirements
bayeswave_plot_data = package_files("bayeswave_plot_data")
bayeswave_pipe_examples = package_files("bayeswave_pipe_examples")
setup(
name='BayesWaveUtils',
version='0.1dev',
author='James Clark, Sophie Hourihane, Meg Millhouse, Sudarhsan Ghonge',
author_email="bayeswave@sympa.ligo.org",
description='Python tools for BayesWave.',
packages=['bayeswave_plot','bayeswave_pipe'],
scripts=['scripts/megaplot.py', 'scripts/megasky.py', 'scripts/bayeswave_pipe'],
scripts=['scripts/megaplot.py', 'scripts/megasky.py', 'scripts/bayeswave_pipe', 'scripts/delete_corruption.py', 'scripts/cp_files.py'],
package_data = {'bayeswave_plot': bayeswave_plot_data,
'bayeswave_pipe': bayeswave_pipe_examples},
license='GPL',
long_description=open('README.md').read(),
url='https://git.ligo.org/lscsoft/bayeswave/',
)
......@@ -68,7 +68,7 @@ def get_wavelet_params(filename, model, chirpflag=False, O1version=False, **keyw
return data
def wt(wave_params,psdfile):
def wt(wave_params,psdfile, ID):
"""
Makes a waveform from a set of wavelets
......@@ -79,6 +79,8 @@ def wt(wave_params,psdfile):
psdfile (str): data file of the PSD
ID (int): the number of the sample for the suggested reconstruction
outputs
-------
array of the waveform
......@@ -98,19 +100,18 @@ def wt(wave_params,psdfile):
fmin = int(psd[0,0]*Tobs)
wavenumber = wave_params['D'][0]
wavenumber = wave_params['D'][ID]
# the starting index in order to export the right set of parameters
start = sum(wave_params['D'][:ID])
for j in range(0,wavenumber):
t0 = wave_params['t'][j]
f0 = wave_params['f'][j]
Q = wave_params['Q'][j]
A = wave_params['A'][j]
phi0 = wave_params['phi'][j]
t0 = wave_params['t'][j+start]
f0 = wave_params['f'][j+start]
Q = wave_params['Q'][j+start]
A = wave_params['A'][j+start]
phi0 = wave_params['phi'][j+start]
i = int(f0*Tobs)
fac = 1.0/math.sqrt(psd[i-fmin,1])
tau = Q/(2*np.pi*f0)
......@@ -125,7 +126,6 @@ def wt(wave_params,psdfile):
for i in range(imin,imax):
t = float(i)/Nsamp*Tobs
sf = A*np.exp(-((t-t0)**2)/(tau**2))
sf *= fac
hs[i] += sf*np.cos(2*np.pi*f0*(t-t0)+phi0)
return hs
......
#
# CMake packaging for BayesWave
# Copyright 2019 James Alexander Clark <james.clark@ligo.org>
# Copyright 2019 James Alexander Clark <james.clark@ligo.org>, Meg Millhouse <meg.millhouse@ligo.org>
# Based on CMake packaging for libframe & Frv by Duncan MacLeod <duncan.macleod@ligo.org>
#
......@@ -10,7 +10,7 @@ cmake_minimum_required(VERSION 3.12.0 FATAL_ERROR)
project(
bayeswave
LANGUAGES C
VERSION 1.0.6
VERSION 1.1.1
DESCRIPTION "LIGO/VIRGO burst analysis algorithm"
HOMEPAGE_URL "https://git.ligo.org/lscsoft/bayeswave"
)
......@@ -19,6 +19,33 @@ include(GNUInstallDirs)
include(CheckFunctionExists)
find_package(PkgConfig)
find_package(Git)
find_package(OpenMP)
#-----------------------------------------------------------------------
# Set rpath so you don't have to set $LD_LIBRARY_PATH before running
# instructions from:
# https://gitlab.kitware.com/cmake/community/-/wikis/doc/cmake/RPATH-handling
IF(DEFINED ENV{CONDA_PREFIX})
MESSAGE(STATUS "CONDA_PREFIX: --[$ENV{CONDA_PREFIX}]--")
ELSE()
MESSAGE(STATUS "Warning: could not find conda environment")
ENDIF()
# use, i.e. don't skip the full RPATH for the build tree
set(CMAKE_SKIP_BUILD_RPATH FALSE)
# when building, don't use the install RPATH already
# (but later on when installing)
set(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE)
# In order to be able to install into a user specified directory, need to make sure the RPATH has the install directory first, and then the conda prefix.
# So make sure to remove environment rpath, and then explicitly add conda library back in.
# May be a more elegant way to do this
set(CMAKE_INSTALL_REMOVE_ENVIRONMENT_RPATH TRUE)
set(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}:$ENV{CONDA_PREFIX}/lib") #
# -- build components -------
......@@ -55,6 +82,7 @@ set(CPACK_SOURCE_IGNORE_FILES
"\\\\.svn/"
"\\\\.git"
"build/"
"dist/"
"CMakeFiles/"
"CMakeCache.txt"
"_CPack_Packages/"
......
......@@ -9,7 +9,9 @@ signals and instrumental glitches, allowing robust, morphology-independent wavef
See [arXiv:1410.3835](https://arxiv.org/abs/1410.3835) for an introduction to the BayesWave
algorithm.
*LSC/Virgo members*: See the [BayesWave wiki](https://wiki.ligo.org/Bursts/BayesWave) for further info
Work in progress documentation can be found [here](https://ldas-jobs.ligo.caltech.edu/~meg.millhouse/Docs/).
*LSC/Virgo members*: See the [BayesWave wiki](https://wiki.ligo.org/Bursts/BayesWave) for further info.
## Citations
......
#!/bin/bash
set -e
INSTALL_PREFIX=$1
INSTALL_PREFIX=${1:-${CONDA_PREFIX}}
echo ${INSTALL_PREFIX}
# Get cmake
if hash cmake 2>/dev/null; then
......
#
# CMake packaging for BayesWave
# Copyright 2019 James Alexander Clark <james.clark@ligo.org>
# Based on CMake packaging for libframe & Frv by Duncan MacLeod <duncan.macleod@ligo.org>
#
# -- package info -----------
cmake_minimum_required(VERSION 3.12.0 FATAL_ERROR)
project(
bayeswave
LANGUAGES C
VERSION 1.0.6
DESCRIPTION "LIGO/VIRGO burst analysis algorithm"
HOMEPAGE_URL "https://git.ligo.org/lscsoft/bayeswave"
)
include(GNUInstallDirs)
include(CheckFunctionExists)
find_package(PkgConfig)
find_package(Git)
find_package(OpenMP)
# -- build components -------
# C library | enable/disable with -DENABLE_C={yes,no} (default yes)
add_subdirectory(src)
#add_subdirectory(doc)
# -- packaging components ---
set(SPEC_IN "${CMAKE_CURRENT_SOURCE_DIR}/bayeswave.spec.in")
set(SPEC "${CMAKE_CURRENT_SOURCE_DIR}/bayeswave.spec")
configure_file(${SPEC_IN} ${SPEC} @ONLY)
# -- build tarball ----------
#
# to build a source tarball:
#
# mkdir dist
# pushd dist
# cmake ..
# cmake --build . --target package_source
#
set(CPACK_PACKAGE_MAJOR ${${PROJECT_NAME}_MAJOR_VERSION})
set(CPACK_PACKAGE_MINOR ${${PROJECT_NAME}_MINOR_VERSION})
set(CPACK_PACKAGE_PATCH ${${PROJECT_NAME}_PATCH_VERSION})
set(CPACK_PACKAGE_VERSION "${CPACK_PACKAGE_MAJOR}.${CPACK_PACKAGE_MINOR}.${CPACK_PACKAGE_PATCH}")
set(CPACK_SOURCE_GENERATOR TXZ)
set(CPACK_SOURCE_PACKAGE_FILE_NAME ${PROJECT_NAME}-${${PROJECT_NAME}_VERSION})
set(CPACK_SOURCE_IGNORE_FILES
"/.*~$/"
".*~$"
"\\\\.svn/"
"\\\\.git"
"build/"
"CMakeFiles/"
"CMakeCache.txt"
"_CPack_Packages/"
"\\\\.cmake"
"Makefile"
"\\\\.deps/"
"autom4te.cache/"
"\\\\.tar\\\\.xz"
)
include(CPack)