Commit bd52bad6 authored by Leo Pound Singer's avatar Leo Pound Singer

Remove deprecated BAYESTAR scripts

These have all moved to https://git.ligo.org/lscsoft/ligo.skymap.
parent 2da21b93
......@@ -37,23 +37,6 @@ lalinference-user-env.shell
lalinference.spec
libtool
octave/
python/bayestar_aggregate_found_injections
python/bayestar_bin_samples
python/bayestar_contour_allsky
python/bayestar_lattice_tmpltbank
python/bayestar_littlehope
python/bayestar_localize_coincs
python/bayestar_localize_lvalert
python/bayestar_mess_up_coincs
python/bayestar_plot_allsky
python/bayestar_plot_found_injections
python/bayestar_plot_pileup
python/bayestar_plot_volume
python/bayestar_prune_neighborhood_tmpltbank
python/bayestar_realize_coincs
python/bayestar_sample_model_psd
python/bayestar_samples_ppplot
python/bayestar_sim_to_tmpltbank
python/cbcBayesBurstPPAnalysis
python/cbcBayesBurstPostProc
python/cbcBayesBurstPostProc.py.tmp
......
usr/bin/bayestar_*
usr/bin/cbcBayes*
usr/bin/imrtgr_*
usr/bin/lalinference_average_skymaps
......
......@@ -179,7 +179,6 @@ rm -Rf ${RPM_BUILD_DIR}/%{name}-%{version}%{?nightly:-%{nightly}}
%files -n python2-%{name}
%defattr(-,root,root)
%license COPYING
%{_bindir}/bayestar_*
%{_bindir}/cbcBayes*
%{_bindir}/imrtgr_*
%{_bindir}/lalinference_average_skymaps
......
......@@ -9,23 +9,6 @@ SUBDIRS = lalinference
if HAVE_PYTHON
pybin_scripts = \
bayestar_aggregate_found_injections \
bayestar_bin_samples \
bayestar_contour_allsky \
bayestar_lattice_tmpltbank \
bayestar_localize_coincs \
bayestar_localize_lvalert \
bayestar_mess_up_coincs \
bayestar_plot_allsky \
bayestar_plot_found_injections \
bayestar_plot_pileup \
bayestar_plot_volume \
bayestar_prune_neighborhood_tmpltbank \
bayestar_realize_coincs \
bayestar_sample_model_psd \
bayestar_samples_ppplot \
bayestar_sim_to_tmpltbank \
bayestar_littlehope \
rapidpe_integrate_extrinsic_likelihood \
rapidpe_create_event_dag \
rapidpe_compute_intrinsic_fisher \
......
#
# Copyright (C) 2013-2017 Leo Singer
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
"""
Match sky maps with injections in an inspinjfind-style sqlite database and
print summary values for each sky map:
* event ID
* false alarm rate
* searched area
* searched posterior probability
* angle between true sky location and maximum a posteriori estimate
* runtime in seconds
* (optional) areas and numbers of modes within specified probability contours
The filenames of the sky maps may be provided as positional command line
arguments, and may also be provided as globs (such as '*.fits.gz').
"""
from __future__ import division
from __future__ import print_function
from lalinference.bayestar.deprecation import warn
warn('ligo-skymap-stats')
# Command line interface.
import argparse
from lalinference.bayestar import command
if __name__ == '__main__':
parser = command.ArgumentParser()
parser.add_argument(
'-o', '--output', metavar='OUT.dat',
type=argparse.FileType('w'), default='-',
help='Name of output file [default: stdout]')
parser.add_argument(
'-j', '--jobs', type=int, default=1, const=None, nargs='?',
help='Number of threads [default: %(default)s]')
parser.add_argument(
'-p', '--contour', default=[], nargs='+', type=float,
metavar='PERCENT',
help='Report the area of the smallest contour and the number of modes '
'containing this much probability.')
parser.add_argument(
'-a', '--area', default=[], nargs='+', type=float, metavar='DEG2',
help='Report the largest probability contained within any region '
'of this area in square degrees. Can be repeated multiple times.')
parser.add_argument(
'--modes', default=False, action='store_true',
help='Compute number of disjoint modes [default: %(default)s]')
parser.add_argument(
'db', type=command.SQLiteType('r'), metavar='DB.sqlite',
help='Input SQLite database from search pipeline')
parser.add_argument(
'fitsfilenames', metavar='GLOB.fits[.gz]', nargs='+', action='glob',
help='Input FITS filenames and/or globs')
opts = parser.parse_args()
# Imports.
import sqlite3
import numpy as np
from lalinference.io import fits
from lalinference.bayestar.postprocess import find_injection_moc
from lalinference.util import sqlite
def startup(dbfilename, opts_contour, opts_modes, opts_area):
global db, contours, modes, areas
db = sqlite3.connect(dbfilename)
contours = opts_contour
modes = opts_modes
areas = opts_area
def process(fitsfilename):
sky_map = fits.read_sky_map(fitsfilename, moc=True)
coinc_event_id = sky_map.meta['objid']
try:
runtime = sky_map.meta['runtime']
except KeyError:
runtime = float('nan')
contour_pvalues = 0.01 * np.asarray(contours)
row = db.execute(
"""
SELECT DISTINCT sim.simulation_id AS simulation_id,
sim.longitude AS ra, sim.latitude AS dec, sim.distance AS distance,
ci.combined_far AS far, ci.snr AS snr
FROM coinc_event_map AS cem1 INNER JOIN coinc_event_map AS cem2
ON (cem1.coinc_event_id = cem2.coinc_event_id)
INNER JOIN sim_inspiral AS sim ON (cem1.event_id = sim.simulation_id)
INNER JOIN coinc_inspiral AS ci ON (cem2.event_id = ci.coinc_event_id)
WHERE cem1.table_name = 'sim_inspiral'
AND cem2.table_name = 'coinc_event' AND cem2.event_id = ?
""", (coinc_event_id,)).fetchone()
if row is None:
raise ValueError(
"No database record found for event '{0}' in '{1}'".format(
coinc_event_id, sqlite.get_filename(db)))
simulation_id, true_ra, true_dec, true_dist, far, snr = row
searched_area, searched_prob, offset, searched_modes, contour_areas, \
area_probs, contour_modes, searched_prob_dist, contour_dists, \
searched_vol, searched_prob_vol, contour_vols = find_injection_moc(
sky_map, true_ra, true_dec, true_dist, contours=contour_pvalues,
areas=areas, modes=modes)
if snr is None:
snr = np.nan
if far is None:
far = np.nan
distmean = sky_map.meta.get('distmean', np.nan)
diststd = sky_map.meta.get('diststd', np.nan)
log_bci = sky_map.meta.get('log_bci', np.nan)
log_bsn = sky_map.meta.get('log_bsn', np.nan)
ret = [coinc_event_id, simulation_id, far, snr, searched_area,
searched_prob, searched_prob_dist, searched_vol, searched_prob_vol,
offset, runtime, distmean, diststd, log_bci, log_bsn] \
+ contour_areas + area_probs + contour_dists + contour_vols
if modes:
ret += [searched_modes] + contour_modes
return ret
if __name__ == '__main__':
from glue.text_progress_bar import ProgressBar
progress = ProgressBar()
db = opts.db
contours = opts.contour
modes = opts.modes
areas = opts.area
progress.update(-1, 'spawning workers')
if opts.jobs == 1:
from six.moves import map
else:
try:
from emcee.interruptible_pool import InterruptiblePool as Pool
except ImportError:
from multiprocessing import Pool
map = Pool(
opts.jobs, startup,
(sqlite.get_filename(db), contours, modes, areas)
).imap
colnames = (
['coinc_event_id', 'simulation_id', 'far', 'snr', 'searched_area',
'searched_prob', 'searched_prob_dist', 'searched_vol',
'searched_prob_vol', 'offset', 'runtime', 'distmean', 'diststd',
'log_bci', 'log_bsn'] +
['area({0:g})'.format(_) for _ in contours] +
['prob({0:g})'.format(_) for _ in areas] +
['dist({0:g})'.format(_) for _ in contours] +
['vol({0:g})'.format(_) for _ in contours])
if modes:
colnames += ['searched_modes']
colnames += ["modes({0:g})".format(p) for p in contours]
print(*colnames, sep="\t", file=opts.output)
count_records = 0
progress.max = len(opts.fitsfilenames)
for record in map(process, opts.fitsfilenames):
count_records += 1
progress.update(count_records, record[0])
print(*record, sep="\t", file=opts.output)
#
# Copyright (C) 2011-2017 Leo Singer
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Convert a list of posterior samples to a HEALPix FITS image using an adaptively
refined HEALPix tree, subdividing each node if it contains more than
--samples-per-bin posterior samples. By default, the resolution is set to the
deepest resolution of the tree.
If an input filename is specified, then the posterior samples are read from it.
If no input filename is provided, then input is read from stdin.
"""
from __future__ import division
# Command line interface.
from lalinference.bayestar import command
parser = command.ArgumentParser()
parser.add_argument(
'--nside', '-n', type=int, default=-1,
help='Write final sky map at this HEALPix lateral resolution '
'[default: auto]')
parser.add_argument(
'--max-nside', '-m', type=int, default=-1,
help='Stop subdivision at this maximum HEALPix lateral resolution '
'[default: max 64-bit]')
parser.add_argument(
'--samples-per-bin', type=int, default=30,
help='Samples per bin [default: %(default)s]')
parser.add_argument(
'--two-step', action='store_true', default=False,
help='Partition the samples into one sub-population for laying out bin '
'boundaries and one sub-population for evaluating densities instead of '
'using the whole population for both steps')
parser.add_argument(
'--objid',
help='Event ID to be stored in FITS header [default: %(default)s]')
parser.add_argument(
'input', metavar='INPUT.dat', default='-',
help='name of input posterior samples file [default: stdin]')
parser.add_argument(
'-o', '--output', metavar='OUTPUT.fits[.gz]', required=True,
help='name of output FITS file [required]')
opts = parser.parse_args()
# Late imports.
import healpy as hp
import numpy as np
from astropy.table import Table
from astropy.utils.misc import NumpyRNGContext
from lalinference.io import fits
from lalinference.io import hdf5
from lalinference import distance
from lalinference import moc
from lalinference.bayestar.sky_map import derasterize
from lalinference.healpix_tree import adaptive_healpix_histogram
def pixstats(samples, max_nside, nside, ipix):
# Reticulating splines.
step = (max_nside // nside) ** 2
i0 = np.searchsorted(samples['ipix'], step * ipix)
i1 = np.searchsorted(samples['ipix'], step * (ipix + 1))
n = i1 - i0
if n == 0:
if 'dist' in samples.colnames:
return 0.0, np.inf, 0.0
else:
return 0.0
else:
probdensity = n / len(samples) / hp.nside2pixarea(nside)
if 'dist' in samples.colnames:
dist = samples['dist'][i0:i1]
return probdensity, np.mean(dist), np.std(dist)
else:
return probdensity
# Read samples.
samples = hdf5.read_samples(opts.input)
# If two-step binning is requested, then randomly partition the samples.
if opts.two_step:
with NumpyRNGContext(0): # Fix random seed to make results reproducible
ranking_samples, samples = np.array_split(
np.random.permutation(samples), 2)
ranking_samples = Table(ranking_samples)
hist_samples = Table(samples)
else:
ranking_samples = hist_samples = samples
# Place the histogram bins.
theta = 0.5*np.pi - hist_samples['dec']
phi = hist_samples['ra']
p = adaptive_healpix_histogram(
theta, phi, opts.samples_per_bin,
nside=opts.nside, max_nside=opts.max_nside, nest=True)
# Evaluate per-pixel density.
p = derasterize(Table([p], names=['PROB']))
order, ipix = moc.uniq2nest(p['UNIQ'])
nside = hp.order2nside(order.astype(int))
max_order = order.max().astype(int)
max_nside = hp.order2nside(max_order)
theta = 0.5*np.pi - ranking_samples['dec']
phi = ranking_samples['ra']
ranking_samples['ipix'] = hp.ang2pix(max_nside, theta, phi, nest=True)
ranking_samples.sort('ipix')
result = np.transpose(
[pixstats(ranking_samples, max_nside, n, i) for n, i in zip(nside, ipix)])
# Add distance info if necessary.
if 'dist' in ranking_samples.colnames:
p['PROBDENSITY'], distmean, diststd = result
p['DISTMU'], p['DISTSIGMA'], p['DISTNORM'] = \
distance.moments_to_parameters(distmean, diststd)
# Add marginal distance moments
p.meta['distmean'] = np.mean(samples['dist'])
p.meta['diststd'] = np.std(samples['dist'])
else:
p['PROBDENSITY'] = result
# Write output to FITS file.
fits.write_sky_map(
opts.output, p, nest=True,
creator=parser.prog, objid=opts.objid, gps_time=samples['time'].mean())
#
# Copyright (C) 2015 Leo Singer
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Create a contours for the credible levels of an all-sky probability map.
The input is a HEALPix probability map.
The output is a GeoJSON FeatureCollection (http://geojson.org/).
"""
from lalinference.bayestar.deprecation import warn
warn('ligo-skymap-contour')
# Command line interface
import argparse
from lalinference.bayestar import command
parser = command.ArgumentParser()
parser.add_argument(
'-o', '--output', metavar='FILE.geojson',
default='-', type=argparse.FileType('w'),
help='name of output file [default: stdout]')
parser.add_argument(
'--contour', metavar='PERCENT', type=float, nargs='+', required=True,
help='plot contour enclosing this percentage of probability mass')
parser.add_argument(
'-i', '--interpolate',
choices='nearest nested bilinear'.split(), default='nearest',
help='resampling interpolation method [default: %(default)s]')
parser.add_argument(
'-s', '--simplify', default=False, action='store_true',
help='simplify contour paths [default: %(default)s]')
parser.add_argument(
'-n', '--nside', metavar='NSIDE', type=int,
help='optionally resample to the specified resolution '
' before generating contours [default: no downsampling]')
parser.add_argument(
'input', metavar='INPUT.fits[.gz]', type=argparse.FileType('rb'),
default='-', nargs='?', help='Input FITS file [default: stdin]')
opts = parser.parse_args()
# Late imports
from lalinference.io import fits
from lalinference.bayestar import postprocess
import healpy as hp
import numpy as np
import json
# Read input file
prob, _ = fits.read_sky_map(opts.input.name, nest=True)
# Resample if requested
if opts.nside is not None and opts.interpolate in ('nearest', 'nested'):
prob = hp.ud_grade(prob, opts.nside, order_in='NESTED', power=-2)
elif opts.nside is not None and opts.interpolate == 'bilinear':
prob = postprocess.smooth_ud_grade(prob, opts.nside, nest=True)
if opts.interpolate == 'nested':
prob = postprocess.interpolate_nested(prob, nest=True)
# Find credible levels
i = np.flipud(np.argsort(prob))
cumsum = np.cumsum(prob[i])
cls = np.empty_like(prob)
cls[i] = cumsum * 100
# Generate contours
paths = list(postprocess.contour(
cls, opts.contour, nest=True, degrees=True, simplify=opts.simplify))
json.dump({
'type': 'FeatureCollection',
'features': [
{
'type': 'Feature',
'properties': {
'credible_level': contour
},
'geometry': {
'type': 'MultiLineString',
'coordinates': path
}
}
for contour, path in zip(opts.contour, paths)
]
}, opts.output)
#
# Copyright (C) 2013-2016 Leo Singer
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
"""
Create a template bank that samples a regular lattice in (tau0, tau3), starting
from an initial (mass1, mass2), with lattice points spaced according to the
metric at the initial point.
"""
from __future__ import division
# Command line interface.
import argparse
from lalinference.bayestar import command
parser = command.ArgumentParser()
parser.add_argument(
'--initial-mass1', metavar='Msun', type=float, default=1.4,
help='Mass of first component of an initial lattice point '
'[default: %(default)s]')
parser.add_argument(
'--initial-mass2', metavar='Msun', type=float, default=1.4,
help='Mass of second component of an initial lattice point '
'[default: %(default)s]')
parser.add_argument(
'--min-mass', metavar='Msun', type=float, default=1.,
help='Minimum component mass [default: %(default)s]')
parser.add_argument(
'--max-mass', metavar='Msun', type=float, default=3.,
help='Maximum component mass [default: %(default)s]')
parser.add_argument(
'-o', '--output', metavar='OUT.xml[.gz]', type=argparse.FileType('wb'),
default='-', help='Name of output file [default: stdout]')
opts = parser.parse_args()
# Python standard library imports.
import os
# LIGO-LW XML imports.
from glue.ligolw import ligolw
from glue.ligolw.utils import process as ligolw_process
from glue.ligolw import utils as ligolw_utils
from glue.ligolw import lsctables
# glue and LAL imports.
from glue.text_progress_bar import ProgressBar
import lal
from lalinspiral.sbank.tau0tau3 import m1m2_to_mchirp
import lalsimulation
# BAYESTAR imports.
from lalinference.bayestar import filter
# Other imports.
import numpy as np
import scipy.linalg
# Open output file.
xmldoc = ligolw.Document()
xmldoc.appendChild(ligolw.LIGO_LW())
# Write process metadata to output file.
process = command.register_to_xmldoc(xmldoc, parser, opts)
# Create a SnglInspiral table and initialize its row ID counter.
sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable)
xmldoc.childNodes[0].appendChild(sngl_inspiral_table)
sngl_inspiral_table.set_next_id(lsctables.SnglInspiralID(0))
f_low = 10.
f_high = 2048.
df = 0.1
initial_mchirp = m1m2_to_mchirp(opts.initial_mass1, opts.initial_mass2)
initial_mtotal = opts.initial_mass1 + opts.initial_mass2
initial_eta = opts.initial_mass1 * opts.initial_mass2 / initial_mtotal**2
initial_chi = 0.
initial_chirp_times = lalsimulation.SimInspiralTaylorF2RedSpinChirpTimesFromMchirpEtaChi(initial_mchirp, initial_eta, initial_chi, f_low)
initial_theta0_theta3 = initial_chirp_times[:2]
# Sampled PSD.
S = lal.CreateREAL8Vector(int(f_high // df))
S.data = [lalsimulation.SimNoisePSDaLIGOZeroDetHighPower(i * df)
for i in range(len(S.data))]
# Allocate noise moments.
moments = [lal.CreateREAL8Vector(int((f_high - f_low) // df))
for _ in range(29)]
# Compute noise moments.
lalsimulation.SimInspiralTaylorF2RedSpinComputeNoiseMoments(
*(moments + [S, f_low, df]))
# Compute Fisher information matrix. Note factor of 2:
# The function SimInspiralTaylorF2RedSpinFisherMatrixChirpTimes
# returns the Fisher matrix for an SNR of 1/sqrt(2). The factor
# of 2 makes this the Fisher matrix at unit SNR.
I = lalsimulation.SimInspiralTaylorF2RedSpinFisherMatrixChirpTimes(
*(initial_chirp_times + [f_low, df] + moments)).data * 2
# Blockwise separation of Fisher matrix. Parameters are in the following order:
# theta0, theta3, theta3S, t0, phi0
IA = I[0:2, 0:2] # (theta0, theta3) block
IB = I[0:2, 3:5] # cross block
ID = I[3:5, 3:5] # (time, phase) block