Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • john-veitch/bilby
  • duncanmmacleod/bilby
  • colm.talbot/bilby
  • lscsoft/bilby
  • matthew-pitkin/bilby
  • salvatore-vitale/tupak
  • charlie.hoy/bilby
  • bfarr/bilby
  • virginia.demilio/bilby
  • vivien/bilby
  • eric-howell/bilby
  • sebastian-khan/bilby
  • rhys.green/bilby
  • moritz.huebner/bilby
  • joseph.mills/bilby
  • scott.coughlin/bilby
  • matthew.carney/bilby
  • hyungwon.lee/bilby
  • monica.rizzo/bilby
  • christopher-berry/bilby
  • lindsay.demarchi/bilby
  • kaushik.rao/bilby
  • charles.kimball/bilby
  • andrew.matas/bilby
  • juan.calderonbustillo/bilby
  • patrick-meyers/bilby
  • hannah.middleton/bilby
  • eve.chase/bilby
  • grant.meadors/bilby
  • khun.phukon/bilby
  • sumeet.kulkarni/bilby
  • daniel.reardon/bilby
  • cjhaster/bilby
  • sylvia.biscoveanu/bilby
  • james-clark/bilby
  • meg.millhouse/bilby
  • joshua.willis/bilby
  • nikhil.sarin/bilby
  • paul.easter/bilby
  • youngmin/bilby
  • daniel-williams/bilby
  • shanika.galaudage/bilby
  • bruce.edelman/bilby
  • avi.vajpeyi/bilby
  • isobel.romero-shaw/bilby
  • andrew.kim/bilby
  • dominika.zieba/bilby
  • jonathan.davies/bilby
  • marc.arene/bilby
  • srishti.tiwari/bilby-tidal-heating-eccentric
  • aditya.vijaykumar/bilby
  • michael.williams/bilby
  • cecilio.garcia-quiros/bilby
  • rory-smith/bilby
  • maite.mateu-lucena/bilby
  • wushichao/bilby
  • kaylee.desoto/bilby
  • brandon.piotrzkowski/bilby
  • rossella.gamba/bilby
  • hunter.gabbard/bilby
  • deep.chatterjee/bilby
  • tathagata.ghosh/bilby
  • arunava.mukherjee/bilby
  • philip.relton/bilby
  • reed.essick/bilby
  • pawan.gupta/bilby
  • francisco.hernandez/bilby
  • rhiannon.udall/bilby
  • leo.tsukada/bilby
  • will-farr/bilby
  • vijay.varma/bilby
  • jeremy.baier/bilby
  • joshua.brandt/bilby
  • ethan.payne/bilby
  • ka-lok.lo/bilby
  • antoni.ramos-buades/bilby
  • oliviastephany.wilk/bilby
  • jack.heinzel/bilby
  • samson.leong/bilby-psi4
  • viviana.caceres/bilby
  • nadia.qutob/bilby
  • michael-coughlin/bilby
  • hemantakumar.phurailatpam/bilby
  • boris.goncharov/bilby
  • sama.al-shammari/bilby
  • siqi.zhong/bilby
  • jocelyn-read/bilby
  • marc.penuliar/bilby
  • stephanie.letourneau/bilby
  • alexandresebastien.goettel/bilby
  • alec.gunny/bilby
  • serguei.ossokine/bilby
  • pratyusava.baral/bilby
  • sophie.hourihane/bilby
  • eunsub/bilby
  • james.hart/bilby
  • pratyusava.baral/bilby-tg
  • zhaozc/bilby
  • pratyusava.baral/bilby_SoG
  • tomasz.baka/bilby
  • nicogerardo.bers/bilby
  • soumen.roy/bilby
  • isaac.mcmahon/healpix-redundancy
  • asamakai.baker/bilby-frequency-dependent-antenna-pattern-functions
  • anna.puecher/bilby
  • pratyusava.baral/bilby-x-g
  • thibeau.wouters/bilby
  • christian.adamcewicz/bilby
  • raffi.enficiaud/bilby
109 results
Show changes
Commits on Source (45)
Showing
with 552 additions and 418 deletions
......@@ -71,6 +71,25 @@ basic-3.10:
<<: *test-python
image: python:3.10
.test-samplers-import: &test-samplers-import
stage: initial
script:
- python -m pip install .
- python -m pip list installed
- python test/test_samplers_import.py
import-samplers-3.8:
<<: *test-samplers-import
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python38
import-samplers-3.9:
<<: *test-samplers-import
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
import-samplers-3.10:
<<: *test-samplers-import
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
.precommits: &precommits
stage: initial
script:
......@@ -97,13 +116,12 @@ precommits-py3.9:
CACHE_DIR: ".pip39"
PYVERSION: "python39"
# FIXME: when image builds for 3.10 change this back.
#precommits-py3.10:
# <<: *precommits
# image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
# variables:
# CACHE_DIR: ".pip310"
# PYVERSION: "python310"
precommits-py3.10:
<<: *precommits
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
variables:
CACHE_DIR: ".pip310"
PYVERSION: "python310"
install:
stage: initial
......@@ -146,19 +164,16 @@ python-3.9:
- htmlcov/
expire_in: 30 days
# add back when 3.10 image is available
#python-3.10:
# <<: *unit-test
# needs: ["basic-3.10", "precommits-py3.10"]
# image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
python-3.10:
<<: *unit-test
needs: ["basic-3.10", "precommits-py3.10"]
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
.test-sampler: &test-sampler
stage: test
script:
- python -m pip install .
- python -m pip install schwimmbad
- python -m pip list installed
- pytest test/integration/sampler_run_test.py --durations 10 -v
python-3.8-samplers:
......@@ -171,11 +186,10 @@ python-3.9-samplers:
needs: ["basic-3.9", "precommits-py3.9"]
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
# add back when 3.10 image is available
#python-3.10-samplers:
# <<: *test-sampler
# needs: ["basic-3.10", "precommits-py3.10"]
# image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
python-3.10-samplers:
<<: *test-sampler
needs: ["basic-3.10", "precommits-py3.10"]
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
integration-tests-python-3.9:
stage: test
......@@ -209,11 +223,10 @@ plotting-python-3.9:
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
needs: ["basic-3.9", "precommits-py3.9"]
# add back when 3.10 image is available
#plotting-python-3.10:
# <<: *plotting
# image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
# needs: ["basic-3.10", "precommits-py3.10"]
plotting-python-3.10:
<<: *plotting
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
needs: ["basic-3.10", "precommits-py3.10"]
# ------------------- Docs stage -------------------------------------------
......@@ -257,8 +270,11 @@ pages:
stage: deploy
image: docker:20.10.12
needs: ["containers"]
only:
- schedules
except:
refs:
- schedules
changes:
- containers/*
script:
- cd containers
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
......@@ -276,11 +292,10 @@ build-python39-container:
variables:
PYVERSION: "python39"
# add back when 3.10 image is available
#build-python310-container:
# <<: *build-container
# variables:
# PYVERSION: "python310"
build-python310-container:
<<: *build-container
variables:
PYVERSION: "python310"
pypi-release:
stage: deploy
......@@ -289,7 +304,7 @@ pypi-release:
TWINE_USERNAME: $PYPI_USERNAME
TWINE_PASSWORD: $PYPI_PASSWORD
before_script:
- pip install twine
- pip install twine setuptools_scm
- python setup.py sdist
script:
- twine upload dist/*
......
......@@ -29,6 +29,7 @@ Hector Estelles
Ignacio Magaña Hernandez
Isobel Marguarethe Romero-Shaw
Jack Heinzel
Jacob Golomb
Jade Powell
James A Clark
Jeremy G Baier
......@@ -61,7 +62,7 @@ Paul Easter
Paul Lasky
Philip Relton
Rhys Green
Richard Udall
Rhiannon Udall
Rico Lo
Roberto Cotesta
Rory Smith
......
# All notable changes will be documented in this file
## [1.3.0] 2022-10-23
Version 1.3.0 release of Bilby
This release has a major change to a sampler interface, `pymc3` is no longer supported, users should switch to `pymc>=4`.
This release also adds a new top-level dependency, `bilby-cython`.
This release also contains various documentation improvements.
### Added
- Improved logging of likelihood information when starting sampling (!1148)
- Switch some geometric calculations to use optimized bilby-cython package (!1053)
- Directly specify the starting point for `bilby_mcmc` (!1155)
- Allow a signal to be specified to only be present in a specific `Interferometer` (!1164)
- Store time domain model function in CBCResult metadata (!1165)
### Changes
- Switch from `pymc3` to `pymc` (!1117)
- Relax equality check for distance marginalization lookup to allow cross-platform use (!1150)
- Fix to deal with non-checkpointing `bilby_mcmc` analyses (!1151)
- Allow result objects with different analysis configurations to be combined (!1153)
- Improve the storing of environment information (!166)
- Fix issue when specifying distance and redshfit independently (!1154)
- Fix a bug in the storage of likelihood/prior samples for `bilby_mcmc` (!1156)
## [1.2.1] 2022-09-05
Version 1.2.1 release of Bilby
......
......@@ -547,6 +547,7 @@ class GMMProposal(DensityEstimateProposal):
def _sample(self, nsamples=None):
return np.squeeze(self.density.sample(n_samples=nsamples)[0])
@staticmethod
def check_dependencies(warn=True):
if importlib.util.find_spec("sklearn") is None:
if warn:
......@@ -593,12 +594,15 @@ class NormalizingFlowProposal(DensityEstimateProposal):
fallback=fallback,
scale_fits=scale_fits,
)
self.setup_flow()
self.setup_optimizer()
self.initialised = False
self.max_training_epochs = max_training_epochs
self.js_factor = js_factor
def initialise(self):
self.setup_flow()
self.setup_optimizer()
self.initialised = True
def setup_flow(self):
if self.ndim < 3:
self.setup_basic_flow()
......@@ -699,6 +703,9 @@ class NormalizingFlowProposal(DensityEstimateProposal):
self.trained = True
def propose(self, chain):
if self.initialised is False:
self.initialise()
import torch
self.steps_since_refit += 1
......@@ -728,6 +735,7 @@ class NormalizingFlowProposal(DensityEstimateProposal):
return theta, float(log_factor)
@staticmethod
def check_dependencies(warn=True):
if importlib.util.find_spec("nflows") is None:
if warn:
......@@ -1094,10 +1102,6 @@ def get_proposal_cycle(string, priors, L1steps=1, warn=True):
]
if GMMProposal.check_dependencies(warn=warn):
plist.append(GMMProposal(priors, weight=big_weight, scale_fits=L1steps))
if NormalizingFlowProposal.check_dependencies(warn=warn):
plist.append(
NormalizingFlowProposal(priors, weight=big_weight, scale_fits=L1steps)
)
plist = remove_proposals_using_string(plist, string)
return ProposalCycle(plist)
......
......@@ -2,9 +2,11 @@ import datetime
import os
import time
from collections import Counter
from pathlib import Path
import numpy as np
import pandas as pd
from scipy.optimize import differential_evolution
from ..core.result import rejection_sample
from ..core.sampler.base_sampler import (
......@@ -113,6 +115,13 @@ class Bilby_MCMC(MCMCSampler):
evidence_method: str, [stepping_stone, thermodynamic]
The evidence calculation method to use. Defaults to stepping_stone, but
the results of all available methods are stored in the ln_z_dict.
initial_sample_method: str
Method to draw the initial sample. Either "prior" (a random draw
from the prior) or "maximize" (use an optimization approach to attempt
to find the maximum posterior estimate).
initial_sample_dict: dict
A dictionary of the initial sample value. If incomplete, will overwrite
the initial_sample drawn using initial_sample_method.
verbose: bool
Whether to print diagnostic output during the run.
......@@ -143,6 +152,8 @@ class Bilby_MCMC(MCMCSampler):
fixed_tau=None,
tau_window=None,
evidence_method="stepping_stone",
initial_sample_method="prior",
initial_sample_dict=None,
)
def __init__(
......@@ -187,6 +198,8 @@ class Bilby_MCMC(MCMCSampler):
self.proposal_cycle = self.kwargs["proposal_cycle"]
self.pt_rejection_sample = self.kwargs["pt_rejection_sample"]
self.evidence_method = self.kwargs["evidence_method"]
self.initial_sample_method = self.kwargs["initial_sample_method"]
self.initial_sample_dict = self.kwargs["initial_sample_dict"]
self.printdt = self.kwargs["printdt"]
check_directory_exists_and_if_not_mkdir(self.outdir)
......@@ -238,8 +251,8 @@ class Bilby_MCMC(MCMCSampler):
@staticmethod
def add_data_to_result(result, ptsampler, outdir, label, make_plots):
result.samples = ptsampler.samples
result.log_likelihood_evaluations = result.samples[LOGLKEY]
result.log_prior_evaluations = result.samples[LOGPKEY]
result.log_likelihood_evaluations = result.samples[LOGLKEY].to_numpy()
result.log_prior_evaluations = result.samples[LOGPKEY].to_numpy()
ptsampler.compute_evidence(
outdir=outdir,
label=label,
......@@ -286,6 +299,8 @@ class Bilby_MCMC(MCMCSampler):
pool=self.pool,
use_ratio=self.use_ratio,
evidence_method=self.evidence_method,
initial_sample_method=self.initial_sample_method,
initial_sample_dict=self.initial_sample_dict,
)
def get_setup_string(self):
......@@ -387,9 +402,10 @@ class Bilby_MCMC(MCMCSampler):
logger.info("Written checkpoint file {}".format(self.resume_file))
else:
logger.warning(
"Cannot write pickle resume file! "
"Job will not resume if interrupted."
"Cannot write pickle resume file! Job may not resume if interrupted."
)
# Touch the file to postpone next check-point attempt
Path(self.resume_file).touch(exist_ok=True)
self.ptsampler.pool = _pool
def print_long_progress(self):
......@@ -520,9 +536,13 @@ class BilbyPTMCMCSampler(object):
pool,
use_ratio,
evidence_method,
initial_sample_method,
initial_sample_dict,
):
self.set_pt_inputs(pt_inputs)
self.use_ratio = use_ratio
self.initial_sample_method = initial_sample_method
self.initial_sample_dict = initial_sample_dict
self.setup_sampler_dictionary(convergence_inputs, proposal_cycle)
self.set_convergence_inputs(convergence_inputs)
self.pt_rejection_sample = pt_rejection_sample
......@@ -570,10 +590,12 @@ class BilbyPTMCMCSampler(object):
betas = self.get_initial_betas()
logger.info(
f"Initializing BilbyPTMCMCSampler with:"
f"ntemps={self.ntemps},"
f"nensemble={self.nensemble},"
f"pt_ensemble={self.pt_ensemble},"
f"initial_betas={betas}\n"
f"ntemps={self.ntemps}, "
f"nensemble={self.nensemble}, "
f"pt_ensemble={self.pt_ensemble}, "
f"initial_betas={betas}, "
f"initial_sample_method={self.initial_sample_method}, "
f"initial_sample_dict={self.initial_sample_dict}\n"
)
self.sampler_dictionary = dict()
for Tindex, beta in enumerate(betas):
......@@ -589,6 +611,8 @@ class BilbyPTMCMCSampler(object):
convergence_inputs=convergence_inputs,
proposal_cycle=proposal_cycle,
use_ratio=self.use_ratio,
initial_sample_method=self.initial_sample_method,
initial_sample_dict=self.initial_sample_dict,
)
for Eindex in range(n)
]
......@@ -1075,6 +1099,8 @@ class BilbyMCMCSampler(object):
Tindex=0,
Eindex=0,
use_ratio=False,
initial_sample_method="prior",
initial_sample_dict=None,
):
self.beta = beta
self.Tindex = Tindex
......@@ -1084,12 +1110,24 @@ class BilbyMCMCSampler(object):
self.parameters = _sampling_convenience_dump.priors.non_fixed_keys
self.ndim = len(self.parameters)
full_sample_dict = _sampling_convenience_dump.priors.sample()
initial_sample = {
k: v
for k, v in full_sample_dict.items()
if k in _sampling_convenience_dump.priors.non_fixed_keys
}
if initial_sample_method.lower() == "prior":
full_sample_dict = _sampling_convenience_dump.priors.sample()
initial_sample = {
k: v
for k, v in full_sample_dict.items()
if k in _sampling_convenience_dump.priors.non_fixed_keys
}
elif initial_sample_method.lower() in ["maximize", "maximise", "maximum"]:
initial_sample = get_initial_maximimum_posterior_sample(self.beta)
else:
raise ValueError(
f"initial sample method {initial_sample_method} not understood"
)
if initial_sample_dict is not None:
initial_sample.update(initial_sample_dict)
logger.info(f"Using initial sample {initial_sample}")
initial_sample = Sample(initial_sample)
initial_sample[LOGLKEY] = self.log_likelihood(initial_sample)
initial_sample[LOGPKEY] = self.log_prior(initial_sample)
......@@ -1264,6 +1302,42 @@ class BilbyMCMCSampler(object):
return samples
def get_initial_maximimum_posterior_sample(beta):
"""A method to attempt optimization of the maximum likelihood
This uses a simple scipy optimization approach, starting from a number
of draws from the prior to avoid problems with local optimization.
"""
logger.info("Finding initial maximum posterior estimate")
likelihood = _sampling_convenience_dump.likelihood
priors = _sampling_convenience_dump.priors
search_parameter_keys = _sampling_convenience_dump.search_parameter_keys
bounds = []
for key in search_parameter_keys:
bounds.append((priors[key].minimum, priors[key].maximum))
def neg_log_post(x):
sample = {key: val for key, val in zip(search_parameter_keys, x)}
ln_prior = priors.ln_prob(sample)
if np.isinf(ln_prior):
return -np.inf
likelihood.parameters.update(sample)
return -beta * likelihood.log_likelihood() - ln_prior
res = differential_evolution(neg_log_post, bounds, popsize=100, init="sobol")
if res.success:
sample = {key: val for key, val in zip(search_parameter_keys, res.x)}
logger.info(f"Initial maximum posterior estimate {sample}")
return sample
else:
raise ValueError("Failed to find initial maximum posterior estimate")
# Methods used to aid parallelisation:
......
......@@ -1710,11 +1710,28 @@ class ResultList(list):
else:
raise TypeError("Could not append a non-Result type")
def combine(self, shuffle=False):
def combine(self, shuffle=False, consistency_level="error"):
"""
Return the combined results in a :class:bilby.core.result.Result`
object.
Parameters
----------
shuffle: bool
If true, shuffle the samples when combining, otherwise they are concatenated.
consistency_level: str, [warning, error]
If warning, print a warning if inconsistencies are discovered between the results before combining.
If error, raise an error if inconsistencies are discovered between the results before combining.
Returns
-------
result: bilby.core.result.Result
The combined result file
"""
self.consistency_level = consistency_level
if len(self) == 0:
return Result()
elif len(self) == 1:
......@@ -1844,24 +1861,37 @@ class ResultList(list):
except ValueError:
raise ResultListError("Not all results contain nested samples")
def _error_or_warning_consistency(self, msg):
if self.consistency_level == "error":
raise ResultListError(msg)
elif self.consistency_level == "warning":
logger.warning(msg)
else:
raise ValueError(f"Input consistency_level {self.consistency_level} not understood")
def check_consistent_priors(self):
for res in self:
for p in self[0].priors.keys():
if not self[0].priors[p] == res.priors[p] or len(self[0].priors) != len(res.priors):
raise ResultListError("Inconsistent priors between results")
msg = "Inconsistent priors between results"
self._error_or_warning_consistency(msg)
def check_consistent_parameters(self):
if not np.all([set(self[0].search_parameter_keys) == set(res.search_parameter_keys) for res in self]):
raise ResultListError("Inconsistent parameters between results")
msg = "Inconsistent parameters between results"
self._error_or_warning_consistency(msg)
def check_consistent_data(self):
if not np.allclose([res.log_noise_evidence for res in self], self[0].log_noise_evidence, atol=1e-8, rtol=0.0)\
and not np.all([np.isnan(res.log_noise_evidence) for res in self]):
raise ResultListError("Inconsistent data between results")
msg = "Inconsistent data between results"
self._error_or_warning_consistency(msg)
def check_consistent_sampler(self):
if not np.all([res.sampler == self[0].sampler for res in self]):
raise ResultListError("Inconsistent samplers between results")
msg = "Inconsistent samplers between results"
self._error_or_warning_consistency(msg)
@latex_plot_format
......
......@@ -6,7 +6,7 @@ import bilby
from bilby.bilby_mcmc import Bilby_MCMC
from ..prior import DeltaFunction, PriorDict
from ..utils import command_line_args, loaded_modules_dict, logger
from ..utils import command_line_args, env_package_list, loaded_modules_dict, logger
from . import proposal
from .base_sampler import Sampler, SamplingMarginalisedParameterError
from .cpnest import Cpnest
......@@ -21,7 +21,7 @@ from .nestle import Nestle
from .polychord import PyPolyChord
from .ptemcee import Ptemcee
from .ptmcmc import PTMCMCSampler
from .pymc3 import Pymc3
from .pymc import Pymc
from .pymultinest import Pymultinest
from .ultranest import Ultranest
from .zeus import Zeus
......@@ -38,7 +38,7 @@ IMPLEMENTED_SAMPLERS = {
"nestle": Nestle,
"ptemcee": Ptemcee,
"ptmcmcsampler": PTMCMCSampler,
"pymc3": Pymc3,
"pymc": Pymc,
"pymultinest": Pymultinest,
"pypolychord": PyPolyChord,
"ultranest": Ultranest,
......@@ -175,6 +175,7 @@ def run_sampler(
likelihood.outdir = outdir
meta_data["likelihood"] = likelihood.meta_data
meta_data["loaded_modules"] = loaded_modules_dict()
meta_data["environment_packages"] = env_package_list(as_dataframe=True)
if command_line_args.bilby_zero_likelihood_mode:
from bilby.core.likelihood import ZeroLikelihood
......
......@@ -243,6 +243,7 @@ class Sampler(object):
self._fixed_parameter_keys = list()
self._constraint_parameter_keys = list()
self._initialise_parameters()
self._log_information_about_priors_and_likelihood()
self.exit_code = exit_code
......@@ -353,11 +354,16 @@ class Sampler(object):
self.likelihood.parameters[key] = self.priors[key].sample()
self._fixed_parameter_keys.append(key)
logger.info("Search parameters:")
def _log_information_about_priors_and_likelihood(self):
logger.info("Analysis priors:")
for key in self._search_parameter_keys + self._constraint_parameter_keys:
logger.info(f" {key} = {self.priors[key]}")
logger.info(f"{key}={self.priors[key]}")
for key in self._fixed_parameter_keys:
logger.info(f" {key} = {self.priors[key].peak}")
logger.info(f"{key}={self.priors[key].peak}")
logger.info(f"Analysis likelihood class: {self.likelihood.__class__}")
logger.info(
f"Analysis likelihood noise evidence: {self.likelihood.noise_log_likelihood()}"
)
def _initialise_result(self, result_class):
"""
......
......@@ -14,13 +14,13 @@ from ..utils import derivatives, infer_args_from_method
from .base_sampler import MCMCSampler
class Pymc3(MCMCSampler):
"""bilby wrapper of the PyMC3 sampler (https://docs.pymc.io/)
class Pymc(MCMCSampler):
"""bilby wrapper of the PyMC sampler (https://www.pymc.io/)
All keyword arguments (i.e., the kwargs) passed to `run_sampler` will be
propapated to `pymc3.sample` where appropriate, see documentation for that
propapated to `pymc.sample` where appropriate, see documentation for that
class for further help. Under Other Parameters, we list commonly used
kwargs and the bilby, or where appropriate, PyMC3 defaults.
kwargs and the bilby, or where appropriate, PyMC defaults.
Parameters
==========
......@@ -40,11 +40,11 @@ class Pymc3(MCMCSampler):
particular variable names (these are case insensitive). If passing a
dictionary of methods, the values keyed on particular variables can be
lists of methods to form compound steps. If no method is provided for
any particular variable then PyMC3 will automatically decide upon a
any particular variable then PyMC will automatically decide upon a
default, with the first option being the NUTS sampler. The currently
allowed methods are 'NUTS', 'HamiltonianMC', 'Metropolis',
'BinaryMetropolis', 'BinaryGibbsMetropolis', 'Slice', and
'CategoricalGibbsMetropolis'. Note: you cannot provide a PyMC3 step
'CategoricalGibbsMetropolis'. Note: you cannot provide a PyMC step
method function itself here as it is outside of the model context
manager.
step_kwargs: dict
......@@ -59,9 +59,8 @@ class Pymc3(MCMCSampler):
step=None,
init="auto",
n_init=200000,
start=None,
initvals=None,
trace=None,
chain_idx=0,
chains=2,
cores=1,
tune=500,
......@@ -109,7 +108,7 @@ class Pymc3(MCMCSampler):
self.default_step_kwargs = {m.__name__.lower(): None for m in STEP_METHODS}
self.default_kwargs.update(self.default_step_kwargs)
super(Pymc3, self).__init__(
super(Pymc, self).__init__(
likelihood=likelihood,
priors=priors,
outdir=outdir,
......@@ -124,24 +123,24 @@ class Pymc3(MCMCSampler):
@staticmethod
def _import_external_sampler():
import pymc3
from pymc3.sampling import STEP_METHODS
from pymc3.theanof import floatX
import pymc
from pymc.aesaraf import floatX
from pymc.step_methods import STEP_METHODS
return pymc3, STEP_METHODS, floatX
return pymc, STEP_METHODS, floatX
@staticmethod
def _import_theano():
import theano # noqa
import theano.tensor as tt
from theano.compile.ops import as_op # noqa
def _import_aesara():
import aesara # noqa
import aesara.tensor as tt
from aesara.compile.ops import as_op # noqa
return theano, tt, as_op
return aesara, tt, as_op
def _verify_parameters(self):
"""
Change `_verify_parameters()` to just pass, i.e., don't try and
evaluate the likelihood for PyMC3.
evaluate the likelihood for PyMC.
"""
pass
......@@ -154,64 +153,64 @@ class Pymc3(MCMCSampler):
def setup_prior_mapping(self):
"""
Set the mapping between predefined bilby priors and the equivalent
PyMC3 distributions.
PyMC distributions.
"""
prior_map = {}
self.prior_map = prior_map
# predefined PyMC3 distributions
# predefined PyMC distributions
prior_map["Gaussian"] = {
"pymc3": "Normal",
"argmap": {"mu": "mu", "sigma": "sd"},
"pymc": "Normal",
"argmap": {"mu": "mu", "sigma": "sigma"},
}
prior_map["TruncatedGaussian"] = {
"pymc3": "TruncatedNormal",
"pymc": "TruncatedNormal",
"argmap": {
"mu": "mu",
"sigma": "sd",
"sigma": "sigma",
"minimum": "lower",
"maximum": "upper",
},
}
prior_map["HalfGaussian"] = {"pymc3": "HalfNormal", "argmap": {"sigma": "sd"}}
prior_map["HalfGaussian"] = {"pymc": "HalfNormal", "argmap": {"sigma": "sigma"}}
prior_map["Uniform"] = {
"pymc3": "Uniform",
"pymc": "Uniform",
"argmap": {"minimum": "lower", "maximum": "upper"},
}
prior_map["LogNormal"] = {
"pymc3": "Lognormal",
"argmap": {"mu": "mu", "sigma": "sd"},
"pymc": "Lognormal",
"argmap": {"mu": "mu", "sigma": "sigma"},
}
prior_map["Exponential"] = {
"pymc3": "Exponential",
"pymc": "Exponential",
"argmap": {"mu": "lam"},
"argtransform": {"mu": lambda mu: 1.0 / mu},
}
prior_map["StudentT"] = {
"pymc3": "StudentT",
"argmap": {"df": "nu", "mu": "mu", "scale": "sd"},
"pymc": "StudentT",
"argmap": {"df": "nu", "mu": "mu", "scale": "sigma"},
}
prior_map["Beta"] = {
"pymc3": "Beta",
"pymc": "Beta",
"argmap": {"alpha": "alpha", "beta": "beta"},
}
prior_map["Logistic"] = {
"pymc3": "Logistic",
"pymc": "Logistic",
"argmap": {"mu": "mu", "scale": "s"},
}
prior_map["Cauchy"] = {
"pymc3": "Cauchy",
"pymc": "Cauchy",
"argmap": {"alpha": "alpha", "beta": "beta"},
}
prior_map["Gamma"] = {
"pymc3": "Gamma",
"pymc": "Gamma",
"argmap": {"k": "alpha", "theta": "beta"},
"argtransform": {"theta": lambda theta: 1.0 / theta},
}
prior_map["ChiSquared"] = {"pymc3": "ChiSquared", "argmap": {"nu": "nu"}}
prior_map["ChiSquared"] = {"pymc": "ChiSquared", "argmap": {"nu": "nu"}}
prior_map["Interped"] = {
"pymc3": "Interpolated",
"pymc": "Interpolated",
"argmap": {"xx": "x_points", "yy": "pdf_points"},
}
prior_map["Normal"] = prior_map["Gaussian"]
......@@ -237,7 +236,7 @@ class Pymc3(MCMCSampler):
def _deltafunction_prior(self, key, **kwargs):
"""
Map the bilby delta function prior to a single value for PyMC3.
Map the bilby delta function prior to a single value for PyMC.
"""
# check prior is a DeltaFunction
......@@ -248,15 +247,15 @@ class Pymc3(MCMCSampler):
def _sine_prior(self, key):
"""
Map the bilby Sine prior to a PyMC3 style function
Map the bilby Sine prior to a PyMC style function
"""
# check prior is a Sine
pymc3, STEP_METHODS, floatX = self._import_external_sampler()
theano, tt, as_op = self._import_theano()
pymc, STEP_METHODS, floatX = self._import_external_sampler()
aesara, tt, as_op = self._import_aesara()
if isinstance(self.priors[key], Sine):
class Pymc3Sine(pymc3.Continuous):
class PymcSine(pymc.Continuous):
def __init__(self, lower=0.0, upper=np.pi):
if lower >= upper:
raise ValueError("Lower bound is above upper bound!")
......@@ -272,20 +271,20 @@ class Pymc3(MCMCSampler):
- upper * tt.cos(upper)
) / self.norm
transform = pymc3.distributions.transforms.interval(lower, upper)
transform = pymc.distributions.transforms.interval(lower, upper)
super(Pymc3Sine, self).__init__(transform=transform)
super(PymcSine, self).__init__(transform=transform)
def logp(self, value):
upper = self.upper
lower = self.lower
return pymc3.distributions.dist_math.bound(
return pymc.distributions.dist_math.bound(
tt.log(tt.sin(value) / self.norm),
lower <= value,
value <= upper,
)
return Pymc3Sine(
return PymcSine(
key, lower=self.priors[key].minimum, upper=self.priors[key].maximum
)
else:
......@@ -293,15 +292,15 @@ class Pymc3(MCMCSampler):
def _cosine_prior(self, key):
"""
Map the bilby Cosine prior to a PyMC3 style function
Map the bilby Cosine prior to a PyMC style function
"""
# check prior is a Cosine
pymc3, STEP_METHODS, floatX = self._import_external_sampler()
theano, tt, as_op = self._import_theano()
pymc, STEP_METHODS, floatX = self._import_external_sampler()
aesara, tt, as_op = self._import_aesara()
if isinstance(self.priors[key], Cosine):
class Pymc3Cosine(pymc3.Continuous):
class PymcCosine(pymc.Continuous):
def __init__(self, lower=-np.pi / 2.0, upper=np.pi / 2.0):
if lower >= upper:
raise ValueError("Lower bound is above upper bound!")
......@@ -316,20 +315,20 @@ class Pymc3(MCMCSampler):
- tt.cos(lower)
) / self.norm
transform = pymc3.distributions.transforms.interval(lower, upper)
transform = pymc.distributions.transforms.interval(lower, upper)
super(Pymc3Cosine, self).__init__(transform=transform)
super(PymcCosine, self).__init__(transform=transform)
def logp(self, value):
upper = self.upper
lower = self.lower
return pymc3.distributions.dist_math.bound(
return pymc.distributions.dist_math.bound(
tt.log(tt.cos(value) / self.norm),
lower <= value,
value <= upper,
)
return Pymc3Cosine(
return PymcCosine(
key, lower=self.priors[key].minimum, upper=self.priors[key].maximum
)
else:
......@@ -337,12 +336,12 @@ class Pymc3(MCMCSampler):
def _powerlaw_prior(self, key):
"""
Map the bilby PowerLaw prior to a PyMC3 style function
Map the bilby PowerLaw prior to a PyMC style function
"""
# check prior is a PowerLaw
pymc3, STEP_METHODS, floatX = self._import_external_sampler()
theano, tt, as_op = self._import_theano()
pymc, STEP_METHODS, floatX = self._import_external_sampler()
aesara, tt, as_op = self._import_aesara()
if isinstance(self.priors[key], PowerLaw):
# check power law is set
......@@ -353,12 +352,12 @@ class Pymc3(MCMCSampler):
# use Pareto distribution
palpha = -(1.0 + self.priors[key].alpha)
return pymc3.Bound(pymc3.Pareto, upper=self.priors[key].minimum)(
return pymc.Bound(pymc.Pareto, upper=self.priors[key].minimum)(
key, alpha=palpha, m=self.priors[key].maximum
)
else:
class Pymc3PowerLaw(pymc3.Continuous):
class PymcPowerLaw(pymc.Continuous):
def __init__(self, lower, upper, alpha, testval=1):
falpha = alpha
self.lower = lower = tt.as_tensor_variable(floatX(lower))
......@@ -374,11 +373,9 @@ class Pymc3(MCMCSampler):
* (tt.pow(self.upper, beta) - tt.pow(self.lower, beta))
)
transform = pymc3.distributions.transforms.interval(
lower, upper
)
transform = pymc.distributions.transforms.interval(lower, upper)
super(Pymc3PowerLaw, self).__init__(
super(PymcPowerLaw, self).__init__(
transform=transform, testval=testval
)
......@@ -387,13 +384,13 @@ class Pymc3(MCMCSampler):
lower = self.lower
alpha = self.alpha
return pymc3.distributions.dist_math.bound(
return pymc.distributions.dist_math.bound(
alpha * tt.log(value) + tt.log(self.norm),
lower <= value,
value <= upper,
)
return Pymc3PowerLaw(
return PymcPowerLaw(
key,
lower=self.priors[key].minimum,
upper=self.priors[key].maximum,
......@@ -404,12 +401,12 @@ class Pymc3(MCMCSampler):
def _multivariate_normal_prior(self, key):
"""
Map the bilby MultivariateNormal prior to a PyMC3 style function.
Map the bilby MultivariateNormal prior to a PyMC style function.
"""
# check prior is a PowerLaw
pymc3, STEP_METHODS, floatX = self._import_external_sampler()
theano, tt, as_op = self._import_theano()
pymc, STEP_METHODS, floatX = self._import_external_sampler()
aesara, tt, as_op = self._import_aesara()
if isinstance(self.priors[key], MultivariateGaussian):
# get names of multivariate Gaussian parameters
mvpars = self.priors[key].mvg.names
......@@ -447,7 +444,7 @@ class Pymc3(MCMCSampler):
upper[i] = maxmu[i] + 100.0 * maxsigma[i]
# create a bounded MultivariateNormal distribution
BoundedMvN = pymc3.Bound(pymc3.MvNormal, lower=lower, upper=upper)
BoundedMvN = pymc.Bound(pymc.MvNormal, lower=lower, upper=upper)
comp_dists = [] # list of any component modes
for i in range(mvg.nmodes):
......@@ -462,7 +459,7 @@ class Pymc3(MCMCSampler):
# create a Mixture model
setname = f"mixture{self.multivariate_normal_num_sets}"
mix = pymc3.Mixture(
mix = pymc.Mixture(
setname,
w=mvg.weights,
comp_dists=comp_dists,
......@@ -486,7 +483,7 @@ class Pymc3(MCMCSampler):
def run_sampler(self):
# set the step method
pymc3, STEP_METHODS, floatX = self._import_external_sampler()
pymc, STEP_METHODS, floatX = self._import_external_sampler()
step_methods = {m.__name__.lower(): m.__name__ for m in STEP_METHODS}
if "step" in self._kwargs:
self.step_method = self._kwargs.pop("step")
......@@ -527,15 +524,15 @@ class Pymc3(MCMCSampler):
else:
self.step_method = None
# initialise the PyMC3 model
self.pymc3_model = pymc3.Model()
# initialise the PyMC model
self.pymc_model = pymc.Model()
# set the prior
self.set_prior()
# if a custom log_likelihood function requires a `sampler` argument
# then use that log_likelihood function, with the assumption that it
# takes in a Pymc3 Sampler, with a pymc3_model attribute, and defines
# takes in a Pymc Sampler, with a pymc_model attribute, and defines
# the likelihood within that context manager
likeargs = infer_args_from_method(self.likelihood.log_likelihood)
if "sampler" in likeargs:
......@@ -580,7 +577,7 @@ class Pymc3(MCMCSampler):
if isinstance(self.step_method, dict):
# create list of step methods (any not given will default to NUTS)
self.kwargs["step"] = []
with self.pymc3_model:
with self.pymc_model:
for key in self.step_method:
# check for a compound step list
if isinstance(self.step_method[key], list):
......@@ -591,7 +588,7 @@ class Pymc3(MCMCSampler):
curmethod,
key,
nuts_kwargs,
pymc3,
pymc,
step_kwargs,
step_methods,
)
......@@ -602,12 +599,12 @@ class Pymc3(MCMCSampler):
curmethod,
key,
nuts_kwargs,
pymc3,
pymc,
step_kwargs,
step_methods,
)
else:
with self.pymc3_model:
with self.pymc_model:
# check for a compound step list
if isinstance(self.step_method, list):
compound = []
......@@ -617,7 +614,7 @@ class Pymc3(MCMCSampler):
args, nuts_kwargs = self._create_args_and_nuts_kwargs(
curmethod, nuts_kwargs, step_kwargs
)
compound.append(pymc3.__dict__[step_methods[curmethod]](**args))
compound.append(pymc.__dict__[step_methods[curmethod]](**args))
self.kwargs["step"] = compound
else:
self.kwargs["step"] = None
......@@ -627,32 +624,32 @@ class Pymc3(MCMCSampler):
args, nuts_kwargs = self._create_args_and_nuts_kwargs(
curmethod, nuts_kwargs, step_kwargs
)
self.kwargs["step"] = pymc3.__dict__[step_methods[curmethod]](
self.kwargs["step"] = pymc.__dict__[step_methods[curmethod]](
**args
)
else:
# re-add step_kwargs if no step methods are set
if len(step_kwargs) > 0 and StrictVersion(
pymc3.__version__
pymc.__version__
) < StrictVersion("3.7"):
self.kwargs["step_kwargs"] = step_kwargs
# check whether only NUTS step method has been assigned
if np.all([sm.lower() == "nuts" for sm in methodslist]):
# in this case we can let PyMC3 autoinitialise NUTS, so remove the step methods and re-add nuts_kwargs
# in this case we can let PyMC autoinitialise NUTS, so remove the step methods and re-add nuts_kwargs
self.kwargs["step"] = None
if len(nuts_kwargs) > 0 and StrictVersion(
pymc3.__version__
) < StrictVersion("3.7"):
if len(nuts_kwargs) > 0 and StrictVersion(pymc.__version__) < StrictVersion(
"3.7"
):
self.kwargs["nuts_kwargs"] = nuts_kwargs
elif len(nuts_kwargs) > 0:
# add NUTS kwargs to standard kwargs
self.kwargs.update(nuts_kwargs)
with self.pymc3_model:
with self.pymc_model:
# perform the sampling
trace = pymc3.sample(**self.kwargs, return_inferencedata=True)
trace = pymc.sample(**self.kwargs)
posterior = trace.posterior.to_dataframe().reset_index()
self.result.samples = posterior[self.search_parameter_keys]
......@@ -674,7 +671,7 @@ class Pymc3(MCMCSampler):
return args, nuts_kwargs
def _create_nuts_kwargs(
self, curmethod, key, nuts_kwargs, pymc3, step_kwargs, step_methods
self, curmethod, key, nuts_kwargs, pymc, step_kwargs, step_methods
):
if curmethod == "nuts":
args, nuts_kwargs = self._get_nuts_args(nuts_kwargs, step_kwargs)
......@@ -684,9 +681,7 @@ class Pymc3(MCMCSampler):
else:
args = {}
self.kwargs["step"].append(
pymc3.__dict__[step_methods[curmethod]](
vars=[self.pymc3_priors[key]], **args
)
pymc.__dict__[step_methods[curmethod]](vars=[self.pymc_priors[key]], **args)
)
return nuts_kwargs
......@@ -702,57 +697,55 @@ class Pymc3(MCMCSampler):
args = {}
return args, nuts_kwargs
def _pymc3_version(self):
pymc3, _, _ = self._import_external_sampler()
return pymc3.__version__
def _pymc_version(self):
pymc, _, _ = self._import_external_sampler()
return pymc.__version__
def set_prior(self):
"""
Set the PyMC3 prior distributions.
Set the PyMC prior distributions.
"""
self.setup_prior_mapping()
self.pymc3_priors = dict()
pymc3, STEP_METHODS, floatX = self._import_external_sampler()
self.pymc_priors = dict()
pymc, STEP_METHODS, floatX = self._import_external_sampler()
# initialise a dictionary of multivariate Gaussian parameters
self.multivariate_normal_sets = {}
self.multivariate_normal_num_sets = 0
# set the parameter prior distributions (in the model context manager)
with self.pymc3_model:
with self.pymc_model:
for key in self.priors:
# if the prior contains ln_prob method that takes a 'sampler' argument
# then try using that
lnprobargs = infer_args_from_method(self.priors[key].ln_prob)
if "sampler" in lnprobargs:
try:
self.pymc3_priors[key] = self.priors[key].ln_prob(sampler=self)
self.pymc_priors[key] = self.priors[key].ln_prob(sampler=self)
except RuntimeError:
raise RuntimeError(
("Problem setting PyMC3 prior for ", f"'{key}'")
)
raise RuntimeError((f"Problem setting PyMC prior for '{key}'"))
else:
# use Prior distribution name
distname = self.priors[key].__class__.__name__
if distname in self.prior_map:
# check if we have a predefined PyMC3 distribution
# check if we have a predefined PyMC distribution
if (
"pymc3" in self.prior_map[distname]
"pymc" in self.prior_map[distname]
and "argmap" in self.prior_map[distname]
):
# check the required arguments for the PyMC3 distribution
pymc3distname = self.prior_map[distname]["pymc3"]
# check the required arguments for the PyMC distribution
pymcdistname = self.prior_map[distname]["pymc"]
if pymc3distname not in pymc3.__dict__:
if pymcdistname not in pymc.__dict__:
raise ValueError(
f"Prior '{pymc3distname}' is not a known PyMC3 distribution."
f"Prior '{pymcdistname}' is not a known PyMC distribution."
)
reqargs = infer_args_from_method(
pymc3.__dict__[pymc3distname].__init__
pymc.__dict__[pymcdistname].dist
)
# set keyword arguments
......@@ -790,11 +783,11 @@ class Pymc3(MCMCSampler):
else:
if parg in reqargs:
priorkwargs[parg] = None
self.pymc3_priors[key] = pymc3.__dict__[pymc3distname](
self.pymc_priors[key] = pymc.__dict__[pymcdistname](
key, **priorkwargs
)
elif "internal" in self.prior_map[distname]:
self.pymc3_priors[key] = self.prior_map[distname][
self.pymc_priors[key] = self.prior_map[distname][
"internal"
](key)
else:
......@@ -808,12 +801,12 @@ class Pymc3(MCMCSampler):
def set_likelihood(self):
"""
Convert any bilby likelihoods to PyMC3 distributions.
Convert any bilby likelihoods to PyMC distributions.
"""
# create theano Op for the log likelihood if not using a predefined model
pymc3, STEP_METHODS, floatX = self._import_external_sampler()
theano, tt, as_op = self._import_theano()
# create aesara Op for the log likelihood if not using a predefined model
pymc, STEP_METHODS, floatX = self._import_external_sampler()
aesara, tt, as_op = self._import_aesara()
class LogLike(tt.Op):
......@@ -845,7 +838,7 @@ class Pymc3(MCMCSampler):
(theta,) = inputs
return [g[0] * self.logpgrad(theta)]
# create theano Op for calculating the gradient of the log likelihood
# create aesara Op for calculating the gradient of the log likelihood
class LogLikeGrad(tt.Op):
itypes = [tt.dvector]
......@@ -878,7 +871,7 @@ class Pymc3(MCMCSampler):
outputs[0][0] = grads
with self.pymc3_model:
with self.pymc_model:
# check if it is a predefined likelhood function
if isinstance(self.likelihood, GaussianLikelihood):
# check required attributes exist
......@@ -891,24 +884,24 @@ class Pymc3(MCMCSampler):
"Gaussian Likelihood does not have all the correct attributes!"
)
if "sigma" in self.pymc3_priors:
if "sigma" in self.pymc_priors:
# if sigma is suppled use that value
if self.likelihood.sigma is None:
self.likelihood.sigma = self.pymc3_priors.pop("sigma")
self.likelihood.sigma = self.pymc_priors.pop("sigma")
else:
del self.pymc3_priors["sigma"]
del self.pymc_priors["sigma"]
for key in self.pymc3_priors:
for key in self.pymc_priors:
if key not in self.likelihood.function_keys:
raise ValueError(f"Prior key '{key}' is not a function key!")
model = self.likelihood.func(self.likelihood.x, **self.pymc3_priors)
model = self.likelihood.func(self.likelihood.x, **self.pymc_priors)
# set the distribution
pymc3.Normal(
pymc.Normal(
"likelihood",
mu=model,
sd=self.likelihood.sigma,
sigma=self.likelihood.sigma,
observed=self.likelihood.y,
)
elif isinstance(self.likelihood, PoissonLikelihood):
......@@ -920,15 +913,15 @@ class Pymc3(MCMCSampler):
"Poisson Likelihood does not have all the correct attributes!"
)
for key in self.pymc3_priors:
for key in self.pymc_priors:
if key not in self.likelihood.function_keys:
raise ValueError(f"Prior key '{key}' is not a function key!")
# get rate function
model = self.likelihood.func(self.likelihood.x, **self.pymc3_priors)
model = self.likelihood.func(self.likelihood.x, **self.pymc_priors)
# set the distribution
pymc3.Poisson("likelihood", mu=model, observed=self.likelihood.y)
pymc.Poisson("likelihood", mu=model, observed=self.likelihood.y)
elif isinstance(self.likelihood, ExponentialLikelihood):
# check required attributes exist
if not hasattr(self.likelihood, "x") or not hasattr(
......@@ -938,15 +931,15 @@ class Pymc3(MCMCSampler):
"Exponential Likelihood does not have all the correct attributes!"
)
for key in self.pymc3_priors:
for key in self.pymc_priors:
if key not in self.likelihood.function_keys:
raise ValueError(f"Prior key '{key}' is not a function key!")
# get mean function
model = self.likelihood.func(self.likelihood.x, **self.pymc3_priors)
model = self.likelihood.func(self.likelihood.x, **self.pymc_priors)
# set the distribution
pymc3.Exponential(
pymc.Exponential(
"likelihood", lam=1.0 / model, observed=self.likelihood.y
)
elif isinstance(self.likelihood, StudentTLikelihood):
......@@ -961,25 +954,25 @@ class Pymc3(MCMCSampler):
"StudentT Likelihood does not have all the correct attributes!"
)
if "nu" in self.pymc3_priors:
if "nu" in self.pymc_priors:
# if nu is suppled use that value
if self.likelihood.nu is None:
self.likelihood.nu = self.pymc3_priors.pop("nu")
self.likelihood.nu = self.pymc_priors.pop("nu")
else:
del self.pymc3_priors["nu"]
del self.pymc_priors["nu"]
for key in self.pymc3_priors:
for key in self.pymc_priors:
if key not in self.likelihood.function_keys:
raise ValueError(f"Prior key '{key}' is not a function key!")
model = self.likelihood.func(self.likelihood.x, **self.pymc3_priors)
model = self.likelihood.func(self.likelihood.x, **self.pymc_priors)
# set the distribution
pymc3.StudentT(
pymc.StudentT(
"likelihood",
nu=self.likelihood.nu,
mu=model,
sd=self.likelihood.sigma,
sigma=self.likelihood.sigma,
observed=self.likelihood.y,
)
elif isinstance(
......@@ -988,22 +981,22 @@ class Pymc3(MCMCSampler):
):
# set theano Op - pass _search_parameter_keys, which only contains non-fixed variables
logl = LogLike(
self._search_parameter_keys, self.likelihood, self.pymc3_priors
self._search_parameter_keys, self.likelihood, self.pymc_priors
)
parameters = dict()
for key in self._search_parameter_keys:
try:
parameters[key] = self.pymc3_priors[key]
parameters[key] = self.pymc_priors[key]
except KeyError:
raise KeyError(
f"Unknown key '{key}' when setting GravitationalWaveTransient likelihood"
)
# convert to theano tensor variable
# convert to aesara tensor variable
values = tt.as_tensor_variable(list(parameters.values()))
pymc3.DensityDist(
pymc.DensityDist(
"likelihood", lambda v: logl(v), observed={"v": values}
)
else:
......
......@@ -292,8 +292,6 @@ def encode_for_hdf5(key, item):
output = json.dumps(item._get_json_dict())
elif isinstance(item, pd.DataFrame):
output = item.to_dict(orient="list")
elif isinstance(item, pd.Series):
output = item.to_dict()
elif inspect.isfunction(item) or inspect.isclass(item):
output = dict(
__module__=item.__module__, __name__=item.__name__, __class__=True
......
import json
import logging
from pathlib import Path
import subprocess
import sys
logger = logging.getLogger('bilby')
......@@ -70,3 +72,60 @@ def loaded_modules_dict():
if "." not in str(key):
vdict[key] = str(getattr(sys.modules[key], "__version__", "N/A"))
return vdict
def env_package_list(as_dataframe=False):
"""Get the list of packages installed in the system prefix.
If it is detected that the system prefix is part of a Conda environment,
a call to ``conda list --prefix {sys.prefix}`` will be made, otherwise
the call will be to ``{sys.executable} -m pip list installed``.
Parameters
----------
as_dataframe: bool
return output as a `pandas.DataFrame`
Returns
-------
pkgs : `list` of `dict`, or `pandas.DataFrame`
If ``as_dataframe=False`` is given, the output is a `list` of `dict`,
one for each package, at least with ``'name'`` and ``'version'`` keys
(more if `conda` is used).
If ``as_dataframe=True`` is given, the output is a `DataFrame`
created from the `list` of `dicts`.
"""
prefix = sys.prefix
# if a conda-meta directory exists, this is a conda environment, so
# use conda to print the package list
if (Path(prefix) / "conda-meta").is_dir():
pkgs = json.loads(subprocess.check_output([
"conda",
"list",
"--prefix", prefix,
"--json"
]))
# otherwise try and use Pip
else:
try:
import pip # noqa: F401
except ModuleNotFoundError: # no pip?
# not a conda environment, and no pip, so just return
# the list of loaded modules
modules = loaded_modules_dict()
pkgs = [{"name": x, "version": y} for x, y in modules.items()]
else:
pkgs = json.loads(subprocess.check_output([
sys.executable,
"-m", "pip",
"list", "installed",
"--format", "json",
]))
# convert to recarray for storage
if as_dataframe:
from pandas import DataFrame
return DataFrame(pkgs)
return pkgs
......@@ -192,14 +192,14 @@ def convert_to_lal_binary_black_hole_parameters(parameters):
converted_parameters = parameters.copy()
original_keys = list(converted_parameters.keys())
if 'redshift' in converted_parameters.keys():
converted_parameters['luminosity_distance'] = \
redshift_to_luminosity_distance(parameters['redshift'])
elif 'comoving_distance' in converted_parameters.keys():
converted_parameters['luminosity_distance'] = \
comoving_distance_to_luminosity_distance(
parameters['comoving_distance'])
if 'luminosity_distance' not in original_keys:
if 'redshift' in converted_parameters.keys():
converted_parameters['luminosity_distance'] = \
redshift_to_luminosity_distance(parameters['redshift'])
elif 'comoving_distance' in converted_parameters.keys():
converted_parameters['luminosity_distance'] = \
comoving_distance_to_luminosity_distance(
parameters['comoving_distance'])
for key in original_keys:
if key[-7:] == '_source':
......@@ -1006,6 +1006,8 @@ def generate_component_masses(sample, require_add=False, source=False):
Add the component masses to the dataframe/dictionary
We add:
mass_1, mass_2
Or if source=True
mass_1_source, mass_2_source
We also add any other masses which may be necessary for
intermediate steps, i.e. typically the total mass is necessary, along
with the mass ratio, so these will usually be added to the dictionary
......@@ -1167,7 +1169,9 @@ def generate_mass_parameters(sample, source=False):
not recompute keys already present in the dictionary
We add, potentially:
chirp mass, total mass, symmetric mass ratio, mass ratio, mass_1, mass_2
chirp_mass, total_mass, symmetric_mass_ratio, mass_ratio, mass_1, mass_2
Or if source=True:
chirp_mass_source, total_mass_source, symmetric_mass_ratio, mass_ratio, mass_1_source, mass_2_source
Parameters
==========
......@@ -1175,6 +1179,10 @@ def generate_mass_parameters(sample, source=False):
The input dictionary with two "spanning" mass parameters
e.g. (mass_1, mass_2), or (chirp_mass, mass_ratio), but not e.g. only
(mass_ratio, symmetric_mass_ratio)
source : bool, default False
If True, then perform the conversions for source mass parameters
i.e. mass_1_source instead of mass_1
Returns
=======
dict: The updated dictionary
......
import numpy as np
from bilby_cython.geometry import calculate_arm, detector_tensor
from .. import utils as gwutils
......@@ -263,7 +264,7 @@ class InterferometerGeometry(object):
if not self._x_updated or not self._y_updated:
_, _ = self.x, self.y # noqa
if not self._detector_tensor_updated:
self._detector_tensor = 0.5 * (np.einsum('i,j->ij', self.x, self.x) - np.einsum('i,j->ij', self.y, self.y))
self._detector_tensor = detector_tensor(x=self.x, y=self.y)
self._detector_tensor_updated = True
return self._detector_tensor
......@@ -288,19 +289,18 @@ class InterferometerGeometry(object):
"""
if arm == 'x':
return self._calculate_arm(self._xarm_tilt, self._xarm_azimuth)
return calculate_arm(
arm_tilt=self._xarm_tilt,
arm_azimuth=self._xarm_azimuth,
longitude=self._longitude,
latitude=self._latitude
)
elif arm == 'y':
return self._calculate_arm(self._yarm_tilt, self._yarm_azimuth)
return calculate_arm(
arm_tilt=self._yarm_tilt,
arm_azimuth=self._yarm_azimuth,
longitude=self._longitude,
latitude=self._latitude
)
else:
raise ValueError("Arm must either be 'x' or 'y'.")
def _calculate_arm(self, arm_tilt, arm_azimuth):
e_long = np.array([-np.sin(self._longitude), np.cos(self._longitude), 0])
e_lat = np.array([-np.sin(self._latitude) * np.cos(self._longitude),
-np.sin(self._latitude) * np.sin(self._longitude), np.cos(self._latitude)])
e_h = np.array([np.cos(self._latitude) * np.cos(self._longitude),
np.cos(self._latitude) * np.sin(self._longitude), np.sin(self._latitude)])
return (np.cos(arm_tilt) * np.cos(arm_azimuth) * e_long +
np.cos(arm_tilt) * np.sin(arm_azimuth) * e_lat +
np.sin(arm_tilt) * e_h)
import os
import numpy as np
from bilby_cython.geometry import (
get_polarization_tensor,
three_by_three_matrix_contraction,
time_delay_from_geocenter,
)
from ...core import utils
from ...core.utils import docstring, logger, PropertyAccessor
......@@ -264,15 +269,21 @@ class Interferometer(object):
psi: float
binary polarisation angle counter-clockwise about the direction of propagation
mode: str
polarisation mode (e.g. 'plus', 'cross')
polarisation mode (e.g. 'plus', 'cross') or the name of a specific detector.
If mode == self.name, return 1
Returns
=======
array_like: A 3x3 array representation of the antenna response for the specified mode
float: The antenna response for the specified mode and time/location
"""
polarization_tensor = gwutils.get_polarization_tensor(ra, dec, time, psi, mode)
return np.einsum('ij,ij->', self.geometry.detector_tensor, polarization_tensor)
if mode in ["plus", "cross", "x", "y", "breathing", "longitudinal"]:
polarization_tensor = get_polarization_tensor(ra, dec, time, psi, mode)
return three_by_three_matrix_contraction(self.geometry.detector_tensor, polarization_tensor)
elif mode == self.name:
return 1
else:
return 0
def get_detector_response(self, waveform_polarizations, parameters):
""" Get the detector response for a particular waveform
......@@ -527,7 +538,7 @@ class Interferometer(object):
=======
float: The time delay from geocenter in seconds
"""
return gwutils.time_delay_geocentric(self.geometry.vertex, np.array([0, 0, 0]), ra, dec, time)
return time_delay_from_geocenter(self.geometry.vertex, ra, dec, time)
def vertex_position_geocentric(self):
"""
......
......@@ -897,8 +897,9 @@ class GravitationalWaveTransient(Likelihood):
for key in pairs:
if key not in loaded_file:
return False, key
elif not np.array_equal(np.atleast_1d(loaded_file[key]),
np.atleast_1d(pairs[key])):
elif not np.allclose(np.atleast_1d(loaded_file[key]),
np.atleast_1d(pairs[key]),
rtol=1e-15):
return False, key
return True, None
......@@ -1088,6 +1089,7 @@ class GravitationalWaveTransient(Likelihood):
waveform_generator_class=self.waveform_generator.__class__,
waveform_arguments=self.waveform_generator.waveform_arguments,
frequency_domain_source_model=self.waveform_generator.frequency_domain_source_model,
time_domain_source_model=self.waveform_generator.time_domain_source_model,
parameter_conversion=self.waveform_generator.parameter_conversion,
sampling_frequency=self.waveform_generator.sampling_frequency,
duration=self.waveform_generator.duration,
......
......@@ -105,6 +105,12 @@ class CompactBinaryCoalescenceResult(CoreResult):
return self.__get_from_nested_meta_data(
'likelihood', 'frequency_domain_source_model')
@property
def time_domain_source_model(self):
""" The time domain source model (function)"""
return self.__get_from_nested_meta_data(
'likelihood', 'time_domain_source_model')
@property
def parameter_conversion(self):
""" The frequency domain source model (function)"""
......@@ -381,6 +387,7 @@ class CompactBinaryCoalescenceResult(CoreResult):
duration=self.duration, sampling_frequency=self.sampling_frequency,
start_time=self.start_time,
frequency_domain_source_model=self.frequency_domain_source_model,
time_domain_source_model=self.time_domain_source_model,
parameter_conversion=self.parameter_conversion,
waveform_arguments=self.waveform_arguments)
......@@ -589,8 +596,8 @@ class CompactBinaryCoalescenceResult(CoreResult):
plot_frequencies,
np.percentile(fd_waveforms, lower_percentile, axis=0),
np.percentile(fd_waveforms, upper_percentile, axis=0),
color=WAVEFORM_COLOR, label=r'{}\% credible interval'.format(
int(upper_percentile - lower_percentile)),
color=WAVEFORM_COLOR,
label=r'{}% credible interval'.format(int(upper_percentile - lower_percentile)),
alpha=0.3)
axs[1].plot(
plot_times, np.mean(td_waveforms, axis=0),
......
import json
import os
from functools import lru_cache
from math import fmod
import numpy as np
from scipy.interpolate import interp1d
from scipy.special import i0e
from bilby_cython.geometry import (
zenith_azimuth_to_theta_phi as _zenith_azimuth_to_theta_phi,
)
from bilby_cython.time import greenwich_mean_sidereal_time
from ..core.utils import (ra_dec_to_theta_phi,
speed_of_light, logger, run_commandline,
from ..core.utils import (logger, run_commandline,
check_directory_exists_and_if_not_mkdir,
SamplesSummary, theta_phi_to_ra_dec)
from ..core.utils.constants import solar_mass
......@@ -53,90 +55,6 @@ def psd_from_freq_series(freq_data, df):
return np.power(asd_from_freq_series(freq_data, df), 2)
def time_delay_geocentric(detector1, detector2, ra, dec, time):
"""
Calculate time delay between two detectors in geocentric coordinates based on XLALArrivaTimeDiff in TimeDelay.c
Parameters
==========
detector1: array_like
Cartesian coordinate vector for the first detector in the geocentric frame
generated by the Interferometer class as self.vertex.
detector2: array_like
Cartesian coordinate vector for the second detector in the geocentric frame.
To get time delay from Earth center, use detector2 = np.array([0,0,0])
ra: float
Right ascension of the source in radians
dec: float
Declination of the source in radians
time: float
GPS time in the geocentric frame
Returns
=======
float: Time delay between the two detectors in the geocentric frame
"""
gmst = fmod(greenwich_mean_sidereal_time(time), 2 * np.pi)
theta, phi = ra_dec_to_theta_phi(ra, dec, gmst)
omega = np.array([np.sin(theta) * np.cos(phi), np.sin(theta) * np.sin(phi), np.cos(theta)])
delta_d = detector2 - detector1
return np.dot(omega, delta_d) / speed_of_light
def get_polarization_tensor(ra, dec, time, psi, mode):
"""
Calculate the polarization tensor for a given sky location and time
See Nishizawa et al. (2009) arXiv:0903.0528 for definitions of the polarisation tensors.
[u, v, w] represent the Earth-frame
[m, n, omega] represent the wave-frame
Note: there is a typo in the definition of the wave-frame in Nishizawa et al.
Parameters
==========
ra: float
right ascension in radians
dec: float
declination in radians
time: float
geocentric GPS time
psi: float
binary polarisation angle counter-clockwise about the direction of propagation
mode: str
polarisation mode
Returns
=======
array_like: A 3x3 representation of the polarization_tensor for the specified mode.
"""
gmst = fmod(greenwich_mean_sidereal_time(time), 2 * np.pi)
theta, phi = ra_dec_to_theta_phi(ra, dec, gmst)
u = np.array([np.cos(phi) * np.cos(theta), np.cos(theta) * np.sin(phi), -np.sin(theta)])
v = np.array([-np.sin(phi), np.cos(phi), 0])
m = -u * np.sin(psi) - v * np.cos(psi)
n = -u * np.cos(psi) + v * np.sin(psi)
if mode.lower() == 'plus':
return np.einsum('i,j->ij', m, m) - np.einsum('i,j->ij', n, n)
elif mode.lower() == 'cross':
return np.einsum('i,j->ij', m, n) + np.einsum('i,j->ij', n, m)
elif mode.lower() == 'breathing':
return np.einsum('i,j->ij', m, m) + np.einsum('i,j->ij', n, n)
# Calculating omega here to avoid calculation when model in [plus, cross, breathing]
omega = np.cross(m, n)
if mode.lower() == 'longitudinal':
return np.einsum('i,j->ij', omega, omega)
elif mode.lower() == 'x':
return np.einsum('i,j->ij', m, omega) + np.einsum('i,j->ij', omega, m)
elif mode.lower() == 'y':
return np.einsum('i,j->ij', n, omega) + np.einsum('i,j->ij', omega, n)
else:
raise ValueError("{} not a polarization mode!".format(mode))
def get_vertex_position_geocentric(latitude, longitude, elevation):
"""
Calculate the position of the IFO vertex in geocentric coordinates in meters.
......@@ -310,56 +228,6 @@ def overlap(signal_a, signal_b, power_spectral_density=None, delta_frequency=Non
return sum(integral).real
__cached_euler_matrix = None
__cached_delta_x = None
def euler_rotation(delta_x):
"""
Calculate the rotation matrix mapping the vector (0, 0, 1) to delta_x
while preserving the origin of the azimuthal angle.
This is decomposed into three Euler angle, alpha, beta, gamma, which rotate
about the z-, y-, and z- axes respectively.
Parameters
==========
delta_x: array-like (3,)
Vector onto which (0, 0, 1) should be mapped.
Returns
=======
total_rotation: array-like (3,3)
Rotation matrix which maps vectors from the frame in which delta_x is
aligned with the z-axis to the target frame.
"""
global __cached_delta_x
global __cached_euler_matrix
delta_x = delta_x / np.sum(delta_x**2)**0.5
if np.array_equal(delta_x, __cached_delta_x):
return __cached_euler_matrix
else:
__cached_delta_x = delta_x
alpha = np.arctan(- delta_x[1] * delta_x[2] / delta_x[0])
beta = np.arccos(delta_x[2])
gamma = np.arctan(delta_x[1] / delta_x[0])
rotation_1 = np.array([
[np.cos(alpha), -np.sin(alpha), 0], [np.sin(alpha), np.cos(alpha), 0],
[0, 0, 1]])
rotation_2 = np.array([
[np.cos(beta), 0, - np.sin(beta)], [0, 1, 0],
[np.sin(beta), 0, np.cos(beta)]])
rotation_3 = np.array([
[np.cos(gamma), -np.sin(gamma), 0], [np.sin(gamma), np.cos(gamma), 0],
[0, 0, 1]])
total_rotation = np.einsum(
'ij,jk,kl->il', rotation_3, rotation_2, rotation_1)
__cached_delta_x = delta_x
__cached_euler_matrix = total_rotation
return total_rotation
def zenith_azimuth_to_theta_phi(zenith, azimuth, ifos):
"""
Convert from the 'detector frame' to the Earth frame.
......@@ -379,15 +247,7 @@ def zenith_azimuth_to_theta_phi(zenith, azimuth, ifos):
The zenith and azimuthal angles in the earth frame.
"""
delta_x = ifos[0].geometry.vertex - ifos[1].geometry.vertex
omega_prime = np.array([
np.sin(zenith) * np.cos(azimuth),
np.sin(zenith) * np.sin(azimuth),
np.cos(zenith)])
rotation_matrix = euler_rotation(delta_x)
omega = np.dot(rotation_matrix, omega_prime)
theta = np.arccos(omega[2])
phi = np.arctan2(omega[1], omega[0]) % (2 * np.pi)
return theta, phi
return _zenith_azimuth_to_theta_phi(zenith, azimuth, delta_x)
def zenith_azimuth_to_ra_dec(zenith, azimuth, geocent_time, ifos):
......@@ -1005,27 +865,6 @@ def plot_spline_pos(log_freqs, samples, nfreqs=100, level=0.9, color='k', label=
plt.xlim(freq_points.min() - .5, freq_points.max() + 50)
def greenwich_mean_sidereal_time(time):
"""
Compute the greenwich mean sidereal time from a GPS time.
This is just a wrapper around :code:`lal.GreenwichMeanSiderealTime` .
Parameters
----------
time: float
The GPS time to convert.
Returns
-------
float
The sidereal time.
"""
from lal import GreenwichMeanSiderealTime
time = float(time)
return GreenwichMeanSiderealTime(time)
def ln_i0(value):
"""
A numerically stable method to evaluate ln(I_0) a modified Bessel function
......
......@@ -89,6 +89,11 @@ def setup_command_line_args():
action="store_true",
help="Merge the set of runs, output saved using the outdir and label",
)
action_parser.add_argument(
"--ignore-inconsistent",
action="store_true",
help="If true, ignore inconsistency errors in the merge process, but print a warning",
)
action_parser.add_argument(
"-b", "--bayes", action="store_true", help="Print all Bayes factors."
)
......@@ -208,7 +213,11 @@ def main():
save(result, args)
if args.merge:
result = results_list.combine()
if args.ignore_inconsistent:
consistency_level = "warning"
else:
consistency_level = "error"
result = results_list.combine(consistency_level=consistency_level)
if args.label is not None:
result.label = args.label
if args.outdir is not None:
......
......@@ -30,15 +30,7 @@ RUN conda install -n ${{conda_env}} -c conda-forge scikit-image celerite george
# Install dependencies and samplers
RUN pip install corner healpy cython tables
RUN conda install -n ${{conda_env}} -c conda-forge dynesty emcee nestle ptemcee
RUN conda install -n ${{conda_env}} -c conda-forge pymultinest ultranest
RUN conda install -n ${{conda_env}} -c conda-forge cpnest kombine dnest4 zeus-mcmc
RUN conda install -n ${{conda_env}} -c conda-forge ptmcmcsampler
RUN conda install -n ${{conda_env}} -c conda-forge pytorch
RUN conda install -n ${{conda_env}} -c conda-forge theano-pymc
RUN conda install -n ${{conda_env}} -c conda-forge pymc3
RUN conda install -n ${{conda_env}} -c conda-forge pymc pymc-base
RUN pip install nessai
RUN conda install -n ${{conda_env}} {conda_samplers} -c conda-forge -c pytorch
# Install Polychord
RUN apt-get update --allow-releaseinfo-change
......
# This dockerfile is written automatically and should not be modified by hand.
FROM containers.ligo.org/docker/base:conda
LABEL name="bilby CI testing" \
maintainer="Gregory Ashton <gregory.ashton@ligo.org>"
RUN conda update -n base -c defaults conda
ENV conda_env python310
RUN conda create -n ${conda_env} python=3.10
RUN echo "source activate ${conda_env}" > ~/.bashrc
ENV PATH /opt/conda/envs/${conda_env}/bin:$PATH
RUN /bin/bash -c "source activate ${conda_env}"
RUN conda info
RUN python --version
# Install conda-installable programs
RUN conda install -n ${conda_env} -y matplotlib numpy scipy pandas astropy flake8
RUN conda install -n ${conda_env} -c anaconda coverage configargparse future dill
RUN conda install -n ${conda_env} -c conda-forge black pytest-cov deepdish arviz
# Install pip-requirements
RUN pip install --upgrade pip
RUN pip install --upgrade setuptools coverage-badge parameterized
# Install documentation requirements
RUN pip install sphinx numpydoc nbsphinx sphinx_rtd_theme sphinx-tabs autodoc
# Install testing requirements
RUN conda install -n ${conda_env} -c conda-forge scikit-image celerite george
# Install dependencies and samplers
RUN pip install corner healpy cython tables
RUN conda install -n ${conda_env} dynesty emcee nestle ptemcee pymultinest ultranest cpnest kombine dnest4 zeus-mcmc pytorch pymc nessai ptmcmcsampler -c conda-forge -c pytorch
# Install Polychord
RUN apt-get update --allow-releaseinfo-change
RUN apt-get install -y build-essential
RUN apt-get install -y libblas3 libblas-dev
RUN apt-get install -y liblapack3 liblapack-dev
RUN apt-get install -y libatlas3-base libatlas-base-dev
RUN apt-get install -y gfortran
RUN git clone https://github.com/PolyChord/PolyChordLite.git \
&& (cd PolyChordLite && python setup.py --no-mpi install)
# Install GW packages
RUN conda install -n ${conda_env} -c conda-forge python-lalsimulation bilby.cython
RUN pip install ligo-gracedb gwpy ligo.skymap
# Add the ROQ data to the image
RUN mkdir roq_basis \
&& cd roq_basis \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/B_linear.npy \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/B_quadratic.npy \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/fnodes_linear.npy \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/fnodes_quadratic.npy \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/params.dat \
&& wget https://git.ligo.org/soichiro.morisaki/roq_basis/raw/main/IMRPhenomD/16s_nospins/basis_addcal.hdf5 \
&& wget https://git.ligo.org/soichiro.morisaki/roq_basis/raw/main/IMRPhenomD/16s_nospins/basis_multiband_addcal.hdf5