Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • john-veitch/bilby
  • duncanmmacleod/bilby
  • colm.talbot/bilby
  • lscsoft/bilby
  • matthew-pitkin/bilby
  • salvatore-vitale/tupak
  • charlie.hoy/bilby
  • bfarr/bilby
  • virginia.demilio/bilby
  • vivien/bilby
  • eric-howell/bilby
  • sebastian-khan/bilby
  • rhys.green/bilby
  • moritz.huebner/bilby
  • joseph.mills/bilby
  • scott.coughlin/bilby
  • matthew.carney/bilby
  • hyungwon.lee/bilby
  • monica.rizzo/bilby
  • christopher-berry/bilby
  • lindsay.demarchi/bilby
  • kaushik.rao/bilby
  • charles.kimball/bilby
  • andrew.matas/bilby
  • juan.calderonbustillo/bilby
  • patrick-meyers/bilby
  • hannah.middleton/bilby
  • eve.chase/bilby
  • grant.meadors/bilby
  • khun.phukon/bilby
  • sumeet.kulkarni/bilby
  • daniel.reardon/bilby
  • cjhaster/bilby
  • sylvia.biscoveanu/bilby
  • james-clark/bilby
  • meg.millhouse/bilby
  • joshua.willis/bilby
  • nikhil.sarin/bilby
  • paul.easter/bilby
  • youngmin/bilby
  • daniel-williams/bilby
  • shanika.galaudage/bilby
  • bruce.edelman/bilby
  • avi.vajpeyi/bilby
  • isobel.romero-shaw/bilby
  • andrew.kim/bilby
  • dominika.zieba/bilby
  • jonathan.davies/bilby
  • marc.arene/bilby
  • srishti.tiwari/bilby-tidal-heating-eccentric
  • aditya.vijaykumar/bilby
  • michael.williams/bilby
  • cecilio.garcia-quiros/bilby
  • rory-smith/bilby
  • maite.mateu-lucena/bilby
  • wushichao/bilby
  • kaylee.desoto/bilby
  • brandon.piotrzkowski/bilby
  • rossella.gamba/bilby
  • hunter.gabbard/bilby
  • deep.chatterjee/bilby
  • tathagata.ghosh/bilby
  • arunava.mukherjee/bilby
  • philip.relton/bilby
  • reed.essick/bilby
  • pawan.gupta/bilby
  • francisco.hernandez/bilby
  • rhiannon.udall/bilby
  • leo.tsukada/bilby
  • will-farr/bilby
  • vijay.varma/bilby
  • jeremy.baier/bilby
  • joshua.brandt/bilby
  • ethan.payne/bilby
  • ka-lok.lo/bilby
  • antoni.ramos-buades/bilby
  • oliviastephany.wilk/bilby
  • jack.heinzel/bilby
  • samson.leong/bilby-psi4
  • viviana.caceres/bilby
  • nadia.qutob/bilby
  • michael-coughlin/bilby
  • hemantakumar.phurailatpam/bilby
  • boris.goncharov/bilby
  • sama.al-shammari/bilby
  • siqi.zhong/bilby
  • jocelyn-read/bilby
  • marc.penuliar/bilby
  • stephanie.letourneau/bilby
  • alexandresebastien.goettel/bilby
  • alec.gunny/bilby
  • serguei.ossokine/bilby
  • pratyusava.baral/bilby
  • sophie.hourihane/bilby
  • eunsub/bilby
  • james.hart/bilby
  • pratyusava.baral/bilby-tg
  • zhaozc/bilby
  • pratyusava.baral/bilby_SoG
  • tomasz.baka/bilby
  • nicogerardo.bers/bilby
  • soumen.roy/bilby
  • isaac.mcmahon/healpix-redundancy
  • asamakai.baker/bilby-frequency-dependent-antenna-pattern-functions
  • anna.puecher/bilby
  • pratyusava.baral/bilby-x-g
  • thibeau.wouters/bilby
  • christian.adamcewicz/bilby
  • raffi.enficiaud/bilby
109 results
Show changes
Commits on Source (45)
Showing
with 552 additions and 418 deletions
......@@ -71,6 +71,25 @@ basic-3.10:
<<: *test-python
image: python:3.10
.test-samplers-import: &test-samplers-import
stage: initial
script:
- python -m pip install .
- python -m pip list installed
- python test/test_samplers_import.py
import-samplers-3.8:
<<: *test-samplers-import
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python38
import-samplers-3.9:
<<: *test-samplers-import
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
import-samplers-3.10:
<<: *test-samplers-import
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
.precommits: &precommits
stage: initial
script:
......@@ -97,13 +116,12 @@ precommits-py3.9:
CACHE_DIR: ".pip39"
PYVERSION: "python39"
# FIXME: when image builds for 3.10 change this back.
#precommits-py3.10:
# <<: *precommits
# image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
# variables:
# CACHE_DIR: ".pip310"
# PYVERSION: "python310"
precommits-py3.10:
<<: *precommits
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
variables:
CACHE_DIR: ".pip310"
PYVERSION: "python310"
install:
stage: initial
......@@ -146,19 +164,16 @@ python-3.9:
- htmlcov/
expire_in: 30 days
# add back when 3.10 image is available
#python-3.10:
# <<: *unit-test
# needs: ["basic-3.10", "precommits-py3.10"]
# image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
python-3.10:
<<: *unit-test
needs: ["basic-3.10", "precommits-py3.10"]
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
.test-sampler: &test-sampler
stage: test
script:
- python -m pip install .
- python -m pip install schwimmbad
- python -m pip list installed
- pytest test/integration/sampler_run_test.py --durations 10 -v
python-3.8-samplers:
......@@ -171,11 +186,10 @@ python-3.9-samplers:
needs: ["basic-3.9", "precommits-py3.9"]
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
# add back when 3.10 image is available
#python-3.10-samplers:
# <<: *test-sampler
# needs: ["basic-3.10", "precommits-py3.10"]
# image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
python-3.10-samplers:
<<: *test-sampler
needs: ["basic-3.10", "precommits-py3.10"]
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
integration-tests-python-3.9:
stage: test
......@@ -209,11 +223,10 @@ plotting-python-3.9:
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
needs: ["basic-3.9", "precommits-py3.9"]
# add back when 3.10 image is available
#plotting-python-3.10:
# <<: *plotting
# image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
# needs: ["basic-3.10", "precommits-py3.10"]
plotting-python-3.10:
<<: *plotting
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
needs: ["basic-3.10", "precommits-py3.10"]
# ------------------- Docs stage -------------------------------------------
......@@ -257,8 +270,11 @@ pages:
stage: deploy
image: docker:20.10.12
needs: ["containers"]
only:
- schedules
except:
refs:
- schedules
changes:
- containers/*
script:
- cd containers
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
......@@ -276,11 +292,10 @@ build-python39-container:
variables:
PYVERSION: "python39"
# add back when 3.10 image is available
#build-python310-container:
# <<: *build-container
# variables:
# PYVERSION: "python310"
build-python310-container:
<<: *build-container
variables:
PYVERSION: "python310"
pypi-release:
stage: deploy
......@@ -289,7 +304,7 @@ pypi-release:
TWINE_USERNAME: $PYPI_USERNAME
TWINE_PASSWORD: $PYPI_PASSWORD
before_script:
- pip install twine
- pip install twine setuptools_scm
- python setup.py sdist
script:
- twine upload dist/*
......
......@@ -29,6 +29,7 @@ Hector Estelles
Ignacio Magaña Hernandez
Isobel Marguarethe Romero-Shaw
Jack Heinzel
Jacob Golomb
Jade Powell
James A Clark
Jeremy G Baier
......@@ -61,7 +62,7 @@ Paul Easter
Paul Lasky
Philip Relton
Rhys Green
Richard Udall
Rhiannon Udall
Rico Lo
Roberto Cotesta
Rory Smith
......
# All notable changes will be documented in this file
## [1.3.0] 2022-10-23
Version 1.3.0 release of Bilby
This release has a major change to a sampler interface, `pymc3` is no longer supported, users should switch to `pymc>=4`.
This release also adds a new top-level dependency, `bilby-cython`.
This release also contains various documentation improvements.
### Added
- Improved logging of likelihood information when starting sampling (!1148)
- Switch some geometric calculations to use optimized bilby-cython package (!1053)
- Directly specify the starting point for `bilby_mcmc` (!1155)
- Allow a signal to be specified to only be present in a specific `Interferometer` (!1164)
- Store time domain model function in CBCResult metadata (!1165)
### Changes
- Switch from `pymc3` to `pymc` (!1117)
- Relax equality check for distance marginalization lookup to allow cross-platform use (!1150)
- Fix to deal with non-checkpointing `bilby_mcmc` analyses (!1151)
- Allow result objects with different analysis configurations to be combined (!1153)
- Improve the storing of environment information (!166)
- Fix issue when specifying distance and redshfit independently (!1154)
- Fix a bug in the storage of likelihood/prior samples for `bilby_mcmc` (!1156)
## [1.2.1] 2022-09-05
Version 1.2.1 release of Bilby
......
......@@ -547,6 +547,7 @@ class GMMProposal(DensityEstimateProposal):
def _sample(self, nsamples=None):
return np.squeeze(self.density.sample(n_samples=nsamples)[0])
@staticmethod
def check_dependencies(warn=True):
if importlib.util.find_spec("sklearn") is None:
if warn:
......@@ -593,12 +594,15 @@ class NormalizingFlowProposal(DensityEstimateProposal):
fallback=fallback,
scale_fits=scale_fits,
)
self.setup_flow()
self.setup_optimizer()
self.initialised = False
self.max_training_epochs = max_training_epochs
self.js_factor = js_factor
def initialise(self):
self.setup_flow()
self.setup_optimizer()
self.initialised = True
def setup_flow(self):
if self.ndim < 3:
self.setup_basic_flow()
......@@ -699,6 +703,9 @@ class NormalizingFlowProposal(DensityEstimateProposal):
self.trained = True
def propose(self, chain):
if self.initialised is False:
self.initialise()
import torch
self.steps_since_refit += 1
......@@ -728,6 +735,7 @@ class NormalizingFlowProposal(DensityEstimateProposal):
return theta, float(log_factor)
@staticmethod
def check_dependencies(warn=True):
if importlib.util.find_spec("nflows") is None:
if warn:
......@@ -1094,10 +1102,6 @@ def get_proposal_cycle(string, priors, L1steps=1, warn=True):
]
if GMMProposal.check_dependencies(warn=warn):
plist.append(GMMProposal(priors, weight=big_weight, scale_fits=L1steps))
if NormalizingFlowProposal.check_dependencies(warn=warn):
plist.append(
NormalizingFlowProposal(priors, weight=big_weight, scale_fits=L1steps)
)
plist = remove_proposals_using_string(plist, string)
return ProposalCycle(plist)
......
......@@ -2,9 +2,11 @@ import datetime
import os
import time
from collections import Counter
from pathlib import Path
import numpy as np
import pandas as pd
from scipy.optimize import differential_evolution
from ..core.result import rejection_sample
from ..core.sampler.base_sampler import (
......@@ -113,6 +115,13 @@ class Bilby_MCMC(MCMCSampler):
evidence_method: str, [stepping_stone, thermodynamic]
The evidence calculation method to use. Defaults to stepping_stone, but
the results of all available methods are stored in the ln_z_dict.
initial_sample_method: str
Method to draw the initial sample. Either "prior" (a random draw
from the prior) or "maximize" (use an optimization approach to attempt
to find the maximum posterior estimate).
initial_sample_dict: dict
A dictionary of the initial sample value. If incomplete, will overwrite
the initial_sample drawn using initial_sample_method.
verbose: bool
Whether to print diagnostic output during the run.
......@@ -143,6 +152,8 @@ class Bilby_MCMC(MCMCSampler):
fixed_tau=None,
tau_window=None,
evidence_method="stepping_stone",
initial_sample_method="prior",
initial_sample_dict=None,
)
def __init__(
......@@ -187,6 +198,8 @@ class Bilby_MCMC(MCMCSampler):
self.proposal_cycle = self.kwargs["proposal_cycle"]
self.pt_rejection_sample = self.kwargs["pt_rejection_sample"]
self.evidence_method = self.kwargs["evidence_method"]
self.initial_sample_method = self.kwargs["initial_sample_method"]
self.initial_sample_dict = self.kwargs["initial_sample_dict"]
self.printdt = self.kwargs["printdt"]
check_directory_exists_and_if_not_mkdir(self.outdir)
......@@ -238,8 +251,8 @@ class Bilby_MCMC(MCMCSampler):
@staticmethod
def add_data_to_result(result, ptsampler, outdir, label, make_plots):
result.samples = ptsampler.samples
result.log_likelihood_evaluations = result.samples[LOGLKEY]
result.log_prior_evaluations = result.samples[LOGPKEY]
result.log_likelihood_evaluations = result.samples[LOGLKEY].to_numpy()
result.log_prior_evaluations = result.samples[LOGPKEY].to_numpy()
ptsampler.compute_evidence(
outdir=outdir,
label=label,
......@@ -286,6 +299,8 @@ class Bilby_MCMC(MCMCSampler):
pool=self.pool,
use_ratio=self.use_ratio,
evidence_method=self.evidence_method,
initial_sample_method=self.initial_sample_method,
initial_sample_dict=self.initial_sample_dict,
)
def get_setup_string(self):
......@@ -387,9 +402,10 @@ class Bilby_MCMC(MCMCSampler):
logger.info("Written checkpoint file {}".format(self.resume_file))
else:
logger.warning(
"Cannot write pickle resume file! "
"Job will not resume if interrupted."
"Cannot write pickle resume file! Job may not resume if interrupted."
)
# Touch the file to postpone next check-point attempt
Path(self.resume_file).touch(exist_ok=True)
self.ptsampler.pool = _pool
def print_long_progress(self):
......@@ -520,9 +536,13 @@ class BilbyPTMCMCSampler(object):
pool,
use_ratio,
evidence_method,
initial_sample_method,
initial_sample_dict,
):
self.set_pt_inputs(pt_inputs)
self.use_ratio = use_ratio
self.initial_sample_method = initial_sample_method
self.initial_sample_dict = initial_sample_dict
self.setup_sampler_dictionary(convergence_inputs, proposal_cycle)
self.set_convergence_inputs(convergence_inputs)
self.pt_rejection_sample = pt_rejection_sample
......@@ -570,10 +590,12 @@ class BilbyPTMCMCSampler(object):
betas = self.get_initial_betas()
logger.info(
f"Initializing BilbyPTMCMCSampler with:"
f"ntemps={self.ntemps},"
f"nensemble={self.nensemble},"
f"pt_ensemble={self.pt_ensemble},"
f"initial_betas={betas}\n"
f"ntemps={self.ntemps}, "
f"nensemble={self.nensemble}, "
f"pt_ensemble={self.pt_ensemble}, "
f"initial_betas={betas}, "
f"initial_sample_method={self.initial_sample_method}, "
f"initial_sample_dict={self.initial_sample_dict}\n"
)
self.sampler_dictionary = dict()
for Tindex, beta in enumerate(betas):
......@@ -589,6 +611,8 @@ class BilbyPTMCMCSampler(object):
convergence_inputs=convergence_inputs,
proposal_cycle=proposal_cycle,
use_ratio=self.use_ratio,
initial_sample_method=self.initial_sample_method,
initial_sample_dict=self.initial_sample_dict,
)
for Eindex in range(n)
]
......@@ -1075,6 +1099,8 @@ class BilbyMCMCSampler(object):
Tindex=0,
Eindex=0,
use_ratio=False,
initial_sample_method="prior",
initial_sample_dict=None,
):
self.beta = beta
self.Tindex = Tindex
......@@ -1084,12 +1110,24 @@ class BilbyMCMCSampler(object):
self.parameters = _sampling_convenience_dump.priors.non_fixed_keys
self.ndim = len(self.parameters)
full_sample_dict = _sampling_convenience_dump.priors.sample()
initial_sample = {
k: v
for k, v in full_sample_dict.items()
if k in _sampling_convenience_dump.priors.non_fixed_keys
}
if initial_sample_method.lower() == "prior":
full_sample_dict = _sampling_convenience_dump.priors.sample()
initial_sample = {
k: v
for k, v in full_sample_dict.items()
if k in _sampling_convenience_dump.priors.non_fixed_keys
}
elif initial_sample_method.lower() in ["maximize", "maximise", "maximum"]:
initial_sample = get_initial_maximimum_posterior_sample(self.beta)
else:
raise ValueError(
f"initial sample method {initial_sample_method} not understood"
)
if initial_sample_dict is not None:
initial_sample.update(initial_sample_dict)
logger.info(f"Using initial sample {initial_sample}")
initial_sample = Sample(initial_sample)
initial_sample[LOGLKEY] = self.log_likelihood(initial_sample)
initial_sample[LOGPKEY] = self.log_prior(initial_sample)
......@@ -1264,6 +1302,42 @@ class BilbyMCMCSampler(object):
return samples
def get_initial_maximimum_posterior_sample(beta):
"""A method to attempt optimization of the maximum likelihood
This uses a simple scipy optimization approach, starting from a number
of draws from the prior to avoid problems with local optimization.
"""
logger.info("Finding initial maximum posterior estimate")
likelihood = _sampling_convenience_dump.likelihood
priors = _sampling_convenience_dump.priors
search_parameter_keys = _sampling_convenience_dump.search_parameter_keys
bounds = []
for key in search_parameter_keys:
bounds.append((priors[key].minimum, priors[key].maximum))
def neg_log_post(x):
sample = {key: val for key, val in zip(search_parameter_keys, x)}
ln_prior = priors.ln_prob(sample)
if np.isinf(ln_prior):
return -np.inf
likelihood.parameters.update(sample)
return -beta * likelihood.log_likelihood() - ln_prior
res = differential_evolution(neg_log_post, bounds, popsize=100, init="sobol")
if res.success:
sample = {key: val for key, val in zip(search_parameter_keys, res.x)}
logger.info(f"Initial maximum posterior estimate {sample}")
return sample
else:
raise ValueError("Failed to find initial maximum posterior estimate")
# Methods used to aid parallelisation:
......
......@@ -1710,11 +1710,28 @@ class ResultList(list):
else:
raise TypeError("Could not append a non-Result type")
def combine(self, shuffle=False):
def combine(self, shuffle=False, consistency_level="error"):
"""
Return the combined results in a :class:bilby.core.result.Result`
object.
Parameters
----------
shuffle: bool
If true, shuffle the samples when combining, otherwise they are concatenated.
consistency_level: str, [warning, error]
If warning, print a warning if inconsistencies are discovered between the results before combining.
If error, raise an error if inconsistencies are discovered between the results before combining.
Returns
-------
result: bilby.core.result.Result
The combined result file
"""
self.consistency_level = consistency_level
if len(self) == 0:
return Result()
elif len(self) == 1:
......@@ -1844,24 +1861,37 @@ class ResultList(list):
except ValueError:
raise ResultListError("Not all results contain nested samples")
def _error_or_warning_consistency(self, msg):
if self.consistency_level == "error":
raise ResultListError(msg)
elif self.consistency_level == "warning":
logger.warning(msg)
else:
raise ValueError(f"Input consistency_level {self.consistency_level} not understood")
def check_consistent_priors(self):
for res in self:
for p in self[0].priors.keys():
if not self[0].priors[p] == res.priors[p] or len(self[0].priors) != len(res.priors):
raise ResultListError("Inconsistent priors between results")
msg = "Inconsistent priors between results"
self._error_or_warning_consistency(msg)
def check_consistent_parameters(self):
if not np.all([set(self[0].search_parameter_keys) == set(res.search_parameter_keys) for res in self]):
raise ResultListError("Inconsistent parameters between results")
msg = "Inconsistent parameters between results"
self._error_or_warning_consistency(msg)
def check_consistent_data(self):
if not np.allclose([res.log_noise_evidence for res in self], self[0].log_noise_evidence, atol=1e-8, rtol=0.0)\
and not np.all([np.isnan(res.log_noise_evidence) for res in self]):
raise ResultListError("Inconsistent data between results")
msg = "Inconsistent data between results"
self._error_or_warning_consistency(msg)
def check_consistent_sampler(self):
if not np.all([res.sampler == self[0].sampler for res in self]):
raise ResultListError("Inconsistent samplers between results")
msg = "Inconsistent samplers between results"
self._error_or_warning_consistency(msg)
@latex_plot_format
......
......@@ -6,7 +6,7 @@ import bilby
from bilby.bilby_mcmc import Bilby_MCMC
from ..prior import DeltaFunction, PriorDict
from ..utils import command_line_args, loaded_modules_dict, logger
from ..utils import command_line_args, env_package_list, loaded_modules_dict, logger
from . import proposal
from .base_sampler import Sampler, SamplingMarginalisedParameterError
from .cpnest import Cpnest
......@@ -21,7 +21,7 @@ from .nestle import Nestle
from .polychord import PyPolyChord
from .ptemcee import Ptemcee
from .ptmcmc import PTMCMCSampler
from .pymc3 import Pymc3
from .pymc import Pymc
from .pymultinest import Pymultinest
from .ultranest import Ultranest
from .zeus import Zeus
......@@ -38,7 +38,7 @@ IMPLEMENTED_SAMPLERS = {
"nestle": Nestle,
"ptemcee": Ptemcee,
"ptmcmcsampler": PTMCMCSampler,
"pymc3": Pymc3,
"pymc": Pymc,
"pymultinest": Pymultinest,
"pypolychord": PyPolyChord,
"ultranest": Ultranest,
......@@ -175,6 +175,7 @@ def run_sampler(
likelihood.outdir = outdir
meta_data["likelihood"] = likelihood.meta_data
meta_data["loaded_modules"] = loaded_modules_dict()
meta_data["environment_packages"] = env_package_list(as_dataframe=True)
if command_line_args.bilby_zero_likelihood_mode:
from bilby.core.likelihood import ZeroLikelihood
......
......@@ -243,6 +243,7 @@ class Sampler(object):
self._fixed_parameter_keys = list()
self._constraint_parameter_keys = list()
self._initialise_parameters()
self._log_information_about_priors_and_likelihood()
self.exit_code = exit_code
......@@ -353,11 +354,16 @@ class Sampler(object):
self.likelihood.parameters[key] = self.priors[key].sample()
self._fixed_parameter_keys.append(key)
logger.info("Search parameters:")
def _log_information_about_priors_and_likelihood(self):
logger.info("Analysis priors:")
for key in self._search_parameter_keys + self._constraint_parameter_keys:
logger.info(f" {key} = {self.priors[key]}")
logger.info(f"{key}={self.priors[key]}")
for key in self._fixed_parameter_keys:
logger.info(f" {key} = {self.priors[key].peak}")
logger.info(f"{key}={self.priors[key].peak}")
logger.info(f"Analysis likelihood class: {self.likelihood.__class__}")
logger.info(
f"Analysis likelihood noise evidence: {self.likelihood.noise_log_likelihood()}"
)
def _initialise_result(self, result_class):
"""
......
......@@ -292,8 +292,6 @@ def encode_for_hdf5(key, item):
output = json.dumps(item._get_json_dict())
elif isinstance(item, pd.DataFrame):
output = item.to_dict(orient="list")
elif isinstance(item, pd.Series):
output = item.to_dict()
elif inspect.isfunction(item) or inspect.isclass(item):
output = dict(
__module__=item.__module__, __name__=item.__name__, __class__=True
......
import json
import logging
from pathlib import Path
import subprocess
import sys
logger = logging.getLogger('bilby')
......@@ -70,3 +72,60 @@ def loaded_modules_dict():
if "." not in str(key):
vdict[key] = str(getattr(sys.modules[key], "__version__", "N/A"))
return vdict
def env_package_list(as_dataframe=False):
"""Get the list of packages installed in the system prefix.
If it is detected that the system prefix is part of a Conda environment,
a call to ``conda list --prefix {sys.prefix}`` will be made, otherwise
the call will be to ``{sys.executable} -m pip list installed``.
Parameters
----------
as_dataframe: bool
return output as a `pandas.DataFrame`
Returns
-------
pkgs : `list` of `dict`, or `pandas.DataFrame`
If ``as_dataframe=False`` is given, the output is a `list` of `dict`,
one for each package, at least with ``'name'`` and ``'version'`` keys
(more if `conda` is used).
If ``as_dataframe=True`` is given, the output is a `DataFrame`
created from the `list` of `dicts`.
"""
prefix = sys.prefix
# if a conda-meta directory exists, this is a conda environment, so
# use conda to print the package list
if (Path(prefix) / "conda-meta").is_dir():
pkgs = json.loads(subprocess.check_output([
"conda",
"list",
"--prefix", prefix,
"--json"
]))
# otherwise try and use Pip
else:
try:
import pip # noqa: F401
except ModuleNotFoundError: # no pip?
# not a conda environment, and no pip, so just return
# the list of loaded modules
modules = loaded_modules_dict()
pkgs = [{"name": x, "version": y} for x, y in modules.items()]
else:
pkgs = json.loads(subprocess.check_output([
sys.executable,
"-m", "pip",
"list", "installed",
"--format", "json",
]))
# convert to recarray for storage
if as_dataframe:
from pandas import DataFrame
return DataFrame(pkgs)
return pkgs
......@@ -192,14 +192,14 @@ def convert_to_lal_binary_black_hole_parameters(parameters):
converted_parameters = parameters.copy()
original_keys = list(converted_parameters.keys())
if 'redshift' in converted_parameters.keys():
converted_parameters['luminosity_distance'] = \
redshift_to_luminosity_distance(parameters['redshift'])
elif 'comoving_distance' in converted_parameters.keys():
converted_parameters['luminosity_distance'] = \
comoving_distance_to_luminosity_distance(
parameters['comoving_distance'])
if 'luminosity_distance' not in original_keys:
if 'redshift' in converted_parameters.keys():
converted_parameters['luminosity_distance'] = \
redshift_to_luminosity_distance(parameters['redshift'])
elif 'comoving_distance' in converted_parameters.keys():
converted_parameters['luminosity_distance'] = \
comoving_distance_to_luminosity_distance(
parameters['comoving_distance'])
for key in original_keys:
if key[-7:] == '_source':
......@@ -1006,6 +1006,8 @@ def generate_component_masses(sample, require_add=False, source=False):
Add the component masses to the dataframe/dictionary
We add:
mass_1, mass_2
Or if source=True
mass_1_source, mass_2_source
We also add any other masses which may be necessary for
intermediate steps, i.e. typically the total mass is necessary, along
with the mass ratio, so these will usually be added to the dictionary
......@@ -1167,7 +1169,9 @@ def generate_mass_parameters(sample, source=False):
not recompute keys already present in the dictionary
We add, potentially:
chirp mass, total mass, symmetric mass ratio, mass ratio, mass_1, mass_2
chirp_mass, total_mass, symmetric_mass_ratio, mass_ratio, mass_1, mass_2
Or if source=True:
chirp_mass_source, total_mass_source, symmetric_mass_ratio, mass_ratio, mass_1_source, mass_2_source
Parameters
==========
......@@ -1175,6 +1179,10 @@ def generate_mass_parameters(sample, source=False):
The input dictionary with two "spanning" mass parameters
e.g. (mass_1, mass_2), or (chirp_mass, mass_ratio), but not e.g. only
(mass_ratio, symmetric_mass_ratio)
source : bool, default False
If True, then perform the conversions for source mass parameters
i.e. mass_1_source instead of mass_1
Returns
=======
dict: The updated dictionary
......
import numpy as np
from bilby_cython.geometry import calculate_arm, detector_tensor
from .. import utils as gwutils
......@@ -263,7 +264,7 @@ class InterferometerGeometry(object):
if not self._x_updated or not self._y_updated:
_, _ = self.x, self.y # noqa
if not self._detector_tensor_updated:
self._detector_tensor = 0.5 * (np.einsum('i,j->ij', self.x, self.x) - np.einsum('i,j->ij', self.y, self.y))
self._detector_tensor = detector_tensor(x=self.x, y=self.y)
self._detector_tensor_updated = True
return self._detector_tensor
......@@ -288,19 +289,18 @@ class InterferometerGeometry(object):
"""
if arm == 'x':
return self._calculate_arm(self._xarm_tilt, self._xarm_azimuth)
return calculate_arm(
arm_tilt=self._xarm_tilt,
arm_azimuth=self._xarm_azimuth,
longitude=self._longitude,
latitude=self._latitude
)
elif arm == 'y':
return self._calculate_arm(self._yarm_tilt, self._yarm_azimuth)
return calculate_arm(
arm_tilt=self._yarm_tilt,
arm_azimuth=self._yarm_azimuth,
longitude=self._longitude,
latitude=self._latitude
)
else:
raise ValueError("Arm must either be 'x' or 'y'.")
def _calculate_arm(self, arm_tilt, arm_azimuth):
e_long = np.array([-np.sin(self._longitude), np.cos(self._longitude), 0])
e_lat = np.array([-np.sin(self._latitude) * np.cos(self._longitude),
-np.sin(self._latitude) * np.sin(self._longitude), np.cos(self._latitude)])
e_h = np.array([np.cos(self._latitude) * np.cos(self._longitude),
np.cos(self._latitude) * np.sin(self._longitude), np.sin(self._latitude)])
return (np.cos(arm_tilt) * np.cos(arm_azimuth) * e_long +
np.cos(arm_tilt) * np.sin(arm_azimuth) * e_lat +
np.sin(arm_tilt) * e_h)
import os
import numpy as np
from bilby_cython.geometry import (
get_polarization_tensor,
three_by_three_matrix_contraction,
time_delay_from_geocenter,
)
from ...core import utils
from ...core.utils import docstring, logger, PropertyAccessor
......@@ -264,15 +269,21 @@ class Interferometer(object):
psi: float
binary polarisation angle counter-clockwise about the direction of propagation
mode: str
polarisation mode (e.g. 'plus', 'cross')
polarisation mode (e.g. 'plus', 'cross') or the name of a specific detector.
If mode == self.name, return 1
Returns
=======
array_like: A 3x3 array representation of the antenna response for the specified mode
float: The antenna response for the specified mode and time/location
"""
polarization_tensor = gwutils.get_polarization_tensor(ra, dec, time, psi, mode)
return np.einsum('ij,ij->', self.geometry.detector_tensor, polarization_tensor)
if mode in ["plus", "cross", "x", "y", "breathing", "longitudinal"]:
polarization_tensor = get_polarization_tensor(ra, dec, time, psi, mode)
return three_by_three_matrix_contraction(self.geometry.detector_tensor, polarization_tensor)
elif mode == self.name:
return 1
else:
return 0
def get_detector_response(self, waveform_polarizations, parameters):
""" Get the detector response for a particular waveform
......@@ -527,7 +538,7 @@ class Interferometer(object):
=======
float: The time delay from geocenter in seconds
"""
return gwutils.time_delay_geocentric(self.geometry.vertex, np.array([0, 0, 0]), ra, dec, time)
return time_delay_from_geocenter(self.geometry.vertex, ra, dec, time)
def vertex_position_geocentric(self):
"""
......
......@@ -897,8 +897,9 @@ class GravitationalWaveTransient(Likelihood):
for key in pairs:
if key not in loaded_file:
return False, key
elif not np.array_equal(np.atleast_1d(loaded_file[key]),
np.atleast_1d(pairs[key])):
elif not np.allclose(np.atleast_1d(loaded_file[key]),
np.atleast_1d(pairs[key]),
rtol=1e-15):
return False, key
return True, None
......@@ -1088,6 +1089,7 @@ class GravitationalWaveTransient(Likelihood):
waveform_generator_class=self.waveform_generator.__class__,
waveform_arguments=self.waveform_generator.waveform_arguments,
frequency_domain_source_model=self.waveform_generator.frequency_domain_source_model,
time_domain_source_model=self.waveform_generator.time_domain_source_model,
parameter_conversion=self.waveform_generator.parameter_conversion,
sampling_frequency=self.waveform_generator.sampling_frequency,
duration=self.waveform_generator.duration,
......
......@@ -105,6 +105,12 @@ class CompactBinaryCoalescenceResult(CoreResult):
return self.__get_from_nested_meta_data(
'likelihood', 'frequency_domain_source_model')
@property
def time_domain_source_model(self):
""" The time domain source model (function)"""
return self.__get_from_nested_meta_data(
'likelihood', 'time_domain_source_model')
@property
def parameter_conversion(self):
""" The frequency domain source model (function)"""
......@@ -381,6 +387,7 @@ class CompactBinaryCoalescenceResult(CoreResult):
duration=self.duration, sampling_frequency=self.sampling_frequency,
start_time=self.start_time,
frequency_domain_source_model=self.frequency_domain_source_model,
time_domain_source_model=self.time_domain_source_model,
parameter_conversion=self.parameter_conversion,
waveform_arguments=self.waveform_arguments)
......@@ -589,8 +596,8 @@ class CompactBinaryCoalescenceResult(CoreResult):
plot_frequencies,
np.percentile(fd_waveforms, lower_percentile, axis=0),
np.percentile(fd_waveforms, upper_percentile, axis=0),
color=WAVEFORM_COLOR, label=r'{}\% credible interval'.format(
int(upper_percentile - lower_percentile)),
color=WAVEFORM_COLOR,
label=r'{}% credible interval'.format(int(upper_percentile - lower_percentile)),
alpha=0.3)
axs[1].plot(
plot_times, np.mean(td_waveforms, axis=0),
......
import json
import os
from functools import lru_cache
from math import fmod
import numpy as np
from scipy.interpolate import interp1d
from scipy.special import i0e
from bilby_cython.geometry import (
zenith_azimuth_to_theta_phi as _zenith_azimuth_to_theta_phi,
)
from bilby_cython.time import greenwich_mean_sidereal_time
from ..core.utils import (ra_dec_to_theta_phi,
speed_of_light, logger, run_commandline,
from ..core.utils import (logger, run_commandline,
check_directory_exists_and_if_not_mkdir,
SamplesSummary, theta_phi_to_ra_dec)
from ..core.utils.constants import solar_mass
......@@ -53,90 +55,6 @@ def psd_from_freq_series(freq_data, df):
return np.power(asd_from_freq_series(freq_data, df), 2)
def time_delay_geocentric(detector1, detector2, ra, dec, time):
"""
Calculate time delay between two detectors in geocentric coordinates based on XLALArrivaTimeDiff in TimeDelay.c
Parameters
==========
detector1: array_like
Cartesian coordinate vector for the first detector in the geocentric frame
generated by the Interferometer class as self.vertex.
detector2: array_like
Cartesian coordinate vector for the second detector in the geocentric frame.
To get time delay from Earth center, use detector2 = np.array([0,0,0])
ra: float
Right ascension of the source in radians
dec: float
Declination of the source in radians
time: float
GPS time in the geocentric frame
Returns
=======
float: Time delay between the two detectors in the geocentric frame
"""
gmst = fmod(greenwich_mean_sidereal_time(time), 2 * np.pi)
theta, phi = ra_dec_to_theta_phi(ra, dec, gmst)
omega = np.array([np.sin(theta) * np.cos(phi), np.sin(theta) * np.sin(phi), np.cos(theta)])
delta_d = detector2 - detector1
return np.dot(omega, delta_d) / speed_of_light
def get_polarization_tensor(ra, dec, time, psi, mode):
"""
Calculate the polarization tensor for a given sky location and time
See Nishizawa et al. (2009) arXiv:0903.0528 for definitions of the polarisation tensors.
[u, v, w] represent the Earth-frame
[m, n, omega] represent the wave-frame
Note: there is a typo in the definition of the wave-frame in Nishizawa et al.
Parameters
==========
ra: float
right ascension in radians
dec: float
declination in radians
time: float
geocentric GPS time
psi: float
binary polarisation angle counter-clockwise about the direction of propagation
mode: str
polarisation mode
Returns
=======
array_like: A 3x3 representation of the polarization_tensor for the specified mode.
"""
gmst = fmod(greenwich_mean_sidereal_time(time), 2 * np.pi)
theta, phi = ra_dec_to_theta_phi(ra, dec, gmst)
u = np.array([np.cos(phi) * np.cos(theta), np.cos(theta) * np.sin(phi), -np.sin(theta)])
v = np.array([-np.sin(phi), np.cos(phi), 0])
m = -u * np.sin(psi) - v * np.cos(psi)
n = -u * np.cos(psi) + v * np.sin(psi)
if mode.lower() == 'plus':
return np.einsum('i,j->ij', m, m) - np.einsum('i,j->ij', n, n)
elif mode.lower() == 'cross':
return np.einsum('i,j->ij', m, n) + np.einsum('i,j->ij', n, m)
elif mode.lower() == 'breathing':
return np.einsum('i,j->ij', m, m) + np.einsum('i,j->ij', n, n)
# Calculating omega here to avoid calculation when model in [plus, cross, breathing]
omega = np.cross(m, n)
if mode.lower() == 'longitudinal':
return np.einsum('i,j->ij', omega, omega)
elif mode.lower() == 'x':
return np.einsum('i,j->ij', m, omega) + np.einsum('i,j->ij', omega, m)
elif mode.lower() == 'y':
return np.einsum('i,j->ij', n, omega) + np.einsum('i,j->ij', omega, n)
else:
raise ValueError("{} not a polarization mode!".format(mode))
def get_vertex_position_geocentric(latitude, longitude, elevation):
"""
Calculate the position of the IFO vertex in geocentric coordinates in meters.
......@@ -310,56 +228,6 @@ def overlap(signal_a, signal_b, power_spectral_density=None, delta_frequency=Non
return sum(integral).real
__cached_euler_matrix = None
__cached_delta_x = None
def euler_rotation(delta_x):
"""
Calculate the rotation matrix mapping the vector (0, 0, 1) to delta_x
while preserving the origin of the azimuthal angle.
This is decomposed into three Euler angle, alpha, beta, gamma, which rotate
about the z-, y-, and z- axes respectively.
Parameters
==========
delta_x: array-like (3,)
Vector onto which (0, 0, 1) should be mapped.
Returns
=======
total_rotation: array-like (3,3)
Rotation matrix which maps vectors from the frame in which delta_x is
aligned with the z-axis to the target frame.
"""
global __cached_delta_x
global __cached_euler_matrix
delta_x = delta_x / np.sum(delta_x**2)**0.5
if np.array_equal(delta_x, __cached_delta_x):
return __cached_euler_matrix
else:
__cached_delta_x = delta_x
alpha = np.arctan(- delta_x[1] * delta_x[2] / delta_x[0])
beta = np.arccos(delta_x[2])
gamma = np.arctan(delta_x[1] / delta_x[0])
rotation_1 = np.array([
[np.cos(alpha), -np.sin(alpha), 0], [np.sin(alpha), np.cos(alpha), 0],
[0, 0, 1]])
rotation_2 = np.array([
[np.cos(beta), 0, - np.sin(beta)], [0, 1, 0],
[np.sin(beta), 0, np.cos(beta)]])
rotation_3 = np.array([
[np.cos(gamma), -np.sin(gamma), 0], [np.sin(gamma), np.cos(gamma), 0],
[0, 0, 1]])
total_rotation = np.einsum(
'ij,jk,kl->il', rotation_3, rotation_2, rotation_1)
__cached_delta_x = delta_x
__cached_euler_matrix = total_rotation
return total_rotation
def zenith_azimuth_to_theta_phi(zenith, azimuth, ifos):
"""
Convert from the 'detector frame' to the Earth frame.
......@@ -379,15 +247,7 @@ def zenith_azimuth_to_theta_phi(zenith, azimuth, ifos):
The zenith and azimuthal angles in the earth frame.
"""
delta_x = ifos[0].geometry.vertex - ifos[1].geometry.vertex
omega_prime = np.array([
np.sin(zenith) * np.cos(azimuth),
np.sin(zenith) * np.sin(azimuth),
np.cos(zenith)])
rotation_matrix = euler_rotation(delta_x)
omega = np.dot(rotation_matrix, omega_prime)
theta = np.arccos(omega[2])
phi = np.arctan2(omega[1], omega[0]) % (2 * np.pi)
return theta, phi
return _zenith_azimuth_to_theta_phi(zenith, azimuth, delta_x)
def zenith_azimuth_to_ra_dec(zenith, azimuth, geocent_time, ifos):
......@@ -1005,27 +865,6 @@ def plot_spline_pos(log_freqs, samples, nfreqs=100, level=0.9, color='k', label=
plt.xlim(freq_points.min() - .5, freq_points.max() + 50)
def greenwich_mean_sidereal_time(time):
"""
Compute the greenwich mean sidereal time from a GPS time.
This is just a wrapper around :code:`lal.GreenwichMeanSiderealTime` .
Parameters
----------
time: float
The GPS time to convert.
Returns
-------
float
The sidereal time.
"""
from lal import GreenwichMeanSiderealTime
time = float(time)
return GreenwichMeanSiderealTime(time)
def ln_i0(value):
"""
A numerically stable method to evaluate ln(I_0) a modified Bessel function
......
......@@ -89,6 +89,11 @@ def setup_command_line_args():
action="store_true",
help="Merge the set of runs, output saved using the outdir and label",
)
action_parser.add_argument(
"--ignore-inconsistent",
action="store_true",
help="If true, ignore inconsistency errors in the merge process, but print a warning",
)
action_parser.add_argument(
"-b", "--bayes", action="store_true", help="Print all Bayes factors."
)
......@@ -208,7 +213,11 @@ def main():
save(result, args)
if args.merge:
result = results_list.combine()
if args.ignore_inconsistent:
consistency_level = "warning"
else:
consistency_level = "error"
result = results_list.combine(consistency_level=consistency_level)
if args.label is not None:
result.label = args.label
if args.outdir is not None:
......
......@@ -30,15 +30,7 @@ RUN conda install -n ${{conda_env}} -c conda-forge scikit-image celerite george
# Install dependencies and samplers
RUN pip install corner healpy cython tables
RUN conda install -n ${{conda_env}} -c conda-forge dynesty emcee nestle ptemcee
RUN conda install -n ${{conda_env}} -c conda-forge pymultinest ultranest
RUN conda install -n ${{conda_env}} -c conda-forge cpnest kombine dnest4 zeus-mcmc
RUN conda install -n ${{conda_env}} -c conda-forge ptmcmcsampler
RUN conda install -n ${{conda_env}} -c conda-forge pytorch
RUN conda install -n ${{conda_env}} -c conda-forge theano-pymc
RUN conda install -n ${{conda_env}} -c conda-forge pymc3
RUN conda install -n ${{conda_env}} -c conda-forge pymc pymc-base
RUN pip install nessai
RUN conda install -n ${{conda_env}} {conda_samplers} -c conda-forge -c pytorch
# Install Polychord
RUN apt-get update --allow-releaseinfo-change
......
# This dockerfile is written automatically and should not be modified by hand.
FROM containers.ligo.org/docker/base:conda
LABEL name="bilby CI testing" \
maintainer="Gregory Ashton <gregory.ashton@ligo.org>"
RUN conda update -n base -c defaults conda
ENV conda_env python310
RUN conda create -n ${conda_env} python=3.10
RUN echo "source activate ${conda_env}" > ~/.bashrc
ENV PATH /opt/conda/envs/${conda_env}/bin:$PATH
RUN /bin/bash -c "source activate ${conda_env}"
RUN conda info
RUN python --version
# Install conda-installable programs
RUN conda install -n ${conda_env} -y matplotlib numpy scipy pandas astropy flake8
RUN conda install -n ${conda_env} -c anaconda coverage configargparse future dill
RUN conda install -n ${conda_env} -c conda-forge black pytest-cov deepdish arviz
# Install pip-requirements
RUN pip install --upgrade pip
RUN pip install --upgrade setuptools coverage-badge parameterized
# Install documentation requirements
RUN pip install sphinx numpydoc nbsphinx sphinx_rtd_theme sphinx-tabs autodoc
# Install testing requirements
RUN conda install -n ${conda_env} -c conda-forge scikit-image celerite george
# Install dependencies and samplers
RUN pip install corner healpy cython tables
RUN conda install -n ${conda_env} dynesty emcee nestle ptemcee pymultinest ultranest cpnest kombine dnest4 zeus-mcmc pytorch pymc nessai ptmcmcsampler -c conda-forge -c pytorch
# Install Polychord
RUN apt-get update --allow-releaseinfo-change
RUN apt-get install -y build-essential
RUN apt-get install -y libblas3 libblas-dev
RUN apt-get install -y liblapack3 liblapack-dev
RUN apt-get install -y libatlas3-base libatlas-base-dev
RUN apt-get install -y gfortran
RUN git clone https://github.com/PolyChord/PolyChordLite.git \
&& (cd PolyChordLite && python setup.py --no-mpi install)
# Install GW packages
RUN conda install -n ${conda_env} -c conda-forge python-lalsimulation bilby.cython
RUN pip install ligo-gracedb gwpy ligo.skymap
# Add the ROQ data to the image
RUN mkdir roq_basis \
&& cd roq_basis \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/B_linear.npy \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/B_quadratic.npy \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/fnodes_linear.npy \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/fnodes_quadratic.npy \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/params.dat \
&& wget https://git.ligo.org/soichiro.morisaki/roq_basis/raw/main/IMRPhenomD/16s_nospins/basis_addcal.hdf5 \
&& wget https://git.ligo.org/soichiro.morisaki/roq_basis/raw/main/IMRPhenomD/16s_nospins/basis_multiband_addcal.hdf5