Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • john-veitch/bilby
  • duncanmmacleod/bilby
  • colm.talbot/bilby
  • lscsoft/bilby
  • matthew-pitkin/bilby
  • salvatore-vitale/tupak
  • charlie.hoy/bilby
  • bfarr/bilby
  • virginia.demilio/bilby
  • vivien/bilby
  • eric-howell/bilby
  • sebastian-khan/bilby
  • rhys.green/bilby
  • moritz.huebner/bilby
  • joseph.mills/bilby
  • scott.coughlin/bilby
  • matthew.carney/bilby
  • hyungwon.lee/bilby
  • monica.rizzo/bilby
  • christopher-berry/bilby
  • lindsay.demarchi/bilby
  • kaushik.rao/bilby
  • charles.kimball/bilby
  • andrew.matas/bilby
  • juan.calderonbustillo/bilby
  • patrick-meyers/bilby
  • hannah.middleton/bilby
  • eve.chase/bilby
  • grant.meadors/bilby
  • khun.phukon/bilby
  • sumeet.kulkarni/bilby
  • daniel.reardon/bilby
  • cjhaster/bilby
  • sylvia.biscoveanu/bilby
  • james-clark/bilby
  • meg.millhouse/bilby
  • joshua.willis/bilby
  • nikhil.sarin/bilby
  • paul.easter/bilby
  • youngmin/bilby
  • daniel-williams/bilby
  • shanika.galaudage/bilby
  • bruce.edelman/bilby
  • avi.vajpeyi/bilby
  • isobel.romero-shaw/bilby
  • andrew.kim/bilby
  • dominika.zieba/bilby
  • jonathan.davies/bilby
  • marc.arene/bilby
  • srishti.tiwari/bilby-tidal-heating-eccentric
  • aditya.vijaykumar/bilby
  • michael.williams/bilby
  • cecilio.garcia-quiros/bilby
  • rory-smith/bilby
  • maite.mateu-lucena/bilby
  • wushichao/bilby
  • kaylee.desoto/bilby
  • brandon.piotrzkowski/bilby
  • rossella.gamba/bilby
  • hunter.gabbard/bilby
  • deep.chatterjee/bilby
  • tathagata.ghosh/bilby
  • arunava.mukherjee/bilby
  • philip.relton/bilby
  • reed.essick/bilby
  • pawan.gupta/bilby
  • francisco.hernandez/bilby
  • rhiannon.udall/bilby
  • leo.tsukada/bilby
  • will-farr/bilby
  • vijay.varma/bilby
  • jeremy.baier/bilby
  • joshua.brandt/bilby
  • ethan.payne/bilby
  • ka-lok.lo/bilby
  • antoni.ramos-buades/bilby
  • oliviastephany.wilk/bilby
  • jack.heinzel/bilby
  • samson.leong/bilby-psi4
  • viviana.caceres/bilby
  • nadia.qutob/bilby
  • michael-coughlin/bilby
  • hemantakumar.phurailatpam/bilby
  • boris.goncharov/bilby
  • sama.al-shammari/bilby
  • siqi.zhong/bilby
  • jocelyn-read/bilby
  • marc.penuliar/bilby
  • stephanie.letourneau/bilby
  • alexandresebastien.goettel/bilby
  • alec.gunny/bilby
  • serguei.ossokine/bilby
  • pratyusava.baral/bilby
  • sophie.hourihane/bilby
  • eunsub/bilby
  • james.hart/bilby
  • pratyusava.baral/bilby-tg
  • zhaozc/bilby
  • pratyusava.baral/bilby_SoG
  • tomasz.baka/bilby
  • nicogerardo.bers/bilby
  • soumen.roy/bilby
  • isaac.mcmahon/healpix-redundancy
  • asamakai.baker/bilby-frequency-dependent-antenna-pattern-functions
  • anna.puecher/bilby
  • pratyusava.baral/bilby-x-g
  • thibeau.wouters/bilby
  • christian.adamcewicz/bilby
  • raffi.enficiaud/bilby
109 results
Show changes
Commits on Source (20)
Showing
with 339 additions and 118 deletions
......@@ -71,6 +71,25 @@ basic-3.10:
<<: *test-python
image: python:3.10
.test-samplers-import: &test-samplers-import
stage: initial
script:
- python -m pip install .
- python -m pip list installed
- python test/test_samplers_import.py
import-samplers-3.8:
<<: *test-samplers-import
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python38
import-samplers-3.9:
<<: *test-samplers-import
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
import-samplers-3.10:
<<: *test-samplers-import
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
.precommits: &precommits
stage: initial
script:
......@@ -97,13 +116,12 @@ precommits-py3.9:
CACHE_DIR: ".pip39"
PYVERSION: "python39"
# FIXME: when image builds for 3.10 change this back.
#precommits-py3.10:
# <<: *precommits
# image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
# variables:
# CACHE_DIR: ".pip310"
# PYVERSION: "python310"
precommits-py3.10:
<<: *precommits
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
variables:
CACHE_DIR: ".pip310"
PYVERSION: "python310"
install:
stage: initial
......@@ -146,19 +164,16 @@ python-3.9:
- htmlcov/
expire_in: 30 days
# add back when 3.10 image is available
#python-3.10:
# <<: *unit-test
# needs: ["basic-3.10", "precommits-py3.10"]
# image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
python-3.10:
<<: *unit-test
needs: ["basic-3.10", "precommits-py3.10"]
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
.test-sampler: &test-sampler
stage: test
script:
- python -m pip install .
- python -m pip install schwimmbad
- python -m pip list installed
- pytest test/integration/sampler_run_test.py --durations 10 -v
python-3.8-samplers:
......@@ -171,11 +186,10 @@ python-3.9-samplers:
needs: ["basic-3.9", "precommits-py3.9"]
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
# add back when 3.10 image is available
#python-3.10-samplers:
# <<: *test-sampler
# needs: ["basic-3.10", "precommits-py3.10"]
# image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
python-3.10-samplers:
<<: *test-sampler
needs: ["basic-3.10", "precommits-py3.10"]
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
integration-tests-python-3.9:
stage: test
......@@ -209,11 +223,10 @@ plotting-python-3.9:
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
needs: ["basic-3.9", "precommits-py3.9"]
# add back when 3.10 image is available
#plotting-python-3.10:
# <<: *plotting
# image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
# needs: ["basic-3.10", "precommits-py3.10"]
plotting-python-3.10:
<<: *plotting
image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
needs: ["basic-3.10", "precommits-py3.10"]
# ------------------- Docs stage -------------------------------------------
......@@ -257,8 +270,11 @@ pages:
stage: deploy
image: docker:20.10.12
needs: ["containers"]
only:
- schedules
except:
refs:
- schedules
changes:
- containers/*
script:
- cd containers
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
......@@ -276,11 +292,10 @@ build-python39-container:
variables:
PYVERSION: "python39"
# add back when 3.10 image is available
#build-python310-container:
# <<: *build-container
# variables:
# PYVERSION: "python310"
build-python310-container:
<<: *build-container
variables:
PYVERSION: "python310"
pypi-release:
stage: deploy
......
......@@ -29,6 +29,7 @@ Hector Estelles
Ignacio Magaña Hernandez
Isobel Marguarethe Romero-Shaw
Jack Heinzel
Jacob Golomb
Jade Powell
James A Clark
Jeremy G Baier
......
......@@ -547,6 +547,7 @@ class GMMProposal(DensityEstimateProposal):
def _sample(self, nsamples=None):
return np.squeeze(self.density.sample(n_samples=nsamples)[0])
@staticmethod
def check_dependencies(warn=True):
if importlib.util.find_spec("sklearn") is None:
if warn:
......@@ -593,12 +594,15 @@ class NormalizingFlowProposal(DensityEstimateProposal):
fallback=fallback,
scale_fits=scale_fits,
)
self.setup_flow()
self.setup_optimizer()
self.initialised = False
self.max_training_epochs = max_training_epochs
self.js_factor = js_factor
def initialise(self):
self.setup_flow()
self.setup_optimizer()
self.initialised = True
def setup_flow(self):
if self.ndim < 3:
self.setup_basic_flow()
......@@ -699,6 +703,9 @@ class NormalizingFlowProposal(DensityEstimateProposal):
self.trained = True
def propose(self, chain):
if self.initialised is False:
self.initialise()
import torch
self.steps_since_refit += 1
......@@ -728,6 +735,7 @@ class NormalizingFlowProposal(DensityEstimateProposal):
return theta, float(log_factor)
@staticmethod
def check_dependencies(warn=True):
if importlib.util.find_spec("nflows") is None:
if warn:
......@@ -1094,10 +1102,6 @@ def get_proposal_cycle(string, priors, L1steps=1, warn=True):
]
if GMMProposal.check_dependencies(warn=warn):
plist.append(GMMProposal(priors, weight=big_weight, scale_fits=L1steps))
if NormalizingFlowProposal.check_dependencies(warn=warn):
plist.append(
NormalizingFlowProposal(priors, weight=big_weight, scale_fits=L1steps)
)
plist = remove_proposals_using_string(plist, string)
return ProposalCycle(plist)
......
......@@ -6,6 +6,7 @@ from pathlib import Path
import numpy as np
import pandas as pd
from scipy.optimize import differential_evolution
from ..core.result import rejection_sample
from ..core.sampler.base_sampler import (
......@@ -114,6 +115,13 @@ class Bilby_MCMC(MCMCSampler):
evidence_method: str, [stepping_stone, thermodynamic]
The evidence calculation method to use. Defaults to stepping_stone, but
the results of all available methods are stored in the ln_z_dict.
initial_sample_method: str
Method to draw the initial sample. Either "prior" (a random draw
from the prior) or "maximize" (use an optimization approach to attempt
to find the maximum posterior estimate).
initial_sample_dict: dict
A dictionary of the initial sample value. If incomplete, will overwrite
the initial_sample drawn using initial_sample_method.
verbose: bool
Whether to print diagnostic output during the run.
......@@ -144,6 +152,8 @@ class Bilby_MCMC(MCMCSampler):
fixed_tau=None,
tau_window=None,
evidence_method="stepping_stone",
initial_sample_method="prior",
initial_sample_dict=None,
)
def __init__(
......@@ -188,6 +198,8 @@ class Bilby_MCMC(MCMCSampler):
self.proposal_cycle = self.kwargs["proposal_cycle"]
self.pt_rejection_sample = self.kwargs["pt_rejection_sample"]
self.evidence_method = self.kwargs["evidence_method"]
self.initial_sample_method = self.kwargs["initial_sample_method"]
self.initial_sample_dict = self.kwargs["initial_sample_dict"]
self.printdt = self.kwargs["printdt"]
check_directory_exists_and_if_not_mkdir(self.outdir)
......@@ -239,8 +251,8 @@ class Bilby_MCMC(MCMCSampler):
@staticmethod
def add_data_to_result(result, ptsampler, outdir, label, make_plots):
result.samples = ptsampler.samples
result.log_likelihood_evaluations = result.samples[LOGLKEY]
result.log_prior_evaluations = result.samples[LOGPKEY]
result.log_likelihood_evaluations = result.samples[LOGLKEY].to_numpy()
result.log_prior_evaluations = result.samples[LOGPKEY].to_numpy()
ptsampler.compute_evidence(
outdir=outdir,
label=label,
......@@ -287,6 +299,8 @@ class Bilby_MCMC(MCMCSampler):
pool=self.pool,
use_ratio=self.use_ratio,
evidence_method=self.evidence_method,
initial_sample_method=self.initial_sample_method,
initial_sample_dict=self.initial_sample_dict,
)
def get_setup_string(self):
......@@ -522,9 +536,13 @@ class BilbyPTMCMCSampler(object):
pool,
use_ratio,
evidence_method,
initial_sample_method,
initial_sample_dict,
):
self.set_pt_inputs(pt_inputs)
self.use_ratio = use_ratio
self.initial_sample_method = initial_sample_method
self.initial_sample_dict = initial_sample_dict
self.setup_sampler_dictionary(convergence_inputs, proposal_cycle)
self.set_convergence_inputs(convergence_inputs)
self.pt_rejection_sample = pt_rejection_sample
......@@ -572,10 +590,12 @@ class BilbyPTMCMCSampler(object):
betas = self.get_initial_betas()
logger.info(
f"Initializing BilbyPTMCMCSampler with:"
f"ntemps={self.ntemps},"
f"nensemble={self.nensemble},"
f"pt_ensemble={self.pt_ensemble},"
f"initial_betas={betas}\n"
f"ntemps={self.ntemps}, "
f"nensemble={self.nensemble}, "
f"pt_ensemble={self.pt_ensemble}, "
f"initial_betas={betas}, "
f"initial_sample_method={self.initial_sample_method}, "
f"initial_sample_dict={self.initial_sample_dict}\n"
)
self.sampler_dictionary = dict()
for Tindex, beta in enumerate(betas):
......@@ -591,6 +611,8 @@ class BilbyPTMCMCSampler(object):
convergence_inputs=convergence_inputs,
proposal_cycle=proposal_cycle,
use_ratio=self.use_ratio,
initial_sample_method=self.initial_sample_method,
initial_sample_dict=self.initial_sample_dict,
)
for Eindex in range(n)
]
......@@ -1077,6 +1099,8 @@ class BilbyMCMCSampler(object):
Tindex=0,
Eindex=0,
use_ratio=False,
initial_sample_method="prior",
initial_sample_dict=None,
):
self.beta = beta
self.Tindex = Tindex
......@@ -1086,12 +1110,24 @@ class BilbyMCMCSampler(object):
self.parameters = _sampling_convenience_dump.priors.non_fixed_keys
self.ndim = len(self.parameters)
full_sample_dict = _sampling_convenience_dump.priors.sample()
initial_sample = {
k: v
for k, v in full_sample_dict.items()
if k in _sampling_convenience_dump.priors.non_fixed_keys
}
if initial_sample_method.lower() == "prior":
full_sample_dict = _sampling_convenience_dump.priors.sample()
initial_sample = {
k: v
for k, v in full_sample_dict.items()
if k in _sampling_convenience_dump.priors.non_fixed_keys
}
elif initial_sample_method.lower() in ["maximize", "maximise", "maximum"]:
initial_sample = get_initial_maximimum_posterior_sample(self.beta)
else:
raise ValueError(
f"initial sample method {initial_sample_method} not understood"
)
if initial_sample_dict is not None:
initial_sample.update(initial_sample_dict)
logger.info(f"Using initial sample {initial_sample}")
initial_sample = Sample(initial_sample)
initial_sample[LOGLKEY] = self.log_likelihood(initial_sample)
initial_sample[LOGPKEY] = self.log_prior(initial_sample)
......@@ -1266,6 +1302,42 @@ class BilbyMCMCSampler(object):
return samples
def get_initial_maximimum_posterior_sample(beta):
"""A method to attempt optimization of the maximum likelihood
This uses a simple scipy optimization approach, starting from a number
of draws from the prior to avoid problems with local optimization.
"""
logger.info("Finding initial maximum posterior estimate")
likelihood = _sampling_convenience_dump.likelihood
priors = _sampling_convenience_dump.priors
search_parameter_keys = _sampling_convenience_dump.search_parameter_keys
bounds = []
for key in search_parameter_keys:
bounds.append((priors[key].minimum, priors[key].maximum))
def neg_log_post(x):
sample = {key: val for key, val in zip(search_parameter_keys, x)}
ln_prior = priors.ln_prob(sample)
if np.isinf(ln_prior):
return -np.inf
likelihood.parameters.update(sample)
return -beta * likelihood.log_likelihood() - ln_prior
res = differential_evolution(neg_log_post, bounds, popsize=100, init="sobol")
if res.success:
sample = {key: val for key, val in zip(search_parameter_keys, res.x)}
logger.info(f"Initial maximum posterior estimate {sample}")
return sample
else:
raise ValueError("Failed to find initial maximum posterior estimate")
# Methods used to aid parallelisation:
......
......@@ -6,7 +6,7 @@ import bilby
from bilby.bilby_mcmc import Bilby_MCMC
from ..prior import DeltaFunction, PriorDict
from ..utils import command_line_args, loaded_modules_dict, logger
from ..utils import command_line_args, env_package_list, loaded_modules_dict, logger
from . import proposal
from .base_sampler import Sampler, SamplingMarginalisedParameterError
from .cpnest import Cpnest
......@@ -175,6 +175,7 @@ def run_sampler(
likelihood.outdir = outdir
meta_data["likelihood"] = likelihood.meta_data
meta_data["loaded_modules"] = loaded_modules_dict()
meta_data["environment_packages"] = env_package_list(as_dataframe=True)
if command_line_args.bilby_zero_likelihood_mode:
from bilby.core.likelihood import ZeroLikelihood
......
......@@ -61,7 +61,6 @@ class Pymc(MCMCSampler):
n_init=200000,
initvals=None,
trace=None,
chain_idx=0,
chains=2,
cores=1,
tune=500,
......
......@@ -292,8 +292,6 @@ def encode_for_hdf5(key, item):
output = json.dumps(item._get_json_dict())
elif isinstance(item, pd.DataFrame):
output = item.to_dict(orient="list")
elif isinstance(item, pd.Series):
output = item.to_dict()
elif inspect.isfunction(item) or inspect.isclass(item):
output = dict(
__module__=item.__module__, __name__=item.__name__, __class__=True
......
import json
import logging
from pathlib import Path
import subprocess
import sys
logger = logging.getLogger('bilby')
......@@ -70,3 +72,60 @@ def loaded_modules_dict():
if "." not in str(key):
vdict[key] = str(getattr(sys.modules[key], "__version__", "N/A"))
return vdict
def env_package_list(as_dataframe=False):
"""Get the list of packages installed in the system prefix.
If it is detected that the system prefix is part of a Conda environment,
a call to ``conda list --prefix {sys.prefix}`` will be made, otherwise
the call will be to ``{sys.executable} -m pip list installed``.
Parameters
----------
as_dataframe: bool
return output as a `pandas.DataFrame`
Returns
-------
pkgs : `list` of `dict`, or `pandas.DataFrame`
If ``as_dataframe=False`` is given, the output is a `list` of `dict`,
one for each package, at least with ``'name'`` and ``'version'`` keys
(more if `conda` is used).
If ``as_dataframe=True`` is given, the output is a `DataFrame`
created from the `list` of `dicts`.
"""
prefix = sys.prefix
# if a conda-meta directory exists, this is a conda environment, so
# use conda to print the package list
if (Path(prefix) / "conda-meta").is_dir():
pkgs = json.loads(subprocess.check_output([
"conda",
"list",
"--prefix", prefix,
"--json"
]))
# otherwise try and use Pip
else:
try:
import pip # noqa: F401
except ModuleNotFoundError: # no pip?
# not a conda environment, and no pip, so just return
# the list of loaded modules
modules = loaded_modules_dict()
pkgs = [{"name": x, "version": y} for x, y in modules.items()]
else:
pkgs = json.loads(subprocess.check_output([
sys.executable,
"-m", "pip",
"list", "installed",
"--format", "json",
]))
# convert to recarray for storage
if as_dataframe:
from pandas import DataFrame
return DataFrame(pkgs)
return pkgs
......@@ -192,14 +192,14 @@ def convert_to_lal_binary_black_hole_parameters(parameters):
converted_parameters = parameters.copy()
original_keys = list(converted_parameters.keys())
if 'redshift' in converted_parameters.keys():
converted_parameters['luminosity_distance'] = \
redshift_to_luminosity_distance(parameters['redshift'])
elif 'comoving_distance' in converted_parameters.keys():
converted_parameters['luminosity_distance'] = \
comoving_distance_to_luminosity_distance(
parameters['comoving_distance'])
if 'luminosity_distance' not in original_keys:
if 'redshift' in converted_parameters.keys():
converted_parameters['luminosity_distance'] = \
redshift_to_luminosity_distance(parameters['redshift'])
elif 'comoving_distance' in converted_parameters.keys():
converted_parameters['luminosity_distance'] = \
comoving_distance_to_luminosity_distance(
parameters['comoving_distance'])
for key in original_keys:
if key[-7:] == '_source':
......
......@@ -30,15 +30,7 @@ RUN conda install -n ${{conda_env}} -c conda-forge scikit-image celerite george
# Install dependencies and samplers
RUN pip install corner healpy cython tables
RUN conda install -n ${{conda_env}} -c conda-forge dynesty emcee nestle ptemcee
RUN conda install -n ${{conda_env}} -c conda-forge pymultinest ultranest
RUN conda install -n ${{conda_env}} -c conda-forge cpnest kombine dnest4 zeus-mcmc
RUN conda install -n ${{conda_env}} -c conda-forge ptmcmcsampler
RUN conda install -n ${{conda_env}} -c conda-forge pytorch
RUN conda install -n ${{conda_env}} -c conda-forge theano-pymc
RUN conda install -n ${{conda_env}} -c conda-forge pymc3
RUN conda install -n ${{conda_env}} -c conda-forge pymc pymc-base
RUN pip install nessai
RUN conda install -n ${{conda_env}} {conda_samplers} -c conda-forge -c pytorch
# Install Polychord
RUN apt-get update --allow-releaseinfo-change
......
# This dockerfile is written automatically and should not be modified by hand.
FROM containers.ligo.org/docker/base:conda
LABEL name="bilby CI testing" \
maintainer="Gregory Ashton <gregory.ashton@ligo.org>"
RUN conda update -n base -c defaults conda
ENV conda_env python310
RUN conda create -n ${conda_env} python=3.10
RUN echo "source activate ${conda_env}" > ~/.bashrc
ENV PATH /opt/conda/envs/${conda_env}/bin:$PATH
RUN /bin/bash -c "source activate ${conda_env}"
RUN conda info
RUN python --version
# Install conda-installable programs
RUN conda install -n ${conda_env} -y matplotlib numpy scipy pandas astropy flake8
RUN conda install -n ${conda_env} -c anaconda coverage configargparse future dill
RUN conda install -n ${conda_env} -c conda-forge black pytest-cov deepdish arviz
# Install pip-requirements
RUN pip install --upgrade pip
RUN pip install --upgrade setuptools coverage-badge parameterized
# Install documentation requirements
RUN pip install sphinx numpydoc nbsphinx sphinx_rtd_theme sphinx-tabs autodoc
# Install testing requirements
RUN conda install -n ${conda_env} -c conda-forge scikit-image celerite george
# Install dependencies and samplers
RUN pip install corner healpy cython tables
RUN conda install -n ${conda_env} dynesty emcee nestle ptemcee pymultinest ultranest cpnest kombine dnest4 zeus-mcmc pytorch pymc nessai ptmcmcsampler -c conda-forge -c pytorch
# Install Polychord
RUN apt-get update --allow-releaseinfo-change
RUN apt-get install -y build-essential
RUN apt-get install -y libblas3 libblas-dev
RUN apt-get install -y liblapack3 liblapack-dev
RUN apt-get install -y libatlas3-base libatlas-base-dev
RUN apt-get install -y gfortran
RUN git clone https://github.com/PolyChord/PolyChordLite.git \
&& (cd PolyChordLite && python setup.py --no-mpi install)
# Install GW packages
RUN conda install -n ${conda_env} -c conda-forge python-lalsimulation bilby.cython
RUN pip install ligo-gracedb gwpy ligo.skymap
# Add the ROQ data to the image
RUN mkdir roq_basis \
&& cd roq_basis \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/B_linear.npy \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/B_quadratic.npy \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/fnodes_linear.npy \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/fnodes_quadratic.npy \
&& wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/params.dat \
&& wget https://git.ligo.org/soichiro.morisaki/roq_basis/raw/main/IMRPhenomD/16s_nospins/basis_addcal.hdf5 \
&& wget https://git.ligo.org/soichiro.morisaki/roq_basis/raw/main/IMRPhenomD/16s_nospins/basis_multiband_addcal.hdf5
......@@ -32,15 +32,7 @@ RUN conda install -n ${conda_env} -c conda-forge scikit-image celerite george
# Install dependencies and samplers
RUN pip install corner healpy cython tables
RUN conda install -n ${conda_env} -c conda-forge dynesty emcee nestle ptemcee
RUN conda install -n ${conda_env} -c conda-forge pymultinest ultranest
RUN conda install -n ${conda_env} -c conda-forge cpnest kombine dnest4 zeus-mcmc
RUN conda install -n ${conda_env} -c conda-forge ptmcmcsampler
RUN conda install -n ${conda_env} -c conda-forge pytorch
RUN conda install -n ${conda_env} -c conda-forge theano-pymc
RUN conda install -n ${conda_env} -c conda-forge pymc3
RUN conda install -n ${conda_env} -c conda-forge pymc pymc-base
RUN pip install nessai
RUN conda install -n ${conda_env} dynesty emcee nestle ptemcee pymultinest ultranest cpnest kombine dnest4 zeus-mcmc pytorch pymc nessai ptmcmcsampler -c conda-forge -c pytorch
# Install Polychord
RUN apt-get update --allow-releaseinfo-change
......
......@@ -32,15 +32,7 @@ RUN conda install -n ${conda_env} -c conda-forge scikit-image celerite george
# Install dependencies and samplers
RUN pip install corner healpy cython tables
RUN conda install -n ${conda_env} -c conda-forge dynesty emcee nestle ptemcee
RUN conda install -n ${conda_env} -c conda-forge pymultinest ultranest
RUN conda install -n ${conda_env} -c conda-forge cpnest kombine dnest4 zeus-mcmc
RUN conda install -n ${conda_env} -c conda-forge ptmcmcsampler
RUN conda install -n ${conda_env} -c conda-forge pytorch
RUN conda install -n ${conda_env} -c conda-forge theano-pymc
RUN conda install -n ${conda_env} -c conda-forge pymc3
RUN conda install -n ${conda_env} -c conda-forge pymc pymc-base
RUN pip install nessai
RUN conda install -n ${conda_env} dynesty emcee nestle ptemcee pymultinest ultranest cpnest kombine dnest4 zeus-mcmc pytorch pymc nessai ptmcmcsampler -c conda-forge -c pytorch
# Install Polychord
RUN apt-get update --allow-releaseinfo-change
......
......@@ -3,13 +3,31 @@ from datetime import date
with open("dockerfile-template", "r") as ff:
template = ff.read()
python_versions = [(3, 8), (3, 9)]
python_versions = [(3, 8), (3, 9), (3, 10)]
today = date.today().strftime("%Y%m%d")
conda_sampler_dict = dict(
python38=[
"dynesty", "emcee", "nestle", "ptemcee", "pymultinest", "ultranest",
"cpnest", "kombine", "dnest4", "zeus-mcmc",
"pytorch", "pymc", "nessai", "ptmcmcsampler",
],
python39=[
"dynesty", "emcee", "nestle", "ptemcee", "pymultinest", "ultranest",
"cpnest", "kombine", "dnest4", "zeus-mcmc",
"pytorch", "pymc", "nessai", "ptmcmcsampler",
],
python310=[
"dynesty", "emcee", "nestle", "ptemcee", "pymultinest", "ultranest",
"cpnest", "kombine", "dnest4", "zeus-mcmc",
"pytorch", "pymc", "nessai", "ptmcmcsampler",
]
)
for python_major_version, python_minor_version in python_versions:
key = f"python{python_major_version}{python_minor_version}"
with open(
"v3-dockerfile-test-suite-python"
f"{python_major_version}{python_minor_version}",
f"v3-dockerfile-test-suite-{key}",
"w"
) as ff:
ff.write(
......@@ -19,5 +37,6 @@ for python_major_version, python_minor_version in python_versions:
ff.write(template.format(
date=today,
python_major_version=python_major_version,
python_minor_version=python_minor_version
python_minor_version=python_minor_version,
conda_samplers=" ".join(conda_sampler_dict[key])
))
......@@ -10,7 +10,7 @@ Installation
$ conda install -c conda-forge bilby
Supported python versions: 3.6+.
Supported python versions: 3.8-3.10.
.. tab:: Pip
......@@ -18,7 +18,7 @@ Installation
$ pip install bilby
Supported python versions: 3.6+.
Supported python versions: 3.8-3.10.
This will install all requirements for running :code:`bilby` for general
......@@ -47,7 +47,7 @@ wave inference, please additionally run the following commands.
Install bilby from source
-------------------------
:code:`bilby` is developed and tested with Python 3.6+. In the
:code:`bilby` is developed and tested with Python 3.8-3.10. In the
following, we assume you have a working python installation, `python pip
<https://packaging.python.org/tutorials/installing-packages/#use-pip-for-installing)>`_,
and `git <https://git-scm.com/>`_. See :ref:`installing-python` for our
......@@ -65,6 +65,15 @@ Clone the repository, install the requirements, and then install the software:
Once you have run these steps, you have :code:`bilby` installed. You can now
try to run the examples.
.. note::
The bilby version is determined by the tags in the git repository. Therefore, you may need to run
.. code-block:: console
git fetch --all --tags
to fetch the tags so that when you install from source your version information is up to date.
.. note::
If you do not have a git.ligo account, and receive an error message:
......@@ -116,7 +125,7 @@ file, you can do this from the link above, or run the command
$ wget https://repo.anaconda.com/archive/Anaconda3-5.2.0-Linux-x86_64.sh
this will download an installer for python 3.6, for other versions check
this will download a linux installer for python, for other versions check
the `anaconda page <https://www.anaconda.com/download/#linux>`_.
Then, `run the command
<https://conda.io/docs/user-guide/install/linux.html>`_
......
......@@ -87,6 +87,7 @@ setup(
classifiers=[
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
......
......@@ -165,29 +165,33 @@ class TestProposals(TestBaseProposals):
def test_NF_proposal(self):
priors = self.create_priors()
chain = self.create_chain(10000)
prop = proposals.NormalizingFlowProposal(priors, first_fit=10000)
prop.steps_since_refit = 9999
start = time.time()
p, w = prop(chain)
dt = time.time() - start
print(f"Training for {prop.__class__.__name__} took dt~{dt:0.2g} [s]")
self.assertTrue(prop.trained)
self.proposal_check(prop)
if proposals.NormalizingFlowProposal.check_dependencies():
prop = proposals.NormalizingFlowProposal(priors, first_fit=10000)
prop.steps_since_refit = 9999
start = time.time()
p, w = prop(chain)
dt = time.time() - start
print(f"Training for {prop.__class__.__name__} took dt~{dt:0.2g} [s]")
self.assertTrue(prop.trained)
self.proposal_check(prop)
else:
print("nflows not installed, unable to test NormalizingFlowProposal")
def test_NF_proposal_15D(self):
ndim = 15
priors = self.create_priors(ndim)
chain = self.create_chain(10000, ndim=ndim)
prop = proposals.NormalizingFlowProposal(priors, first_fit=10000)
prop.steps_since_refit = 9999
start = time.time()
p, w = prop(chain)
dt = time.time() - start
print(f"Training for {prop.__class__.__name__} took dt~{dt:0.2g} [s]")
self.assertTrue(prop.trained)
self.proposal_check(prop, ndim=ndim)
if proposals.NormalizingFlowProposal.check_dependencies():
prop = proposals.NormalizingFlowProposal(priors, first_fit=10000)
prop.steps_since_refit = 9999
start = time.time()
p, w = prop(chain)
dt = time.time() - start
print(f"Training for {prop.__class__.__name__} took dt~{dt:0.2g} [s]")
self.assertTrue(prop.trained)
self.proposal_check(prop, ndim=ndim)
else:
print("nflows not installed, unable to test NormalizingFlowProposal")
if __name__ == "__main__":
......
......@@ -21,6 +21,7 @@ class TestNessai(unittest.TestCase):
plot=False,
skip_import_verification=True,
sampling_seed=150914,
npool=None, # TODO: remove when support for nessai<0.7.0 is dropped
)
self.expected = self.sampler.default_kwargs
self.expected['output'] = 'outdir/label_nessai/'
......@@ -59,10 +60,13 @@ class TestNessai(unittest.TestCase):
assert self.expected["seed"] == 150914
def test_npool_max_threads(self):
# TODO: remove when support for nessai<0.7.0 is dropped
expected = self.expected.copy()
expected["n_pool"] = None
expected["max_threads"] = 1
new_kwargs = self.sampler.kwargs.copy()
new_kwargs["n_pool"] = 1
new_kwargs["max_threads"] = 1
self.sampler.kwargs = new_kwargs
self.assertDictEqual(expected, self.sampler.kwargs)
......
......@@ -33,7 +33,6 @@ class TestPyMC(unittest.TestCase):
n_init=200000,
initvals=None,
trace=None,
chain_idx=0,
chains=2,
cores=1,
tune=500,
......@@ -57,7 +56,6 @@ class TestPyMC(unittest.TestCase):
n_init=200000,
initvals=None,
trace=None,
chain_idx=0,
chains=2,
cores=1,
tune=500,
......
......@@ -104,7 +104,7 @@ class TestGWUtils(unittest.TestCase):
strain = gwutils.read_frame_file(
filename, start_time=None, end_time=None, channel=channel
)
self.assertEqual(strain.channel.name, channel)
self.assertEqual(strain.name, channel)
self.assertTrue(np.all(strain.value == data[:-1]))
# Check reading with time limits
......