Commit 5a8eda66 authored by Gregory Ashton's avatar Gregory Ashton

Merge branch 'change_sampled_parameters' into 'master'

Change sampled parameters

See merge request Monash/tupak!31
parents 04de105a 5bb71c9d
Pipeline #19560 passed with stages
in 5 minutes and 38 seconds
#!/bin/python
"""
Tutorial to demonstrate running parameter estimation sampling in non-standard parameters for an injected signal.
This example estimates the masses using a uniform prior in chirp mass and mass ratio and distance using a uniform in
comoving volume prior on luminosity distance between luminosity distances of 100Mpc and 5Gpc, the cosmology is WMAP7.
"""
from __future__ import division, print_function
import tupak
import numpy as np
tupak.utils.setup_logger(log_level="info")
time_duration = 4.
sampling_frequency = 2048.
outdir = 'outdir'
np.random.seed(151226)
injection_parameters = dict(mass_1=36., mass_2=29., a_1=0.4, a_2=0.3, tilt_1=0.5, tilt_2=1.0, phi_12=1.7, phi_jl=0.3,
luminosity_distance=3000., iota=0.4, psi=2.659, phase=1.3, geocent_time=1126259642.413,
waveform_approximant='IMRPhenomPv2', reference_frequency=50., ra=1.375, dec=-1.2108)
# Create the waveform_generator using a LAL BinaryBlackHole source function
waveform_generator = tupak.waveform_generator.WaveformGenerator(
sampling_frequency=sampling_frequency, time_duration=time_duration,
frequency_domain_source_model=tupak.source.lal_binary_black_hole,
parameter_conversion=tupak.conversion.convert_to_lal_binary_black_hole_parameters,
non_standard_sampling_parameter_keys=['chirp_mass', 'mass_ratio', 'cos_iota'],
parameters=injection_parameters)
hf_signal = waveform_generator.frequency_domain_strain()
# Set up interferometers.
IFOs = [tupak.detector.get_interferometer_with_fake_noise_and_injection(
name, injection_polarizations=hf_signal, injection_parameters=injection_parameters, time_duration=time_duration,
sampling_frequency=sampling_frequency, outdir=outdir) for name in ['H1', 'L1', 'V1']]
# Set up prior
priors = dict()
# These parameters will not be sampled
for key in ['a_1', 'a_2', 'tilt_1', 'tilt_2', 'phi_12', 'phi_jl', 'phase', 'psi', 'ra', 'dec', 'geocent_time']:
priors[key] = injection_parameters[key]
priors['luminosity_distance'] = tupak.prior.create_default_prior(name='luminosity_distance')
# Initialise Likelihood
likelihood = tupak.likelihood.Likelihood(interferometers=IFOs, waveform_generator=waveform_generator)
# Run sampler
result = tupak.sampler.run_sampler(likelihood=likelihood, priors=priors, sampler='dynesty',
injection_parameters=injection_parameters, label='DifferentParameters',
outdir=outdir, conversion_function=tupak.conversion.generate_all_bbh_parameters)
result.plot_corner()
result.plot_walks()
result.plot_distributions()
print(result)
......@@ -15,12 +15,12 @@ sampling_frequency = 4096.
simulation_parameters = dict(
mass_1=36.,
mass_2=29.,
a_1=0,
a_2=0,
tilt_1=0,
tilt_2=0,
phi_12=0,
phi_jl=0,
a_1=0.,
a_2=0.,
tilt_1=0.,
tilt_2=0.,
phi_12=0.,
phi_jl=0.,
luminosity_distance=100.,
iota=0.4,
phase=1.3,
......
......@@ -55,7 +55,7 @@ class TestPriorLatexLabel(unittest.TestCase):
self.assertEqual(test_label, self.prior.latex_label)
def test_default_label_assignment(self):
self.prior.name = 'mchirp'
self.prior.name = 'chirp_mass'
self.prior.latex_label = None
self.assertEqual(self.prior.latex_label, '$\mathcal{M}$')
......
......@@ -9,3 +9,4 @@ from . import likelihood
from . import waveform_generator
from . import result
from . import sampler
from . import conversion
import tupak
import numpy as np
import lalsimulation as lalsim
import pandas as pd
import logging
def convert_to_lal_binary_black_hole_parameters(parameters, search_keys, remove=True):
"""
Convert parameters we have into parameters we need.
This is defined by the parameters of tupak.source.lal_binary_black_hole()
Mass: mass_1, mass_2
Spin: a_1, a_2, tilt_1, tilt_2, phi_12, phi_jl
Extrinsic: luminosity_distance, theta_jn, phase, ra, dec, geocent_time, psi
This involves popping a lot of things from parameters.
The keys in ignored_keys should be popped after evaluating the waveform.
Parameters
----------
parameters: dict
dictionary of parameter values to convert into the required parameters
search_keys: list
parameters which are needed for the waveform generation
remove: bool, optional
Whether or not to remove the extra key, necessary for sampling, default=True.
Return
------
ignored_keys: list
keys which are added to parameters during function call
"""
ignored_keys = []
if 'mass_1' not in search_keys and 'mass_2' not in search_keys:
if 'chirp_mass' in parameters.keys():
if 'total_mass' in parameters.keys():
# chirp_mass, total_mass to total_mass, symmetric_mass_ratio
parameters['symmetric_mass_ratio'] = (parameters['chirp_mass'] / parameters['total_mass'])**(5 / 3)
if remove:
parameters.pop('chirp_mass')
if 'symmetric_mass_ratio' in parameters.keys():
# symmetric_mass_ratio to mass_ratio
temp = (1 / parameters['symmetric_mass_ratio'] / 2 - 1)
parameters['mass_ratio'] = temp - (temp**2 - 1)**0.5
if remove:
parameters.pop('symmetric_mass_ratio')
if 'mass_ratio' in parameters.keys():
if 'total_mass' not in parameters.keys():
parameters['total_mass'] = parameters['chirp_mass'] * (1 + parameters['mass_ratio'])**1.2 \
/ parameters['mass_ratio']**0.6
parameters.pop('chirp_mass')
# total_mass, mass_ratio to component masses
parameters['mass_1'] = parameters['total_mass'] / (1 + parameters['mass_ratio'])
parameters['mass_2'] = parameters['mass_1'] * parameters['mass_ratio']
if remove:
parameters.pop('total_mass')
parameters.pop('mass_ratio')
ignored_keys.append('mass_1')
ignored_keys.append('mass_2')
elif 'total_mass' in parameters.keys():
if 'symmetric_mass_ratio' in parameters.keys():
# symmetric_mass_ratio to mass_ratio
temp = (1 / parameters['symmetric_mass_ratio'] / 2 - 1)
parameters['mass_ratio'] = temp - (temp**2 - 1)**0.5
if remove:
parameters.pop('symmetric_mass_ratio')
if 'mass_ratio' in parameters.keys():
# total_mass, mass_ratio to component masses
parameters['mass_1'] = parameters['total_mass'] / (1 + parameters['mass_ratio'])
parameters['mass_2'] = parameters['mass_1'] * parameters['mass_ratio']
if remove:
parameters.pop('total_mass')
parameters.pop('mass_ratio')
ignored_keys.append('mass_1')
ignored_keys.append('mass_2')
if 'cos_tilt_1' in parameters.keys():
ignored_keys.append('tilt_1')
parameters['tilt_1'] = np.arccos(parameters['cos_tilt_1'])
if remove:
parameters.pop('cos_tilt_1')
if 'cos_tilt_2' in parameters.keys():
ignored_keys.append('tilt_2')
parameters['tilt_2'] = np.arccos(parameters['cos_tilt_2'])
if remove:
parameters.pop('cos_tilt_2')
if 'cos_iota' in parameters.keys():
parameters['iota'] = np.arccos(parameters['cos_iota'])
if remove:
parameters.pop('cos_iota')
return ignored_keys
def generate_all_bbh_parameters(sample, likelihood=None, priors=None):
"""
From either a single sample or a set of samples fill in all missing BBH parameters, in place.
Parameters
----------
sample: dict or pandas.DataFrame
Samples to fill in with extra parameters, this may be either an injection or posterior samples.
likelihood: tupak.likelihood.Likelihood
Likelihood used for sampling, used for waveform and likelihood.interferometers.
priors: dict, optional
Dictionary of prior objects, used to fill in non-sampled parameters.
"""
if likelihood is not None:
sample['reference_frequency'] = likelihood.waveform_generator.parameters['reference_frequency']
sample['waveform_approximant'] = likelihood.waveform_generator.parameters['waveform_approximant']
fill_from_fixed_priors(sample, priors)
convert_to_lal_binary_black_hole_parameters(sample, [key for key in sample.keys()], remove=False)
generate_non_standard_parameters(sample)
generate_component_spins(sample)
compute_snrs(sample, likelihood)
def fill_from_fixed_priors(sample, priors):
"""Add parameters with delta function prior to the data frame/dictionary."""
if priors is not None:
for name in priors:
if isinstance(priors[name], tupak.prior.DeltaFunction):
sample[name] = priors[name].peak
def generate_non_standard_parameters(sample):
"""
Add the known non-standard parameters to the data frame/dictionary.
We add:
chirp mass, total mass, symmetric mass ratio, mass ratio, cos tilt 1, cos tilt 2, cos iota
"""
sample['chirp_mass'] = (sample['mass_1'] * sample['mass_2'])**0.6 / (sample['mass_1'] + sample['mass_2'])**0.2
sample['total_mass'] = sample['mass_1'] + sample['mass_2']
sample['symmetric_mass_ratio'] = (sample['mass_1'] * sample['mass_2']) / (sample['mass_1'] + sample['mass_2'])**2
sample['mass_ratio'] = sample['mass_2'] / sample['mass_1']
sample['cos_tilt_1'] = np.cos(sample['tilt_1'])
sample['cos_tilt_2'] = np.cos(sample['tilt_2'])
sample['cos_iota'] = np.cos(sample['iota'])
def generate_component_spins(sample):
"""
Add the component spins to the data frame/dictionary.
This function uses a lalsimulation function to transform the spins.
"""
spin_conversion_parameters = ['iota', 'phi_jl', 'tilt_1', 'tilt_2', 'phi_12', 'a_1', 'a_2', 'mass_1',
'mass_2', 'reference_frequency', 'phase']
if all(key in sample.keys() for key in spin_conversion_parameters) and isinstance(sample, dict):
sample['iota'], sample['spin_1x'], sample['spin_1y'], sample['spin_1z'], sample['spin_2x'], \
sample['spin_2y'], sample['spin_2z'] = \
lalsim.SimInspiralTransformPrecessingNewInitialConditions(
sample['iota'], sample['phi_jl'], sample['tilt_1'], sample['tilt_2'], sample['phi_12'], sample['a_1'],
sample['a_2'], sample['mass_1'] * tupak.utils.solar_mass, sample['mass_2'] * tupak.utils.solar_mass,
sample['reference_frequency'], sample['phase'])
sample['phi_1'] = np.arctan(sample['spin_1y'] / sample['spin_1x'])
sample['phi_2'] = np.arctan(sample['spin_2y'] / sample['spin_2x'])
elif all(key in sample.keys() for key in spin_conversion_parameters) and isinstance(sample, pd.DataFrame):
logging.info('Extracting component spins.')
new_spin_parameters = ['spin_1x', 'spin_1y', 'spin_1z', 'spin_2x', 'spin_2y', 'spin_2z']
new_spins = {name: np.zeros(len(sample)) for name in new_spin_parameters}
for ii in range(len(sample)):
new_spins['iota'], new_spins['spin_1x'][ii], new_spins['spin_1y'][ii], new_spins['spin_1z'][ii], \
new_spins['spin_2x'][ii], new_spins['spin_2y'][ii], new_spins['spin_2z'][ii] = \
lalsim.SimInspiralTransformPrecessingNewInitialConditions(
sample['iota'][ii], sample['phi_jl'][ii], sample['tilt_1'][ii], sample['tilt_2'][ii],
sample['phi_12'][ii], sample['a_1'][ii], sample['a_2'][ii],
sample['mass_1'][ii] * tupak.utils.solar_mass, sample['mass_2'][ii] * tupak.utils.solar_mass,
sample['reference_frequency'][ii], sample['phase'][ii])
for name in new_spin_parameters:
sample[name] = new_spins[name]
sample['phi_1'] = np.arctan(sample['spin_1y'] / sample['spin_1x'])
sample['phi_2'] = np.arctan(sample['spin_2y'] / sample['spin_2x'])
else:
logging.warning("Component spin extraction failed.")
def compute_snrs(sample, likelihood):
"""Compute the optimal and matched filter snrs of all posterior samples."""
temp_sample = sample
if likelihood is not None:
if isinstance(temp_sample, dict):
for key in likelihood.waveform_generator.parameters.keys():
likelihood.waveform_generator.parameters[key] = temp_sample[key]
signal_polarizations = likelihood.waveform_generator.frequency_domain_strain()
for interferometer in likelihood.interferometers:
signal = interferometer.get_detector_response(signal_polarizations,
likelihood.waveform_generator.parameters)
sample['{}_matched_filter_snr'.format(interferometer.name)] = \
tupak.utils.matched_filter_snr_squared(signal, interferometer,
likelihood.waveform_generator.time_duration)**0.5
sample['{}_optimal_snr'.format(interferometer.name)] = tupak.utils.optimal_snr_squared(
signal, interferometer, likelihood.waveform_generator.time_duration) ** 0.5
else:
logging.info('Computing SNRs for every sample, this may take some time.')
all_interferometers = likelihood.interferometers
matched_filter_snrs = {interferometer.name: [] for interferometer in all_interferometers}
optimal_snrs = {interferometer.name: [] for interferometer in all_interferometers}
for ii in range(len(temp_sample)):
for key in set(temp_sample.keys()).intersection(likelihood.waveform_generator.parameters.keys()):
likelihood.waveform_generator.parameters[key] = temp_sample[key][ii]
for key in likelihood.waveform_generator.non_standard_sampling_parameter_keys:
likelihood.waveform_generator.parameters[key] = temp_sample[key][ii]
signal_polarizations = likelihood.waveform_generator.frequency_domain_strain()
for interferometer in all_interferometers:
signal = interferometer.get_detector_response(signal_polarizations,
likelihood.waveform_generator.parameters)
matched_filter_snrs[interferometer.name].append(tupak.utils.matched_filter_snr_squared(
signal, interferometer, likelihood.waveform_generator.time_duration)**0.5)
optimal_snrs[interferometer.name].append(tupak.utils.optimal_snr_squared(
signal, interferometer, likelihood.waveform_generator.time_duration) ** 0.5)
for interferometer in likelihood.interferometers:
sample['{}_matched_filter_snr'.format(interferometer.name)] = matched_filter_snrs[interferometer.name]
sample['{}_optimal_snr'.format(interferometer.name)] = optimal_snrs[interferometer.name]
likelihood.interferometers = all_interferometers
print([interferometer.name for interferometer in likelihood.interferometers])
else:
logging.info('Not computing SNRs.')
......@@ -18,6 +18,7 @@ class Likelihood(object):
self.interferometers = interferometers
self.waveform_generator = waveform_generator
self.parameters = self.waveform_generator.parameters
self.non_standard_sampling_parameter_keys = self.waveform_generator.non_standard_sampling_parameter_keys
self.distance_marginalization = distance_marginalization
self.phase_marginalization = phase_marginalization
self.prior = prior
......@@ -151,13 +152,10 @@ def get_binary_black_hole_likelihood(interferometers):
likelihood: tupak.likelihood.Likelihood
The likelihood to pass to `run_sampler`
"""
waveform_generator = tupak.waveform_generator.WaveformGenerator(time_duration=interferometers[0].duration,
sampling_frequency=interferometers[
0].sampling_frequency,
frequency_domain_source_model=tupak.source.lal_binary_black_hole,
parameters={'waveform_approximant': 'IMRPhenomPv2',
'reference_frequency': 50})
likelihood = tupak.likelihood.Likelihood(
interferometers, waveform_generator)
waveform_generator = tupak.waveform_generator.WaveformGenerator(
time_duration=interferometers[0].duration, sampling_frequency=interferometers[0].sampling_frequency,
frequency_domain_source_model=tupak.source.lal_binary_black_hole,
parameters={'waveform_approximant': 'IMRPhenomPv2', 'reference_frequency': 50})
likelihood = tupak.likelihood.Likelihood(interferometers, waveform_generator)
return likelihood
......@@ -95,21 +95,25 @@ class Prior(object):
default_labels = {
'mass_1': '$m_1$',
'mass_2': '$m_2$',
'mchirp': '$\mathcal{M}$',
'q': '$q$',
'total_mass': '$M$',
'chirp_mass': '$\mathcal{M}$',
'mass_ratio': '$q$',
'symmetric_mass_ratio': '$\eta$',
'a_1': '$a_1$',
'a_2': '$a_2$',
'tilt_1': '$\\theta_1$',
'tilt_2': '$\\theta_2$',
'cos_tilt_1': '$\cos\\theta_1$',
'cos_tilt_2': '$\cos\\theta_2$',
'phi_12': '$\Delta\phi$',
'phi_jl': '$\phi_{JL}$',
'luminosity_distance': '$d_L$',
'dec': '$\mathrm{DEC}$',
'ra': '$\mathrm{RA}$',
'iota': '$\iota$',
'cos_iota': '$\cos\iota$',
'psi': '$\psi$',
'phase': '$\phi$',
'tc': '$t_c$',
'geocent_time': '$t_c$'
}
if self.name in default_labels.keys():
......@@ -302,7 +306,7 @@ class Interped(Prior):
self.xx = np.linspace(self.minimum, self.maximum, len(xx))
self.yy = all_interpolated(self.xx)
if np.trapz(self.yy, self.xx) != 1:
logging.info('Supplied PDF is not normalised, normalising.')
logging.info('Supplied PDF for {} is not normalised, normalising.'.format(self.name))
self.yy /= np.trapz(self.yy, self.xx)
self.YY = cumtrapz(self.yy, self.xx, initial=0)
# Need last element of cumulative distribution to be exactly one.
......@@ -376,20 +380,25 @@ def create_default_prior(name):
Default prior distribution for that parameter, if unknown None is returned.
"""
default_priors = {
'mass_1': PowerLaw(name=name, alpha=0, minimum=5, maximum=100),
'mass_2': PowerLaw(name=name, alpha=0, minimum=5, maximum=100),
'mchirp': Uniform(name=name, minimum=5, maximum=100),
'q': Uniform(name=name, minimum=0, maximum=1),
'mass_1': Uniform(name=name, minimum=5, maximum=100),
'mass_2': Uniform(name=name, minimum=5, maximum=100),
'chirp_mass': Uniform(name=name, minimum=5, maximum=100),
'total_mass': Uniform(name=name, minimum=10, maximum=200),
'mass_ratio': Uniform(name=name, minimum=0.125, maximum=1),
'symmetric_mass_ratio': Uniform(name=name, minimum=8 / 81, maximum=0.25),
'a_1': Uniform(name=name, minimum=0, maximum=0.8),
'a_2': Uniform(name=name, minimum=0, maximum=0.8),
'tilt_1': Sine(name=name),
'tilt_2': Sine(name=name),
'cos_tilt_1': Uniform(name=name, minimum=-1, maximum=1),
'cos_tilt_2': Uniform(name=name, minimum=-1, maximum=1),
'phi_12': Uniform(name=name, minimum=0, maximum=2 * np.pi),
'phi_jl': Uniform(name=name, minimum=0, maximum=2 * np.pi),
'luminosity_distance': UniformComovingVolume(name=name, minimum=1e2, maximum=5e3),
'dec': Cosine(name=name),
'ra': Uniform(name=name, minimum=0, maximum=2 * np.pi),
'iota': Sine(name=name),
'cos_iota': Uniform(name=name, minimum=-1, maximum=1),
'psi': Uniform(name=name, minimum=0, maximum=2 * np.pi),
'phase': Uniform(name=name, minimum=0, maximum=2 * np.pi)
}
......@@ -402,7 +411,28 @@ def create_default_prior(name):
return prior
def fill_priors(prior, likelihood):
def parse_floats_to_fixed_priors(old_parameters):
parameters = old_parameters.copy()
for key in parameters:
if type(parameters[key]) is not float and type(parameters[key]) is not int \
and type(parameters[key]) is not Prior:
logging.info("Expected parameter " + str(key) + " to be a float or int but was "
+ str(type(parameters[key])) + " instead. Will not be converted.")
continue
elif type(parameters[key]) is Prior:
continue
parameters[key] = DeltaFunction(name=key, latex_label=None, peak=old_parameters[key])
return parameters
def parse_keys_to_parameters(keys):
parameters = {}
for key in keys:
parameters[key] = create_default_prior(key)
return parameters
def fill_priors(prior, likelihood, parameters=None):
"""
Fill dictionary of priors based on required parameters of likelihood
......@@ -415,6 +445,9 @@ def fill_priors(prior, likelihood):
dictionary of prior objects and floats
likelihood: tupak.likelihood.Likelihood instance
Used to infer the set of parameters to fill the prior with
parameters: list
list of parameters to be sampled in, this can override the default
priors for the waveform generator
Returns
-------
......@@ -436,6 +469,10 @@ def fill_priors(prior, likelihood):
missing_keys = set(likelihood.parameters) - set(prior.keys())
if parameters is not None:
for parameter in parameters:
prior[parameter] = create_default_prior(parameter)
for missing_key in missing_keys:
default_prior = create_default_prior(missing_key)
if default_prior is None:
......@@ -445,11 +482,64 @@ def fill_priors(prior, likelihood):
" not be sampled and may cause an error."
.format(missing_key, set_val))
else:
prior[missing_key] = default_prior
if not test_redundancy(missing_key, prior):
prior[missing_key] = default_prior
for key in prior:
test_redundancy(key, prior)
return prior
def test_redundancy(key, prior):
"""
Test whether adding the key would add be redundant.
Parameters
----------
key: str
The string to test.
prior: dict
Current prior dictionary.
Return
------
redundant: bool
Whether the key is redundant
"""
redundant = False
mass_parameters = {'mass_1', 'mass_2', 'chirp_mass', 'total_mass', 'mass_ratio', 'symmetric_mass_ratio'}
spin_magnitude_parameters = {'a_1', 'a_2'}
spin_tilt_1_parameters = {'tilt_1', 'cos_tilt_1'}
spin_tilt_2_parameters = {'tilt_2', 'cos_tilt_2'}
spin_azimuth_parameters = {'phi_1', 'phi_2', 'phi_12', 'phi_jl'}
inclination_parameters = {'iota', 'cos_iota'}
distance_parameters = {'luminosity_distance', 'comoving_distance', 'redshift'}
for parameter_set in [mass_parameters, spin_magnitude_parameters, spin_azimuth_parameters]:
if key in parameter_set:
if len(parameter_set.intersection(prior.keys())) > 2:
redundant = True
logging.warning('{} in prior. This may lead to unexpected behaviour.'.format(
parameter_set.intersection(prior.keys())))
break
elif len(parameter_set.intersection(prior.keys())) == 2:
redundant = True
break
for parameter_set in [inclination_parameters, distance_parameters, spin_tilt_1_parameters, spin_tilt_2_parameters]:
if key in parameter_set:
if len(parameter_set.intersection(prior.keys())) > 1:
redundant = True
logging.warning('{} in prior. This may lead to unexpected behaviour.'.format(
parameter_set.intersection(prior.keys())))
break
elif len(parameter_set.intersection(prior.keys())) == 1:
redundant = True
break
return redundant
def write_priors_to_file(priors, outdir):
"""
Write the prior distribution to file.
......
......@@ -3,6 +3,7 @@ import os
import numpy as np
import deepdish
import pandas as pd
import tupak
try:
from chainconsumer import ChainConsumer
......@@ -204,16 +205,24 @@ class Result(dict):
for key in self.prior:
prior_file.write(self.prior[key])
def samples_to_data_frame(self):
def samples_to_data_frame(self, likelihood=None, priors=None, conversion_function=None):
"""
Convert array of samples to data frame.
:return:
Parameters
----------
likelihood: tupak.likelihood.Likelihood
Likelihood used for sampling.
priors: dict
Dictionary of prior object, used to fill in delta function priors.
conversion_function: function
Function which adds in extra parameters to the data frame,
should take the data_frame, likelihood and prior as arguments.
"""
data_frame = pd.DataFrame(self.samples, columns=self.search_parameter_keys)
if conversion_function is not None:
conversion_function(data_frame, likelihood, priors)
self.posterior = data_frame
for key in self.fixed_parameter_keys:
self.posterior[key] = self.priors[key].sample(len(self.posterior))
def construct_cbc_derived_parameters(self):
"""
......
......@@ -142,11 +142,16 @@ class Sampler(object):
return result
def verify_parameters(self):
required_keys = self.priors
unmatched_keys = [r for r in required_keys if r not in self.likelihood.parameters]
if len(unmatched_keys) > 0:
raise KeyError(
"Source model does not contain keys {}".format(unmatched_keys))
for key in self.priors:
try:
self.likelihood.parameters[key] = self.priors[key].sample()
except AttributeError as e:
logging.warning('Cannot sample from {}, {}'.format(key, e))
try:
self.likelihood.log_likelihood_ratio()
except TypeError:
raise TypeError('Likelihood evaluation failed. Have you definitely specified all the parameters?\n{}'.format(
self.likelihood.parameters))
def prior_transform(self, theta):
return [self.priors[key].rescale(t) for key, t in zip(self.__search_parameter_keys, theta)]
......@@ -249,6 +254,7 @@ class Nestle(Sampler):
loglikelihood=self.log_likelihood,
prior_transform=self.prior_transform,
ndim=self.ndim, **self.kwargs)
print("")
self.result.sampler_output = out
self.result.samples = nestle.resample_equal(out.samples, out.weights)
......@@ -294,6 +300,7 @@ class Dynesty(Sampler):
prior_transform=self.prior_transform,
ndim=self.ndim, **self.kwargs)
nested_sampler.run_nested(print_progress=self.kwargs['verbose'])
print("")
out = nested_sampler.results
# self.result.sampler_output = out
......@@ -395,7 +402,7 @@ class Ptemcee(Sampler):
def run_sampler(likelihood, priors=None, label='label', outdir='outdir',
sampler='nestle', use_ratio=True, injection_parameters=None,