diff --git a/examples/injection_examples/change_sampled_parameters.py b/examples/injection_examples/change_sampled_parameters.py new file mode 100644 index 0000000000000000000000000000000000000000..364c2eb00da1876b9c7297b52e526e51aa929cfd --- /dev/null +++ b/examples/injection_examples/change_sampled_parameters.py @@ -0,0 +1,55 @@ +#!/bin/python +""" +Tutorial to demonstrate running parameter estimation sampling in non-standard parameters for an injected signal. + +This example estimates the masses using a uniform prior in chirp mass and mass ratio and distance using a uniform in +comoving volume prior on luminosity distance between luminosity distances of 100Mpc and 5Gpc, the cosmology is WMAP7. +""" +from __future__ import division, print_function +import tupak +import numpy as np + +tupak.utils.setup_logger(log_level="info") + +time_duration = 4. +sampling_frequency = 2048. +outdir = 'outdir' + +np.random.seed(151226) + +injection_parameters = dict(mass_1=36., mass_2=29., a_1=0.4, a_2=0.3, tilt_1=0.5, tilt_2=1.0, phi_12=1.7, phi_jl=0.3, + luminosity_distance=3000., iota=0.4, psi=2.659, phase=1.3, geocent_time=1126259642.413, + waveform_approximant='IMRPhenomPv2', reference_frequency=50., ra=1.375, dec=-1.2108) + +# Create the waveform_generator using a LAL BinaryBlackHole source function +waveform_generator = tupak.waveform_generator.WaveformGenerator( + sampling_frequency=sampling_frequency, time_duration=time_duration, + frequency_domain_source_model=tupak.source.lal_binary_black_hole, + parameter_conversion=tupak.conversion.convert_to_lal_binary_black_hole_parameters, + non_standard_sampling_parameter_keys=['chirp_mass', 'mass_ratio', 'cos_iota'], + parameters=injection_parameters) +hf_signal = waveform_generator.frequency_domain_strain() + +# Set up interferometers. +IFOs = [tupak.detector.get_interferometer_with_fake_noise_and_injection( + name, injection_polarizations=hf_signal, injection_parameters=injection_parameters, time_duration=time_duration, + sampling_frequency=sampling_frequency, outdir=outdir) for name in ['H1', 'L1', 'V1']] + +# Set up prior +priors = dict() +# These parameters will not be sampled +for key in ['a_1', 'a_2', 'tilt_1', 'tilt_2', 'phi_12', 'phi_jl', 'phase', 'psi', 'ra', 'dec', 'geocent_time']: + priors[key] = injection_parameters[key] +priors['luminosity_distance'] = tupak.prior.create_default_prior(name='luminosity_distance') + +# Initialise Likelihood +likelihood = tupak.likelihood.Likelihood(interferometers=IFOs, waveform_generator=waveform_generator) + +# Run sampler +result = tupak.sampler.run_sampler(likelihood=likelihood, priors=priors, sampler='dynesty', + injection_parameters=injection_parameters, label='DifferentParameters', + outdir=outdir, conversion_function=tupak.conversion.generate_all_bbh_parameters) +result.plot_corner() +result.plot_walks() +result.plot_distributions() +print(result) diff --git a/test/make_standard_data.py b/test/make_standard_data.py index 67b395f44ecc750588c51867544d541350a43834..e01f86c9af429d126f4c81aef233a568cc28f76a 100644 --- a/test/make_standard_data.py +++ b/test/make_standard_data.py @@ -15,12 +15,12 @@ sampling_frequency = 4096. simulation_parameters = dict( mass_1=36., mass_2=29., - a_1=0, - a_2=0, - tilt_1=0, - tilt_2=0, - phi_12=0, - phi_jl=0, + a_1=0., + a_2=0., + tilt_1=0., + tilt_2=0., + phi_12=0., + phi_jl=0., luminosity_distance=100., iota=0.4, phase=1.3, diff --git a/test/prior_tests.py b/test/prior_tests.py index 6bc4853296cc826c978dec8a1607aa65eb8482eb..86a8a90d69d22f95150a996e535d958ca9e2d139 100644 --- a/test/prior_tests.py +++ b/test/prior_tests.py @@ -55,7 +55,7 @@ class TestPriorLatexLabel(unittest.TestCase): self.assertEqual(test_label, self.prior.latex_label) def test_default_label_assignment(self): - self.prior.name = 'mchirp' + self.prior.name = 'chirp_mass' self.prior.latex_label = None self.assertEqual(self.prior.latex_label, '$\mathcal{M}$') diff --git a/tupak/__init__.py b/tupak/__init__.py index e91008d3a0b33c5985e77a218ccf0e0dc0979b6c..0f57bbb4f2261d33feaebeef81f1cface0277a5c 100644 --- a/tupak/__init__.py +++ b/tupak/__init__.py @@ -9,3 +9,4 @@ from . import likelihood from . import waveform_generator from . import result from . import sampler +from . import conversion diff --git a/tupak/conversion.py b/tupak/conversion.py new file mode 100644 index 0000000000000000000000000000000000000000..1fd08fdc8c8de20316f0fa24c970983596045017 --- /dev/null +++ b/tupak/conversion.py @@ -0,0 +1,237 @@ +import tupak +import numpy as np +import lalsimulation as lalsim +import pandas as pd +import logging + + +def convert_to_lal_binary_black_hole_parameters(parameters, search_keys, remove=True): + """ + Convert parameters we have into parameters we need. + + This is defined by the parameters of tupak.source.lal_binary_black_hole() + + + Mass: mass_1, mass_2 + Spin: a_1, a_2, tilt_1, tilt_2, phi_12, phi_jl + Extrinsic: luminosity_distance, theta_jn, phase, ra, dec, geocent_time, psi + + This involves popping a lot of things from parameters. + The keys in ignored_keys should be popped after evaluating the waveform. + + Parameters + ---------- + parameters: dict + dictionary of parameter values to convert into the required parameters + search_keys: list + parameters which are needed for the waveform generation + remove: bool, optional + Whether or not to remove the extra key, necessary for sampling, default=True. + + Return + ------ + ignored_keys: list + keys which are added to parameters during function call + """ + + ignored_keys = [] + + if 'mass_1' not in search_keys and 'mass_2' not in search_keys: + if 'chirp_mass' in parameters.keys(): + if 'total_mass' in parameters.keys(): + # chirp_mass, total_mass to total_mass, symmetric_mass_ratio + parameters['symmetric_mass_ratio'] = (parameters['chirp_mass'] / parameters['total_mass'])**(5 / 3) + if remove: + parameters.pop('chirp_mass') + if 'symmetric_mass_ratio' in parameters.keys(): + # symmetric_mass_ratio to mass_ratio + temp = (1 / parameters['symmetric_mass_ratio'] / 2 - 1) + parameters['mass_ratio'] = temp - (temp**2 - 1)**0.5 + if remove: + parameters.pop('symmetric_mass_ratio') + if 'mass_ratio' in parameters.keys(): + if 'total_mass' not in parameters.keys(): + parameters['total_mass'] = parameters['chirp_mass'] * (1 + parameters['mass_ratio'])**1.2 \ + / parameters['mass_ratio']**0.6 + parameters.pop('chirp_mass') + # total_mass, mass_ratio to component masses + parameters['mass_1'] = parameters['total_mass'] / (1 + parameters['mass_ratio']) + parameters['mass_2'] = parameters['mass_1'] * parameters['mass_ratio'] + if remove: + parameters.pop('total_mass') + parameters.pop('mass_ratio') + ignored_keys.append('mass_1') + ignored_keys.append('mass_2') + elif 'total_mass' in parameters.keys(): + if 'symmetric_mass_ratio' in parameters.keys(): + # symmetric_mass_ratio to mass_ratio + temp = (1 / parameters['symmetric_mass_ratio'] / 2 - 1) + parameters['mass_ratio'] = temp - (temp**2 - 1)**0.5 + if remove: + parameters.pop('symmetric_mass_ratio') + if 'mass_ratio' in parameters.keys(): + # total_mass, mass_ratio to component masses + parameters['mass_1'] = parameters['total_mass'] / (1 + parameters['mass_ratio']) + parameters['mass_2'] = parameters['mass_1'] * parameters['mass_ratio'] + if remove: + parameters.pop('total_mass') + parameters.pop('mass_ratio') + ignored_keys.append('mass_1') + ignored_keys.append('mass_2') + + if 'cos_tilt_1' in parameters.keys(): + ignored_keys.append('tilt_1') + parameters['tilt_1'] = np.arccos(parameters['cos_tilt_1']) + if remove: + parameters.pop('cos_tilt_1') + if 'cos_tilt_2' in parameters.keys(): + ignored_keys.append('tilt_2') + parameters['tilt_2'] = np.arccos(parameters['cos_tilt_2']) + if remove: + parameters.pop('cos_tilt_2') + + if 'cos_iota' in parameters.keys(): + parameters['iota'] = np.arccos(parameters['cos_iota']) + if remove: + parameters.pop('cos_iota') + + return ignored_keys + + +def generate_all_bbh_parameters(sample, likelihood=None, priors=None): + """ + From either a single sample or a set of samples fill in all missing BBH parameters, in place. + + Parameters + ---------- + sample: dict or pandas.DataFrame + Samples to fill in with extra parameters, this may be either an injection or posterior samples. + likelihood: tupak.likelihood.Likelihood + Likelihood used for sampling, used for waveform and likelihood.interferometers. + priors: dict, optional + Dictionary of prior objects, used to fill in non-sampled parameters. + """ + + if likelihood is not None: + sample['reference_frequency'] = likelihood.waveform_generator.parameters['reference_frequency'] + sample['waveform_approximant'] = likelihood.waveform_generator.parameters['waveform_approximant'] + + fill_from_fixed_priors(sample, priors) + convert_to_lal_binary_black_hole_parameters(sample, [key for key in sample.keys()], remove=False) + generate_non_standard_parameters(sample) + generate_component_spins(sample) + compute_snrs(sample, likelihood) + + +def fill_from_fixed_priors(sample, priors): + """Add parameters with delta function prior to the data frame/dictionary.""" + if priors is not None: + for name in priors: + if isinstance(priors[name], tupak.prior.DeltaFunction): + sample[name] = priors[name].peak + + +def generate_non_standard_parameters(sample): + """ + Add the known non-standard parameters to the data frame/dictionary. + + We add: + chirp mass, total mass, symmetric mass ratio, mass ratio, cos tilt 1, cos tilt 2, cos iota + """ + sample['chirp_mass'] = (sample['mass_1'] * sample['mass_2'])**0.6 / (sample['mass_1'] + sample['mass_2'])**0.2 + sample['total_mass'] = sample['mass_1'] + sample['mass_2'] + sample['symmetric_mass_ratio'] = (sample['mass_1'] * sample['mass_2']) / (sample['mass_1'] + sample['mass_2'])**2 + sample['mass_ratio'] = sample['mass_2'] / sample['mass_1'] + + sample['cos_tilt_1'] = np.cos(sample['tilt_1']) + sample['cos_tilt_2'] = np.cos(sample['tilt_2']) + sample['cos_iota'] = np.cos(sample['iota']) + + +def generate_component_spins(sample): + """ + Add the component spins to the data frame/dictionary. + + This function uses a lalsimulation function to transform the spins. + """ + spin_conversion_parameters = ['iota', 'phi_jl', 'tilt_1', 'tilt_2', 'phi_12', 'a_1', 'a_2', 'mass_1', + 'mass_2', 'reference_frequency', 'phase'] + if all(key in sample.keys() for key in spin_conversion_parameters) and isinstance(sample, dict): + sample['iota'], sample['spin_1x'], sample['spin_1y'], sample['spin_1z'], sample['spin_2x'], \ + sample['spin_2y'], sample['spin_2z'] = \ + lalsim.SimInspiralTransformPrecessingNewInitialConditions( + sample['iota'], sample['phi_jl'], sample['tilt_1'], sample['tilt_2'], sample['phi_12'], sample['a_1'], + sample['a_2'], sample['mass_1'] * tupak.utils.solar_mass, sample['mass_2'] * tupak.utils.solar_mass, + sample['reference_frequency'], sample['phase']) + + sample['phi_1'] = np.arctan(sample['spin_1y'] / sample['spin_1x']) + sample['phi_2'] = np.arctan(sample['spin_2y'] / sample['spin_2x']) + + elif all(key in sample.keys() for key in spin_conversion_parameters) and isinstance(sample, pd.DataFrame): + logging.info('Extracting component spins.') + new_spin_parameters = ['spin_1x', 'spin_1y', 'spin_1z', 'spin_2x', 'spin_2y', 'spin_2z'] + new_spins = {name: np.zeros(len(sample)) for name in new_spin_parameters} + + for ii in range(len(sample)): + new_spins['iota'], new_spins['spin_1x'][ii], new_spins['spin_1y'][ii], new_spins['spin_1z'][ii], \ + new_spins['spin_2x'][ii], new_spins['spin_2y'][ii], new_spins['spin_2z'][ii] = \ + lalsim.SimInspiralTransformPrecessingNewInitialConditions( + sample['iota'][ii], sample['phi_jl'][ii], sample['tilt_1'][ii], sample['tilt_2'][ii], + sample['phi_12'][ii], sample['a_1'][ii], sample['a_2'][ii], + sample['mass_1'][ii] * tupak.utils.solar_mass, sample['mass_2'][ii] * tupak.utils.solar_mass, + sample['reference_frequency'][ii], sample['phase'][ii]) + + for name in new_spin_parameters: + sample[name] = new_spins[name] + + sample['phi_1'] = np.arctan(sample['spin_1y'] / sample['spin_1x']) + sample['phi_2'] = np.arctan(sample['spin_2y'] / sample['spin_2x']) + + else: + logging.warning("Component spin extraction failed.") + + +def compute_snrs(sample, likelihood): + """Compute the optimal and matched filter snrs of all posterior samples.""" + temp_sample = sample + if likelihood is not None: + if isinstance(temp_sample, dict): + for key in likelihood.waveform_generator.parameters.keys(): + likelihood.waveform_generator.parameters[key] = temp_sample[key] + signal_polarizations = likelihood.waveform_generator.frequency_domain_strain() + for interferometer in likelihood.interferometers: + signal = interferometer.get_detector_response(signal_polarizations, + likelihood.waveform_generator.parameters) + sample['{}_matched_filter_snr'.format(interferometer.name)] = \ + tupak.utils.matched_filter_snr_squared(signal, interferometer, + likelihood.waveform_generator.time_duration)**0.5 + sample['{}_optimal_snr'.format(interferometer.name)] = tupak.utils.optimal_snr_squared( + signal, interferometer, likelihood.waveform_generator.time_duration) ** 0.5 + else: + logging.info('Computing SNRs for every sample, this may take some time.') + all_interferometers = likelihood.interferometers + matched_filter_snrs = {interferometer.name: [] for interferometer in all_interferometers} + optimal_snrs = {interferometer.name: [] for interferometer in all_interferometers} + for ii in range(len(temp_sample)): + for key in set(temp_sample.keys()).intersection(likelihood.waveform_generator.parameters.keys()): + likelihood.waveform_generator.parameters[key] = temp_sample[key][ii] + for key in likelihood.waveform_generator.non_standard_sampling_parameter_keys: + likelihood.waveform_generator.parameters[key] = temp_sample[key][ii] + signal_polarizations = likelihood.waveform_generator.frequency_domain_strain() + for interferometer in all_interferometers: + signal = interferometer.get_detector_response(signal_polarizations, + likelihood.waveform_generator.parameters) + matched_filter_snrs[interferometer.name].append(tupak.utils.matched_filter_snr_squared( + signal, interferometer, likelihood.waveform_generator.time_duration)**0.5) + optimal_snrs[interferometer.name].append(tupak.utils.optimal_snr_squared( + signal, interferometer, likelihood.waveform_generator.time_duration) ** 0.5) + + for interferometer in likelihood.interferometers: + sample['{}_matched_filter_snr'.format(interferometer.name)] = matched_filter_snrs[interferometer.name] + sample['{}_optimal_snr'.format(interferometer.name)] = optimal_snrs[interferometer.name] + + likelihood.interferometers = all_interferometers + print([interferometer.name for interferometer in likelihood.interferometers]) + + else: + logging.info('Not computing SNRs.') diff --git a/tupak/likelihood.py b/tupak/likelihood.py index 16082db600601d622bb72da3b7a381e1fd8332e1..91391977a775364ee86ae640dc75cd660ac795a3 100644 --- a/tupak/likelihood.py +++ b/tupak/likelihood.py @@ -18,6 +18,7 @@ class Likelihood(object): self.interferometers = interferometers self.waveform_generator = waveform_generator self.parameters = self.waveform_generator.parameters + self.non_standard_sampling_parameter_keys = self.waveform_generator.non_standard_sampling_parameter_keys self.distance_marginalization = distance_marginalization self.phase_marginalization = phase_marginalization self.prior = prior @@ -151,13 +152,10 @@ def get_binary_black_hole_likelihood(interferometers): likelihood: tupak.likelihood.Likelihood The likelihood to pass to `run_sampler` """ - waveform_generator = tupak.waveform_generator.WaveformGenerator(time_duration=interferometers[0].duration, - sampling_frequency=interferometers[ - 0].sampling_frequency, - frequency_domain_source_model=tupak.source.lal_binary_black_hole, - parameters={'waveform_approximant': 'IMRPhenomPv2', - 'reference_frequency': 50}) - likelihood = tupak.likelihood.Likelihood( - interferometers, waveform_generator) + waveform_generator = tupak.waveform_generator.WaveformGenerator( + time_duration=interferometers[0].duration, sampling_frequency=interferometers[0].sampling_frequency, + frequency_domain_source_model=tupak.source.lal_binary_black_hole, + parameters={'waveform_approximant': 'IMRPhenomPv2', 'reference_frequency': 50}) + likelihood = tupak.likelihood.Likelihood(interferometers, waveform_generator) return likelihood diff --git a/tupak/prior.py b/tupak/prior.py index 99f7f6fd48829d4d5132461cac47930f353e1a5a..274e43dab76f30a8a7d123218bb62f148471f8e8 100644 --- a/tupak/prior.py +++ b/tupak/prior.py @@ -95,21 +95,25 @@ class Prior(object): default_labels = { 'mass_1': '$m_1$', 'mass_2': '$m_2$', - 'mchirp': '$\mathcal{M}$', - 'q': '$q$', + 'total_mass': '$M$', + 'chirp_mass': '$\mathcal{M}$', + 'mass_ratio': '$q$', + 'symmetric_mass_ratio': '$\eta$', 'a_1': '$a_1$', 'a_2': '$a_2$', 'tilt_1': '$\\theta_1$', 'tilt_2': '$\\theta_2$', + 'cos_tilt_1': '$\cos\\theta_1$', + 'cos_tilt_2': '$\cos\\theta_2$', 'phi_12': '$\Delta\phi$', 'phi_jl': '$\phi_{JL}$', 'luminosity_distance': '$d_L$', 'dec': '$\mathrm{DEC}$', 'ra': '$\mathrm{RA}$', 'iota': '$\iota$', + 'cos_iota': '$\cos\iota$', 'psi': '$\psi$', 'phase': '$\phi$', - 'tc': '$t_c$', 'geocent_time': '$t_c$' } if self.name in default_labels.keys(): @@ -302,7 +306,7 @@ class Interped(Prior): self.xx = np.linspace(self.minimum, self.maximum, len(xx)) self.yy = all_interpolated(self.xx) if np.trapz(self.yy, self.xx) != 1: - logging.info('Supplied PDF is not normalised, normalising.') + logging.info('Supplied PDF for {} is not normalised, normalising.'.format(self.name)) self.yy /= np.trapz(self.yy, self.xx) self.YY = cumtrapz(self.yy, self.xx, initial=0) # Need last element of cumulative distribution to be exactly one. @@ -376,20 +380,25 @@ def create_default_prior(name): Default prior distribution for that parameter, if unknown None is returned. """ default_priors = { - 'mass_1': PowerLaw(name=name, alpha=0, minimum=5, maximum=100), - 'mass_2': PowerLaw(name=name, alpha=0, minimum=5, maximum=100), - 'mchirp': Uniform(name=name, minimum=5, maximum=100), - 'q': Uniform(name=name, minimum=0, maximum=1), + 'mass_1': Uniform(name=name, minimum=5, maximum=100), + 'mass_2': Uniform(name=name, minimum=5, maximum=100), + 'chirp_mass': Uniform(name=name, minimum=5, maximum=100), + 'total_mass': Uniform(name=name, minimum=10, maximum=200), + 'mass_ratio': Uniform(name=name, minimum=0.125, maximum=1), + 'symmetric_mass_ratio': Uniform(name=name, minimum=8 / 81, maximum=0.25), 'a_1': Uniform(name=name, minimum=0, maximum=0.8), 'a_2': Uniform(name=name, minimum=0, maximum=0.8), 'tilt_1': Sine(name=name), 'tilt_2': Sine(name=name), + 'cos_tilt_1': Uniform(name=name, minimum=-1, maximum=1), + 'cos_tilt_2': Uniform(name=name, minimum=-1, maximum=1), 'phi_12': Uniform(name=name, minimum=0, maximum=2 * np.pi), 'phi_jl': Uniform(name=name, minimum=0, maximum=2 * np.pi), 'luminosity_distance': UniformComovingVolume(name=name, minimum=1e2, maximum=5e3), 'dec': Cosine(name=name), 'ra': Uniform(name=name, minimum=0, maximum=2 * np.pi), 'iota': Sine(name=name), + 'cos_iota': Uniform(name=name, minimum=-1, maximum=1), 'psi': Uniform(name=name, minimum=0, maximum=2 * np.pi), 'phase': Uniform(name=name, minimum=0, maximum=2 * np.pi) } @@ -402,7 +411,28 @@ def create_default_prior(name): return prior -def fill_priors(prior, likelihood): +def parse_floats_to_fixed_priors(old_parameters): + parameters = old_parameters.copy() + for key in parameters: + if type(parameters[key]) is not float and type(parameters[key]) is not int \ + and type(parameters[key]) is not Prior: + logging.info("Expected parameter " + str(key) + " to be a float or int but was " + + str(type(parameters[key])) + " instead. Will not be converted.") + continue + elif type(parameters[key]) is Prior: + continue + parameters[key] = DeltaFunction(name=key, latex_label=None, peak=old_parameters[key]) + return parameters + + +def parse_keys_to_parameters(keys): + parameters = {} + for key in keys: + parameters[key] = create_default_prior(key) + return parameters + + +def fill_priors(prior, likelihood, parameters=None): """ Fill dictionary of priors based on required parameters of likelihood @@ -415,6 +445,9 @@ def fill_priors(prior, likelihood): dictionary of prior objects and floats likelihood: tupak.likelihood.Likelihood instance Used to infer the set of parameters to fill the prior with + parameters: list + list of parameters to be sampled in, this can override the default + priors for the waveform generator Returns ------- @@ -436,6 +469,10 @@ def fill_priors(prior, likelihood): missing_keys = set(likelihood.parameters) - set(prior.keys()) + if parameters is not None: + for parameter in parameters: + prior[parameter] = create_default_prior(parameter) + for missing_key in missing_keys: default_prior = create_default_prior(missing_key) if default_prior is None: @@ -445,11 +482,64 @@ def fill_priors(prior, likelihood): " not be sampled and may cause an error." .format(missing_key, set_val)) else: - prior[missing_key] = default_prior + if not test_redundancy(missing_key, prior): + prior[missing_key] = default_prior + + for key in prior: + test_redundancy(key, prior) return prior +def test_redundancy(key, prior): + """ + Test whether adding the key would add be redundant. + + Parameters + ---------- + key: str + The string to test. + prior: dict + Current prior dictionary. + + Return + ------ + redundant: bool + Whether the key is redundant + """ + redundant = False + mass_parameters = {'mass_1', 'mass_2', 'chirp_mass', 'total_mass', 'mass_ratio', 'symmetric_mass_ratio'} + spin_magnitude_parameters = {'a_1', 'a_2'} + spin_tilt_1_parameters = {'tilt_1', 'cos_tilt_1'} + spin_tilt_2_parameters = {'tilt_2', 'cos_tilt_2'} + spin_azimuth_parameters = {'phi_1', 'phi_2', 'phi_12', 'phi_jl'} + inclination_parameters = {'iota', 'cos_iota'} + distance_parameters = {'luminosity_distance', 'comoving_distance', 'redshift'} + + for parameter_set in [mass_parameters, spin_magnitude_parameters, spin_azimuth_parameters]: + if key in parameter_set: + if len(parameter_set.intersection(prior.keys())) > 2: + redundant = True + logging.warning('{} in prior. This may lead to unexpected behaviour.'.format( + parameter_set.intersection(prior.keys()))) + break + elif len(parameter_set.intersection(prior.keys())) == 2: + redundant = True + break + for parameter_set in [inclination_parameters, distance_parameters, spin_tilt_1_parameters, spin_tilt_2_parameters]: + if key in parameter_set: + if len(parameter_set.intersection(prior.keys())) > 1: + redundant = True + logging.warning('{} in prior. This may lead to unexpected behaviour.'.format( + parameter_set.intersection(prior.keys()))) + break + elif len(parameter_set.intersection(prior.keys())) == 1: + redundant = True + break + + return redundant + + def write_priors_to_file(priors, outdir): """ Write the prior distribution to file. diff --git a/tupak/result.py b/tupak/result.py index 86ca51802b4db8d0f370c633b369929e54e2248a..2d01627d83a0e4fc9c5a57732ff3d88a481e36a0 100644 --- a/tupak/result.py +++ b/tupak/result.py @@ -3,6 +3,7 @@ import os import numpy as np import deepdish import pandas as pd +import tupak try: from chainconsumer import ChainConsumer @@ -204,16 +205,24 @@ class Result(dict): for key in self.prior: prior_file.write(self.prior[key]) - def samples_to_data_frame(self): + def samples_to_data_frame(self, likelihood=None, priors=None, conversion_function=None): """ Convert array of samples to data frame. - :return: + Parameters + ---------- + likelihood: tupak.likelihood.Likelihood + Likelihood used for sampling. + priors: dict + Dictionary of prior object, used to fill in delta function priors. + conversion_function: function + Function which adds in extra parameters to the data frame, + should take the data_frame, likelihood and prior as arguments. """ data_frame = pd.DataFrame(self.samples, columns=self.search_parameter_keys) + if conversion_function is not None: + conversion_function(data_frame, likelihood, priors) self.posterior = data_frame - for key in self.fixed_parameter_keys: - self.posterior[key] = self.priors[key].sample(len(self.posterior)) def construct_cbc_derived_parameters(self): """ diff --git a/tupak/sampler.py b/tupak/sampler.py index 651dc6d593ee035ac0cd61bb5bf9be0084a211dc..e0c4b703d31b409e4361a6079d156405b35092d9 100644 --- a/tupak/sampler.py +++ b/tupak/sampler.py @@ -142,11 +142,16 @@ class Sampler(object): return result def verify_parameters(self): - required_keys = self.priors - unmatched_keys = [r for r in required_keys if r not in self.likelihood.parameters] - if len(unmatched_keys) > 0: - raise KeyError( - "Source model does not contain keys {}".format(unmatched_keys)) + for key in self.priors: + try: + self.likelihood.parameters[key] = self.priors[key].sample() + except AttributeError as e: + logging.warning('Cannot sample from {}, {}'.format(key, e)) + try: + self.likelihood.log_likelihood_ratio() + except TypeError: + raise TypeError('Likelihood evaluation failed. Have you definitely specified all the parameters?\n{}'.format( + self.likelihood.parameters)) def prior_transform(self, theta): return [self.priors[key].rescale(t) for key, t in zip(self.__search_parameter_keys, theta)] @@ -249,6 +254,7 @@ class Nestle(Sampler): loglikelihood=self.log_likelihood, prior_transform=self.prior_transform, ndim=self.ndim, **self.kwargs) + print("") self.result.sampler_output = out self.result.samples = nestle.resample_equal(out.samples, out.weights) @@ -294,6 +300,7 @@ class Dynesty(Sampler): prior_transform=self.prior_transform, ndim=self.ndim, **self.kwargs) nested_sampler.run_nested(print_progress=self.kwargs['verbose']) + print("") out = nested_sampler.results # self.result.sampler_output = out @@ -395,7 +402,7 @@ class Ptemcee(Sampler): def run_sampler(likelihood, priors=None, label='label', outdir='outdir', sampler='nestle', use_ratio=True, injection_parameters=None, - **kwargs): + conversion_function=None, **kwargs): """ The primary interface to easy parameter estimation @@ -416,12 +423,15 @@ def run_sampler(likelihood, priors=None, label='label', outdir='outdir', samplers use_ratio: bool (False) If True, use the likelihood's loglikelihood_ratio, rather than just - the loglikelhood. + the log likelhood. injection_parameters: dict A dictionary of injection parameters used in creating the data (if using simulated data). Appended to the result object and saved. + + conversion_function: function, optional + Function to apply to posterior to generate additional parameters. **kwargs: - All kwargs are passed directly to the samplers `run` functino + All kwargs are passed directly to the samplers `run` function Returns ------ @@ -434,7 +444,7 @@ def run_sampler(likelihood, priors=None, label='label', outdir='outdir', if priors is None: priors = dict() - priors = fill_priors(priors, likelihood) + priors = fill_priors(priors, likelihood, parameters=likelihood.non_standard_sampling_parameter_keys) tupak.prior.write_priors_to_file(priors, outdir) if implemented_samplers.__contains__(sampler.title()): @@ -442,6 +452,7 @@ def run_sampler(likelihood, priors=None, label='label', outdir='outdir', sampler = sampler_class(likelihood, priors, sampler, outdir=outdir, label=label, use_ratio=use_ratio, **kwargs) + if sampler.cached_result: logging.info("Using cached result") return sampler.cached_result @@ -453,8 +464,13 @@ def run_sampler(likelihood, priors=None, label='label', outdir='outdir', result.logz = result.log_bayes_factor + result.noise_logz else: result.log_bayes_factor = result.logz - result.noise_logz - result.injection_parameters = injection_parameters - result.samples_to_data_frame() + if injection_parameters is not None: + result.injection_parameters = injection_parameters + tupak.conversion.generate_all_bbh_parameters(result.injection_parameters) + result.fixed_parameter_keys = [key for key in priors if isinstance(key, prior.DeltaFunction)] + # result.prior = prior # Removed as this breaks the saving of the data + result.samples_to_data_frame(likelihood=likelihood, priors=priors, conversion_function=conversion_function) + result.kwargs = sampler.kwargs result.save_to_file(outdir=outdir, label=label) return result else: diff --git a/tupak/source.py b/tupak/source.py index c5d21660a56a73f97c93016eb0276e18855bf4cd..1a1d1b0642f1a00c58fa8a0fa329d0dcdb5fba53 100644 --- a/tupak/source.py +++ b/tupak/source.py @@ -13,8 +13,7 @@ from . import utils def lal_binary_black_hole( frequency_array, mass_1, mass_2, luminosity_distance, a_1, tilt_1, phi_12, a_2, tilt_2, phi_jl, - iota, phase, waveform_approximant, reference_frequency, ra, dec, - geocent_time, psi): + iota, phase, waveform_approximant, reference_frequency, ra, dec, geocent_time, psi): """ A Binary Black Hole waveform model using lalsimulation """ if mass_2 > mass_1: return None diff --git a/tupak/waveform_generator.py b/tupak/waveform_generator.py index 119547a760909e4015bf45dbf0d47396c3f0465c..843ae839d3e9c9953efda399fc0f4621022265e0 100644 --- a/tupak/waveform_generator.py +++ b/tupak/waveform_generator.py @@ -1,5 +1,6 @@ import inspect +import tupak from . import utils import numpy as np @@ -9,58 +10,84 @@ class WaveformGenerator(object): Parameters ---------- sampling_frequency: float - The sampling frequency to sample at + The sampling frequency time_duration: float Time duration of data - source_model: func + frequency_domain_source_model: func A python function taking some arguments and returning the frequency domain strain. Note the first argument must be the frequencies at which to compute the strain - - Note: the arguments of source_model (except the first, which is the + time_domain_source_model: func + A python function taking some arguments and returning the time + domain strain. Note the first argument must be the times at + which to compute the strain + parameters: dict + Initial values for the parameters + parameter_conversion: func + Function to convert from sampled parameters to parameters of the waveform generator + non_standard_sampling_parameter_keys: list + List of parameter name for *non-standard* sampling parameters. + + Note: the arguments of frequency_domain_source_model (except the first, which is the frequencies at which to compute the strain) will be added to the WaveformGenerator object and initialised to `None`. - """ def __init__(self, time_duration, sampling_frequency, frequency_domain_source_model=None, - time_domain_source_model=None, parameters=None): + time_domain_source_model=None, parameters=None, parameter_conversion=None, + non_standard_sampling_parameter_keys=None): self.time_duration = time_duration self.sampling_frequency = sampling_frequency self.frequency_domain_source_model = frequency_domain_source_model self.time_domain_source_model = time_domain_source_model + self.time_duration = time_duration + self.sampling_frequency = sampling_frequency + self.parameter_conversion = parameter_conversion + self.non_standard_sampling_parameter_keys = non_standard_sampling_parameter_keys self.parameters = parameters self.__frequency_array_updated = False self.__time_array_updated = False def frequency_domain_strain(self): """ Wrapper to source_model """ + if self.parameter_conversion is not None: + added_keys = self.parameter_conversion(self.parameters, self.non_standard_sampling_parameter_keys) + if self.frequency_domain_source_model is not None: - return self.frequency_domain_source_model(self.frequency_array, **self.parameters) + model_frequency_strain = self.frequency_domain_source_model(self.frequency_array, **self.parameters) elif self.time_domain_source_model is not None: - fft_data = dict() + model_frequency_strain = dict() time_domain_strain = self.time_domain_source_model(self.time_array, **self.parameters) if isinstance(time_domain_strain, np.ndarray): return utils.nfft(time_domain_strain, self.sampling_frequency) for key in time_domain_strain: - fft_data[key], self.frequency_array = utils.nfft(time_domain_strain[key], self.sampling_frequency) - return fft_data + model_frequency_strain[key], self.frequency_array = utils.nfft(time_domain_strain[key], + self.sampling_frequency) else: raise RuntimeError("No source model given") + if self.parameter_conversion is not None: + for key in added_keys: + self.parameters.pop(key) + return model_frequency_strain def time_domain_strain(self): + if self.parameter_conversion is not None: + added_keys = self.parameter_conversion(self.parameters, self.non_standard_sampling_parameter_keys) if self.time_domain_source_model is not None: - return self.time_domain_source_model(self.time_array, **self.parameters) + model_time_series = self.time_domain_source_model(self.time_array, **self.parameters) elif self.frequency_domain_source_model is not None: - ifft_data = dict() + model_time_series = dict() frequency_domain_strain = self.frequency_domain_source_model(self.frequency_array, **self.parameters) if isinstance(frequency_domain_strain, np.ndarray): return utils.infft(frequency_domain_strain, self.sampling_frequency) for key in frequency_domain_strain: - ifft_data = utils.infft(frequency_domain_strain[key], self.sampling_frequency) - return ifft_data + model_time_series[key] = utils.infft(frequency_domain_strain[key], self.sampling_frequency) else: raise RuntimeError("No source model given") + if self.parameter_conversion is not None: + for key in added_keys: + self.parameters.pop(key) + return model_time_series[key] @property def frequency_array(self): @@ -91,16 +118,10 @@ class WaveformGenerator(object): @parameters.setter def parameters(self, parameters): - if parameters is None: - self.__parameters_from_source_model() - elif isinstance(parameters, dict): - if not hasattr(self, '_WaveformGenerator__parameters'): - self.__parameters_from_source_model() - for key in self.__parameters.keys(): - if key in parameters.keys(): - self.__parameters[key] = parameters[key] - else: - raise TypeError('Parameters must be a dictionary of key-value pairs.') + self.__parameters_from_source_model() + if isinstance(parameters, dict): + for key in parameters.keys(): + self.__parameters[key] = parameters[key] def __parameters_from_source_model(self): if self.frequency_domain_source_model is not None: