Commit b331d8a5 authored by Colm Talbot's avatar Colm Talbot

Merge branch 'docstring_update' into 'master'

Docstring update

See merge request Monash/tupak!63
parents 38754ddb 39c7b7ae
Pipeline #22129 passed with stages
in 15 minutes and 51 seconds
......@@ -294,10 +294,8 @@ class TestDetector(unittest.TestCase):
self.assertEqual(self.ifo.data, np.array([1]))
def test_unit_vector_along_arm_default(self):
with mock.patch('logging.warning') as m:
m.side_effect = KeyError('foo')
with self.assertRaises(KeyError):
self.ifo.unit_vector_along_arm('z')
with self.assertRaises(ValueError):
self.ifo.unit_vector_along_arm('z')
def test_unit_vector_along_arm_x(self):
with mock.patch('numpy.array') as m:
......
......@@ -80,10 +80,12 @@ class TestSampler(unittest.TestCase):
expected_result = Result()
expected_result.search_parameter_keys = ['c']
expected_result.fixed_parameter_keys = ['a']
expected_result.parameter_labels = ['c']
expected_result.parameter_labels = [None]
expected_result.label = 'label'
expected_result.outdir = 'outdir'
expected_result.outdir = 'test_directory'
expected_result.kwargs = {}
print(self.sampler.result.__dict__)
print(expected_result.__dict__)
self.assertDictEqual(self.sampler.result.__dict__, expected_result.__dict__)
def test_make_outdir_if_no_outdir_exists(self):
......
......@@ -11,18 +11,41 @@ except ImportError:
class Likelihood(object):
""" Empty likelihood class to be subclassed by other likelihoods """
def __init__(self, parameters=None):
"""Empty likelihood class to be subclassed by other likelihoods
Parameters
----------
parameters:
"""
self.parameters = parameters
def log_likelihood(self):
"""
Returns
-------
float
"""
return np.nan
def noise_log_likelihood(self):
"""
Returns
-------
float
"""
return np.nan
def log_likelihood_ratio(self):
"""Difference between log likelihood and noise log likelihood
Returns
-------
float
"""
return self.log_likelihood() - self.noise_log_likelihood()
......
This diff is collapsed.
......@@ -7,7 +7,19 @@ import corner
def result_file_name(outdir, label):
""" Returns the standard filename used for a result file """
""" Returns the standard filename used for a result file
Parameters
----------
outdir: str
Name of the output directory
label: str
Naming scheme of the output file
Returns
-------
str: File name of the output file
"""
return '{}/{}_result.h5'.format(outdir, label)
......@@ -21,8 +33,14 @@ def read_in_result(outdir=None, label=None, filename=None):
filename: str
If given, try to load from this filename
Returns:
result: tupak.result.Result instance
Returns
-------
result: tupak.core.result.Result
Raises
-------
ValueError: If no filename is given and either outdir or label is None
If no tupak.core.result.Result is found in the path
"""
if filename is None:
......@@ -37,9 +55,18 @@ def read_in_result(outdir=None, label=None, filename=None):
class Result(dict):
def __init__(self, dictionary=None):
""" A class to save the results of the sampling run.
Parameters
----------
dictionary: dict
A dictionary containing values to be set in this instance
"""
dict.__init__(self)
if type(dictionary) is dict:
for key in dictionary:
val = self._standardise_strings(dictionary[key], key)
val = self._standardise_a_string(dictionary[key])
setattr(self, key, val)
def __getattr__(self, name):
......@@ -69,22 +96,41 @@ class Result(dict):
else:
return ''
def _standardise_a_string(self, item):
""" When reading in data, ensure all strings are decoded correctly """
@staticmethod
def _standardise_a_string(item):
""" When reading in data, ensure all strings are decoded correctly
Parameters
----------
item: str
Returns
-------
str: decoded string
"""
if type(item) in [bytes]:
return item.decode()
else:
return item
def _standardise_strings(self, item, name=None):
@staticmethod
def _standardise_strings(item):
"""
Parameters
----------
item: list
List of strings to be decoded
Returns
-------
list: list of decoded strings in item
"""
if type(item) in [list]:
item = [self._standardise_a_string(i) for i in item]
# logging.debug("Unable to decode item {}".format(name))
item = [_standardise_a_string(i) for i in item]
return item
def get_result_dictionary(self):
return dict(self)
def save_to_file(self):
""" Writes the Result to a deepdish h5 file """
file_name = result_file_name(self.outdir, self.label)
......@@ -98,28 +144,40 @@ class Result(dict):
logging.debug("Saving result to {}".format(file_name))
try:
deepdish.io.save(file_name, self.get_result_dictionary())
deepdish.io.save(file_name, dict(self))
except Exception as e:
logging.error(
"\n\n Saving the data has failed with the following message:\n {} \n\n"
.format(e))
logging.error("\n\n Saving the data has failed with the "
"following message:\n {} \n\n".format(e))
def save_posterior_samples(self):
"""Saves posterior samples to a file"""
filename = '{}/{}_posterior_samples.txt'.format(self.outdir, self.label)
self.posterior.to_csv(filename, index=False, header=True)
def get_latex_labels_from_parameter_keys(self, keys):
return_list = []
""" Returns a list of latex_labels corresponding to the given keys
Parameters
----------
keys: list
List of strings corresponding to the desired latex_labels
Returns
-------
list: The desired latex_labels
"""
latex_labels = []
for k in keys:
if k in self.search_parameter_keys:
idx = self.search_parameter_keys.index(k)
return_list.append(self.parameter_labels[idx])
latex_labels.append(self.parameter_labels[idx])
elif k in self.parameter_labels:
return_list.append(k)
latex_labels.append(k)
else:
raise ValueError('key {} not a parameter label or latex label'
.format(k))
return return_list
return latex_labels
def plot_corner(self, parameters=None, save=True, dpi=300, **kwargs):
""" Plot a corner-plot using corner
......@@ -128,10 +186,12 @@ class Result(dict):
Parameters
----------
parameters: list
parameters: list, optional
If given, a list of the parameter names to include
save: bool
save: bool, optional
If true, save the image using the given label and outdir
dpi: int, optional
Dots per inch resolution of the plot
**kwargs:
Other keyword arguments are passed to `corner.corner`. We set some
defaults to improve the basic look and feel, but these can all be
......@@ -189,13 +249,11 @@ class Result(dict):
return fig
def plot_walks(self, save=True, **kwargs):
"""
"""
"""DEPRECATED"""
logging.warning("plot_walks deprecated")
def plot_distributions(self, save=True, **kwargs):
"""
"""
"""DEPRECATED"""
logging.warning("plot_distributions deprecated")
def samples_to_posterior(self, likelihood=None, priors=None,
......@@ -205,11 +263,11 @@ class Result(dict):
Parameters
----------
likelihood: tupak.likelihood.GravitationalWaveTransient
GravitationalWaveTransient used for sampling.
priors: dict
likelihood: tupak.likelihood.GravitationalWaveTransient, optional
GravitationalWaveTransient likelihood used for sampling.
priors: dict, optional
Dictionary of prior object, used to fill in delta function priors.
conversion_function: function
conversion_function: function, optional
Function which adds in extra parameters to the data frame,
should take the data_frame, likelihood and prior as arguments.
"""
......@@ -220,11 +278,7 @@ class Result(dict):
self.posterior = data_frame
def construct_cbc_derived_parameters(self):
"""
Construct widely used derived parameters of CBCs
:return:
"""
""" Construct widely used derived parameters of CBCs """
self.posterior['mass_chirp'] = (self.posterior.mass_1 * self.posterior.mass_2) ** 0.6 / (
self.posterior.mass_1 + self.posterior.mass_2) ** 0.2
self.posterior['q'] = self.posterior.mass_2 / self.posterior.mass_1
......@@ -238,8 +292,21 @@ class Result(dict):
(4 * self.posterior.q + 3) / (3 * self.posterior.q + 4) * self.posterior.q
* self.posterior.a_2 * np.sin(self.posterior.tilt_2))
def _check_attribute_match_to_other_object(self, name, other_object):
""" Check attribute name exists in other_object and is the same """
def check_attribute_match_to_other_object(self, name, other_object):
""" Check attribute name exists in other_object and is the same
Parameters
----------
name: str
Name of the attribute in this instance
other_object: object
Other object with attributes to compare with
Returns
-------
bool: True if attribute name matches with an attribute of other_object, False otherwise
"""
A = getattr(self, name, False)
B = getattr(other_object, name, False)
logging.debug('Checking {} value: {}=={}'.format(name, A, B))
......
This diff is collapsed.
......@@ -16,6 +16,15 @@ solar_mass = 1.98855 * 1e30
def get_sampling_frequency(time_series):
"""
Calculate sampling frequency from a time series
Returns
-------
float: Sampling frequency of the time series
Raises
-------
ValueError: If the time series is not evenly sampled.
"""
tol = 1e-10
if np.ptp(np.diff(time_series)) > tol:
......@@ -25,19 +34,39 @@ def get_sampling_frequency(time_series):
def create_time_series(sampling_frequency, duration, starting_time=0.):
"""
Parameters
----------
sampling_frequency: float
duration: float
starting_time: float, optional
Returns
-------
float: An equidistant time series given the parameters
"""
return np.arange(starting_time, duration, 1./sampling_frequency)
def ra_dec_to_theta_phi(ra, dec, gmst):
"""
Convert from RA and DEC to polar coordinates on celestial sphere
Input:
ra - right ascension in radians
dec - declination in radians
gmst - Greenwich mean sidereal time of arrival of the signal in radians
Output:
theta - zenith angle in radians
phi - azimuthal angle in radians
""" Convert from RA and DEC to polar coordinates on celestial sphere
Parameters
-------
ra: float
right ascension in radians
dec: float
declination in radians
gmst: float
Greenwich mean sidereal time of arrival of the signal in radians
Returns
-------
float: zenith angle in radians
float: azimuthal angle in radians
"""
phi = ra - gmst
theta = np.pi / 2 - dec
......@@ -52,10 +81,15 @@ def gps_time_to_gmst(gps_time):
A correction has been applied to give the exact correct value for 00:00:00, 1 Jan. 2018
Error accumulates at a rate of ~0.0001 radians/decade.
Input:
time - gps time
Output:
gmst - Greenwich mean sidereal time in radians
Parameters
-------
gps_time: float
gps time
Returns
-------
float: Greenwich mean sidereal time in radians
"""
omega_earth = 2 * np.pi * (1 / 365.2425 + 1) / 86400.
gps_2000 = 630720013.
......@@ -67,12 +101,18 @@ def gps_time_to_gmst(gps_time):
def create_frequency_series(sampling_frequency, duration):
"""
Create a frequency series with the correct length and spacing.
""" Create a frequency series with the correct length and spacing.
Parameters
-------
sampling_frequency: float
duration: float
duration of data
Returns
-------
array_like: frequency series
:param sampling_frequency: sampling frequency
:param duration: duration of data
:return: frequencies, frequency series
"""
number_of_samples = duration * sampling_frequency
number_of_samples = int(np.round(number_of_samples))
......@@ -93,12 +133,18 @@ def create_frequency_series(sampling_frequency, duration):
def create_white_noise(sampling_frequency, duration):
"""
Create white_noise which is then coloured by a given PSD
""" Create white_noise which is then coloured by a given PSD
:param sampling_frequency: sampling frequency
:param duration: duration of data
Parameters
-------
sampling_frequency: float
duration: float
duration of the data
Returns
-------
array_like: white noise
array_like: frequency array
"""
number_of_samples = duration * sampling_frequency
......@@ -131,16 +177,20 @@ def create_white_noise(sampling_frequency, duration):
def nfft(ht, Fs):
"""
performs an FFT while keeping track of the frequency bins
assumes input time series is real (positive frequencies only)
ht = time series
Fs = sampling frequency
""" Performs an FFT while keeping track of the frequency bins assumes input time series is real
(positive frequencies only)
returns
hf = single-sided FFT of ft normalised to units of strain / sqrt(Hz)
f = frequencies associated with hf
Parameters
-------
ht: array_like
Time series
Fs: float
Sampling frequency
Returns
-------
array_like: Single-sided FFT of ft normalised to units of strain / sqrt(Hz) (hf)
array_like: Frequencies associated with hf
"""
# add one zero padding if time series does not have even number of sampling times
if np.mod(len(ht), 2) == 1:
......@@ -160,14 +210,18 @@ def nfft(ht, Fs):
def infft(hf, Fs):
"""
inverse FFT for use in conjunction with nfft
eric.thrane@ligo.org
input:
hf = single-side FFT calculated by fft_eht
Fs = sampling frequency
output:
h = time series
""" Inverse FFT for use in conjunction with nfft (eric.thrane@ligo.org)
Parameters
-------
hf: array_like
single-side FFT calculated by fft_eht
Fs: float
sampling frequency
Returns
-------
array_like: time series
"""
# use irfft to work with positive frequencies only
h = np.fft.irfft(hf)
......@@ -228,6 +282,9 @@ def setup_logger(outdir=None, label=None, log_level=None):
def get_progress_bar(module='tqdm'):
"""
TODO: Write proper docstring
"""
if module in ['tqdm']:
try:
from tqdm import tqdm
......@@ -245,19 +302,34 @@ def get_progress_bar(module='tqdm'):
def spherical_to_cartesian(radius, theta, phi):
"""
Convert from spherical coordinates to cartesian.
""" Convert from spherical coordinates to cartesian.
:param radius: radial coordinate
:param theta: axial coordinate
:param phi: azimuthal coordinate
:return cartesian: cartesian vector
Parameters
-------
radius: float
radial coordinate
theta: float
axial coordinate
phi: float
azimuthal coordinate
Returns
-------
list: cartesian vector
"""
cartesian = [radius * np.sin(theta) * np.cos(phi), radius * np.sin(theta) * np.sin(phi), radius * np.cos(theta)]
return cartesian
def check_directory_exists_and_if_not_mkdir(directory):
""" Checks if the given directory exists and creates it if it does not exist
Parameters
----------
directory: str
Name of the directory
"""
if not os.path.exists(directory):
os.makedirs(directory)
logging.debug('Making directory {}'.format(directory))
......@@ -266,6 +338,13 @@ def check_directory_exists_and_if_not_mkdir(directory):
def set_up_command_line_arguments():
""" Sets up some command line arguments that can be used to modify how scripts are run.
Returns
-------
list: A list of the used arguments
"""
parser = argparse.ArgumentParser(
description="Command line interface for tupak scripts")
parser.add_argument("-v", "--verbose", action="store_true",
......
......@@ -92,8 +92,8 @@ def convert_to_lal_binary_black_hole_parameters(parameters, search_keys, remove=
converted_parameters['mass_ratio'] =\
mass_1_and_chirp_mass_to_mass_ratio(parameters['mass_1'], parameters['chirp_mass'])
temp = (parameters['chirp_mass'] / parameters['mass_1'])**5
parameters['mass_ratio'] = (2 * temp / 3 / ((51 * temp**2 - 12 * temp**3)**0.5 + 9 * temp))**(1 / 3)\
+ (((51 * temp**2 - 12 * temp**3)**0.5 + 9 * temp) / 9 / 2**0.5)**(1 / 3)
parameters['mass_ratio'] = (2 * temp / 3 / ((51 * temp**2 - 12 * temp**3)**0.5 + 9 * temp))**(1 / 3) + \
(((51 * temp**2 - 12 * temp**3)**0.5 + 9 * temp) / 9 / 2**0.5)**(1 / 3)
if remove:
converted_parameters.pop('chirp_mass')
elif 'symmetric_mass_ratio' in converted_parameters.keys():
......@@ -307,8 +307,8 @@ def mass_1_and_chirp_mass_to_mass_ratio(mass_1, chirp_mass):
Mass ratio of the binary
"""
temp = (chirp_mass / mass_1)**5
mass_ratio = (2 / 3 / (3**0.5 * (27 * temp**2 - 4 * temp**3)**0.5 + 9 * temp))**(1 / 3) * temp \
+ ((3**0.5 * (27 * temp**2 - 4 * temp ** 3)**0.5 + 9 * temp) / (2 * 3**2)) ** (1 / 3)
mass_ratio = (2 / 3 / (3**0.5 * (27 * temp**2 - 4 * temp**3)**0.5 + 9 * temp))**(1 / 3) * temp + \
((3**0.5 * (27 * temp**2 - 4 * temp ** 3)**0.5 + 9 * temp) / (2 * 3**2)) ** (1 / 3)
return mass_ratio
......@@ -320,7 +320,7 @@ def generate_all_bbh_parameters(sample, likelihood=None, priors=None):
----------
sample: dict or pandas.DataFrame
Samples to fill in with extra parameters, this may be either an injection or posterior samples.
likelihood: tupak.GravitationalWaveTransient
likelihood: tupak.gw.likelihood.GravitationalWaveTr, optional
GravitationalWaveTransient used for sampling, used for waveform and likelihood.interferometers.
priors: dict, optional
Dictionary of prior objects, used to fill in non-sampled parameters.
......@@ -339,7 +339,19 @@ def generate_all_bbh_parameters(sample, likelihood=None, priors=None):
def fill_from_fixed_priors(sample, priors):
"""Add parameters with delta function prior to the data frame/dictionary."""
"""Add parameters with delta function prior to the data frame/dictionary.
Parameters
----------
sample: dict
A dictionary or data frame
priors: dict
A dictionary of priors
Returns
-------
dict:
"""
output_sample = sample.copy()
if priors is not None:
for name in priors:
......@@ -354,6 +366,16 @@ def generate_non_standard_parameters(sample):
We add:
chirp mass, total mass, symmetric mass ratio, mass ratio, cos tilt 1, cos tilt 2, cos iota
Parameters
----------
sample : dict
The input dictionary with component masses 'mass_1' and 'mass_2'
Returns
-------
dict: The updated dictionary
"""
output_sample = sample.copy()
output_sample['chirp_mass'] = component_masses_to_chirp_mass(sample['mass_1'], sample['mass_2'])
......@@ -372,17 +394,32 @@ def generate_component_spins(sample):
Add the component spins to the data frame/dictionary.
This function uses a lalsimulation function to transform the spins.
Parameters
----------
sample: A dictionary with the necessary spin conversion parameters:
'iota', 'phi_jl', 'tilt_1', 'tilt_2', 'phi_12', 'a_1', 'a_2', 'mass_1', 'mass_2', 'reference_frequency', 'phase'
Returns
-------
dict: The updated dictionary
"""
output_sample = sample.copy()
spin_conversion_parameters = ['iota', 'phi_jl', 'tilt_1', 'tilt_2', 'phi_12', 'a_1', 'a_2', 'mass_1',
'mass_2', 'reference_frequency', 'phase']
if all(key in output_sample.keys() for key in spin_conversion_parameters) and isinstance(output_sample, dict):
output_sample['iota'], output_sample['spin_1x'], output_sample['spin_1y'], output_sample['spin_1z'], output_sample['spin_2x'], \
output_sample['spin_2y'], output_sample['spin_2z'] = \
lalsim.SimInspiralTransformPrecessingNewInitialConditions(
output_sample['iota'], output_sample['phi_jl'], output_sample['tilt_1'], output_sample['tilt_2'], output_sample['phi_12'], output_sample['a_1'],
output_sample['a_2'], output_sample['mass_1'] * tupak.core.utils.solar_mass, output_sample['mass_2'] * tupak.core.utils.solar_mass,
output_sample['reference_frequency'], output_sample['phase'])
output_sample['iota'], output_sample['spin_1x'], output_sample['spin_1y'], output_sample['spin_1z'], \
output_sample['spin_2x'], output_sample['spin_2y'], output_sample['spin_2z'] = \
lalsim.SimInspiralTransformPrecessingNewInitialConditions(output_sample['iota'], output_sample['phi_jl'],
output_sample['tilt_1'], output_sample['tilt_2'],
output_sample['phi_12'], output_sample['a_1'],
output_sample['a_2'], output_sample['mass_1']
* tupak.core.utils.solar_mass,
output_sample['mass_2']
* tupak.core.utils.solar_mass,
output_sample['reference_frequency'],
output_sample['phase'])
output_sample['phi_1'] = np.arctan(output_sample['spin_1y'] / output_sample['spin_1x'])
output_sample['phi_2'] = np.arctan(output_sample['spin_2y'] / output_sample['spin_2x'])
......@@ -414,7 +451,16 @@ def generate_component_spins(sample):
def compute_snrs(sample, likelihood):
"""Compute the optimal and matched filter snrs of all posterior samples."""
"""Compute the optimal and matched filter snrs of all posterior samples and print it out.
Parameters
----------
sample: dict or array_like
likelihood: tupak.gw.likelihood.GravitationalWaveTransient
Likelihood function to be applied on the posterior
"""
temp_sample = sample
if likelihood is not None:
if isinstance(temp_sample, dict):
......@@ -426,7 +472,7 @@ def compute_snrs(sample, likelihood):
likelihood.waveform_generator.parameters)
sample['{}_matched_filter_snr'.format(interferometer.name)] = \
tupak.gw.utils.matched_filter_snr_squared(signal, interferometer,
likelihood.waveform_generator.time_duration) ** 0.5
likelihood.waveform_generator.time_duration) ** 0.5
sample['{}_optimal_snr'.format(interferometer.name)] = tupak.gw.utils.optimal_snr_squared(
signal, interferometer, likelihood.waveform_generator.time_duration) ** 0.5
else:
......
This diff is collapsed.
......@@ -32,16 +32,16 @@ class GravitationalWaveTransient(likelihood.Likelihood):
waveform_generator: `tupak.waveform_generator.WaveformGenerator`
An object which computes the frequency-domain strain of the signal,
given some set of parameters
distance_marginalization: bool
distance_marginalization: bool, optional
If true, marginalize over distance in the likelihood.
This uses a look up table calculated at run time.
time_marginalization: bool
time_marginalization: bool, optional
If true, marginalize over time in the likelihood.
This uses a FFT.
phase_marginalization: bool
phase_marginalization: bool, optional
If true, marginalize over phase in the likelihood.
This is done analytically using a Bessel function.
prior: dict
prior: dict, optional
If given, used in the distance and phase marginalization.
Returns
......@@ -243,34 +243,39 @@ class GravitationalWaveTransient(likelihood.Likelihood):
class BasicGravitationalWaveTransient(likelihood.Likelihood):
""" A basic gravitational wave transient likelihood
The simplest frequency-domain gravitational wave transient likelihood. Does
not include distance/phase marginalization.
Parameters
----------
interferometers: list
A list of `tupak.detector.Interferometer` instances - contains the
detector data and power spectral densities
waveform_generator: `tupak.waveform_generator.WaveformGenerator`
An object which computes the frequency-domain strain of the signal,
given some set of parameters
def __init__(self, interferometers, waveform_generator):
"""
Returns
-------
Likelihood: `tupak.gw.likelihood.BasicGravitationalWaveTransient`
A likelihood object, able to compute the likelihood of the data given
some model parameters
"""
The simplest frequency-domain gravitational wave transient likelihood. Does
not include distance/phase marginalization.
def __init__(self, interferometers, waveform_generator):