Skip to content
Snippets Groups Projects
Commit ff191b79 authored by Gregory Ashton's avatar Gregory Ashton
Browse files

Fix bug in likelihood docs and examples

Previously I was setting the noise_log_likelihood to nan in a confusing
way. This removes that and also renames all the liklihoods for clarity.
parent 911fced0
No related branches found
No related tags found
No related merge requests found
Pipeline #
......@@ -36,7 +36,7 @@ Then, the likelihood for all :math:`N` data points is
In practise, we implement the log-likelihood to avoid numerical overflow
errors. To code this up in :code:`tupak`, we would write a class like this::
class GaussianLikelihood(tupak.Likelihood):
class SimpleGaussianLikelihood(tupak.Likelihood):
def __init__(self, data):
"""
A very simple Gaussian likelihood
......@@ -105,7 +105,7 @@ the likelihood for each data point.
In :code:`tupak`, we can code this up as a likelihood in the following way::
class GaussianLikelihood(tupak.Likelihood):
class GaussianLikelihoodKnownNoise(tupak.Likelihood):
def __init__(self, x, y, sigma, function):
"""
A general Gaussian likelihood - the parameters are inferred from the
......@@ -139,10 +139,6 @@ In :code:`tupak`, we can code this up as a likelihood in the following way::
return -0.5 * (np.sum((res / self.sigma)**2)
+ self.N*np.log(2*np.pi*self.sigma**2))
def noise_log_likelihood(self):
return -0.5 * (np.sum((self.y / self.sigma)**2)
+ self.N*np.log(2*np.pi*self.sigma**2))
This likelihood can be given any python function, the data (in the form of
:code:`x` and :code:`y`) and the standard deviation of the noise. The
......@@ -214,11 +210,6 @@ instatiating the likelihood::
return -0.5 * (np.sum((res / sigma)**2)
+ self.N*np.log(2*np.pi*sigma**2))
def noise_log_likelihood(self):
return np.nan
sigma = self.parameters['sigma']
return -0.5 * (np.sum((self.y / sigma)**2)
+ self.N*np.log(2*np.pi*sigma**2))
An example using this likelihood can be found `on this page <https://git.ligo.org/Monash/tupak/blob/master/examples/other_examples/linear_regression_unknown_noise.py>`_.
......
......@@ -22,7 +22,7 @@ outdir = 'outdir'
data = np.random.normal(3, 4, 100)
class GaussianLikelihood(tupak.Likelihood):
class SimpleGaussianLikelihood(tupak.Likelihood):
def __init__(self, data):
"""
A very simple Gaussian likelihood
......@@ -44,7 +44,7 @@ class GaussianLikelihood(tupak.Likelihood):
+ self.N*np.log(2*np.pi*sigma**2))
likelihood = GaussianLikelihood(data)
likelihood = SimpleGaussianLikelihood(data)
priors = dict(mu=tupak.core.prior.Uniform(0, 5, 'mu'),
sigma=tupak.core.prior.Uniform(0, 10, 'sigma'))
......
......@@ -50,7 +50,7 @@ fig.savefig('{}/{}_data.png'.format(outdir, label))
# our model.
class GaussianLikelihood(tupak.Likelihood):
class GaussianLikelihoodKnownNoise(tupak.Likelihood):
def __init__(self, x, y, sigma, function):
"""
A general Gaussian likelihood - the parameters are inferred from the
......@@ -84,14 +84,10 @@ class GaussianLikelihood(tupak.Likelihood):
return -0.5 * (np.sum((res / self.sigma)**2)
+ self.N*np.log(2*np.pi*self.sigma**2))
def noise_log_likelihood(self):
return -0.5 * (np.sum((self.y / self.sigma)**2)
+ self.N*np.log(2*np.pi*self.sigma**2))
# Now lets instantiate a version of our GaussianLikelihood, giving it
# the time, data and signal model
likelihood = GaussianLikelihood(time, data, sigma, model)
likelihood = GaussianLikelihoodKnownNoise(time, data, sigma, model)
# From hereon, the syntax is exactly equivalent to other tupak examples
# We make a prior
......
......@@ -81,20 +81,18 @@ class GaussianLikelihood(tupak.Likelihood):
parameters.pop(0)
self.parameters = dict.fromkeys(parameters)
self.function_keys = self.parameters.keys()
self.parameters['sigma'] = None
if sigma is None:
self.parameters['sigma'] = None
self.sigma = self.parameters['sigma']
else:
self.sigma = sigma
def log_likelihood(self):
self.sigma = self.parameters['sigma']
model_parameters = {k: self.parameters[k] for k in self.function_keys}
res = self.y - self.function(self.x, **model_parameters)
sigma = self.parameters['sigma']
return -0.5 * (np.sum((res / sigma)**2)
+ self.N*np.log(2*np.pi*sigma**2))
def noise_log_likelihood(self):
return np.nan
sigma = self.parameters['sigma']
return -0.5 * (np.sum((self.y / sigma)**2)
+ self.N*np.log(2*np.pi*sigma**2))
return -0.5 * (np.sum((res / self.sigma)**2)
+ self.N*np.log(2*np.pi*self.sigma**2))
# Now lets instantiate a version of our GaussianLikelihood, giving it
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment