diff --git a/docs/likelihood.txt b/docs/likelihood.txt
index 7e5ecbed04a85e8775da86bef57ab33da0df70dd..17ffaf3dde3d15d45343ddba36ce30ffdb7b4703 100644
--- a/docs/likelihood.txt
+++ b/docs/likelihood.txt
@@ -36,7 +36,7 @@ Then, the likelihood for all :math:`N` data points is
 In practise, we implement the log-likelihood to avoid numerical overflow
 errors. To code this up in :code:`tupak`, we would write a class like this::
 
-   class GaussianLikelihood(tupak.Likelihood):
+   class SimpleGaussianLikelihood(tupak.Likelihood):
        def __init__(self, data):
            """
            A very simple Gaussian likelihood
@@ -105,7 +105,7 @@ the likelihood for each data point.
 
 In :code:`tupak`, we can code this up as a likelihood in the following way::
 
-   class GaussianLikelihood(tupak.Likelihood):
+   class GaussianLikelihoodKnownNoise(tupak.Likelihood):
        def __init__(self, x, y, sigma, function):
            """
            A general Gaussian likelihood - the parameters are inferred from the
@@ -139,10 +139,6 @@ In :code:`tupak`, we can code this up as a likelihood in the following way::
            return -0.5 * (np.sum((res / self.sigma)**2)
                           + self.N*np.log(2*np.pi*self.sigma**2))
 
-       def noise_log_likelihood(self):
-           return -0.5 * (np.sum((self.y / self.sigma)**2)
-                          + self.N*np.log(2*np.pi*self.sigma**2))
-
 
 This likelihood can be given any python function, the data (in the form of
 :code:`x` and :code:`y`) and the standard deviation of the noise. The
@@ -214,11 +210,6 @@ instatiating the likelihood::
            return -0.5 * (np.sum((res / sigma)**2)
                           + self.N*np.log(2*np.pi*sigma**2))
 
-       def noise_log_likelihood(self):
-           return np.nan
-           sigma = self.parameters['sigma']
-           return -0.5 * (np.sum((self.y / sigma)**2)
-                          + self.N*np.log(2*np.pi*sigma**2))
 
 An example using this likelihood can be found `on this page <https://git.ligo.org/Monash/tupak/blob/master/examples/other_examples/linear_regression_unknown_noise.py>`_.
 
diff --git a/examples/other_examples/gaussian_example.py b/examples/other_examples/gaussian_example.py
index e6f505ed53c10478dd27414dfd545a6005e5b522..1cf4a691b20574e46fb5458aa282e4360204c2a4 100644
--- a/examples/other_examples/gaussian_example.py
+++ b/examples/other_examples/gaussian_example.py
@@ -22,7 +22,7 @@ outdir = 'outdir'
 data = np.random.normal(3, 4, 100)
 
 
-class GaussianLikelihood(tupak.Likelihood):
+class SimpleGaussianLikelihood(tupak.Likelihood):
     def __init__(self, data):
         """
         A very simple Gaussian likelihood
@@ -44,7 +44,7 @@ class GaussianLikelihood(tupak.Likelihood):
                        + self.N*np.log(2*np.pi*sigma**2))
 
 
-likelihood = GaussianLikelihood(data)
+likelihood = SimpleGaussianLikelihood(data)
 priors = dict(mu=tupak.core.prior.Uniform(0, 5, 'mu'),
               sigma=tupak.core.prior.Uniform(0, 10, 'sigma'))
 
diff --git a/examples/other_examples/linear_regression.py b/examples/other_examples/linear_regression.py
index 0deb0571bcf6b5020a9500e9061f346bdbdaa68f..6df18c9304b31da6bc6fa654b64f8d130dfc498b 100644
--- a/examples/other_examples/linear_regression.py
+++ b/examples/other_examples/linear_regression.py
@@ -50,7 +50,7 @@ fig.savefig('{}/{}_data.png'.format(outdir, label))
 # our model.
 
 
-class GaussianLikelihood(tupak.Likelihood):
+class GaussianLikelihoodKnownNoise(tupak.Likelihood):
     def __init__(self, x, y, sigma, function):
         """
         A general Gaussian likelihood - the parameters are inferred from the
@@ -84,14 +84,10 @@ class GaussianLikelihood(tupak.Likelihood):
         return -0.5 * (np.sum((res / self.sigma)**2)
                        + self.N*np.log(2*np.pi*self.sigma**2))
 
-    def noise_log_likelihood(self):
-        return -0.5 * (np.sum((self.y / self.sigma)**2)
-                       + self.N*np.log(2*np.pi*self.sigma**2))
-
 
 # Now lets instantiate a version of our GaussianLikelihood, giving it
 # the time, data and signal model
-likelihood = GaussianLikelihood(time, data, sigma, model)
+likelihood = GaussianLikelihoodKnownNoise(time, data, sigma, model)
 
 # From hereon, the syntax is exactly equivalent to other tupak examples
 # We make a prior
diff --git a/examples/other_examples/linear_regression_unknown_noise.py b/examples/other_examples/linear_regression_unknown_noise.py
index 3389de13e2a55e807d3bf4f57e2851b69efcb6cc..0b1cacdebd9a3642731e67bde4955699e44c2ec1 100644
--- a/examples/other_examples/linear_regression_unknown_noise.py
+++ b/examples/other_examples/linear_regression_unknown_noise.py
@@ -81,20 +81,18 @@ class GaussianLikelihood(tupak.Likelihood):
         parameters.pop(0)
         self.parameters = dict.fromkeys(parameters)
         self.function_keys = self.parameters.keys()
-        self.parameters['sigma'] = None
+        if sigma is None:
+            self.parameters['sigma'] = None
+            self.sigma = self.parameters['sigma']
+        else:
+            self.sigma = sigma
 
     def log_likelihood(self):
+        self.sigma = self.parameters['sigma']
         model_parameters = {k: self.parameters[k] for k in self.function_keys}
         res = self.y - self.function(self.x, **model_parameters)
-        sigma = self.parameters['sigma']
-        return -0.5 * (np.sum((res / sigma)**2)
-                       + self.N*np.log(2*np.pi*sigma**2))
-
-    def noise_log_likelihood(self):
-        return np.nan
-        sigma = self.parameters['sigma']
-        return -0.5 * (np.sum((self.y / sigma)**2)
-                       + self.N*np.log(2*np.pi*sigma**2))
+        return -0.5 * (np.sum((res / self.sigma)**2)
+                       + self.N*np.log(2*np.pi*self.sigma**2))
 
 
 # Now lets instantiate a version of our GaussianLikelihood, giving it