diff --git a/docs/likelihood.txt b/docs/likelihood.txt
index 17ffaf3dde3d15d45343ddba36ce30ffdb7b4703..869b1383f57713fbc44cff8b80410f86cb3226c2 100644
--- a/docs/likelihood.txt
+++ b/docs/likelihood.txt
@@ -167,48 +167,10 @@ In the last example, we considered only cases with known noise (e.g., a
 prespecified standard deviation. We now present a general function which can
 handle unknown noise (in which case you need to specify a prior for
 :math:`\sigma`, or known noise (in which case you pass the known noise in when
-instatiating the likelihood::
+instatiating the likelihood
 
-   class GaussianLikelihood(tupak.Likelihood):
-       def __init__(self, x, y, function, sigma=None):
-           """
-           A general Gaussian likelihood for known or unknown noise - the model
-           parameters are inferred from the arguments of function
-
-           Parameters
-           ----------
-           x, y: array_like
-               The data to analyse
-           function:
-               The python function to fit to the data. Note, this must take the
-               dependent variable as its first argument. The other arguments are
-               will require a prior and will be sampled over (unless a fixed
-               value is given).
-           sigma: None, float, array_like
-               If None, the standard deviation of the noise is unknown and will be
-               estimated (note: this requires a prior to be given for sigma). If
-               not None, this defined the standard-deviation of the data points.
-               This can either be a single float, or an array with length equal
-               to that for `x` and `y`.
-           """
-           self.x = x
-           self.y = y
-           self.N = len(x)
-           self.function = function
-
-           # These lines of code infer the parameters from the provided function
-           parameters = inspect.getargspec(function).args
-           parameters.pop(0)
-           self.parameters = dict.fromkeys(parameters)
-           self.function_keys = self.parameters.keys()
-           self.parameters['sigma'] = None
-
-       def log_likelihood(self):
-           model_parameters = {k: self.parameters[k] for k in self.function_keys}
-           res = self.y - self.function(self.x, **model_parameters)
-           sigma = self.parameters['sigma']
-           return -0.5 * (np.sum((res / sigma)**2)
-                          + self.N*np.log(2*np.pi*sigma**2))
+.. literalinclude:: ../examples/other_examples/linear_regression_unknown_noise.py
+   :lines: 52-94
 
 
 An example using this likelihood can be found `on this page <https://git.ligo.org/Monash/tupak/blob/master/examples/other_examples/linear_regression_unknown_noise.py>`_.
diff --git a/examples/other_examples/linear_regression_unknown_noise.py b/examples/other_examples/linear_regression_unknown_noise.py
index 0b1cacdebd9a3642731e67bde4955699e44c2ec1..1f21be4cee7eb3bbe401e2a1f2914a0914b4e175 100644
--- a/examples/other_examples/linear_regression_unknown_noise.py
+++ b/examples/other_examples/linear_regression_unknown_noise.py
@@ -74,6 +74,7 @@ class GaussianLikelihood(tupak.Likelihood):
         self.x = x
         self.y = y
         self.N = len(x)
+        self.sigma = sigma
         self.function = function
 
         # These lines of code infer the parameters from the provided function
@@ -81,18 +82,15 @@ class GaussianLikelihood(tupak.Likelihood):
         parameters.pop(0)
         self.parameters = dict.fromkeys(parameters)
         self.function_keys = self.parameters.keys()
-        if sigma is None:
+        if self.sigma is None:
             self.parameters['sigma'] = None
-            self.sigma = self.parameters['sigma']
-        else:
-            self.sigma = sigma
 
     def log_likelihood(self):
-        self.sigma = self.parameters['sigma']
+        sigma = self.parameters.get('sigma', self.sigma)
         model_parameters = {k: self.parameters[k] for k in self.function_keys}
         res = self.y - self.function(self.x, **model_parameters)
-        return -0.5 * (np.sum((res / self.sigma)**2)
-                       + self.N*np.log(2*np.pi*self.sigma**2))
+        return -0.5 * (np.sum((res / sigma)**2)
+                       + self.N*np.log(2*np.pi*sigma**2))
 
 
 # Now lets instantiate a version of our GaussianLikelihood, giving it