diff --git a/docs/likelihood.txt b/docs/likelihood.txt
index 412238720388d7d6cd5f4fe0775bfec34149ac53..2158aa92375b9f705c0816fba6e17b5a4f6deb44 100644
--- a/docs/likelihood.txt
+++ b/docs/likelihood.txt
@@ -167,11 +167,55 @@ In the last example, we considered only cases with known noise (e.g., a
 prespecified standard deviation. We now present a general function which can
 handle unknown noise (in which case you need to specify a prior for
 :math:`\sigma`, or known noise (in which case you pass the known noise in when
-instatiating the likelihood
-
-.. literalinclude:: ../examples/other_examples/linear_regression_unknown_noise.py
-   :lines: 52-94
-
+instatiating the likelihood::
+
+  class GaussianLikelihood(tupak.Likelihood):
+      def __init__(self, x, y, function, sigma=None):
+          """
+          A general Gaussian likelihood for known or unknown noise - the model
+          parameters are inferred from the arguments of function
+
+          Parameters
+          ----------
+          x, y: array_like
+              The data to analyse
+          function:
+              The python function to fit to the data. Note, this must take the
+              dependent variable as its first argument. The other arguments are
+              will require a prior and will be sampled over (unless a fixed
+              value is given).
+          sigma: None, float, array_like
+              If None, the standard deviation of the noise is unknown and will be
+              estimated (note: this requires a prior to be given for sigma). If
+              not None, this defined the standard-deviation of the data points.
+              This can either be a single float, or an array with length equal
+              to that for `x` and `y`.
+          """
+          self.x = x
+          self.y = y
+          self.N = len(x)
+          self.sigma = sigma
+          self.function = function
+
+          # These lines of code infer the parameters from the provided function
+          parameters = inspect.getargspec(function).args
+          parameters.pop(0)
+          self.parameters = dict.fromkeys(parameters)
+          self.function_keys = self.parameters.keys()
+          if self.sigma is None:
+              self.parameters['sigma'] = None
+
+      def log_likelihood(self):
+          sigma = self.parameters.get('sigma', self.sigma)
+          model_parameters = {k: self.parameters[k] for k in self.function_keys}
+          res = self.y - self.function(self.x, **model_parameters)
+          return -0.5 * (np.sum((res / sigma)**2)
+                         + self.N*np.log(2*np.pi*sigma**2))
+
+We provide this general-purpose class as part of tupak:
+
+.. autoclass:: tupak.core.likelihood.GaussianLikelihood
+   :members:
 
 An example using this likelihood can be found `on this page <https://git.ligo.org/Monash/tupak/blob/master/examples/other_examples/linear_regression_unknown_noise.py>`_.
 
diff --git a/examples/other_examples/hyper_parameter_example.py b/examples/other_examples/hyper_parameter_example.py
index ec0758c59b1ec7f2612533b2e269a2792d49023e..17dc87d7150bb9e2dd2103f568012626b1774e9b 100644
--- a/examples/other_examples/hyper_parameter_example.py
+++ b/examples/other_examples/hyper_parameter_example.py
@@ -108,7 +108,7 @@ fig2.savefig('outdir/hyper_parameter_combined_posteriors.png')
 run_prior = tupak.core.prior.Uniform(minimum=-10, maximum=10, name='mu_c0')
 hyper_prior = tupak.core.prior.Gaussian(mu=0, sigma=1, name='hyper')
 
-hp_likelihood = tupak.gw.likelihood.HyperparameterLikelihood(
+hp_likelihood = tupak.core.likelihood.HyperparameterLikelihood(
         samples, hyper_prior, run_prior)
 
 hp_priors = dict(
diff --git a/examples/other_examples/linear_regression_unknown_noise.py b/examples/other_examples/linear_regression_unknown_noise.py
index 1f21be4cee7eb3bbe401e2a1f2914a0914b4e175..719bed045e062ae04e94e3683140392e4e501133 100644
--- a/examples/other_examples/linear_regression_unknown_noise.py
+++ b/examples/other_examples/linear_regression_unknown_noise.py
@@ -9,7 +9,6 @@ from __future__ import division
 import tupak
 import numpy as np
 import matplotlib.pyplot as plt
-import inspect
 
 # A few simple setup steps
 tupak.core.utils.setup_logger()
@@ -45,60 +44,11 @@ ax.set_ylabel('y')
 ax.legend()
 fig.savefig('{}/{}_data.png'.format(outdir, label))
 
+# Now lets instantiate the built-in GaussianLikelihood, giving it
+# the time, data and signal model. Note that, because we do not give it the
+# parameter, sigma is unknown and marginalised over during the sampling
+likelihood = tupak.core.likelihood.GaussianLikelihood(time, data, model)
 
-# Parameter estimation: we now define a Gaussian Likelihood class relevant for
-# our model.
-
-class GaussianLikelihood(tupak.Likelihood):
-    def __init__(self, x, y, function, sigma=None):
-        """
-        A general Gaussian likelihood for known or unknown noise - the model
-        parameters are inferred from the arguments of function
-
-        Parameters
-        ----------
-        x, y: array_like
-            The data to analyse
-        function:
-            The python function to fit to the data. Note, this must take the
-            dependent variable as its first argument. The other arguments are
-            will require a prior and will be sampled over (unless a fixed
-            value is given).
-        sigma: None, float, array_like
-            If None, the standard deviation of the noise is unknown and will be
-            estimated (note: this requires a prior to be given for sigma). If
-            not None, this defined the standard-deviation of the data points.
-            This can either be a single float, or an array with length equal
-            to that for `x` and `y`.
-        """
-        self.x = x
-        self.y = y
-        self.N = len(x)
-        self.sigma = sigma
-        self.function = function
-
-        # These lines of code infer the parameters from the provided function
-        parameters = inspect.getargspec(function).args
-        parameters.pop(0)
-        self.parameters = dict.fromkeys(parameters)
-        self.function_keys = self.parameters.keys()
-        if self.sigma is None:
-            self.parameters['sigma'] = None
-
-    def log_likelihood(self):
-        sigma = self.parameters.get('sigma', self.sigma)
-        model_parameters = {k: self.parameters[k] for k in self.function_keys}
-        res = self.y - self.function(self.x, **model_parameters)
-        return -0.5 * (np.sum((res / sigma)**2)
-                       + self.N*np.log(2*np.pi*sigma**2))
-
-
-# Now lets instantiate a version of our GaussianLikelihood, giving it
-# the time, data and signal model
-likelihood = GaussianLikelihood(time, data, model)
-
-# From hereon, the syntax is exactly equivalent to other tupak examples
-# We make a prior
 priors = {}
 priors['m'] = tupak.core.prior.Uniform(0, 5, 'm')
 priors['c'] = tupak.core.prior.Uniform(-2, 2, 'c')
diff --git a/tupak/core/likelihood.py b/tupak/core/likelihood.py
index 02e442240d3a135757abd2b693cc9376ca5de538..ad817a65e0565a98260deb013aa7159d96068cae 100644
--- a/tupak/core/likelihood.py
+++ b/tupak/core/likelihood.py
@@ -1,5 +1,7 @@
 from __future__ import division, print_function
 
+import inspect
+import logging
 import numpy as np
 
 try:
@@ -24,3 +26,96 @@ class Likelihood(object):
         return self.log_likelihood() - self.noise_log_likelihood()
 
 
+class GaussianLikelihood(Likelihood):
+    def __init__(self, x, y, function, sigma=None):
+        """
+        A general Gaussian likelihood for known or unknown noise - the model
+        parameters are inferred from the arguments of function
+
+        Parameters
+        ----------
+        x, y: array_like
+            The data to analyse
+        function:
+            The python function to fit to the data. Note, this must take the
+            dependent variable as its first argument. The other arguments are
+            will require a prior and will be sampled over (unless a fixed
+            value is given).
+        sigma: None, float, array_like
+            If None, the standard deviation of the noise is unknown and will be
+            estimated (note: this requires a prior to be given for sigma). If
+            not None, this defined the standard-deviation of the data points.
+            This can either be a single float, or an array with length equal
+            to that for `x` and `y`.
+        """
+        self.x = x
+        self.y = y
+        self.N = len(x)
+        self.sigma = sigma
+        self.function = function
+
+        # These lines of code infer the parameters from the provided function
+        parameters = inspect.getargspec(function).args
+        parameters.pop(0)
+        self.parameters = dict.fromkeys(parameters)
+        self.function_keys = self.parameters.keys()
+        if self.sigma is None:
+            self.parameters['sigma'] = None
+
+    def log_likelihood(self):
+        sigma = self.parameters.get('sigma', self.sigma)
+        model_parameters = {k: self.parameters[k] for k in self.function_keys}
+        res = self.y - self.function(self.x, **model_parameters)
+        return -0.5 * (np.sum((res / sigma)**2)
+                       + self.N*np.log(2*np.pi*sigma**2))
+
+
+class HyperparameterLikelihood(Likelihood):
+    """ A likelihood for infering hyperparameter posterior distributions
+
+    See Eq. (1) of https://arxiv.org/abs/1801.02699 for a definition.
+
+    Parameters
+    ----------
+    samples: list
+        An N-dimensional list of individual sets of samples. Each set may have
+        a different size.
+    hyper_prior: `tupak.prior.Prior`
+        A prior distribution with a `parameters` argument pointing to the
+        hyperparameters to infer from the samples. These may need to be
+        initialized to any arbitrary value, but this will not effect the
+        result.
+    run_prior: `tupak.prior.Prior`
+        The prior distribution used in the inidivudal inferences which resulted
+        in the set of samples.
+
+    """
+
+    def __init__(self, samples, hyper_prior, run_prior):
+        Likelihood.__init__(self, parameters=hyper_prior.__dict__)
+        self.samples = samples
+        self.hyper_prior = hyper_prior
+        self.run_prior = run_prior
+        if hasattr(hyper_prior, 'lnprob') and hasattr(run_prior, 'lnprob'):
+            logging.info("Using log-probabilities in likelihood")
+            self.log_likelihood = self.log_likelihood_using_lnprob
+        else:
+            logging.info("Using probabilities in likelihood")
+            self.log_likelihood = self.log_likelihood_using_prob
+
+    def log_likelihood_using_lnprob(self):
+        L = []
+        self.hyper_prior.__dict__.update(self.parameters)
+        for samp in self.samples:
+            f = self.hyper_prior.lnprob(samp) - self.run_prior.lnprob(samp)
+            L.append(logsumexp(f))
+        return np.sum(L)
+
+    def log_likelihood_using_prob(self):
+        L = []
+        self.hyper_prior.__dict__.update(self.parameters)
+        for samp in self.samples:
+            L.append(
+                np.sum(self.hyper_prior.prob(samp) /
+                       self.run_prior.prob(samp)))
+        return np.sum(np.log(L))
diff --git a/tupak/gw/likelihood.py b/tupak/gw/likelihood.py
index 08b70bfaab10b1abc35428ae39c2eaeafba89a2c..15c3259c1b60b646713efdfd84d4e12c73c67466 100644
--- a/tupak/gw/likelihood.py
+++ b/tupak/gw/likelihood.py
@@ -322,53 +322,3 @@ def get_binary_black_hole_likelihood(interferometers):
     likelihood = tupak.gw.likelihood.GravitationalWaveTransient(interferometers, waveform_generator)
     return likelihood
 
-
-class HyperparameterLikelihood(likelihood.Likelihood):
-    """ A likelihood for infering hyperparameter posterior distributions
-
-    See Eq. (1) of https://arxiv.org/abs/1801.02699 for a definition.
-
-    Parameters
-    ----------
-    samples: list
-        An N-dimensional list of individual sets of samples. Each set may have
-        a different size.
-    hyper_prior: `tupak.prior.Prior`
-        A prior distribution with a `parameters` argument pointing to the
-        hyperparameters to infer from the samples. These may need to be
-        initialized to any arbitrary value, but this will not effect the
-        result.
-    run_prior: `tupak.prior.Prior`
-        The prior distribution used in the inidivudal inferences which resulted
-        in the set of samples.
-
-    """
-
-    def __init__(self, samples, hyper_prior, run_prior):
-        likelihood.Likelihood.__init__(self, parameters=hyper_prior.__dict__)
-        self.samples = samples
-        self.hyper_prior = hyper_prior
-        self.run_prior = run_prior
-        if hasattr(hyper_prior, 'lnprob') and hasattr(run_prior, 'lnprob'):
-            logging.info("Using log-probabilities in likelihood")
-            self.log_likelihood = self.log_likelihood_using_lnprob
-        else:
-            logging.info("Using probabilities in likelihood")
-            self.log_likelihood = self.log_likelihood_using_prob
-
-    def log_likelihood_using_lnprob(self):
-        L = []
-        self.hyper_prior.__dict__.update(self.parameters)
-        for samp in self.samples:
-            f = self.hyper_prior.lnprob(samp) - self.run_prior.lnprob(samp)
-            L.append(logsumexp(f))
-        return np.sum(L)
-
-    def log_likelihood_using_prob(self):
-        L = []
-        self.hyper_prior.__dict__.update(self.parameters)
-        for samp in self.samples:
-            L.append(
-                np.sum(self.hyper_prior.prob(samp) /
-                       self.run_prior.prob(samp)))
-        return np.sum(np.log(L))