diff --git a/examples/other_examples/hyper_parameter_example.py b/examples/other_examples/hyper_parameter_example.py
new file mode 100644
index 0000000000000000000000000000000000000000..ed98f3fff134b7099322efcdd08d1e008c28a3a3
--- /dev/null
+++ b/examples/other_examples/hyper_parameter_example.py
@@ -0,0 +1,82 @@
+#!/bin/python
+"""
+An example of how to use tupak to perform paramater estimation for hyperparams
+"""
+from __future__ import division
+import tupak
+import numpy as np
+
+tupak.utils.setup_logger()
+outdir = 'outdir'
+
+
+class GaussianLikelihood(tupak.likelihood.Likelihood):
+    def __init__(self, x, y, waveform_generator):
+        self.x = x
+        self.y = y
+        self.N = len(x)
+        self.waveform_generator = waveform_generator
+        self.parameters = waveform_generator.parameters
+
+    def log_likelihood(self):
+        sigma = 1
+        res = self.y - self.waveform_generator.time_domain_strain()
+        return -0.5 * (np.sum((res / sigma)**2)
+                       + self.N*np.log(2*np.pi*sigma**2))
+
+
+def model(time, m):
+    return m * time
+
+
+sampling_frequency = 10
+time_duration = 100
+time = np.arange(0, time_duration, 1/sampling_frequency)
+
+true_mu_m = 5
+true_sigma_m = 0.1
+sigma = 0.1
+Nevents = 10
+samples = []
+
+# Make the sample sets
+for i in range(Nevents):
+    m = np.random.normal(true_mu_m, true_sigma_m)
+    injection_parameters = dict(m=m)
+
+    N = len(time)
+    data = model(time, **injection_parameters) + np.random.normal(0, sigma, N)
+
+    waveform_generator = tupak.waveform_generator.WaveformGenerator(
+        time_duration=time_duration, sampling_frequency=sampling_frequency,
+        time_domain_source_model=model)
+
+    likelihood = GaussianLikelihood(time, data, waveform_generator)
+
+    priors = dict(m=tupak.prior.Uniform(-10, 10, 'm'))
+
+    result = tupak.sampler.run_sampler(
+        likelihood=likelihood, priors=priors, sampler='dynesty', npoints=1000,
+        injection_parameters=injection_parameters, outdir=outdir,
+        verbose=False, label='individual_{}'.format(i), use_ratio=False,
+        sample='unif')
+    result.plot_corner()
+    samples.append(result.samples)
+
+# Now run the hyperparameter inference
+run_prior = tupak.prior.Uniform(minimum=-10, maximum=10, name='mu_m')
+hyper_prior = tupak.prior.Gaussian(mu=0, sigma=1, name='hyper')
+
+hp_likelihood = tupak.likelihood.HyperparameterLikelihood(
+        samples, hyper_prior, run_prior, mu=None, sigma=None)
+
+hp_priors = dict(
+    mu=tupak.prior.Uniform(-10, 10, 'mu', '$\mu_m$'),
+    sigma=tupak.prior.Uniform(0, 10, 'sigma', '$\sigma_m$'))
+
+# And run sampler
+result = tupak.sampler.run_sampler(
+    likelihood=hp_likelihood, priors=hp_priors, sampler='dynesty',
+    npoints=1000, outdir=outdir, label='hyperparameter', use_ratio=False,
+    sample='unif', verbose=True)
+result.plot_corner(truth=dict(mu=true_mu_m, sigma=true_sigma_m))
diff --git a/tupak/likelihood.py b/tupak/likelihood.py
index 382c1aea8cddabdb010ee507f0855f06154ea749..5449e90ebe68939372b8e373eec3880be499277c 100644
--- a/tupak/likelihood.py
+++ b/tupak/likelihood.py
@@ -230,3 +230,53 @@ def get_binary_black_hole_likelihood(interferometers):
     likelihood = tupak.likelihood.GravitationalWaveTransient(interferometers, waveform_generator)
     return likelihood
 
+
+class HyperparameterLikelihood(Likelihood):
+    """ A likelihood for infering hyperparameter posterior distributions
+
+    See Eq. (1) of https://arxiv.org/abs/1801.02699 for a definition.
+
+    Parameters
+    ----------
+    samples: list
+        An N-dimensional list of individual sets of samples. Each set may have
+        a different size.
+    hyper_prior: `tupak.prior.Prior`
+        A prior distribution with a `parameters` argument pointing to the
+        hyperparameters to infer from the samples. These may need to be
+        initialized to any arbitrary value, but this will not effect the
+        result.
+    run_prior: `tupak.prior.Prior`
+        The prior distribution used in the inidivudal inferences which resulted
+        in the set of samples.
+
+    """
+
+    def __init__(self, samples, hyper_prior, run_prior):
+        self.samples = samples
+        self.hyper_prior = hyper_prior
+        self.run_prior = run_prior
+        self.parameters = hyper_prior.__dict__
+        if hasattr(hyper_prior, 'lnprob') and hasattr(run_prior, 'lnprob'):
+            logging.info("Using log-probabilities in likelihood")
+            self.log_likelihood = self.log_likelihood_using_lnprob
+        else:
+            logging.info("Using probabilities in likelihood")
+            self.log_likelihood = self.log_likelihood_using_prob
+
+    def log_likelihood_using_prob(self):
+        L = []
+        self.hyper_prior.__dict__.update(self.parameters)
+        for samp in self.samples:
+            f = self.hyper_prior.lnprob(samp) - self.run_prior.lnprob(samp)
+            L.append(logsumexp(f))
+        return np.sum(L)
+
+    def log_likelihood_using_lnprob(self):
+        L = []
+        self.hyper_prior.__dict__.update(self.parameters)
+        for samp in self.samples:
+            L.append(
+                np.sum(self.hyper_prior.prob(samp) /
+                       self.run_prior.prob(samp)))
+        return np.sum(np.log(L))
diff --git a/tupak/prior.py b/tupak/prior.py
index 12f9f6a12dda556393114ac360357a1e96617493..4c141735226148649589d1cd35daf87bdcc25360 100644
--- a/tupak/prior.py
+++ b/tupak/prior.py
@@ -173,6 +173,12 @@ class PowerLaw(Prior):
             return np.nan_to_num(val ** self.alpha * (1 + self.alpha) / (self.maximum ** (1 + self.alpha)
                                                                          - self.minimum ** (1 + self.alpha))) * in_prior
 
+    def lnprob(self, val):
+        in_prior = (val >= self.minimum) & (val <= self.maximum)
+        normalising = (1+self.alpha)/(self.maximum ** (1 + self.alpha)
+                                      - self.minimum ** (1 + self.alpha))
+        return self.alpha * np.log(val) * np.log(normalising) * in_prior
+
 
 class Uniform(PowerLaw):
     """Uniform prior"""
@@ -254,6 +260,9 @@ class Gaussian(Prior):
         """Return the prior probability of val"""
         return np.exp(-(self.mu - val)**2 / (2 * self.sigma**2)) / (2 * np.pi)**0.5 / self.sigma
 
+    def lnprob(self, val):
+        return -0.5*((self.mu - val)**2 / self.sigma**2 + np.log(2 * np.pi * self.sigma**2))
+
 
 class TruncatedGaussian(Prior):
     """