diff --git a/peyote/detector.py b/peyote/detector.py
index 9bf1f84def42f5c71970b0ee093dfbb92154bdeb..383b44697867c9130aaf6a3285b7b044e72b56a0 100644
--- a/peyote/detector.py
+++ b/peyote/detector.py
@@ -323,6 +323,14 @@ class Interferometer(object):
     def whitened_data(self):
         return self.data / self.amplitude_spectral_density_array
 
+    def save_data(self, outdir):
+        np.savetxt('{}/{}_frequency_domain_data.dat'.format(outdir, self.name), [self.frequency_array,
+                                                                                 self.data.real, self.data.imag],
+                   header='f real_h(f) imag_h(f)')
+        np.savetxt('{}/{}_psd.dat'.format(outdir, self.name), [self.frequency_array,
+                                                               self.amplitude_spectral_density_array],
+                   header='f h(f)')
+
 
 class PowerSpectralDensity:
 
@@ -458,7 +466,8 @@ GEO600 = get_empty_interferometer('GEO600')
 
 def get_inteferometer(
         name, center_time, T=4, alpha=0.25, psd_offset=-1024, psd_duration=100,
-        cache=True, outdir='outdir', plot=True, filter_freq=1024, **kwargs):
+        cache=True, outdir='outdir', plot=True, filter_freq=1024,
+        raw_data_file=None, **kwargs):
     """
     Helper function to obtain an Interferometer instance with appropriate
     PSD and data, given an center_time
@@ -497,10 +506,11 @@ def get_inteferometer(
 
     strain = get_open_strain_data(
             name, center_time-T/2, center_time+T/2, outdir=outdir, cache=cache,
-            **kwargs)
+            raw_data_file=raw_data_file, **kwargs)
 
     strain_psd = get_open_strain_data(
             name, center_time+psd_offset, center_time+psd_offset+psd_duration,
+            raw_data_file=raw_data_file,
             outdir=outdir, cache=cache, **kwargs)
 
     sampling_frequency = int(strain.sample_rate.value)
@@ -555,9 +565,80 @@ def get_inteferometer(
     return interferometer, sampling_frequency, time_duration
 
 
-def get_open_strain_data(name, t1, t2, outdir, cache=False, **kwargs):
+def get_inteferometer_with_fake_noise_and_injection(
+        name, injection_polarizations, injection_parameters, sampling_frequency=4096, time_duration=4,
+        outdir='outdir', plot=True, save=True):
+    """
+    Helper function to obtain an Interferometer instance with appropriate
+    PSD and data, given an center_time
+
+    Parameters
+    ----------
+    name: str
+        Detector name, e.g., 'H1'.
+    injection_polarizations: dict
+        polarizations of waveform to inject, output of waveform_generator.get_frequency_domain_signal
+    injection_parameters: dict
+        injection parameters, needed for sky position and timing
+    sampling_frequency: float
+        sampling frequency for data, should match injection signal
+    time_duration: float
+        length of data, should be the same as used for signal generation
+    outdir: str
+        directory in which to store output
+    plot: bool
+        If true, create an ASD + strain plot
+    save: bool
+        If true, save frequency domain data and PSD to file
+
+    Returns
+    -------
+    interferometer: `peyote.detector.Interferometer`
+        An Interferometer instance with a PSD and frequency-domain strain data.
+    """
+
+    utils.check_directory_exists_and_if_not_mkdir(outdir)
+
+    interferometer = get_empty_interferometer(name)
+    interferometer.set_data(sampling_frequency=sampling_frequency, duration=time_duration,
+                            from_power_spectral_density=True)
+    interferometer.inject_signal(waveform_polarizations=injection_polarizations, parameters=injection_parameters)
+
+    interferometer_signal = interferometer.get_detector_response(injection_polarizations, injection_parameters)
+
+    if plot:
+        fig, ax = plt.subplots()
+        ax.loglog(interferometer.frequency_array, np.abs(interferometer.data),
+                  '-C0', label=name)
+        ax.loglog(interferometer.frequency_array,
+                  interferometer.amplitude_spectral_density_array,
+                  '-C1', lw=0.5, label=name+' ASD')
+        ax.loglog(interferometer.frequency_array, abs(interferometer_signal), label='Signal')
+        ax.grid('on')
+        ax.set_ylabel(r'strain [strain/$\sqrt{\rm Hz}$]')
+        ax.set_xlabel(r'frequency [Hz]')
+        ax.set_xlim(20, 2000)
+        ax.legend(loc='best')
+        fig.savefig('{}/{}_frequency_domain_data.png'.format(outdir, name))
+
+    if save:
+        interferometer.save_data(outdir)
+
+    return interferometer
+
+
+def get_open_strain_data(name, t1, t2, outdir, cache=False, raw_data_file=None,
+                         **kwargs):
     filename = '{}/{}_{}_{}.txt'.format(outdir, name, t1, t2)
-    if os.path.isfile(filename) and cache:
+    if raw_data_file:
+        logging.info('Attempting to use raw_data_file {}'.format(raw_data_file))
+        strain = TimeSeries.read(raw_data_file)
+        if (t1 > strain.times[0].value) and (t2 < strain.times[-1].value):
+            logging.info('Using supplied raw data file')
+            strain = strain.crop(t1, t2)
+        else:
+            raise ValueError('Supplied file does not contain requested data')
+    elif os.path.isfile(filename) and cache:
         logging.info('Using cached data from {}'.format(filename))
         strain = TimeSeries.read(filename)
     else:
@@ -566,4 +647,3 @@ def get_open_strain_data(name, t1, t2, outdir, cache=False, **kwargs):
         logging.info('Saving data to {}'.format(filename))
         strain.write(filename)
     return strain
-
diff --git a/peyote/prior.py b/peyote/prior.py
index 326353648aadf501f52ef9601955d933300db047..d2ba21041f455e7d92d77cb079a8ec7efb300d36 100644
--- a/peyote/prior.py
+++ b/peyote/prior.py
@@ -4,6 +4,9 @@ from __future__ import division
 import numpy as np
 from scipy.interpolate import interp1d
 from scipy.integrate import cumtrapz
+from scipy.special import erf, erfinv
+import logging
+import os
 
 
 class Prior(object):
@@ -22,11 +25,17 @@ class Prior(object):
 
     def rescale(self, val):
         """
-        'Rescale' a sample from the unit line element to the prior, does nothing.
+        'Rescale' a sample from the unit line element to the prior.
 
-        This maps to the inverse CDF.
+        This should be overwritten by each subclass.
         """
-        return val
+        return None
+
+    @staticmethod
+    def test_valid_for_rescaling(val):
+        """Test if 0 < val < 1"""
+        if (val < 0) or (val > 1):
+            raise ValueError("Number to be rescaled should be in [0, 1]")
 
     def __repr__(self):
         prior_name = self.__class__.__name__
@@ -59,18 +68,18 @@ class Prior(object):
             return '$\mathcal{M}$'
         elif self.name == 'q':
             return '$q$'
-        elif self.name == 'a1':
+        elif self.name == 'a_1':
             return '$a_1$'
-        elif self.name == 'a2':
+        elif self.name == 'a_2':
             return '$a_2$'
-        elif self.name == 'tilt1':
-            return '$\\text{tilt}_1$'
-        elif self.name == 'tilt2':
-            return '$\\text{tilt}_2$'
-        elif self.name == 'phi1':
-            return '$\phi_1$'
-        elif self.name == 'phi2':
-            return '$\phi_2$'
+        elif self.name == 'tilt_1':
+            return '$\\theta_1$'
+        elif self.name == 'tilt_2':
+            return '$\\theta_2$'
+        elif self.name == 'phi_12':
+            return '$\Delta\phi$'
+        elif self.name == 'phi_jl':
+            return '$\phi_{JL}$'
         elif self.name == 'luminosity_distance':
             return '$d_L$'
         elif self.name == 'dec':
@@ -94,18 +103,19 @@ class Prior(object):
 class Uniform(Prior):
     """Uniform prior"""
 
-    def __init__(self, lower, upper, name=None, latex_label=None):
+    def __init__(self, minimum, maximum, name=None, latex_label=None):
         Prior.__init__(self, name, latex_label)
-        self.lower = lower
-        self.upper = upper
-        self.support = upper - lower
+        self.minimum = minimum
+        self.maximum = maximum
+        self.support = maximum - minimum
 
     def rescale(self, val):
-        return self.lower + val * self.support
+        Prior.test_valid_for_rescaling(val)
+        return self.minimum + val * self.support
 
     def prob(self, val):
         """Return the prior probability of val"""
-        if (self.lower < val) and (val < self.upper):
+        if (self.minimum < val) and (val < self.maximum):
             return 1 / self.support
         else:
             return 0
@@ -120,6 +130,7 @@ class DeltaFunction(Prior):
 
     def rescale(self, val):
         """Rescale everything to the peak with the correct shape."""
+        Prior.test_valid_for_rescaling(val)
         return self.peak * val ** 0
 
     def prob(self, val):
@@ -133,11 +144,12 @@ class DeltaFunction(Prior):
 class PowerLaw(Prior):
     """Power law prior distribution"""
 
-    def __init__(self, alpha, bounds, name=None, latex_label=None):
+    def __init__(self, alpha, minimum, maximum, name=None, latex_label=None):
         """Power law with bounds and alpha, spectral index"""
         Prior.__init__(self, name, latex_label)
         self.alpha = alpha
-        self.low, self.high = bounds
+        self.minimum = minimum
+        self.maximum = maximum
 
     def rescale(self, val):
         """
@@ -145,20 +157,21 @@ class PowerLaw(Prior):
 
         This maps to the inverse CDF. This has been analytically solved for this case.
         """
+        Prior.test_valid_for_rescaling(val)
         if self.alpha == -1:
-            return self.low * np.exp(val * np.log(self.high / self.low))
+            return self.minimum * np.exp(val * np.log(self.maximum / self.minimum))
         else:
-            return (self.low ** (1 + self.alpha) + val *
-                    (self.high ** (1 + self.alpha) - self.low ** (1 + self.alpha))) ** (1. / (1 + self.alpha))
+            return (self.minimum ** (1 + self.alpha) + val *
+                    (self.maximum ** (1 + self.alpha) - self.minimum ** (1 + self.alpha))) ** (1. / (1 + self.alpha))
 
     def prob(self, val):
         """Return the prior probability of val"""
-        if (val > self.low) and (val < self.high):
+        if (val > self.minimum) and (val < self.maximum):
             if self.alpha == -1:
-                return 1 / val / np.log(self.high / self.low)
+                return 1 / val / np.log(self.maximum / self.minimum)
             else:
-                return val ** self.alpha * (1 + self.alpha) / (self.high ** (1 + self.alpha)
-                                                               - self.low ** (1 + self.alpha))
+                return val ** self.alpha * (1 + self.alpha) / (self.maximum ** (1 + self.alpha)
+                                                               - self.minimum ** (1 + self.alpha))
         else:
             return 0
 
@@ -174,12 +187,16 @@ class Cosine(Prior):
 
         This maps to the inverse CDF. This has been analytically solved for this case.
         """
+        Prior.test_valid_for_rescaling(val)
         return np.arcsin(-1 + val * 2)
 
     @staticmethod
     def prob(val):
-        """Return the prior probability of val"""
-        return np.cos(val) / 2
+        """Return the prior probability of val, defined over [-pi/2, pi/2]"""
+        if (val > -np.pi / 2) and (val < np.pi / 2):
+            return np.cos(val) / 2
+        else:
+            return 0
 
 
 class Sine(Prior):
@@ -193,100 +210,173 @@ class Sine(Prior):
 
         This maps to the inverse CDF. This has been analytically solved for this case.
         """
+        Prior.test_valid_for_rescaling(val)
         return np.arccos(-1 + val * 2)
 
     @staticmethod
     def prob(val):
+        """Return the prior probability of val, defined over [0, pi]"""
+        if (val > 0) and (val < np.pi):
+            return np.sin(val) / 2
+        else:
+            return 0
+
+
+class Gaussian(Prior):
+    """Gaussian prior"""
+
+    def __init__(self, mu, sigma, name=None, latex_label=None):
+        """Power law with bounds and alpha, spectral index"""
+        Prior.__init__(self, name, latex_label)
+        self.mu = mu
+        self.sigma = sigma
+
+    def rescale(self, val):
+        """
+        'Rescale' a sample from the unit line element to the appropriate Gaussian prior.
+
+        This maps to the inverse CDF. This has been analytically solved for this case.
+        """
+        Prior.test_valid_for_rescaling(val)
+        return self.mu + erfinv(2 * val - 1) * 2**0.5 * self.sigma
+
+    def prob(self, val):
+        """Return the prior probability of val"""
+        return np.exp(-(self.mu - val)**2 / (2 * self.sigma**2)) / (2 * np.pi)**0.5 / self.sigma
+
+
+class TruncatedGaussian(Prior):
+    """
+    Truncated Gaussian prior
+
+    https://en.wikipedia.org/wiki/Truncated_normal_distribution
+    """
+
+    def __init__(self, mu, sigma, minimum, maximum, name=None, latex_label=None):
+        """Power law with bounds and alpha, spectral index"""
+        Prior.__init__(self, name, latex_label)
+        self.mu = mu
+        self.sigma = sigma
+        self.minimum = minimum
+        self.maximum = maximum
+
+        self.normalisation = (erf((self.maximum - self.mu) / 2 ** 0.5 / self.sigma) - erf(
+            (self.minimum - self.mu) / 2 ** 0.5 / self.sigma)) / 2
+
+    def rescale(self, val):
+        """
+        'Rescale' a sample from the unit line element to the appropriate truncated Gaussian prior.
+
+        This maps to the inverse CDF. This has been analytically solved for this case.
+        """
+        Prior.test_valid_for_rescaling(val)
+        return erfinv(2 * val * self.normalisation + erf(
+            (self.minimum - self.mu) / 2 ** 0.5 / self.sigma)) * 2 ** 0.5 * self.sigma + self.mu
+
+    def prob(self, val):
         """Return the prior probability of val"""
-        return np.sin(val) / 2
+        if (val > self.minimum) & (val < self.maximum):
+            return np.exp(-(self.mu - val) ** 2 / (2 * self.sigma ** 2)) / (
+                        2 * np.pi) ** 0.5 / self.sigma / self.normalisation
+        else:
+            return 0
 
 
 class Interped(Prior):
 
-    def __init__(self, xx, yy, name=None, latex_label=None):
+    def __init__(self, xx, yy, minimum=None, maximum=None, name=None, latex_label=None):
         """Initialise object from arrays of x and y=p(x)"""
         Prior.__init__(self, name, latex_label)
-        self.xx = xx
-        self.low = min(self.xx)
-        self.high = max(self.xx)
-        self.yy = yy
-        if np.trapz(self.yy, self.xx) != 0:
-            print('Supplied PDF is not normalised, normalising.')
+        if minimum is None or minimum < min(xx):
+            self.minimum = min(xx)
+        else:
+            self.minimum = minimum
+        if maximum is None or maximum > max(xx):
+            self.maximum = max(xx)
+        else:
+            self.maximum = maximum
+        self.xx = xx[(xx > self.minimum) & (xx < self.maximum)]
+        self.yy = yy[(xx > self.minimum) & (xx < self.maximum)]
+        if np.trapz(self.yy, self.xx) != 1:
+            logging.info('Supplied PDF is not normalised, normalising.')
         self.yy /= np.trapz(self.yy, self.xx)
         self.YY = cumtrapz(self.yy, self.xx, initial=0)
-        self.probability_density = interp1d(x=self.xx, y=self.yy, bounds_error=False, fill_value=min(self.yy))
+        self.probability_density = interp1d(x=self.xx, y=self.yy, bounds_error=False, fill_value=0)
         self.cumulative_distribution = interp1d(x=self.xx, y=self.YY, bounds_error=False, fill_value=0)
-        self.invervse_cumulative_distribution = interp1d(x=self.YY, y=self.xx, bounds_error=False,
-                                                         fill_value=(min(self.xx), max(self.xx)))
+        self.inverse_cumulative_distribution = interp1d(x=self.YY, y=self.xx, bounds_error=True)
 
     def prob(self, val):
         """Return the prior probability of val"""
-        return self.probability_density(val)
+        if (val > self.minimum) & (val < self.maximum):
+            return self.probability_density(val)
+        else:
+            return 0
 
-    def rescale(self, x):
+    def rescale(self, val):
         """
         'Rescale' a sample from the unit line element to the prior.
 
         This maps to the inverse CDF. This is done using interpolation.
         """
-        return self.invervse_cumulative_distribution(x)
+        Prior.test_valid_for_rescaling(val)
+        return self.inverse_cumulative_distribution(val)
 
 
 class FromFile(Interped):
 
-    def __init__(self, file_name):
+    def __init__(self, file_name, minimum=None, maximum=None, name=None, latex_label=None):
         try:
             self.id = file_name
-            xx, yy = np.genfromtxt(file_name).T
-            Interped.__init__(self, xx, yy)
+            if '/' not in self.id:
+                self.id = '{}/peyote/prior_files/{}'.format(os.getcwd(), self.id)
+            xx, yy = np.genfromtxt(self.id).T
+            Interped.__init__(self, xx=xx, yy=yy, minimum=minimum, maximum=maximum, name=name, latex_label=latex_label)
         except IOError:
-            print("Can't load {}.".format(file_name))
-            print("Format should be:")
-            print(r"x\tp(x)")
+            logging.warning("Can't load {}.".format(self.id))
+            logging.warning("Format should be:")
+            logging.warning(r"x\tp(x)")
 
 
 def fix(prior, value=None):
     if value is None or np.isnan(value):
         raise ValueError("You can't fix the value to be np.nan. You need to assign it a legal value")
-    prior = DeltaFunction(name=prior.name,
-                             latex_label=prior.latex_label,
-                             peak=value)
+    prior = DeltaFunction(name=prior.name, latex_label=prior.latex_label, peak=value)
     return prior
 
 
 def create_default_prior(name):
     if name == 'mass_1':
-        prior = PowerLaw(name=name, alpha=0, bounds=(5, 100))
+        prior = PowerLaw(name=name, alpha=0, minimum=5, maximum=100)
     elif name == 'mass_2':
-        prior = PowerLaw(name=name, alpha=0, bounds=(5, 100))
+        prior = PowerLaw(name=name, alpha=0, minimum=5, maximum=100)
     elif name == 'mchirp':
-        prior = PowerLaw(name=name, alpha=0, bounds=(5, 100))
+        prior = Uniform(name=name, minimum=5, maximum=100)
     elif name == 'q':
-        prior = PowerLaw(name=name, alpha=0, bounds=(0, 1))
+        prior = Uniform(name=name, minimum=0, maximum=1)
     elif name == 'a_1':
-        prior = PowerLaw(name=name, alpha=0, bounds=(0, 1))
+        prior = Uniform(name=name, minimum=0, maximum=0.8)
     elif name == 'a_2':
-        prior = PowerLaw(name=name, alpha=0, bounds=(0, 1))
+        prior = Uniform(name=name, minimum=0, maximum=0.8)
     elif name == 'tilt_1':
         prior = Sine(name=name)
     elif name == 'tilt_2':
         prior = Sine(name=name)
-    elif name == 'phi_1':
-        prior = PowerLaw(name=name, alpha=0, bounds=(0, 2 * np.pi))
-    elif name == 'phi_2':
-        prior = PowerLaw(name=name, alpha=0, bounds=(0, 2 * np.pi))
+    elif name == 'phi_12':
+        prior = Uniform(name=name, minimum=0, maximum=2 * np.pi)
+    elif name == 'phi_jl':
+        prior = Uniform(name=name, minimum=0, maximum=2 * np.pi)
     elif name == 'luminosity_distance':
-        prior = PowerLaw(name=name, alpha=2, bounds=(1e2, 5e3))
+        prior = PowerLaw(name=name, alpha=2, minimum=1e2, maximum=5e3)
     elif name == 'dec':
         prior = Cosine(name=name)
     elif name == 'ra':
-        prior = PowerLaw(name=name, alpha=0, bounds=(0, 2 * np.pi))
+        prior = Uniform(name=name, minimum=0, maximum=2 * np.pi)
     elif name == 'iota':
         prior = Sine(name=name)
     elif name == 'psi':
-        prior = PowerLaw(name=name, alpha=0, bounds=(0, 2 * np.pi))
+        prior = Uniform(name=name, minimum=0, maximum=2 * np.pi)
     elif name == 'phase':
-        prior = PowerLaw(name=name, alpha=0, bounds=(0, 2 * np.pi))
+        prior = Uniform(name=name, minimum=0, maximum=2 * np.pi)
     else:
         prior = None
     return prior
@@ -297,8 +387,8 @@ def parse_floats_to_fixed_priors(old_parameters):
     for key in parameters:
         if type(parameters[key]) is not float and type(parameters[key]) is not int \
                 and type(parameters[key]) is not Prior:
-            print("Expected parameter " + str(key) + " to be a float or int but was " + str(type(parameters[key]))
-                  + " instead. Will not be converted.")
+            logging.info("Expected parameter " + str(key) + " to be a float or int but was "
+                         + str(type(parameters[key])) + " instead. Will not be converted.")
             continue
         elif type(parameters[key]) is Prior:
             continue
@@ -311,3 +401,61 @@ def parse_keys_to_parameters(keys):
     for key in keys:
         parameters[key] = create_default_prior(key)
     return parameters
+
+
+def fill_priors(prior, waveform_generator):
+    """
+    Fill dictionary of priors based on required parameters for waveform generator
+
+    Any floats in prior will be converted to delta function prior.
+    Any required, non-specified parameters will use the default.
+    Parameters
+    ----------
+    prior: dict
+        dictionary of prior objects and floats
+    waveform_generator: WaveformGenerator
+        waveform generator to be used for inference
+    """
+    bad_keys = []
+    for key in prior:
+        if isinstance(prior[key], Prior):
+            continue
+        elif isinstance(prior[key], float) or isinstance(prior[key], int):
+            prior[key] = DeltaFunction(prior[key])
+            logging.info("{} converted to delta function prior.".format(key))
+        else:
+            logging.warning("{} cannot be converted to delta function prior.".format(key))
+            logging.warning("If required the default prior will be used.")
+            bad_keys.append(key)
+
+    missing_keys = set(waveform_generator.parameters) - set(prior.keys())
+
+    for missing_key in missing_keys:
+        prior[missing_key] = create_default_prior(missing_key)
+        if prior[missing_key] is None:
+            logging.warning("No default prior found for unspecified variable {}.".format(missing_key))
+            logging.warning("This variable will NOT be sampled.")
+            bad_keys.append(missing_key)
+
+    for key in bad_keys:
+        prior.pop(key)
+
+
+def write_priors_to_file(priors, outdir):
+    """
+    Write the prior distribtuion to file.
+
+    Parameters
+    ----------
+    priors: dict
+        priors used
+    outdir: str
+        output directory
+    """
+    if outdir[-1] != "/":
+        outdir += "/"
+    prior_file = outdir + "prior.txt"
+    print("Writing priors to {}".format(prior_file))
+    with open(prior_file, "w") as outfile:
+        for key in priors:
+            outfile.write("prior['{}'] = {}\n".format(key, priors[key]))
diff --git a/peyote/result.py b/peyote/result.py
index fb12a82ebfdf36650ea05020a1ecd49564ec8182..f3fbc41d46a5ad8ce70b7cbe3a731abc208a8327 100644
--- a/peyote/result.py
+++ b/peyote/result.py
@@ -1,6 +1,7 @@
 import logging
 import os
 import deepdish
+from chainconsumer import ChainConsumer
 
 
 class Result(dict):
@@ -35,3 +36,81 @@ class Result(dict):
 
         logging.info("Saving result to {}".format(file_name))
         deepdish.io.save(file_name, self)
+
+    def plot_corner(self, save=True, **kwargs):
+        """ Plot a corner-plot using chain-consumer
+
+        Parameters
+        ----------
+        save: bool
+            If true, save the image using the given label and outdir
+
+        Returns
+        -------
+        fig:
+            A matplotlib figure instance
+        """
+
+        # Set some defaults (unless already set)
+        kwargs['figsize'] = kwargs.get('figsize', 'GROW')
+        if save:
+            kwargs['filename'] = '{}/{}_corner.png'.format(self.outdir, self.label)
+            logging.info('Saving corner plot to {}'.format(kwargs['filename']))
+        if self.injection_parameters is not None:
+            kwargs['truth'] = [self.injection_parameters[key] for key in self.search_parameter_keys]
+        c = ChainConsumer()
+        c.add_chain(self.samples, parameters=self.parameter_labels)
+        fig = c.plotter.plot(**kwargs)
+        return fig
+
+    def plot_walks(self, save=True, **kwargs):
+        """ Plot the chain walkst using chain-consumer
+
+        Parameters
+        ----------
+        save: bool
+            If true, save the image using the given label and outdir
+
+        Returns
+        -------
+        fig:
+            A matplotlib figure instance
+        """
+
+        # Set some defaults (unless already set)
+        if save:
+            kwargs['filename'] = '{}/{}_walks.png'.format(self.outdir, self.label)
+            logging.info('Saving walker plot to {}'.format(kwargs['filename']))
+        if self.injection_parameters is not None:
+            kwargs['truth'] = [self.injection_parameters[key] for key in self.search_parameter_keys]
+        c = ChainConsumer()
+        c.add_chain(self.samples, parameters=self.parameter_labels)
+        fig = c.plotter.plot_walks(**kwargs)
+        return fig
+
+    def plot_distributions(self, save=True, **kwargs):
+        """ Plot the chain walkst using chain-consumer
+
+        Parameters
+        ----------
+        save: bool
+            If true, save the image using the given label and outdir
+
+        Returns
+        -------
+        fig:
+            A matplotlib figure instance
+        """
+
+        # Set some defaults (unless already set)
+        if save:
+            kwargs['filename'] = '{}/{}_distributions.png'.format(self.outdir, self.label)
+            logging.info('Saving distributions plot to {}'.format(kwargs['filename']))
+        if self.injection_parameters is not None:
+            kwargs['truth'] = [self.injection_parameters[key] for key in self.search_parameter_keys]
+        c = ChainConsumer()
+        c.add_chain(self.samples, parameters=self.parameter_labels)
+        fig = c.plotter.plot_distributions(**kwargs)
+        return fig
+
+
diff --git a/peyote/sampler.py b/peyote/sampler.py
index 92d67028ca298310431465709499a6606c67321f..56ceadfb8bc72d03cf4a5bf27088f655196fa776 100644
--- a/peyote/sampler.py
+++ b/peyote/sampler.py
@@ -4,14 +4,17 @@ import inspect
 import logging
 import os
 import sys
-
 import numpy as np
+<<<<<<< HEAD
 from chainconsumer import ChainConsumer
 import matplotlib.pyplot as plt
+=======
+>>>>>>> master
 
 from .result import Result
-from .prior import Prior
+from .prior import Prior, fill_priors
 from . import utils
+import peyote
 
 
 class Sampler(object):
@@ -69,7 +72,11 @@ class Sampler(object):
         if result is None:
             self.__result = Result()
             self.__result.search_parameter_keys = self.__search_parameter_keys
-            self.__result.labels = [self.priors[k].latex_label for k in self.__search_parameter_keys]
+            self.__result.parameter_labels = [
+                self.priors[k].latex_label for k in
+                self.__search_parameter_keys]
+            self.__result.label = self.label
+            self.__result.outdir = self.outdir
         elif type(result) is Result:
             self.__result = result
         else:
@@ -183,30 +190,6 @@ class Sampler(object):
         logging.info("Using sampler {} with kwargs {}".format(
             self.__class__.__name__, self.kwargs))
 
-    def plot_corner(self, save=True, **kwargs):
-        """ Plot a corner-plot using chain-consumer
-
-        Parameters
-        ----------
-        save: bool
-            If true, save the image using the given label and outdir
-
-        Returns
-        -------
-        fig:
-            A matplotlib figure instance
-        """
-
-        # Set some defaults (unless already set)
-        kwargs['figsize'] = kwargs.get('figsize', 'GROW')
-        if save:
-            kwargs['filename'] = '{}/{}_corner.png'.format(self.outdir, self.label)
-            logging.info('Saving corner plot to {}'.format(kwargs['filename']))
-        c = ChainConsumer()
-        c.add_chain(self.result.samples, parameters=self.result.labels)
-        fig = c.plotter.plot(**kwargs)
-        return fig
-
 
 class Nestle(Sampler):
 
@@ -244,21 +227,37 @@ class Nestle(Sampler):
 
 
 class Dynesty(Sampler):
+
+    @property
+    def kwargs(self):
+        return self.__kwargs
+
+    @kwargs.setter
+    def kwargs(self, kwargs):
+        self.__kwargs = dict(dlogz=0.1, sample='rwalk', walks=100, bound='multi', update_interval=6000)
+        self.__kwargs.update(kwargs)
+        if 'npoints' not in self.__kwargs:
+            for equiv in ['nlive', 'nlives', 'n_live_points', 'npoint']:
+                if equiv in self.__kwargs:
+                    self.__kwargs['npoints'] = self.__kwargs.pop(equiv)
+        if 'npoints' not in self.__kwargs:
+            self.__kwargs['npoints'] = 10000
+
     def run_sampler(self):
         dynesty = self.external_sampler
         nested_sampler = dynesty.NestedSampler(
             loglikelihood=self.log_likelihood,
             prior_transform=self.prior_transform,
             ndim=self.ndim, **self.kwargs)
-        nested_sampler.run_nested()
+        nested_sampler.run_nested(dlogz=self.kwargs['dlogz'])
         out = nested_sampler.results
 
         self.result.sampler_output = out
         weights = np.exp(out['logwt'] - out['logz'][-1])
         self.result.samples = dynesty.utils.resample_equal(
             out.samples, weights)
-        self.result.logz = out.logz
-        self.result.logzerr = out.logzerr
+        self.result.logz = out.logz[-1]
+        self.result.logzerr = out.logzerr[-1]
         return self.result
 
 
@@ -270,7 +269,7 @@ class Pymultinest(Sampler):
 
     @kwargs.setter
     def kwargs(self, kwargs):
-        outputfiles_basename = self.outdir + '/pymultinest_{}_out/'.format(self.label)
+        outputfiles_basename = self.outdir + '/pymultinest_{}/'.format(self.label)
         utils.check_directory_exists_and_if_not_mkdir(outputfiles_basename)
         self.__kwargs = dict(importance_nested_sampling=False, resume=True,
                              verbose=True, sampling_efficiency='parameter',
@@ -350,8 +349,8 @@ class Ptemcee(Sampler):
         fig.savefig(filename)
 
 
-def run_sampler(likelihood, priors, label='label', outdir='outdir',
-                sampler='nestle', use_ratio=False, injection_parameters=None,
+def run_sampler(likelihood, priors=None, label='label', outdir='outdir',
+                sampler='nestle', use_ratio=True, injection_parameters=None,
                 **sampler_kwargs):
     """
     The primary interface to easy parameter estimation
@@ -362,8 +361,10 @@ def run_sampler(likelihood, priors, label='label', outdir='outdir',
         A `Likelihood` instance
     priors: dict
         A dictionary of the priors for each parameter - missing parameters will
-        use default priors
-    label, outdir: str
+        use default priors, if None, all priors will be default
+    label: str
+        Name for the run, used in output files
+    outdir: str
         A string used in defining output files
     sampler: str
         The name of the sampler to use - see
@@ -380,14 +381,18 @@ def run_sampler(likelihood, priors, label='label', outdir='outdir',
 
     Returns
     ------
-    result, sampler
-        An object containing the results, and the sampler instance (useful
-        for creating plots etc)
+    result
+        An object containing the results
     """
 
     utils.check_directory_exists_and_if_not_mkdir(outdir)
     implemented_samplers = get_implemented_samplers()
 
+    if priors is None:
+        priors = dict()
+    fill_priors(priors, likelihood.waveform_generator)
+    peyote.prior.write_priors_to_file(priors, outdir)
+
     if implemented_samplers.__contains__(sampler.title()):
         sampler_class = globals()[sampler.title()]
         sampler = sampler_class(likelihood, priors, sampler, outdir=outdir,
@@ -395,10 +400,14 @@ def run_sampler(likelihood, priors, label='label', outdir='outdir',
                                 **sampler_kwargs)
         result = sampler.run_sampler()
         result.noise_logz = likelihood.noise_log_likelihood()
-        result.log_bayes_factor = result.logz - result.noise_logz
+        if use_ratio:
+            result.log_bayes_factor = result.logz
+            result.logz = result.log_bayes_factor + result.noise_logz
+        else:
+            result.log_bayes_factor = result.logz - result.noise_logz
         result.injection_parameters = injection_parameters
         result.save_to_file(outdir=outdir, label=label)
-        return result, sampler
+        return result
     else:
         raise ValueError(
             "Sampler {} not yet implemented".format(sampler))
diff --git a/peyote/source.py b/peyote/source.py
index c4e777b0244a16d0e9e9b43bfe8a5c2a69332f6b..749a74d89c5b095d3ce6026dddfdb7d73fcd5bc7 100644
--- a/peyote/source.py
+++ b/peyote/source.py
@@ -10,7 +10,7 @@ from . import utils
 
 
 def lal_binary_black_hole(
-        frequency_array, mass_1, mass_2, luminosity_distance, a_1, tilt_1, phi_1, a_2, tilt_2, phi_2,
+        frequency_array, mass_1, mass_2, luminosity_distance, a_1, tilt_1, phi_12, a_2, tilt_2, phi_jl,
         iota, phase, waveform_approximant, reference_frequency, ra, dec,
         geocent_time, psi):
     """ A Binary Black Hole waveform model using lalsimulation """
@@ -21,8 +21,9 @@ def lal_binary_black_hole(
     mass_1 = mass_1 * utils.solar_mass
     mass_2 = mass_2 * utils.solar_mass
 
-    spin_1x, spin_1y, spin_1z = utils.spherical_to_cartesian(a_1, tilt_1, phi_1)
-    spin_2x, spin_2y, spin_2z = utils.spherical_to_cartesian(a_2, tilt_2, phi_2)
+    iota, spin_1x, spin_1y, spin_1z, spin_2x, spin_2y, spin_2z = \
+        lalsim.SimInspiralTransformPrecessingNewInitialConditions(iota, phi_jl, tilt_1, tilt_2, phi_12, a_1, a_2,
+                                                                  mass_1, mass_2, reference_frequency, phase)
 
     longitude_ascending_nodes = 0.0
     eccentricity = 0.0
diff --git a/peyote/waveform_generator.py b/peyote/waveform_generator.py
index ed79b66073d5db9c989046a5684b64df9c648389..0acca2ca21e76a5ea3f0947a049aa3f592623312 100644
--- a/peyote/waveform_generator.py
+++ b/peyote/waveform_generator.py
@@ -72,9 +72,9 @@ class WaveformGenerator(object):
             for key in self.__parameters.keys():
                 if key in parameters.keys():
                     self.__parameters[key] = parameters[key]
-                else:
-                    raise KeyError('The provided dictionary did not '
-                                   'contain key {}'.format(key))
+                # else:
+                #     raise KeyError('The provided dictionary did not '
+                #                    'contain key {}'.format(key))
         else:
             raise TypeError('Parameters must either be set as a list of keys or'
                             ' a dictionary of key-value pairs.')
diff --git a/test/noise_realisation_tests.py b/test/noise_realisation_tests.py
index 730fc73086d9ac98c6edd77acc6defa80022884a..812629184874f06c1e263f40f3413c04642ff3a7 100644
--- a/test/noise_realisation_tests.py
+++ b/test/noise_realisation_tests.py
@@ -8,40 +8,40 @@ class TestNoiseRealisation(unittest.TestCase):
         time_duration = 1.
         sampling_frequency = 4096.
         factor = np.sqrt(2./time_duration)
-        navg = 1000
+        n_avg = 1000
         psd_avg = 0
-        H1 = peyote.detector.H1
-        for x in range(0, navg):
-            H1_hf_noise, frequencies = H1.power_spectral_density.get_noise_realisation(sampling_frequency,
-                                                                                       time_duration)
-            H1.set_data(sampling_frequency, time_duration, frequency_domain_strain=H1_hf_noise)
-            hf_tmp = H1.data
-            psd_avg += abs(hf_tmp)**2
-
-        psd_avg = psd_avg/navg
+        interferometer = peyote.detector.H1
+        for x in range(0, n_avg):
+            interferometer.set_data(sampling_frequency, time_duration, from_power_spectral_density=True)
+            psd_avg += abs(interferometer.data)**2
+
+        psd_avg = psd_avg/n_avg
         asd_avg = np.sqrt(abs(psd_avg))
 
-        a = H1.amplitude_spectral_density_array/factor
+        a = interferometer.amplitude_spectral_density_array/factor
         b = asd_avg
         self.assertTrue(np.isclose(a[2]/b[2], 1.00, atol=1e-1))
 
     def test_noise_normalisation(self):
         time_duration = 1.
         sampling_frequency = 4096.
-        number_of_samples = int(np.round(time_duration*sampling_frequency))
-        time_array = (1./sampling_frequency) * np.linspace(0, number_of_samples, number_of_samples)
+        time_array = peyote.utils.create_time_series(sampling_frequency=sampling_frequency, duration=time_duration)
 
         # generate some toy-model signal for matched filtering SNR testing
-        navg = 10000
-        snr = np.zeros(navg)
+        n_avg = 10000
+        snr = np.zeros(n_avg)
         mu = np.exp(-(time_array-time_duration/2.)**2 / (2.*0.1**2)) * np.sin(2*np.pi*100*time_array)
         muf, frequency_array = peyote.utils.nfft(mu, sampling_frequency)
-        for x in range(0, navg):
-            H1 = peyote.detector.H1
-            H1_hf_noise, frequencies = H1.power_spectral_density.get_noise_realisation(sampling_frequency, time_duration)
-            H1.set_data(sampling_frequency, time_duration,frequency_domain_strain=H1_hf_noise)
-            hf_tmp = H1.data
-            Sh = H1.power_spectral_density
-            snr[x] = peyote.utils.inner_product(hf_tmp, muf, frequency_array, Sh) / np.sqrt(peyote.utils.inner_product(muf, muf, frequency_array, Sh))
+        for x in range(0, n_avg):
+            interferometer = peyote.detector.H1
+            interferometer.set_data(sampling_frequency, time_duration, from_power_spectral_density=True)
+            hf_tmp = interferometer.data
+            psd = interferometer.power_spectral_density
+            snr[x] = peyote.utils.inner_product(hf_tmp, muf, frequency_array, psd) \
+                     / np.sqrt(peyote.utils.inner_product(muf, muf, frequency_array, psd))
 
         self.assertTrue(np.isclose(np.std(snr), 1.00, atol=1e-2))
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/test/prior_tests.py b/test/prior_tests.py
index d310d54056cb6bc2ac7debce0f2484175ae35ead..a61afa78b93e4cc6eb60fff124ae49f154585450 100644
--- a/test/prior_tests.py
+++ b/test/prior_tests.py
@@ -79,7 +79,7 @@ class TestPriorIsFixed(unittest.TestCase):
         self.assertTrue(self.prior.is_fixed)
 
     def test_is_fixed_uniform_class(self):
-        self.prior = peyote.prior.Uniform(lower=0, upper=10)
+        self.prior = peyote.prior.Uniform(minimum=0, maximum=10)
         self.assertFalse(self.prior.is_fixed)
 
 
diff --git a/test/tests.py b/test/tests.py
index 8ddf7ca3f7809379ded42e479d4ff90ad9db1732..9c28c31a3fc48e2f057cd8f5ceb13c19a88a7069 100644
--- a/test/tests.py
+++ b/test/tests.py
@@ -49,15 +49,16 @@ class Test(unittest.TestCase):
         likelihood = peyote.likelihood.Likelihood(
             [self.msd['IFO']], self.msd['waveform_generator'])
 
+        priors = {}
+        for key in self.msd['simulation_parameters']:
+            priors[key] = self.msd['simulation_parameters'][key]
+
         dL = self.msd['simulation_parameters']['luminosity_distance']
-        priors = {'luminosity_distance' : peyote.prior.Uniform(
-            name='luminosity_distance', lower=dL - 10, upper=dL + 10)
-}
-
-        result, sampler = peyote.sampler.run_sampler(
-                likelihood, priors, sampler='nestle', verbose=False)
-        self.assertAlmostEqual(np.mean(result.samples), dL,
-                               delta=np.std(result.samples))
+        priors['luminosity_distance'] = peyote.prior.Uniform(
+            name='luminosity_distance', minimum=dL - 10, maximum=dL + 10)
+
+        result = peyote.sampler.run_sampler(likelihood, priors, sampler='nestle', verbose=False)
+        self.assertAlmostEqual(np.mean(result.samples), dL, delta=np.std(result.samples))
 
 
 if __name__ == '__main__':
diff --git a/tutorials/BasicTutorial.py b/tutorials/BasicTutorial.py
index bc250d9e19bcafe94fb5b9367d13b3b8d84ea5ff..9f20a813c36a765e5cd8aa8db27a7b661a23ef52 100644
--- a/tutorials/BasicTutorial.py
+++ b/tutorials/BasicTutorial.py
@@ -1,89 +1,46 @@
 import numpy as np
 import pylab as plt
-
-import peyote.prior
+import peyote
 
 peyote.utils.setup_logger()
 
-time_duration = 1.
-sampling_frequency = 4096.
+time_duration = 4.
+sampling_frequency = 2048.
+outdir = 'outdir'
 
-injection_parameters = dict(
-    mass_1=36.,
-    mass_2=29.,
-    a_1=0,
-    a_2=0,
-    tilt_1=0,
-    tilt_2=0,
-    phi_1=0,
-    phi_2=0,
-    luminosity_distance=100.,
-    iota=0.4,
-    phase=1.3,
-    waveform_approximant='IMRPhenomPv2',
-    reference_frequency=50.,
-    ra=1.375,
-    dec=-1.2108,
-    geocent_time=1126259642.413,
-    psi=2.659
-)
+injection_parameters = dict(mass_1=36., mass_2=29., a_1=0, a_2=0, tilt_1=0, tilt_2=0, phi_12=0, phi_jl=0,
+                            luminosity_distance=2500., iota=0.4, psi=2.659, phase=1.3, geocent_time=1126259642.413,
+                            waveform_approximant='IMRPhenomPv2', reference_frequency=50., ra=1.375, dec=-1.2108)
 
 # Create the waveform_generator using a LAL BinaryBlackHole source function
 waveform_generator = peyote.waveform_generator.WaveformGenerator(
-    sampling_frequency=sampling_frequency,
-    time_duration=time_duration,
+    sampling_frequency=sampling_frequency, time_duration=time_duration,
     frequency_domain_source_model=peyote.source.lal_binary_black_hole,
     parameters=injection_parameters)
 hf_signal = waveform_generator.frequency_domain_strain()
 
-sampling_parameters = peyote.prior.parse_floats_to_fixed_priors(injection_parameters)
-
-#sampling_parameters = peyote.prior.parse_keys_to_parameters(injection_parameters.keys())
-
-
-# Simulate the data in H1
-H1 = peyote.detector.H1
-H1.set_data(sampling_frequency=sampling_frequency, duration=time_duration,
-            from_power_spectral_density=True)
-H1.inject_signal(waveform_polarizations=hf_signal, parameters=injection_parameters)
-
-# Simulate the data in L1
-L1 = peyote.detector.L1
-L1.set_data(sampling_frequency=sampling_frequency, duration=time_duration,
-            from_power_spectral_density=True)
-L1.inject_signal(waveform_polarizations=hf_signal, parameters=injection_parameters)
-
-# Simulate the data in V1
-V1 = peyote.detector.V1
-V1.set_data(sampling_frequency=sampling_frequency, duration=time_duration,
-            from_power_spectral_density=True)
-V1.inject_signal(waveform_polarizations=hf_signal, parameters=injection_parameters)
-
-IFOs = [H1, L1, V1]
-
-# Plot the noise and signal
-fig, ax = plt.subplots()
-plt.loglog(H1.frequency_array, np.abs(H1.data), lw=1.5, label='H1 noise+signal')
-plt.loglog(H1.frequency_array, H1.amplitude_spectral_density_array, lw=1.5, label='H1 ASD')
-plt.loglog(L1.frequency_array, np.abs(L1.data), lw=1.5, label='L1 noise+signal')
-plt.loglog(L1.frequency_array, L1.amplitude_spectral_density_array, lw=1.5, label='H1 ASD')
-# plt.loglog(frequencies, np.abs(hf_signal['plus']), lw=0.8, label='signal')
-plt.xlim(10, 1000)
-plt.legend()
-plt.xlabel(r'frequency')
-plt.ylabel(r'strain')
-fig.savefig('data')
+# Set up interferometers.
+IFOs = [peyote.detector.get_inteferometer_with_fake_noise_and_injection(
+    name, injection_polarizations=hf_signal, injection_parameters=injection_parameters,
+    sampling_frequency=sampling_frequency, time_duration=time_duration, outdir=outdir)
+    for name in ['H1', 'L1', 'V1']]
 
+# Set up likelihood
 likelihood = peyote.likelihood.Likelihood(IFOs, waveform_generator)
 
-# New way way of doing it, still not perfect
-sampling_parameters['mass_1'] = peyote.prior.Uniform(lower=35, upper=37, name='mass1')
-sampling_parameters['luminosity_distance'] = peyote.prior.Uniform(lower=30, upper=200, name='luminosity_distance')
-#sampling_parameters["geocent_time"].prior = peyote.prior.Uniform(lower=injection_parameters["geocent_time"] - 0.1,
-#                                                                  upper=injection_parameters["geocent_time"]+0.1)
-
-result, sampler = peyote.sampler.run_sampler(
-    likelihood, priors=sampling_parameters, label='BasicTutorial',
-    sampler='nestle', verbose=True, injection_parameters=injection_parameters)
-sampler.plot_corner()
+# Set up prior
+priors = dict()
+# These parameters will not be sampled
+for key in ['a_1', 'a_2', 'tilt_1', 'tilt_2', 'phi_12', 'phi_jl', 'phase', 'psi', 'iota', 'ra', 'dec', 'geocent_time']:
+    priors[key] = injection_parameters[key]
+priors['luminosity_distance'] = peyote.prior.FromFile(file_name='dL_MDC.txt', minimum=500, maximum=5000,
+                                                      name='luminosity_distance')
+
+# Run sampler
+result = peyote.sampler.run_sampler(likelihood=likelihood, priors=priors, sampler='dynesty',
+                                    label='BasicTutorial', use_ratio=True, npoints=500, verbose=True,
+                                    injection_parameters=injection_parameters, outdir=outdir)
+result.plot_corner()
+result.plot_walks()
+result.plot_distributions()
 print(result)
diff --git a/tutorials/GW150914.py b/tutorials/GW150914.py
index 475dd8b148a05b56aea1e0877775e1219fc93ddd..52cba24b8029d20fa179514857defd36ca720f6a 100644
--- a/tutorials/GW150914.py
+++ b/tutorials/GW150914.py
@@ -15,36 +15,28 @@ L1, sampling_frequency, time_duration = peyote.detector.get_inteferometer('L1',
 IFOs = [H1, L1]
 
 maximum_posterior_estimates = dict(
-    a_1=0, a_2=0, tilt_1=0, tilt_2=0, phi_1=0, phi_2=0,
+    a_1=0, a_2=0, tilt_1=0, tilt_2=0, phi_12=0, phi_jl=0,
     luminosity_distance=410., iota=2.97305, phase=1.145,
     waveform_approximant='IMRPhenomPv2', reference_frequency=50., ra=1.375,
     dec=-1.2108, geocent_time=time_of_event, psi=2.659, mass_1=36, mass_2=29)
 
 # Define the prior
-prior = peyote.prior.parse_floats_to_fixed_priors(maximum_posterior_estimates)
-prior['ra'] = peyote.prior.create_default_prior(name='ra')
-prior['dec'] = peyote.prior.create_default_prior(name='dec')
-prior['psi'] = peyote.prior.Uniform(0, np.pi/2, 'psi')
-prior['phase'] = peyote.prior.Uniform(0, np.pi/2, 'phase')
-prior['iota'] = peyote.prior.create_default_prior(name='iota')
+prior = dict()
 prior['mass_1'] = peyote.prior.Uniform(10, 80, 'mass_1')
 prior['mass_2'] = peyote.prior.Uniform(10, 80, 'mass_2')
-prior['geocent_time'] = peyote.prior.Uniform(
-    time_of_event-0.1, time_of_event+0.1, name='geocent_time')
-prior['luminosity_distance'] = peyote.prior.create_default_prior(
-    name='luminosity_distance')
+prior['geocent_time'] = peyote.prior.Uniform(time_of_event-0.1, time_of_event+0.1, name='geocent_time')
 
 # Create the waveformgenerator
 waveformgenerator = peyote.waveform_generator.WaveformGenerator(
      peyote.source.lal_binary_black_hole, sampling_frequency,
-     time_duration, parameters=prior)
+     time_duration, parameters={'waveform_approximant': 'IMRPhenomPv2', 'reference_frequency': 50})
 
 # Define a likelihood
 likelihood = peyote.likelihood.Likelihood(IFOs, waveformgenerator)
 
 # Run the sampler
-result, sampler = peyote.sampler.run_sampler(
+result = peyote.sampler.run_sampler(
     likelihood, prior, label='GW150914', sampler='pymultinest',
     npoints=1024, resume=False, outdir=outdir, use_ratio=True)
 truth = [maximum_posterior_estimates[x] for x in result.search_parameter_keys]
-sampler.plot_corner(truth=truth)
+result.plot_corner(truth=truth)
diff --git a/tutorials/Injection.py b/tutorials/Injection.py
index 080bdace58d11f0630e17d77a95a8b117b547cd0..25e59c5df4d48ae4bd54517c911cfcfbde0418cf 100644
--- a/tutorials/Injection.py
+++ b/tutorials/Injection.py
@@ -1,44 +1,46 @@
 """
 Tutorial to show signal injection using new features of detector.py
 """
-from __future__ import print_function, division
-
-import corner
-
+from __future__ import division, print_function
+import numpy as np
 import peyote
 
-time_duration = 1.
-sampling_frequency = 4096.
-
-source = peyote.source.BinaryBlackHole('BBH', sampling_frequency, time_duration, mass_1=36., mass_2=29., spin11=0,
-                                       spin12=0, spin13=0, spin21=0, spin22=0,
-                                       spin23=0, luminosity_distance=5000., iota=0., phase=0.,
-                                       waveform_approximant='IMRPhenomPv2', reference_frequency=50., ra=0, dec=1,
-                                       geocent_time=0, psi=1)
-
-IFO_1 = peyote.detector.H1
-IFO_2 = peyote.detector.L1
-IFO_3 = peyote.detector.V1
-IFOs = [IFO_1, IFO_2, IFO_3]
-for IFO in IFOs:
-    IFO.set_data(from_power_spectral_density=True, sampling_frequency=sampling_frequency, duration=time_duration)
-    IFO.inject_signal(source)
-
-
-likelihood = peyote.likelihood.Likelihood(source=source, interferometers=IFOs)
-
-print(likelihood.noise_log_likelihood())
-print(likelihood.log_likelihood())
-print(likelihood.log_likelihood_ratio())
-
-prior = source.copy()
-prior.mass_1 = peyote.parameter.PriorFactory('mass_1', prior=peyote.prior.Uniform(lower=35, upper=37),
-                                             latex_label='$m_1$')
-prior.mass_2 = peyote.parameter.PriorFactory('mass_2', prior=peyote.prior.Uniform(lower=28, upper=30),
-                                             latex_label='$m_2$')
-
-# result = peyote.sampler.run_sampler(likelihood, prior, sampler='dynesty', npoints=100, print_progress=True)
-
-# truths = [source.__dict__[x] for x in result.search_parameter_keys]
-# fig = corner.corner(result.samples, labels=result.labels, truths=truths)
-# fig.savefig('Injection Test')
+peyote.utils.setup_logger()
+
+outdir = 'outdir'
+label = 'injection'
+
+# Create the waveform generator
+waveform_generator = peyote.waveform_generator.WaveformGenerator(
+    peyote.source.lal_binary_black_hole, sampling_frequency=4096, time_duration=4,
+    parameters={'reference_frequency': 50.0, 'waveform_approximant': 'IMRPhenomPv2'})
+
+# Define the prior
+# Merger time is some time in 2018, shame LIGO will never see it...
+time_of_event = np.random.uniform(1198800018, 1230336018)
+prior = dict()
+prior['geocent_time'] = peyote.prior.Uniform(time_of_event-0.01, time_of_event+0.01, name='geocent_time')
+peyote.prior.fill_priors(prior, waveform_generator)
+
+# Create signal injection
+injection_parameters = {name: prior[name].sample() for name in prior}
+print(injection_parameters)
+for parameter in injection_parameters:
+    waveform_generator.parameters[parameter] = injection_parameters[parameter]
+injection_polarizations = waveform_generator.frequency_domain_strain()
+
+# Create interferometers and inject signal
+IFOs = [peyote.detector.get_inteferometer_with_fake_noise_and_injection(
+    name, injection_polarizations=injection_polarizations, injection_parameters=injection_parameters,
+    sampling_frequency=4096, time_duration=4, outdir=outdir) for name in ['H1', 'L1', 'V1', 'GEO600']]
+
+# Define a likelihood
+likelihood = peyote.likelihood.Likelihood(IFOs, waveform_generator)
+
+# Run the sampler
+result = peyote.sampler.run_sampler(
+    likelihood, prior, label='injection', sampler='nestle',
+    npoints=200, resume=False, outdir=outdir, use_ratio=True, injection_parameters=injection_parameters)
+truth = [injection_parameters[parameter] for parameter in result.search_parameter_keys]
+result.plot_corner(truth=truth)
+print(result)
diff --git a/tutorials/custom_source_tutorial.py b/tutorials/custom_source_tutorial.py
index 982c550f49141139c73d35207a822093d309d9dd..e879acac222c353647d47f76c2753f29d7c7cf6b 100644
--- a/tutorials/custom_source_tutorial.py
+++ b/tutorials/custom_source_tutorial.py
@@ -81,9 +81,9 @@ IFOs = generate_and_plot_data(wg, injection_parameters)
 
 likelihood = peyote.likelihood.Likelihood(IFOs, wg)
 
-sampling_parameters['amplitude'] = peyote.prior.Uniform(lower=0.9 * 1e-21, upper=1.1 * 1e-21)
-sampling_parameters['sigma'] = peyote.prior.Uniform(lower=0, upper=10)
-sampling_parameters['mu'] = peyote.prior.Uniform(lower=50, upper=200)
+sampling_parameters['amplitude'] = peyote.prior.Uniform(minimum=0.9 * 1e-21, maximum=1.1 * 1e-21)
+sampling_parameters['sigma'] = peyote.prior.Uniform(minimum=0, maximum=10)
+sampling_parameters['mu'] = peyote.prior.Uniform(minimum=50, maximum=200)
 
 result = peyote.sampler.run_sampler(likelihood, priors=sampling_parameters, verbose=True)
 
diff --git a/tutorials/make_standard_data.py b/tutorials/make_standard_data.py
index eeee0ebdcea91f25e7f28d0bc064b7edc89aa812..2945055a0fb1b4c443091ad3c6ad91fe23029e34 100644
--- a/tutorials/make_standard_data.py
+++ b/tutorials/make_standard_data.py
@@ -19,8 +19,8 @@ simulation_parameters = dict(
     a_2=0,
     tilt_1=0,
     tilt_2=0,
-    phi_1=0,
-    phi_2=0,
+    phi_12=0,
+    phi_jl=0,
     luminosity_distance=100.,
     iota=0.4,
     phase=1.3,