diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4344928d3c515c49e8fb2f86373fc54152cf9d15..fdd2375db0378eca1b030168a754050621140be6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,12 +2,15 @@
 
 ## Unreleased
 
+## [0.3.3] 2018-11-08
+
 Changes currently on master, but not under a tag.
+
+- Removed unnecessary arguments (`ra`, `dec`, `geocent_time`, `psi`) from source functions and replaced them with `**kwargs` where appropriate.
 - Renamed `PriorSet` to `PriorDict`
 - Renamed `BBHPriorSet` to `BBHPriorDict`
 - Renamed `BNSPriorSet` to `BNSPriorDict`
 - Renamed `CalibrationPriorSet` to `CalibrationPriorDict`
-
 - Fixed a bug which caused `Interferometer.detector_tensor` not to update when `latitude`, `longitude`, `xarm_azimuth`, `yarm_azimuth`, `xarm_tilt`, `yarm_tilt` were updated.
 
 ### Changes
@@ -21,6 +24,7 @@ Changes currently on master, but not under a tag.
   compatibility were too much. Note, working in only python 2 or 3, we do not
   expect users to encounter issues.
 - Intermediate data products of samples, nested_samples are stored in the h5
+- Time marginalised GravitationalWaveTransient works with arbitrary time priors.
 
 ## [0.3.1] 2018-11-06
 
diff --git a/bilby/core/sampler/base_sampler.py b/bilby/core/sampler/base_sampler.py
index 5ee847cbb7af74a55d0f269b960c7acd7e597a4a..3a1fd97cfb94a2f4386916cc6d40109fc657053e 100644
--- a/bilby/core/sampler/base_sampler.py
+++ b/bilby/core/sampler/base_sampler.py
@@ -414,15 +414,16 @@ class NestedSampler(Sampler):
 
         Parameters
         ----------
-        sorted_samples, unsorted_samples: array_like
-            Sorted and unsorted values of the samples. These should be of the same
-            shape and contain the same sample values, but in different orders
-        unsorted_loglikelihoods: array_like
+        sorted_samples, unsorted_samples: array-like
+            Sorted and unsorted values of the samples. These should be of the
+            same shape and contain the same sample values, but in different
+            orders
+        unsorted_loglikelihoods: array-like
             The loglikelihoods corresponding to the unsorted_samples
 
         Returns
         -------
-        sorted_loglikelihoods: array_like
+        sorted_loglikelihoods: array-like
             The loglikelihoods reordered to match that of the sorted_samples
 
 
@@ -430,12 +431,13 @@ class NestedSampler(Sampler):
 
         idxs = []
         for ii in range(len(unsorted_loglikelihoods)):
-            idx = np.where(np.all(sorted_samples[ii] == unsorted_samples, axis=1))
+            idx = np.where(np.all(sorted_samples[ii] == unsorted_samples,
+                                  axis=1))[0]
             if len(idx) > 1:
-                raise ValueError(
-                    "Multiple matches found between sorted and unsorted samples")
-            else:
-                idxs.append(idx[0])
+                logger.warning(
+                    "Multiple likelihood matches found between sorted and "
+                    "unsorted samples. Taking the first match.")
+            idxs.append(idx[0])
         return unsorted_loglikelihoods[idxs]
 
 
diff --git a/bilby/core/sampler/emcee.py b/bilby/core/sampler/emcee.py
index 8f1813e2d58e0fbd3513cc9231c65f60fa2cef94..a7e896de2be39390c8804c4ce428c84fcc3bb16e 100644
--- a/bilby/core/sampler/emcee.py
+++ b/bilby/core/sampler/emcee.py
@@ -28,8 +28,10 @@ class Emcee(MCMCSampler):
     nsteps: int, (100)
         The number of steps
     nburn: int (None)
-        If given, the fixed number of steps to discard as burn-in. Else,
-        nburn is estimated from the autocorrelation time
+        If given, the fixed number of steps to discard as burn-in. These will
+        be discarded from the total number of steps set by `nsteps` and
+        therefore the value must be greater than `nsteps`. Else, nburn is
+        estimated from the autocorrelation time
     burn_in_fraction: float, (0.25)
         The fraction of steps to discard as burn-in in the event that the
         autocorrelation time cannot be calculated
@@ -89,6 +91,11 @@ class Emcee(MCMCSampler):
 
     @nburn.setter
     def nburn(self, nburn):
+        if isinstance(nburn, (float, int)):
+            if nburn > self.kwargs['iterations'] - 1:
+                raise ValueError('Number of burn-in samples must be smaller '
+                                 'than the total number of iterations')
+
         self.__nburn = nburn
 
     @property
@@ -121,7 +128,7 @@ class Emcee(MCMCSampler):
         return self.result
 
     def _set_pos0(self):
-        if self.pos0:
+        if self.pos0 is not None:
             logger.debug("Using given initial positions for walkers")
             if isinstance(self.pos0, DataFrame):
                 self.pos0 = self.pos0[self.search_parameter_keys].values
diff --git a/bilby/core/utils.py b/bilby/core/utils.py
index 9b5b4e1a561b7bc3cebb328565f40e310fb1d471..56eafe0ec8741823a88a0e80554ec36741c82c88 100644
--- a/bilby/core/utils.py
+++ b/bilby/core/utils.py
@@ -6,6 +6,7 @@ from math import fmod
 import argparse
 import traceback
 import inspect
+import subprocess
 
 import numpy as np
 
@@ -659,6 +660,38 @@ def derivatives(vals, func, releps=1e-3, abseps=None, mineps=1e-9, reltol=1e-3,
     return grads
 
 
+def run_commandline(cl, log_level=20, raise_error=True, return_output=True):
+    """Run a string cmd as a subprocess, check for errors and return output.
+
+    Parameters
+    ----------
+    cl: str
+        Command to run
+    log_level: int
+        See https://docs.python.org/2/library/logging.html#logging-levels,
+        default is '20' (INFO)
+
+    """
+
+    logger.log(log_level, 'Now executing: ' + cl)
+    if return_output:
+        try:
+            out = subprocess.check_output(
+                cl, stderr=subprocess.STDOUT, shell=True,
+                universal_newlines=True)
+        except subprocess.CalledProcessError as e:
+            logger.log(log_level, 'Execution failed: {}'.format(e.output))
+            if raise_error:
+                raise
+            else:
+                out = 0
+        os.system('\n')
+        return(out)
+    else:
+        process = subprocess.Popen(cl, shell=True)
+        process.communicate()
+
+
 #  Instantiate the default argument parser at runtime
 command_line_args, command_line_parser = set_up_command_line_arguments()
 #  Instantiate the default logging
@@ -680,5 +713,5 @@ else:
             matplotlib.use(backend, warn=False)
             plt.switch_backend(backend)
             break
-        except Exception as e:
+        except Exception:
             print(traceback.format_exc())
diff --git a/bilby/gw/conversion.py b/bilby/gw/conversion.py
index 800bbe8a25692f9166d803633a8d297ab72742e6..1a442d3ddc33da546cdc4e8fd300e9ead0824004 100644
--- a/bilby/gw/conversion.py
+++ b/bilby/gw/conversion.py
@@ -1,9 +1,10 @@
 from __future__ import division
 
 import numpy as np
+from pandas import DataFrame
 
 from ..core.utils import logger, solar_mass
-from ..core.prior import DeltaFunction
+from ..core.prior import DeltaFunction, Interped
 
 try:
     from astropy.cosmology import z_at_value, Planck15
@@ -622,6 +623,11 @@ def _generate_all_cbc_parameters(sample, defaults, base_conversion,
     output_sample, _ = base_conversion(output_sample)
     output_sample = generate_mass_parameters(output_sample)
     output_sample = generate_spin_parameters(output_sample)
+    if likelihood is not None:
+        if likelihood.distance_marginalization:
+            output_sample = \
+                generate_distance_samples_from_marginalized_likelihood(
+                    output_sample, likelihood)
     output_sample = generate_source_frame_parameters(output_sample)
     compute_snrs(output_sample, likelihood)
     return output_sample
@@ -900,18 +906,12 @@ def compute_snrs(sample, likelihood):
         Likelihood function to be applied on the posterior
 
     """
-    temp_sample = sample
     if likelihood is not None:
-        if isinstance(temp_sample, dict):
-            temp = dict()
-            for key in likelihood.waveform_generator.parameters.keys():
-                temp[key] = temp_sample[key]
+        if isinstance(sample, dict):
             signal_polarizations =\
-                likelihood.waveform_generator.frequency_domain_strain(temp)
+                likelihood.waveform_generator.frequency_domain_strain(sample)
             for ifo in likelihood.interferometers:
-                signal = ifo.get_detector_response(
-                    signal_polarizations,
-                    likelihood.waveform_generator.parameters)
+                signal = ifo.get_detector_response(signal_polarizations, sample)
                 sample['{}_matched_filter_snr'.format(ifo.name)] =\
                     ifo.matched_filter_snr_squared(signal=signal) ** 0.5
                 sample['{}_optimal_snr'.format(ifo.name)] = \
@@ -922,17 +922,13 @@ def compute_snrs(sample, likelihood):
             all_interferometers = likelihood.interferometers
             matched_filter_snrs = {ifo.name: [] for ifo in all_interferometers}
             optimal_snrs = {ifo.name: [] for ifo in all_interferometers}
-            for ii in range(len(temp_sample)):
-                temp = dict()
-                for key in set(temp_sample.keys()).intersection(
-                        likelihood.waveform_generator.parameters.keys()):
-                    temp[key] = temp_sample[key][ii]
+            for ii in range(len(sample)):
                 signal_polarizations =\
-                    likelihood.waveform_generator.frequency_domain_strain(temp)
+                    likelihood.waveform_generator.frequency_domain_strain(
+                        dict(sample.iloc[ii]))
                 for ifo in all_interferometers:
                     signal = ifo.get_detector_response(
-                        signal_polarizations,
-                        likelihood.waveform_generator.parameters)
+                        signal_polarizations, sample.iloc[ii])
                     matched_filter_snrs[ifo.name].append(
                         ifo.matched_filter_snr_squared(signal=signal) ** 0.5)
                     optimal_snrs[ifo.name].append(
@@ -948,3 +944,83 @@ def compute_snrs(sample, likelihood):
 
     else:
         logger.debug('Not computing SNRs.')
+
+
+def generate_distance_samples_from_marginalized_likelihood(samples, likelihood):
+    """
+    Reconstruct the distance posterior from a run which used a likelihood which
+    explicitly marginalised over distance.
+
+    See Eq. (C29-C32) of https://arxiv.org/abs/1809.02293
+
+    Parameters
+    ----------
+    samples: DataFrame
+        Posterior from run with distance marginalisation turned on.
+    likelihood: bilby.gw.likelihood.GravitationalWaveTransient
+        Likelihood used during sampling.
+
+    Return
+    ------
+    sample: DataFrame
+        Returns the posterior with distance samples.
+    """
+    if not likelihood.distance_marginalization:
+        return samples
+    if likelihood.phase_marginalization or likelihood.time_marginalization:
+        logger.warning('Cannot currently reconstruct distance posterior '
+                       'when other marginalizations are turned on.')
+        return samples
+    if isinstance(samples, dict):
+        pass
+    elif isinstance(samples, DataFrame):
+        for ii in range(len(samples)):
+            temp = _generate_distance_sample_from_marginalized_likelihood(
+                dict(samples.iloc[ii]), likelihood)
+            samples['luminosity_distance'][ii] = temp['luminosity_distance']
+    return samples
+
+
+def _generate_distance_sample_from_marginalized_likelihood(sample, likelihood):
+    """
+    Generate a single sample from the posterior distribution for luminosity
+    distance when using a likelihood which explicitly marginalises over
+    distance.
+
+    See Eq. (C29-C32) of https://arxiv.org/abs/1809.02293
+
+    Parameters
+    ----------
+    sample: dict
+        The set of parameters used with the marginalised likelihood.
+    likelihood: bilby.gw.likelihood.GravitationalWaveTransient
+        The likelihood used.
+
+    Returns
+    -------
+    sample: dict
+        Modifed dictionary with the distance sampled from the posterior.
+    """
+    signal_polarizations = \
+        likelihood.waveform_generator.frequency_domain_strain(sample)
+    rho_mf_sq = 0
+    rho_opt_sq = 0
+    for ifo in likelihood.interferometers:
+        signal = ifo.get_detector_response(signal_polarizations, sample)
+        rho_mf_sq += ifo.matched_filter_snr_squared(signal=signal)
+        rho_opt_sq += ifo.optimal_snr_squared(signal=signal)
+
+    rho_mf_sq_dist = (rho_mf_sq * sample['luminosity_distance'] /
+                      likelihood._distance_array)
+
+    rho_opt_sq_dist = (rho_opt_sq * sample['luminosity_distance']**2 /
+                       likelihood._distance_array**2)
+
+    distance_log_like = (rho_mf_sq_dist.real - rho_opt_sq_dist.real / 2)
+
+    distance_post = np.exp(distance_log_like - max(distance_log_like)) *\
+        likelihood.distance_prior_array
+
+    sample['luminosity_distance'] = Interped(
+        likelihood._distance_array, distance_post).sample()
+    return sample
diff --git a/bilby/gw/detector.py b/bilby/gw/detector.py
index 20b67f79cec9a67414f9eb155c6a37ae3e5ce051..d87ce2aaf8e9d6256424271de6ea82b8b4857d7b 100644
--- a/bilby/gw/detector.py
+++ b/bilby/gw/detector.py
@@ -2125,3 +2125,44 @@ def get_event_data(
             logger.warning('No data found for {}.'.format(name))
 
     return InterferometerList(interferometers)
+
+
+def load_data_from_cache_file(
+        cache_file, trigger_time, segment_duration, psd_duration,
+        channel_name=None):
+    data_set = False
+    psd_set = False
+    segment_start = trigger_time - segment_duration + 1
+    psd_start = segment_start - psd_duration - 4
+    with open(cache_file, 'r') as ff:
+        lines = ff.readlines()
+        ifo_name = lines[0][0] + '1'
+        ifo = get_empty_interferometer(ifo_name)
+        for line in lines:
+            line = line.strip()
+            _, _, frame_start, frame_duration, frame_name = line.split(' ')
+            frame_start = float(frame_start)
+            frame_duration = float(frame_duration)
+            if frame_name[:4] == 'file':
+                frame_name = frame_name[16:]
+            if not data_set & (frame_start < segment_start) &\
+                    (segment_start < frame_start + frame_duration):
+                ifo.set_strain_data_from_frame_file(
+                    frame_name, 4096, segment_duration,
+                    start_time=segment_start,
+                    channel=channel_name, buffer_time=0)
+                data_set = True
+            if not psd_set & (frame_start < psd_start) &\
+                    (psd_start + psd_duration < frame_start + frame_duration):
+                ifo.power_spectral_density =\
+                    PowerSpectralDensity.from_frame_file(
+                        frame_name, psd_start_time=psd_start,
+                        psd_duration=psd_duration,
+                        channel=channel_name, sampling_frequency=4096)
+                psd_set = True
+    if data_set and psd_set:
+        return ifo
+    elif not data_set:
+        logger.warning('Data not loaded for {}'.format(ifo.name))
+    elif not psd_set:
+        logger.warning('PSD not created for {}'.format(ifo.name))
diff --git a/bilby/gw/likelihood.py b/bilby/gw/likelihood.py
index e775cd639b44dcf60e5a2ec7afdf3abdcc620de5..eaa6d9dadfe4a74c45f172b60f7647444d9a3d87 100644
--- a/bilby/gw/likelihood.py
+++ b/bilby/gw/likelihood.py
@@ -169,8 +169,8 @@ class GravitationalWaveTransient(likelihood.Likelihood):
             if self.time_marginalization:
                 matched_filter_snr_squared_tc_array +=\
                     4 / self.waveform_generator.duration * np.fft.fft(
-                        signal_ifo.conjugate()[0:-1] *
-                        interferometer.frequency_domain_strain[0:-1] /
+                        signal_ifo[0:-1] *
+                        interferometer.frequency_domain_strain.conjugate()[0:-1] /
                         interferometer.power_spectral_density_array[0:-1])
 
         if self.time_marginalization:
@@ -181,18 +181,21 @@ class GravitationalWaveTransient(likelihood.Likelihood):
                 if self.phase_marginalization:
                     dist_marged_log_l_tc_array = self._interp_dist_margd_loglikelihood(
                         abs(rho_mf_ref_tc_array), rho_opt_ref)
-                    log_l = logsumexp(dist_marged_log_l_tc_array) + self.tc_log_norm
+                    log_l = logsumexp(dist_marged_log_l_tc_array,
+                                      b=self.time_prior_array)
                 else:
                     dist_marged_log_l_tc_array = self._interp_dist_margd_loglikelihood(
                         rho_mf_ref_tc_array.real, rho_opt_ref)
-                    log_l = logsumexp(dist_marged_log_l_tc_array) + self.tc_log_norm
+                    log_l = logsumexp(dist_marged_log_l_tc_array,
+                                      b=self.time_prior_array)
             elif self.phase_marginalization:
-                log_l = (
-                    logsumexp(self._bessel_function_interped(abs(matched_filter_snr_squared_tc_array))) -
-                    optimal_snr_squared / 2 + self.tc_log_norm)
+                log_l = logsumexp(self._bessel_function_interped(abs(
+                    matched_filter_snr_squared_tc_array)),
+                    b=self.time_prior_array) - optimal_snr_squared / 2
             else:
-                log_l = (logsumexp(matched_filter_snr_squared_tc_array.real) +
-                         self.tc_log_norm - optimal_snr_squared / 2)
+                log_l = logsumexp(
+                    matched_filter_snr_squared_tc_array.real,
+                    b=self.time_prior_array) - optimal_snr_squared / 2
 
         elif self.distance_marginalization:
             rho_mf_ref, rho_opt_ref = self._setup_rho(matched_filter_snr_squared, optimal_snr_squared)
@@ -271,12 +274,19 @@ class GravitationalWaveTransient(likelihood.Likelihood):
 
     def _setup_phase_marginalization(self):
         self._bessel_function_interped = interp1d(
-            np.logspace(-5, 10, int(1e6)), np.log([i0e(snr) for snr in np.logspace(-5, 10, int(1e6))]) +
-            np.logspace(-5, 10, int(1e6)), bounds_error=False, fill_value=(0, np.nan))
+            np.logspace(-5, 10, int(1e6)), np.logspace(-5, 10, int(1e6)) +
+            np.log([i0e(snr) for snr in np.logspace(-5, 10, int(1e6))]),
+            bounds_error=False, fill_value=(0, np.nan))
 
     def _setup_time_marginalization(self):
         delta_tc = 2 / self.waveform_generator.sampling_frequency
-        self.tc_log_norm = np.log(delta_tc / self.waveform_generator.duration)
+        times =\
+            self.interferometers.start_time + np.linspace(
+                0, self.interferometers.duration,
+                int(self.interferometers.duration / 2 *
+                    self.waveform_generator.sampling_frequency) + 1)[1:]
+        self.time_prior_array =\
+            self.prior['geocent_time'].prob(times) * delta_tc
 
 
 class BasicGravitationalWaveTransient(likelihood.Likelihood):
diff --git a/bilby/gw/prior.py b/bilby/gw/prior.py
index 344e193949e3a270cfd142598a97b991e53a8896..d9eb726240e5d421e35290b7ad056c5334954f91 100644
--- a/bilby/gw/prior.py
+++ b/bilby/gw/prior.py
@@ -239,7 +239,7 @@ class CalibrationPriorDict(PriorDict):
         PriorDict.__init__(self, dictionary=dictionary, filename=filename)
         self.source = None
 
-    def write_to_file(self, outdir, label):
+    def to_file(self, outdir, label):
         """
         Write the prior to file. This includes information about the source if
         possible.
@@ -251,7 +251,7 @@ class CalibrationPriorDict(PriorDict):
         label: str
             Label for prior.
         """
-        PriorDict.write_to_file(self, outdir=outdir, label=label)
+        PriorDict.to_file(self, outdir=outdir, label=label)
         if self.source is not None:
             prior_file = os.path.join(outdir, "{}.prior".format(label))
             with open(prior_file, "a") as outfile:
diff --git a/bilby/gw/source.py b/bilby/gw/source.py
index 75bf22febf755b10732b21d057d7a738826f44bc..7d927b1c15123bfefad5a66dfa9c30f33ca97487 100644
--- a/bilby/gw/source.py
+++ b/bilby/gw/source.py
@@ -15,7 +15,7 @@ except ImportError:
 
 def lal_binary_black_hole(
         frequency_array, mass_1, mass_2, luminosity_distance, a_1, tilt_1, phi_12, a_2, tilt_2, phi_jl,
-        iota, phase, ra, dec, geocent_time, psi, **kwargs):
+        iota, phase, **kwargs):
     """ A Binary Black Hole waveform model using lalsimulation
 
     Parameters
@@ -44,14 +44,6 @@ def lal_binary_black_hole(
         Orbital inclination
     phase: float
         The phase at coalescence
-    ra: float
-        The right ascension of the binary
-    dec: float
-        The declination of the object
-    geocent_time: float
-        The time at coalescence
-    psi: float
-        Orbital polarisation
     kwargs: dict
         Optional keyword arguments
 
@@ -114,8 +106,7 @@ def lal_binary_black_hole(
 
 
 def lal_eccentric_binary_black_hole_no_spins(
-        frequency_array, mass_1, mass_2, eccentricity, luminosity_distance, iota, phase, ra, dec,
-        geocent_time, psi, **kwargs):
+        frequency_array, mass_1, mass_2, eccentricity, luminosity_distance, iota, phase, **kwargs):
     """ Eccentric binary black hole waveform model using lalsimulation (EccentricFD)
 
     Parameters
@@ -134,14 +125,6 @@ def lal_eccentric_binary_black_hole_no_spins(
         Orbital inclination
     phase: float
         The phase at coalescence
-    ra: float
-        The right ascension of the binary
-    dec: float
-        The declination of the object
-    geocent_time: float
-        The time at coalescence
-    psi: float
-        Orbital polarisation
     kwargs: dict
         Optional keyword arguments
 
@@ -194,7 +177,7 @@ def lal_eccentric_binary_black_hole_no_spins(
     return {'plus': h_plus, 'cross': h_cross}
 
 
-def sinegaussian(frequency_array, hrss, Q, frequency, ra, dec, geocent_time, psi):
+def sinegaussian(frequency_array, hrss, Q, frequency, **kwargs):
     tau = Q / (np.sqrt(2.0) * np.pi * frequency)
     temp = Q / (4.0 * np.sqrt(np.pi) * frequency)
     fm = frequency_array - frequency
@@ -214,8 +197,7 @@ def sinegaussian(frequency_array, hrss, Q, frequency, ra, dec, geocent_time, psi
 
 
 def supernova(
-        frequency_array, realPCs, imagPCs, file_path, luminosity_distance, ra,
-        dec, geocent_time, psi):
+        frequency_array, realPCs, imagPCs, file_path, luminosity_distance, **kwargs):
     """ A supernova NR simulation for injections """
 
     realhplus, imaghplus, realhcross, imaghcross = np.loadtxt(
@@ -231,7 +213,7 @@ def supernova(
 
 def supernova_pca_model(
         frequency_array, pc_coeff1, pc_coeff2, pc_coeff3, pc_coeff4, pc_coeff5,
-        luminosity_distance, ra, dec, geocent_time, psi, **kwargs):
+        luminosity_distance, **kwargs):
     """ Supernova signal model """
 
     realPCs = kwargs['realPCs']
@@ -256,7 +238,7 @@ def supernova_pca_model(
 
 def lal_binary_neutron_star(
         frequency_array, mass_1, mass_2, luminosity_distance, chi_1, chi_2,
-        iota, phase, lambda_1, lambda_2, ra, dec, geocent_time, psi, **kwargs):
+        iota, phase, lambda_1, lambda_2, **kwargs):
     """ A Binary Neutron Star waveform model using lalsimulation
 
     Parameters
diff --git a/bilby/gw/utils.py b/bilby/gw/utils.py
index 4d2457ab24159df52b2697ea86eca5275b401ff0..668f42c0841019df3e70a7f109c6dfc3fa2c22f0 100644
--- a/bilby/gw/utils.py
+++ b/bilby/gw/utils.py
@@ -1,10 +1,12 @@
 from __future__ import division
 import os
+import json
 
 import numpy as np
 
 from ..core.utils import (gps_time_to_gmst, ra_dec_to_theta_phi,
-                          speed_of_light, logger)
+                          speed_of_light, logger, run_commandline,
+                          check_directory_exists_and_if_not_mkdir)
 
 try:
     from gwpy.timeseries import TimeSeries
@@ -407,6 +409,109 @@ def read_frame_file(file_name, start_time, end_time, channel=None, buffer_time=1
         return None
 
 
+def get_gracedb(gracedb, outdir, duration, calibration, detectors):
+    candidate = gracedb_to_json(gracedb, outdir)
+    trigger_time = candidate['gpstime']
+    gps_start_time = trigger_time - duration
+    cache_files = []
+    for det in detectors:
+        output_cache_file = gw_data_find(
+            det, gps_start_time, duration, calibration,
+            outdir=outdir)
+        cache_files.append(output_cache_file)
+    return candidate, cache_files
+
+
+def gracedb_to_json(gracedb, outdir=None):
+    """ Script to download a GraceDB candidate
+
+    Parameters
+    ----------
+    gracedb: str
+        The UID of the GraceDB candidate
+    outdir: str, optional
+        If given, a string identfying the location in which to store the json
+    """
+    logger.info(
+        'Starting routine to download GraceDb candidate {}'.format(gracedb))
+    from ligo.gracedb.rest import GraceDb
+    import urllib3
+
+    logger.info('Initialise client and attempt to download')
+    try:
+        client = GraceDb()
+    except FileNotFoundError:
+        raise ValueError(
+            'Failed to authenticate with gracedb: check your X509 '
+            'certificate is accessible and valid')
+    try:
+        candidate = client.event(gracedb)
+        logger.info('Successfully downloaded candidate')
+    except urllib3.HTTPError:
+        raise ValueError("No candidate found")
+
+    json_output = candidate.json()
+
+    if outdir is not None:
+        check_directory_exists_and_if_not_mkdir(outdir)
+        outfilepath = os.path.join(outdir, '{}.json'.format(gracedb))
+        logger.info('Writing candidate to {}'.format(outfilepath))
+        with open(outfilepath, 'w') as outfile:
+                json.dump(json_output, outfile, indent=2)
+
+    return json_output
+
+
+def gw_data_find(observatory, gps_start_time, duration, calibration,
+                 outdir='.'):
+    """ Builds a gw_data_find call and process output
+
+    Parameters
+    ----------
+    observatory: str, {H1, L1, V1}
+        Observatory description
+    gps_start_time: float
+        The start time in gps to look for data
+    duration: int
+        The duration (integer) in s
+    calibrartion: int {1, 2}
+        Use C01 or C02 calibration
+    outdir: string
+        A path to the directory where output is stored
+
+    Returns
+    -------
+    output_cache_file: str
+        Path to the output cache file
+
+    """
+    logger.info('Building gw_data_find command line')
+
+    observatory_lookup = dict(H1='H', L1='L', V1='V')
+    observatory_code = observatory_lookup[observatory]
+
+    dtype = '{}_HOFT_C0{}'.format(observatory, calibration)
+    logger.info('Using LDRDataFind query type {}'.format(dtype))
+
+    cache_file = '{}-{}_CACHE-{}-{}.lcf'.format(
+        observatory, dtype, gps_start_time, duration)
+    output_cache_file = os.path.join(outdir, cache_file)
+
+    gps_end_time = gps_start_time + duration
+
+    cl_list = ['gw_data_find']
+    cl_list.append('--observatory {}'.format(observatory_code))
+    cl_list.append('--gps-start-time {}'.format(gps_start_time))
+    cl_list.append('--gps-end-time {}'.format(gps_end_time))
+    cl_list.append('--type {}'.format(dtype))
+    cl_list.append('--output {}'.format(output_cache_file))
+    cl_list.append('--url-type file')
+    cl_list.append('--lal-cache')
+    cl = ' '.join(cl_list)
+    run_commandline(cl)
+    return output_cache_file
+
+
 def save_to_fits(posterior, outdir, label):
     """ Generate a fits file from a posterior array """
     from astropy.io import fits
diff --git a/examples/injection_examples/create_your_own_time_domain_source_model.py b/examples/injection_examples/create_your_own_time_domain_source_model.py
index 82ca24d187775fb8213910498fa2dddd940b24e7..f8ebcd12b1f53fc0e0da071f0c2a2207d01a5649 100644
--- a/examples/injection_examples/create_your_own_time_domain_source_model.py
+++ b/examples/injection_examples/create_your_own_time_domain_source_model.py
@@ -49,7 +49,7 @@ ifos.inject_signal(waveform_generator=waveform,
 prior = injection_parameters.copy()
 prior['amplitude'] = bilby.core.prior.LogUniform(1e-23, 1e-21, r'$h_0$')
 prior['damping_time'] = bilby.core.prior.Uniform(
-    0, 1, r'damping time', unit='$s$')
+    0.01, 1, r'damping time', unit='$s$')
 prior['frequency'] = bilby.core.prior.Uniform(0, 200, r'frequency', unit='Hz')
 prior['phase'] = bilby.core.prior.Uniform(-np.pi / 2, np.pi / 2, r'$\phi$')
 
diff --git a/examples/other_examples/add_multiple_results.py b/examples/other_examples/add_multiple_results.py
deleted file mode 100644
index 83f57b1a62ca15b16735bd6677e11e96a3bb260c..0000000000000000000000000000000000000000
--- a/examples/other_examples/add_multiple_results.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/env python
-"""
-An example of running two sets of posterior sample estimations and adding them
-"""
-from __future__ import division
-import bilby
-import numpy as np
-
-outdir = 'outdir'
-
-
-def model(time, m, c):
-    return time * m + c
-
-
-injection_parameters = dict(m=0.5, c=0.2)
-sigma = 1
-sampling_frequency = 10
-time_duration = 10
-time = np.arange(0, time_duration, 1 / sampling_frequency)
-N = len(time)
-data = model(time, **injection_parameters) + np.random.normal(0, sigma, N)
-
-likelihood = bilby.core.likelihood.GaussianLikelihood(
-    time, data, model, sigma=sigma)
-
-priors = dict()
-priors['m'] = bilby.core.prior.Uniform(0, 1, 'm')
-priors['c'] = bilby.core.prior.Uniform(-2, 2, 'c')
-
-resultA = bilby.run_sampler(
-    likelihood=likelihood, priors=priors, sampler='emcee', nsteps=1000,
-    nburn=500, outdir=outdir, label='A')
-
-resultB = bilby.run_sampler(
-    likelihood=likelihood, priors=priors, sampler='emcee', nsteps=1000,
-    nburn=500, outdir=outdir, label='B')
-
-resultA.plot_walkers()
-result = resultA + resultB
-result.plot_corner()
-print(result)
diff --git a/examples/other_examples/starting_mcmc_chains_near_to_the_peak.py b/examples/other_examples/starting_mcmc_chains_near_to_the_peak.py
new file mode 100644
index 0000000000000000000000000000000000000000..3fcbf45d32dc9f381ceb7a8961233946c63dcb1e
--- /dev/null
+++ b/examples/other_examples/starting_mcmc_chains_near_to_the_peak.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+"""
+An example of using emcee, but starting the walkers off close to the peak (or
+any other arbitrary point). This is based off the
+linear_regression_with_unknown_noise.py example.
+"""
+from __future__ import division
+import bilby
+import numpy as np
+import pandas as pd
+
+# A few simple setup steps
+label = 'starting_mcmc_chains_near_to_the_peak'
+outdir = 'outdir'
+
+
+# First, we define our "signal model", in this case a simple linear function
+def model(time, m, c):
+    return time * m + c
+
+
+# Now we define the injection parameters which we make simulated data with
+injection_parameters = dict(m=0.5, c=0.2)
+
+# For this example, we'll inject standard Gaussian noise
+sigma = 1
+
+# These lines of code generate the fake data
+sampling_frequency = 10
+time_duration = 10
+time = np.arange(0, time_duration, 1 / sampling_frequency)
+N = len(time)
+data = model(time, **injection_parameters) + np.random.normal(0, sigma, N)
+
+# Now lets instantiate the built-in GaussianLikelihood, giving it
+# the time, data and signal model. Note that, because we do not give it the
+# parameter, sigma is unknown and marginalised over during the sampling
+likelihood = bilby.core.likelihood.GaussianLikelihood(time, data, model)
+
+# Here we define the prior distribution used while sampling
+priors = bilby.core.prior.PriorDict()
+priors['m'] = bilby.core.prior.Uniform(0, 5, 'm')
+priors['c'] = bilby.core.prior.Uniform(-2, 2, 'c')
+priors['sigma'] = bilby.core.prior.Uniform(0, 10, 'sigma')
+
+# Set values to determine how to sample with emcee
+nwalkers = 100
+nsteps = 1000
+sampler = 'emcee'
+
+# Run the sampler from the default pos0 (which is samples drawn from the prior)
+result = bilby.run_sampler(
+    likelihood=likelihood, priors=priors, sampler=sampler, nsteps=nsteps,
+    nwalkers=nwalkers, outdir=outdir, label=label + 'default_pos0')
+result.plot_walkers()
+
+
+# Here we define a distribution from which to start the walkers off from.
+start_pos = bilby.core.prior.PriorDict()
+start_pos['m'] = bilby.core.prior.Normal(injection_parameters['m'], 0.1)
+start_pos['c'] = bilby.core.prior.Normal(injection_parameters['c'], 0.1)
+start_pos['sigma'] = bilby.core.prior.Normal(sigma, 0.1)
+
+# This line generated the initial starting position data frame by sampling
+# nwalkers-times from the start_pos distribution. Note, you can
+# generate this is anyway you like, but it must be a DataFrame with a length
+# equal to the number of walkers
+pos0 = pd.DataFrame(start_pos.sample(nwalkers))
+
+# Run the sampler with our created pos0
+result = bilby.run_sampler(
+    likelihood=likelihood, priors=priors, sampler=sampler, nsteps=nsteps,
+    nwalkers=nwalkers, outdir=outdir, label=label + 'user_pos0', pos0=pos0)
+result.plot_walkers()
+
+
+# After running this script, in the outdir, you'll find to png images showing
+# the result of the runs with and without the initialisation.
diff --git a/setup.py b/setup.py
index 2ec62c6b013a0b1407da860ec5599f7db4e68f24..9d6ac48440c94bc353abc3090329e8b58c589a53 100644
--- a/setup.py
+++ b/setup.py
@@ -57,7 +57,7 @@ def readfile(filename):
     return filecontents
 
 
-VERSION = '0.3.2'
+VERSION = '0.3.3'
 version_file = write_version_file(VERSION)
 long_description = get_long_description()
 
diff --git a/test/gw_likelihood_test.py b/test/gw_likelihood_test.py
index 133fadde7eb3bad57b1b1d5bf20065664ec175d6..989aaa656c142fb5df3489f1a82ade7bf054d553 100644
--- a/test/gw_likelihood_test.py
+++ b/test/gw_likelihood_test.py
@@ -146,35 +146,27 @@ class TestTimeMarginalization(unittest.TestCase):
         self.parameters = dict(
             mass_1=31., mass_2=29., a_1=0.4, a_2=0.3, tilt_1=0.0, tilt_2=0.0,
             phi_12=1.7, phi_jl=0.3, luminosity_distance=4000., iota=0.4,
-            psi=2.659, phase=1.3, geocent_time=1126259642.413, ra=1.375,
+            psi=2.659, phase=1.3, geocent_time=1126259640, ra=1.375,
             dec=-1.2108)
 
         self.interferometers = bilby.gw.detector.InterferometerList(['H1'])
         self.interferometers.set_strain_data_from_power_spectral_densities(
-            sampling_frequency=self.sampling_frequency, duration=self.duration)
+            sampling_frequency=self.sampling_frequency, duration=self.duration,
+            start_time=1126259640)
 
         self.waveform_generator = bilby.gw.waveform_generator.WaveformGenerator(
             duration=self.duration, sampling_frequency=self.sampling_frequency,
             frequency_domain_source_model=bilby.gw.source.lal_binary_black_hole,
-        )
+            start_time=1126259640)
 
         self.prior = bilby.gw.prior.BBHPriorDict()
-        self.prior['geocent_time'] = bilby.prior.Uniform(
-            minimum=self.parameters['geocent_time'] - self.duration / 2,
-            maximum=self.parameters['geocent_time'] + self.duration / 2)
 
         self.likelihood = bilby.gw.likelihood.GravitationalWaveTransient(
             interferometers=self.interferometers,
             waveform_generator=self.waveform_generator, prior=self.prior.copy()
         )
 
-        self.time = bilby.gw.likelihood.GravitationalWaveTransient(
-            interferometers=self.interferometers,
-            waveform_generator=self.waveform_generator,
-            time_marginalization=True, prior=self.prior.copy()
-        )
-        for like in [self.likelihood, self.time]:
-            like.parameters = self.parameters.copy()
+        self.likelihood.parameters = self.parameters.copy()
 
     def tearDown(self):
         del self.duration
@@ -184,20 +176,62 @@ class TestTimeMarginalization(unittest.TestCase):
         del self.waveform_generator
         del self.prior
         del self.likelihood
-        del self.time
 
-    def test_time_marginalisation(self):
-        """Test time marginalised likelihood matches brute force version"""
-        like = []
-        times = np.linspace(self.prior['geocent_time'].minimum,
-                            self.prior['geocent_time'].maximum, 4097)[:-1]
+    def test_time_marginalisation_full_segment(self):
+        """
+        Test time marginalised likelihood matches brute force version over the
+        whole segment.
+        """
+        likes = []
+        lls = []
+        self.prior['geocent_time'] = bilby.prior.Uniform(
+            minimum=self.waveform_generator.start_time,
+            maximum=self.waveform_generator.start_time + self.duration)
+        self.time = bilby.gw.likelihood.GravitationalWaveTransient(
+            interferometers=self.interferometers,
+            waveform_generator=self.waveform_generator,
+            time_marginalization=True, prior=self.prior.copy()
+        )
+        times = self.waveform_generator.start_time + np.linspace(
+            0, self.duration, 4097)[:-1]
         for time in times:
             self.likelihood.parameters['geocent_time'] = time
-            like.append(np.exp(self.likelihood.log_likelihood_ratio()))
+            lls.append(self.likelihood.log_likelihood_ratio())
+            likes.append(np.exp(lls[-1]))
 
-        marg_like = np.log(np.trapz(like, times)
-                           / self.waveform_generator.duration)
+        marg_like = np.log(np.trapz(
+            likes * self.prior['geocent_time'].prob(times), times))
         self.time.parameters = self.parameters.copy()
+        self.time.parameters['geocent_time'] = self.waveform_generator.start_time
+        self.assertAlmostEqual(marg_like, self.time.log_likelihood_ratio(),
+                               delta=0.5)
+
+    def test_time_marginalisation_partial_segment(self):
+        """
+        Test time marginalised likelihood matches brute force version over the
+        whole segment.
+        """
+        likes = []
+        lls = []
+        self.prior['geocent_time'] = bilby.prior.Uniform(
+            minimum=self.parameters['geocent_time'] + 1 - 0.1,
+            maximum=self.parameters['geocent_time'] + 1 + 0.1)
+        self.time = bilby.gw.likelihood.GravitationalWaveTransient(
+            interferometers=self.interferometers,
+            waveform_generator=self.waveform_generator,
+            time_marginalization=True, prior=self.prior.copy()
+        )
+        times = self.waveform_generator.start_time + np.linspace(
+            0, self.duration, 4097)[:-1]
+        for time in times:
+            self.likelihood.parameters['geocent_time'] = time
+            lls.append(self.likelihood.log_likelihood_ratio())
+            likes.append(np.exp(lls[-1]))
+
+        marg_like = np.log(np.trapz(
+            likes * self.prior['geocent_time'].prob(times), times))
+        self.time.parameters = self.parameters.copy()
+        self.time.parameters['geocent_time'] = self.waveform_generator.start_time
         self.assertAlmostEqual(marg_like, self.time.log_likelihood_ratio(),
                                delta=0.5)
 
@@ -343,12 +377,13 @@ class TestTimePhaseMarginalization(unittest.TestCase):
 
         self.interferometers = bilby.gw.detector.InterferometerList(['H1'])
         self.interferometers.set_strain_data_from_power_spectral_densities(
-            sampling_frequency=self.sampling_frequency, duration=self.duration)
+            sampling_frequency=self.sampling_frequency, duration=self.duration,
+            start_time=1126259640)
 
         self.waveform_generator = bilby.gw.waveform_generator.WaveformGenerator(
             duration=self.duration, sampling_frequency=self.sampling_frequency,
             frequency_domain_source_model=bilby.gw.source.lal_binary_black_hole,
-        )
+            start_time=1126259640)
 
         self.prior = bilby.gw.prior.BBHPriorDict()
         self.prior['geocent_time'] = bilby.prior.Uniform(
diff --git a/test/prior_test.py b/test/prior_test.py
index c1a9c9fc1df745a2449d80d06c16b0e4abe77d31..5d78d919bcff173cb077387006c4fd644c76d309 100644
--- a/test/prior_test.py
+++ b/test/prior_test.py
@@ -7,6 +7,7 @@ import os
 import shutil
 from collections import OrderedDict
 
+
 class TestPriorInstantiationWithoutOptionalPriors(unittest.TestCase):
 
     def setUp(self):
@@ -301,10 +302,10 @@ class TestPriorDict(unittest.TestCase):
         self.first_prior = bilby.core.prior.Uniform(name='a', minimum=0, maximum=1, unit='kg')
         self.second_prior = bilby.core.prior.PowerLaw(name='b', alpha=3, minimum=1, maximum=2, unit='m/s')
         self.third_prior = bilby.core.prior.DeltaFunction(name='c', peak=42, unit='m')
-        self.prior_dict = dict(mass=self.first_prior,
-                               speed=self.second_prior,
-                               length=self.third_prior)
-        self.prior_set_from_dict = bilby.core.prior.PriorDict(dictionary=self.prior_dict)
+        self.priors = dict(mass=self.first_prior,
+                           speed=self.second_prior,
+                           length=self.third_prior)
+        self.prior_set_from_dict = bilby.core.prior.PriorDict(dictionary=self.priors)
         self.default_prior_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                                                'prior_files/binary_black_holes.prior')
         self.prior_set_from_file = bilby.core.prior.PriorDict(filename=self.default_prior_file)
@@ -313,7 +314,7 @@ class TestPriorDict(unittest.TestCase):
         del self.first_prior
         del self.second_prior
         del self.third_prior
-        del self.prior_dict
+        del self.priors
         del self.prior_set_from_dict
         del self.default_prior_file
         del self.prior_set_from_file
@@ -325,26 +326,7 @@ class TestPriorDict(unittest.TestCase):
         self.assertEqual(3, len(self.prior_set_from_dict))
 
     def test_prior_set_has_expected_priors(self):
-        self.assertDictEqual(self.prior_dict, dict(self.prior_set_from_dict))
-
-    # Removed for now as it does not create the outdir in the Gitlab CI
-    # def test_write_to_file(self):
-    #     outdir = 'prior_set_test_outdir'
-    #     label = 'test_label'
-    #     label_dot_prior = label + '.prior'
-    #     current_dir_path = os.path.dirname(os.path.realpath(__file__))
-    #     outdir_path = os.path.join(current_dir_path, outdir)
-    #     outfile_path = os.path.join(outdir_path, label_dot_prior)
-    #     self.prior_set_from_dict.write_to_file(outdir=outdir, label=label)
-    #     self.assertTrue(os.path.isdir(outdir_path))
-    #     self.assertTrue(os.listdir(outdir_path)[0] == label_dot_prior)
-    #     with open(outfile_path, 'r') as f:
-    #         expected_outfile = [
-    #             'speed = PowerLaw(alpha=3, minimum=1, maximum=2, name=\'b\', latex_label=\'b\', unit=\'m/s\')\n',
-    #             'mass = Uniform(minimum=0, maximum=1, name=\'a\', latex_label=\'a\', unit=\'kg\')\n',
-    #             'length = DeltaFunction(peak=42, name=\'c\', latex_label=\'c\', unit=\'m\')\n']
-    #         self.assertListEqual(sorted(expected_outfile), sorted(f.readlines()))
-    #     shutil.rmtree(outdir_path)
+        self.assertDictEqual(self.priors, dict(self.prior_set_from_dict))
 
     def test_read_from_file(self):
         expected = dict(
@@ -373,6 +355,21 @@ class TestPriorDict(unittest.TestCase):
             )
         self.assertDictEqual(expected, self.prior_set_from_file)
 
+    def test_to_file(self):
+        expected = ["length = DeltaFunction(peak=42, name='c', latex_label='c', unit='m')\n",
+                    "speed = PowerLaw(alpha=3, minimum=1, maximum=2, name='b', latex_label='b', unit='m/s')\n",
+                    "mass = Uniform(minimum=0, maximum=1, name='a', latex_label='a', unit='kg')\n"]
+        self.prior_set_from_dict.to_file(outdir='prior_files', label='to_file_test')
+        with open('prior_files/to_file_test.prior') as f:
+            for i, line in enumerate(f.readlines()):
+                self.assertTrue(line in expected)
+
+    def test_from_dict_with_string(self):
+        string_prior = "bilby.core.prior.PowerLaw(name='b', alpha=3, minimum=1, maximum=2, unit='m/s')"
+        self.priors['speed'] = string_prior
+        from_dict = bilby.core.prior.PriorDict(dictionary=self.priors)
+        self.assertDictEqual(self.prior_set_from_dict, from_dict)
+
     def test_convert_floats_to_delta_functions(self):
         self.prior_set_from_dict['d'] = 5
         self.prior_set_from_dict['e'] = 7.3