diff --git a/CHANGELOG.md b/CHANGELOG.md
index 19cc2068fa9a69aa7c42ea02e7554921d60b30ec..737a303fb1a94d7a3679c4743740e8e1be12d70d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,18 +2,34 @@
 
 ## Unreleased
 
+### Added
+- 
+### Changed
+- 
+### Removed
+- 
+
+## [0.5.0] 2019-05-08
+
 ### Added
 - A plot_skymap method to the CBCResult object based on ligo.skymap
+- A plot_calibration_posterior method to the CBCResult object
+- Method to merge results
 
 ### Changed
+- Significant refactoring of detector module: this should be backward conmpatible. This work was done to break the large detector.py file into smaller, more manageable chunks. 
 - The `periodic_boundary` option to the prior classes has been changed to `boundary`.
-This breaks backward compatibility.
+*This breaks backward compatibility*.
 The options to `boundary` are `{'periodic', 'reflective', None}`.
 Periodic boundaries are supported as before.
 Reflective boundaries are supported in `dynesty` and `cpnest`.  
+- Minor speed improvements by caching intermediate steps
 - Added state plotting for dynesty. Use `check_point_plot=True` in the `run_sampler` 
 function to create trace plots during the dynesty checkpoints
 - Dynesty now prints the progress to STDOUT rather than STDERR
+- `detector` module refactored into subpackage. Maintains backward compatibility.
+- Specifying alternative frequency bounds for the ROQ now possible if the appropriate
+`params.dat` file is passed.
 
 ### Removed
 - Obsolete (and potentially incorrect) plot_skymap methods from gw.utils
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index f8c4bde71429cec0e6e6addcfaa65d31f3ce95f4..b5cad3bcd9f9fbe7447333a486fc059c764f34ad 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -33,7 +33,7 @@ when you change the code your installed version will automatically be updated.
 #### Removing previously installed versions
 
 If you have previously installed `bilby` using `pip` (or generally find buggy
-behaviour). It may be worthwhile purging your system and reinstalling. To do
+behaviour), it may be worthwhile purging your system and reinstalling. To do
 this, first find out where the module is being imported from: from any
 directory that is *not* the source directory, do the following
 
@@ -170,7 +170,7 @@ interested party) please key these three things in mind
 
 * If you open a discussion, be timely in responding to the submitter. Note, the
   reverse does not need to apply.
-* Keep your questions/comments focussed on the scope of the merge request. If
+* Keep your questions/comments focused on the scope of the merge request. If
   while reviewing the code you notice other things which could be improved, open
   a new issue.
 * Be supportive - merge requests represent a lot of hard work and effort and
diff --git a/README.rst b/README.rst
index 9a16984e99118d23035d30f8cef02cf67c03b566..3628c06384f8475a3241a7e08443e5318a1fa28e 100644
--- a/README.rst
+++ b/README.rst
@@ -48,7 +48,7 @@ as requested in their associated documentation.
 * `emcee <https://github.com/dfm/emcee>`__
 * `ptemcee <https://github.com/willvousden/ptemcee>`__
 * `ptmcmcsampler <https://github.com/jellis18/PTMCMCSampler>`__
-* `pypolychord <https://github.com/vhaasteren/pypolychord>`__
+* `pypolychord <https://github.com/PolyChord/PolyChordLite>`__
 * `PyMC3 <https://github.com/pymc-devs/pymc3>`_
 
 
diff --git a/bilby/core/result.py b/bilby/core/result.py
index 5f3d4b4503d941179be8d73296bd65085bf26a88..dc56ffcc5d3a57ca92b6ce9d64d2f55d76f5f3d9 100644
--- a/bilby/core/result.py
+++ b/bilby/core/result.py
@@ -1029,7 +1029,7 @@ class Result(object):
         ----------
         likelihood: bilby.likelihood.GravitationalWaveTransient, optional
             GravitationalWaveTransient likelihood used for sampling.
-        priors: dict, optional
+        priors: bilby.prior.PriorDict, optional
             Dictionary of prior object, used to fill in delta function priors.
         conversion_function: function, optional
             Function which adds in extra parameters to the data frame,
@@ -1044,13 +1044,9 @@ class Result(object):
                 data_frame, priors)
             data_frame['log_likelihood'] = getattr(
                 self, 'log_likelihood_evaluations', np.nan)
-            if self.log_prior_evaluations is None:
-                ln_prior = list()
-                for ii in range(len(data_frame)):
-                    ln_prior.append(
-                        self.priors.ln_prob(dict(
-                            data_frame[self.search_parameter_keys].iloc[ii])))
-                data_frame['log_prior'] = np.array(ln_prior)
+            if self.log_prior_evaluations is None and priors is not None:
+                data_frame['log_prior'] = priors.ln_prob(
+                    dict(data_frame[self.search_parameter_keys]), axis=0)
             else:
                 data_frame['log_prior'] = self.log_prior_evaluations
         if conversion_function is not None:
@@ -1447,7 +1443,8 @@ def plot_multiple(results, filename=None, labels=None, colours=None,
 
 
 def make_pp_plot(results, filename=None, save=True, confidence_interval=0.9,
-                 lines=None, legend_fontsize=9, keys=None, **kwargs):
+                 lines=None, legend_fontsize=9, keys=None, title=True,
+                 **kwargs):
     """
     Make a P-P plot for a set of runs with injected signals.
 
@@ -1515,7 +1512,16 @@ def make_pp_plot(results, filename=None, save=True, confidence_interval=0.9,
         pvalues.append(pvalue)
         logger.info("{}: {}".format(key, pvalue))
 
-    ax.legend(fontsize=legend_fontsize)
+    Pvals = namedtuple('pvals', ['combined_pvalue', 'pvalues', 'names'])
+    pvals = Pvals(combined_pvalue=scipy.stats.combine_pvalues(pvalues)[1],
+                  pvalues=pvalues,
+                  names=list(credible_levels.keys()))
+    logger.info(
+        "Combined p-value: {}".format(pvals.combined_pvalue))
+
+    if title:
+        ax.set_title("p-value = {:2.4f}".format(pvals.combined_pvalue))
+    ax.legend(linewidth=1, labelspacing=0.25)
     ax.set_xlim(0, 1)
     ax.set_ylim(0, 1)
     fig.tight_layout()
@@ -1524,12 +1530,6 @@ def make_pp_plot(results, filename=None, save=True, confidence_interval=0.9,
             filename = 'outdir/pp.png'
         fig.savefig(filename, dpi=500)
 
-    Pvals = namedtuple('pvals', ['combined_pvalue', 'pvalues', 'names'])
-    pvals = Pvals(combined_pvalue=scipy.stats.combine_pvalues(pvalues)[1],
-                  pvalues=pvalues,
-                  names=list(credible_levels.keys()))
-    logger.info(
-        "Combined p-value: {}".format(pvals.combined_pvalue))
     return fig, pvals
 
 
diff --git a/bilby/core/sampler/__init__.py b/bilby/core/sampler/__init__.py
index b77831e64039d39ee4852365309ca13221f318f0..ade03719109e831ee517c1eb2d4be923f6e0ee67 100644
--- a/bilby/core/sampler/__init__.py
+++ b/bilby/core/sampler/__init__.py
@@ -169,10 +169,6 @@ def run_sampler(likelihood, priors=None, label='label', outdir='outdir',
     else:
         result = sampler.run_sampler()
 
-    # Initial save of the sampler in case of failure in post-processing
-    if save:
-        result.save_to_file(extension=save, gzip=gzip)
-
     end_time = datetime.datetime.now()
     result.sampling_time = (end_time - start_time).total_seconds()
     logger.info('Sampling time: {}'.format(end_time - start_time))
@@ -187,10 +183,13 @@ def run_sampler(likelihood, priors=None, label='label', outdir='outdir',
         result.log_bayes_factor = \
             result.log_evidence - result.log_noise_evidence
 
-    if result.injection_parameters is not None:
-        if conversion_function is not None:
-            result.injection_parameters = conversion_function(
-                result.injection_parameters)
+    # Initial save of the sampler in case of failure in post-processing
+    if save:
+        result.save_to_file(extension=save, gzip=gzip)
+
+    if None not in [result.injection_parameters, conversion_function]:
+        result.injection_parameters = conversion_function(
+            result.injection_parameters)
 
     result.samples_to_posterior(likelihood=likelihood, priors=result.priors,
                                 conversion_function=conversion_function)
diff --git a/bilby/core/sampler/base_sampler.py b/bilby/core/sampler/base_sampler.py
index c81a692eadd88cd8d93a03977d09c6b16d552304..4b5ffc673d42dc9bf6a6d629925155166b4ffe3b 100644
--- a/bilby/core/sampler/base_sampler.py
+++ b/bilby/core/sampler/base_sampler.py
@@ -388,12 +388,62 @@ class Sampler(object):
         self.check_draw(draw)
         return draw
 
-    def check_draw(self, draw):
-        """ Checks if the draw will generate an infinite prior or likelihood """
-        if np.isinf(self.log_likelihood(draw)):
-            logger.warning('Prior draw {} has inf likelihood'.format(draw))
-        if np.isinf(self.log_prior(draw)):
-            logger.warning('Prior draw {} has inf prior'.format(draw))
+    def get_initial_points_from_prior(self, npoints=1):
+        """ Method to draw a set of live points from the prior
+
+        This iterates over draws from the prior until all the samples have a
+        finite prior and likelihood (relevant for constrained priors).
+
+        Parameters
+        ----------
+        npoints: int
+            The number of values to return
+
+        Returns
+        -------
+        unit_cube, parameters, likelihood: tuple of array_like
+            unit_cube (nlive, ndim) is an array of the prior samples from the
+            unit cube, parameters (nlive, ndim) is the unit_cube array
+            transformed to the target space, while likelihood (nlive) are the
+            likelihood evaluations.
+
+        """
+        unit_cube = []
+        parameters = []
+        likelihood = []
+        while len(unit_cube) < npoints:
+            unit = np.random.rand(self.ndim)
+            theta = self.prior_transform(unit)
+            if self.check_draw(theta, warning=False):
+                unit_cube.append(unit)
+                parameters.append(theta)
+                likelihood.append(self.log_likelihood(theta))
+
+        return np.array(unit_cube), np.array(parameters), np.array(likelihood)
+
+    def check_draw(self, theta, warning=True):
+        """ Checks if the draw will generate an infinite prior or likelihood
+
+        Parameters
+        ----------
+        theta: array_like
+            Parameter values at which to evaluate likelihood
+
+        Returns
+        -------
+        bool, cube (nlive,
+            True if the likelihood and prior are finite, false otherwise
+
+        """
+        if np.isinf(self.log_prior(theta)):
+            if warning:
+                logger.warning('Prior draw {} has inf prior'.format(theta))
+            return False
+        if np.isinf(self.log_likelihood(theta)):
+            if warning:
+                logger.warning('Prior draw {} has inf likelihood'.format(theta))
+            return False
+        return True
 
     def run_sampler(self):
         """A template method to run in subclasses"""
diff --git a/bilby/core/sampler/dynesty.py b/bilby/core/sampler/dynesty.py
index 8e21ec5afe5e9b1231af0344cba001a8c45423bb..ddd59ff084a0a9438d2e3a8201c2b2b757da5e67 100644
--- a/bilby/core/sampler/dynesty.py
+++ b/bilby/core/sampler/dynesty.py
@@ -202,6 +202,10 @@ class Dynesty(NestedSampler):
 
     def run_sampler(self):
         import dynesty
+        if self.kwargs['live_points'] is None:
+            self.kwargs['live_points'] = (
+                self.get_initial_points_from_prior(
+                    self.kwargs['nlive']))
         self.sampler = dynesty.NestedSampler(
             loglikelihood=self.log_likelihood,
             prior_transform=self.prior_transform,
diff --git a/bilby/core/sampler/polychord.py b/bilby/core/sampler/polychord.py
index 4378649335da69a5a4e852e9831b19b40c5d0c9a..7cd5c379f4fb409202441ea6798df059acee002e 100644
--- a/bilby/core/sampler/polychord.py
+++ b/bilby/core/sampler/polychord.py
@@ -35,22 +35,25 @@ class PyPolyChord(NestedSampler):
         import pypolychord
         from pypolychord.settings import PolyChordSettings
         if self.kwargs['use_polychord_defaults']:
-            settings = PolyChordSettings(nDims=self.ndim, nDerived=self.ndim, base_dir=self.outdir,
+            settings = PolyChordSettings(nDims=self.ndim, nDerived=self.ndim,
+                                         base_dir=self._sample_file_directory,
                                          file_root=self.label)
         else:
             self._setup_dynamic_defaults()
             pc_kwargs = self.kwargs.copy()
-            pc_kwargs['base_dir'] = self.outdir
+            pc_kwargs['base_dir'] = self._sample_file_directory
             pc_kwargs['file_root'] = self.label
             pc_kwargs.pop('use_polychord_defaults')
             settings = PolyChordSettings(nDims=self.ndim, nDerived=self.ndim, **pc_kwargs)
         self._verify_kwargs_against_default_kwargs()
 
-        pypolychord.run_polychord(loglikelihood=self.log_likelihood, nDims=self.ndim,
-                                  nDerived=self.ndim, settings=settings, prior=self.prior_transform)
-
-        self.result.log_evidence, self.result.log_evidence_err = self._read_out_stats_file()
-        self.result.samples = self._read_sample_file()
+        out = pypolychord.run_polychord(loglikelihood=self.log_likelihood, nDims=self.ndim,
+                                        nDerived=self.ndim, settings=settings, prior=self.prior_transform)
+        self.result.log_evidence = out.logZ
+        self.result.log_evidence_err = out.logZerr
+        log_likelihoods, physical_parameters = self._read_sample_file()
+        self.result.log_likelihood_evaluations = log_likelihoods
+        self.result.samples = physical_parameters
         return self.result
 
     def _setup_dynamic_defaults(self):
@@ -74,21 +77,23 @@ class PyPolyChord(NestedSampler):
         """ Overrides the log_likelihood so that PolyChord understands it """
         return super(PyPolyChord, self).log_likelihood(theta), theta
 
-    def _read_out_stats_file(self):
-        statsfile = self.outdir + '/' + self.label + '.stats'
-        with open(statsfile) as f:
-            for line in f:
-                if line.startswith('log(Z)'):
-                    line = line.replace('log(Z)', '')
-                    line = line.replace('=', '')
-                    line = line.replace(' ', '')
-                    print(line)
-                    z = line.split('+/-')
-                    log_z = float(z[0])
-                    log_z_err = float(z[1])
-                    return log_z, log_z_err
-
     def _read_sample_file(self):
-        sample_file = self.outdir + '/' + self.label + '_equal_weights.txt'
+        """
+        This method reads out the _equal_weights.txt file that polychord produces.
+        The first column is omitted since it is just composed of 1s, i.e. the equal weights/
+        The second column are the log likelihoods, the remaining columns are the physical parameters
+
+        Returns
+        -------
+        array_like, array_like: The log_likelihoods and the associated parameters
+
+        """
+        sample_file = self._sample_file_directory + '/' + self.label + '_equal_weights.txt'
         samples = np.loadtxt(sample_file)
-        return samples[:, -self.ndim:]  # extract last ndim columns
+        log_likelihoods = -0.5 * samples[:, 1]
+        physical_parameters = samples[:, -self.ndim:]
+        return log_likelihoods, physical_parameters
+
+    @property
+    def _sample_file_directory(self):
+        return self.outdir + '/chains'
diff --git a/bilby/gw/detector/geometry.py b/bilby/gw/detector/geometry.py
index 319645187435d9ebae71653a84bc3c84a73276d2..0725e4f3176b6b1ae12c2b95a9a5a21183a0aa51 100644
--- a/bilby/gw/detector/geometry.py
+++ b/bilby/gw/detector/geometry.py
@@ -6,6 +6,31 @@ from .. import utils as gwutils
 class InterferometerGeometry(object):
     def __init__(self, length, latitude, longitude, elevation, xarm_azimuth, yarm_azimuth,
                  xarm_tilt=0., yarm_tilt=0.):
+        """
+        Instantiate an Interferometer object.
+
+        Parameters
+        ----------
+
+        length: float
+            Length of the interferometer in km.
+        latitude: float
+            Latitude North in degrees (South is negative).
+        longitude: float
+            Longitude East in degrees (West is negative).
+        elevation: float
+            Height above surface in metres.
+        xarm_azimuth: float
+            Orientation of the x arm in degrees North of East.
+        yarm_azimuth: float
+            Orientation of the y arm in degrees North of East.
+        xarm_tilt: float, optional
+            Tilt of the x arm in radians above the horizontal defined by
+            ellipsoid earth model in LIGO-T980044-08.
+        yarm_tilt: float, optional
+            Tilt of the y arm in radians above the horizontal.
+        """
+
         self._x_updated = False
         self._y_updated = False
         self._vertex_updated = False
@@ -24,6 +49,20 @@ class InterferometerGeometry(object):
         self._y = None
         self._detector_tensor = None
 
+    def __eq__(self, other):
+        for attribute in ['length', 'latitude', 'longitude', 'elevation',
+                          'xarm_azimuth', 'yarm_azimuth', 'xarm_tilt', 'yarm_tilt']:
+            if not getattr(self, attribute) == getattr(other, attribute):
+                return False
+        return True
+
+    def __repr__(self):
+        return self.__class__.__name__ + '(length={}, latitude={}, longitude={}, elevation={}, ' \
+                                         'xarm_azimuth={}, yarm_azimuth={}, xarm_tilt={}, yarm_tilt={})' \
+            .format(float(self.length), float(self.latitude), float(self.longitude),
+                    float(self.elevation), float(self.xarm_azimuth), float(self.yarm_azimuth), float(self.xarm_tilt),
+                    float(self.yarm_tilt))
+
     @property
     def latitude(self):
         """ Saves latitude in rad internally. Updates related quantities if set to a different value.
diff --git a/bilby/gw/detector/interferometer.py b/bilby/gw/detector/interferometer.py
index d059d4df5dea622befd7c7f4422c0292736ae6e3..24afd74cb80c5c725e102bcd6cb02c0771a687e2 100644
--- a/bilby/gw/detector/interferometer.py
+++ b/bilby/gw/detector/interferometer.py
@@ -95,14 +95,7 @@ class Interferometer(object):
 
     def __eq__(self, other):
         if self.name == other.name and \
-                self.length == other.length and \
-                self.latitude == other.latitude and \
-                self.longitude == other.longitude and \
-                self.elevation == other.elevation and \
-                self.xarm_azimuth == other.xarm_azimuth and \
-                self.xarm_tilt == other.xarm_tilt and \
-                self.yarm_azimuth == other.yarm_azimuth and \
-                self.yarm_tilt == other.yarm_tilt and \
+                self.geometry == other.geometry and \
                 self.power_spectral_density.__eq__(other.power_spectral_density) and \
                 self.calibration_model == other.calibration_model and \
                 self.strain_data == other.strain_data:
@@ -113,10 +106,12 @@ class Interferometer(object):
         return self.__class__.__name__ + '(name=\'{}\', power_spectral_density={}, minimum_frequency={}, ' \
                                          'maximum_frequency={}, length={}, latitude={}, longitude={}, elevation={}, ' \
                                          'xarm_azimuth={}, yarm_azimuth={}, xarm_tilt={}, yarm_tilt={})' \
-            .format(self.name, self.power_spectral_density, float(self.minimum_frequency),
-                    float(self.maximum_frequency), float(self.length), float(self.latitude), float(self.longitude),
-                    float(self.elevation), float(self.xarm_azimuth), float(self.yarm_azimuth), float(self.xarm_tilt),
-                    float(self.yarm_tilt))
+            .format(self.name, self.power_spectral_density, float(self.strain_data.minimum_frequency),
+                    float(self.strain_data.maximum_frequency), float(self.geometry.length),
+                    float(self.geometry.latitude), float(self.geometry.longitude),
+                    float(self.geometry.elevation), float(self.geometry.xarm_azimuth),
+                    float(self.geometry.yarm_azimuth), float(self.geometry.xarm_tilt),
+                    float(self.geometry.yarm_tilt))
 
     def set_strain_data_from_frequency_domain_strain(
             self, frequency_domain_strain, sampling_frequency=None,
@@ -250,7 +245,7 @@ class Interferometer(object):
 
         """
         polarization_tensor = gwutils.get_polarization_tensor(ra, dec, time, psi, mode)
-        return np.einsum('ij,ij->', self.detector_tensor, polarization_tensor)
+        return np.einsum('ij,ij->', self.geometry.detector_tensor, polarization_tensor)
 
     def get_detector_response(self, waveform_polarizations, parameters):
         """ Get the detector response for a particular waveform
@@ -283,11 +278,11 @@ class Interferometer(object):
             parameters['ra'], parameters['dec'], parameters['geocent_time'])
         dt = parameters['geocent_time'] + time_shift - self.strain_data.start_time
 
-        signal_ifo[self.frequency_mask] = signal_ifo[self.frequency_mask] * np.exp(
-            -1j * 2 * np.pi * dt * self.frequency_array[self.frequency_mask])
+        signal_ifo[self.strain_data.frequency_mask] = signal_ifo[self.strain_data.frequency_mask] * np.exp(
+            -1j * 2 * np.pi * dt * self.strain_data.frequency_array[self.strain_data.frequency_mask])
 
-        signal_ifo[self.frequency_mask] *= self.calibration_model.get_calibration_factor(
-            self.frequency_array[self.frequency_mask],
+        signal_ifo[self.strain_data.frequency_mask] *= self.calibration_model.get_calibration_factor(
+            self.strain_data.frequency_array[self.strain_data.frequency_mask],
             prefix='recalib_{}_'.format(self.name), **parameters)
 
         return signal_ifo
@@ -342,7 +337,7 @@ class Interferometer(object):
                 .format(self.strain_data.start_time, parameters['geocent_time']))
 
         signal_ifo = self.get_detector_response(injection_polarizations, parameters)
-        if np.shape(self.frequency_domain_strain).__eq__(np.shape(signal_ifo)):
+        if np.shape(self.strain_data.frequency_domain_strain).__eq__(np.shape(signal_ifo)):
             self.strain_data.frequency_domain_strain = \
                 signal_ifo + self.strain_data.frequency_domain_strain
         else:
@@ -376,7 +371,10 @@ class Interferometer(object):
         array_like: An array representation of the ASD
 
         """
-        return self.power_spectral_density_array ** 0.5
+        return (
+            self.power_spectral_density.get_amplitude_spectral_density_array(
+                frequency_array=self.strain_data.frequency_array) *
+            self.strain_data.window_factor**0.5)
 
     @property
     def power_spectral_density_array(self):
@@ -389,8 +387,10 @@ class Interferometer(object):
         array_like: An array representation of the PSD
 
         """
-        return (self.power_spectral_density.power_spectral_density_interpolated(self.frequency_array) *
-                self.strain_data.window_factor)
+        return (
+            self.power_spectral_density.get_power_spectral_density_array(
+                frequency_array=self.strain_data.frequency_array) *
+            self.strain_data.window_factor)
 
     def unit_vector_along_arm(self, arm):
         logger.warning("This method has been moved and will be removed in the future."
@@ -416,7 +416,7 @@ class Interferometer(object):
         -------
         float: The time delay from geocenter in seconds
         """
-        return gwutils.time_delay_geocentric(self.vertex, np.array([0, 0, 0]), ra, dec, time)
+        return gwutils.time_delay_geocentric(self.geometry.vertex, np.array([0, 0, 0]), ra, dec, time)
 
     def vertex_position_geocentric(self):
         """
@@ -429,9 +429,9 @@ class Interferometer(object):
         -------
         array_like: A 3D array representation of the vertex
         """
-        return gwutils.get_vertex_position_geocentric(self.latitude_radians,
-                                                      self.longitude_radians,
-                                                      self.elevation)
+        return gwutils.get_vertex_position_geocentric(self.geometry.latitude_radians,
+                                                      self.geometry.longitude_radians,
+                                                      self.geometry.elevation)
 
     def optimal_snr_squared(self, signal):
         """
@@ -446,8 +446,8 @@ class Interferometer(object):
         float: The optimal signal to noise ratio possible squared
         """
         return gwutils.optimal_snr_squared(
-            signal=signal[self.frequency_mask],
-            power_spectral_density=self.power_spectral_density_array[self.frequency_mask],
+            signal=signal[self.strain_data.frequency_mask],
+            power_spectral_density=self.power_spectral_density_array[self.strain_data.frequency_mask],
             duration=self.strain_data.duration)
 
     def inner_product(self, signal):
@@ -463,9 +463,9 @@ class Interferometer(object):
         float: The optimal signal to noise ratio possible squared
         """
         return gwutils.noise_weighted_inner_product(
-            aa=signal[self.frequency_mask],
-            bb=self.frequency_domain_strain[self.frequency_mask],
-            power_spectral_density=self.power_spectral_density_array[self.frequency_mask],
+            aa=signal[self.strain_data.frequency_mask],
+            bb=self.strain_data.frequency_domain_strain[self.strain_data.frequency_mask],
+            power_spectral_density=self.power_spectral_density_array[self.strain_data.frequency_mask],
             duration=self.strain_data.duration)
 
     def matched_filter_snr(self, signal):
@@ -482,9 +482,9 @@ class Interferometer(object):
 
         """
         return gwutils.matched_filter_snr(
-            signal=signal[self.frequency_mask],
-            frequency_domain_strain=self.frequency_domain_strain[self.frequency_mask],
-            power_spectral_density=self.power_spectral_density_array[self.frequency_mask],
+            signal=signal[self.strain_data.frequency_mask],
+            frequency_domain_strain=self.strain_data.frequency_domain_strain[self.strain_data.frequency_mask],
+            power_spectral_density=self.power_spectral_density_array[self.strain_data.frequency_mask],
             duration=self.strain_data.duration)
 
     @property
@@ -516,13 +516,13 @@ class Interferometer(object):
             filename_data = '{}/{}_{}_frequency_domain_data.dat'.format(outdir, self.name, label)
         np.savetxt(filename_data,
                    np.array(
-                       [self.frequency_array,
-                        self.frequency_domain_strain.real,
-                        self.frequency_domain_strain.imag]).T,
+                       [self.strain_data.frequency_array,
+                        self.strain_data.frequency_domain_strain.real,
+                        self.strain_data.frequency_domain_strain.imag]).T,
                    header='f real_h(f) imag_h(f)')
         np.savetxt(filename_psd,
                    np.array(
-                       [self.frequency_array,
+                       [self.strain_data.frequency_array,
                         self.amplitude_spectral_density_array]).T,
                    header='f h(f)')
 
@@ -531,22 +531,22 @@ class Interferometer(object):
             return
 
         fig, ax = plt.subplots()
-        df = self.frequency_array[1] - self.frequency_array[0]
+        df = self.strain_data.frequency_array[1] - self.strain_data.frequency_array[0]
         asd = gwutils.asd_from_freq_series(
-            freq_data=self.frequency_domain_strain, df=df)
+            freq_data=self.strain_data.frequency_domain_strain, df=df)
 
-        ax.loglog(self.frequency_array[self.frequency_mask],
-                  asd[self.frequency_mask],
+        ax.loglog(self.strain_data.frequency_array[self.strain_data.frequency_mask],
+                  asd[self.strain_data.frequency_mask],
                   color='C0', label=self.name)
-        ax.loglog(self.frequency_array[self.frequency_mask],
-                  self.amplitude_spectral_density_array[self.frequency_mask],
+        ax.loglog(self.strain_data.frequency_array[self.strain_data.frequency_mask],
+                  self.amplitude_spectral_density_array[self.strain_data.frequency_mask],
                   color='C1', lw=1.0, label=self.name + ' ASD')
         if signal is not None:
             signal_asd = gwutils.asd_from_freq_series(
                 freq_data=signal, df=df)
 
-            ax.loglog(self.frequency_array[self.frequency_mask],
-                      signal_asd[self.frequency_mask],
+            ax.loglog(self.strain_data.frequency_array[self.strain_data.frequency_mask],
+                      signal_asd[self.strain_data.frequency_mask],
                       color='C2',
                       label='Signal')
         ax.grid(True)
@@ -589,7 +589,7 @@ class Interferometer(object):
         if notches is None:
             notches = list()
         timeseries = gwpy.timeseries.TimeSeries(
-            data=self.time_domain_strain, times=self.time_array)
+            data=self.strain_data.time_domain_strain, times=self.strain_data.time_array)
         zpks = []
         if bandpass_frequencies is not None:
             zpks.append(gwpy.signal.filter_design.bandpass(
@@ -608,10 +608,10 @@ class Interferometer(object):
         fig, ax = plt.subplots()
 
         if t0:
-            x = self.time_array - t0
+            x = self.strain_data.time_array - t0
             xlabel = 'GPS time [s] - {}'.format(t0)
         else:
-            x = self.time_array
+            x = self.strain_data.time_array
             xlabel = 'GPS time [s]'
 
         ax.plot(x, strain)
diff --git a/bilby/gw/detector/psd.py b/bilby/gw/detector/psd.py
index 0267dce30885d57860357d3f4c3abfefd4cb83a7..a306eb2a2db08156be833baded97d35a8af98869 100644
--- a/bilby/gw/detector/psd.py
+++ b/bilby/gw/detector/psd.py
@@ -40,6 +40,8 @@ class PowerSpectralDensity(object):
             Interpolated function of the PSD
 
         """
+        self._cache = dict(
+            frequency_array=np.array([]), psd_array=None, asd_array=None)
         self.frequency_array = np.array(frequency_array)
         if psd_array is not None:
             self.psd_array = psd_array
@@ -48,6 +50,12 @@ class PowerSpectralDensity(object):
         self.psd_file = psd_file
         self.asd_file = asd_file
 
+    def _update_cache(self, frequency_array):
+        psd_array = self.power_spectral_density_interpolated(frequency_array)
+        self._cache['psd_array'] = psd_array
+        self._cache['asd_array'] = psd_array**0.5
+        self._cache['frequency_array'] = frequency_array
+
     def __eq__(self, other):
         if self.psd_file == other.psd_file \
                 and self.asd_file == other.asd_file \
@@ -182,6 +190,17 @@ class PowerSpectralDensity(object):
                                                               self.psd_array,
                                                               bounds_error=False,
                                                               fill_value=np.inf)
+        self._update_cache(self.frequency_array)
+
+    def get_power_spectral_density_array(self, frequency_array):
+        if not np.array_equal(frequency_array, self._cache['frequency_array']):
+            self._update_cache(frequency_array=frequency_array)
+        return self._cache['psd_array']
+
+    def get_amplitude_spectral_density_array(self, frequency_array):
+        if not np.array_equal(frequency_array, self._cache['frequency_array']):
+            self._update_cache(frequency_array=frequency_array)
+        return self._cache['asd_array']
 
     @property
     def power_spectral_density_interpolated(self):
diff --git a/bilby/gw/likelihood.py b/bilby/gw/likelihood.py
index 409ca05d1f125fdcdcd452f53d964d945c55bac5..2f3d7ab76e068bd9660840eec13f708af0326bbd 100644
--- a/bilby/gw/likelihood.py
+++ b/bilby/gw/likelihood.py
@@ -1,5 +1,6 @@
 from __future__ import division
 
+import gc
 import os
 import json
 import copy
@@ -17,7 +18,8 @@ from scipy.special import i0e
 from ..core import likelihood
 from ..core.utils import (
     logger, UnsortedInterp2d, BilbyJsonEncoder, decode_bilby_json,
-    create_time_series)
+    create_frequency_series, create_time_series, speed_of_light,
+    radius_of_earth)
 from ..core.prior import Interped, Prior, Uniform
 from .detector import InterferometerList
 from .prior import BBHPriorDict
@@ -189,17 +191,17 @@ class GravitationalWaveTransient(likelihood.Likelihood):
 
     @property
     def priors(self):
-        return self.__prior
+        return self._prior
 
     @priors.setter
     def priors(self, priors):
         if priors is not None:
-            self.__prior = priors.copy()
+            self._prior = priors.copy()
         elif any([self.time_marginalization, self.phase_marginalization,
                   self.distance_marginalization]):
             raise ValueError("You can't use a marginalized likelihood without specifying a priors")
         else:
-            self.__prior = None
+            self._prior = None
 
     def noise_log_likelihood(self):
         log_l = 0
@@ -210,7 +212,7 @@ class GravitationalWaveTransient(likelihood.Likelihood):
                 interferometer.frequency_domain_strain[mask],
                 interferometer.power_spectral_density_array[mask],
                 self.waveform_generator.duration) / 2
-        return log_l.real
+        return float(np.real(log_l))
 
     def log_likelihood_ratio(self):
         waveform_polarizations =\
@@ -222,57 +224,40 @@ class GravitationalWaveTransient(likelihood.Likelihood):
         d_inner_h = 0.
         optimal_snr_squared = 0.
         complex_matched_filter_snr = 0.
-        d_inner_h_squared_tc_array = np.zeros(
-            self.interferometers.frequency_array[0:-1].shape,
-            dtype=np.complex128)
+        if self.time_marginalization:
+            d_inner_h_tc_array = np.zeros(
+                self.interferometers.frequency_array[0:-1].shape,
+                dtype=np.complex128)
 
         for interferometer in self.interferometers:
             per_detector_snr = self.calculate_snrs(
-                waveform_polarizations, interferometer)
+                waveform_polarizations=waveform_polarizations,
+                interferometer=interferometer)
 
             d_inner_h += per_detector_snr.d_inner_h
-            optimal_snr_squared += per_detector_snr.optimal_snr_squared
+            optimal_snr_squared += np.real(per_detector_snr.optimal_snr_squared)
             complex_matched_filter_snr += per_detector_snr.complex_matched_filter_snr
 
             if self.time_marginalization:
-                d_inner_h_squared_tc_array += per_detector_snr.d_inner_h_squared_tc_array
+                d_inner_h_tc_array += per_detector_snr.d_inner_h_squared_tc_array
 
         if self.time_marginalization:
-
-            if self.distance_marginalization:
-                rho_mf_ref_tc_array, rho_opt_ref = self._setup_rho(
-                    d_inner_h_squared_tc_array, optimal_snr_squared)
-                if self.phase_marginalization:
-                    dist_marged_log_l_tc_array = self._interp_dist_margd_loglikelihood(
-                        abs(rho_mf_ref_tc_array), rho_opt_ref)
-                else:
-                    dist_marged_log_l_tc_array = self._interp_dist_margd_loglikelihood(
-                        rho_mf_ref_tc_array.real, rho_opt_ref)
-                log_l = logsumexp(dist_marged_log_l_tc_array,
-                                  b=self.time_prior_array)
-            elif self.phase_marginalization:
-                log_l = logsumexp(self._bessel_function_interped(abs(
-                    d_inner_h_squared_tc_array)),
-                    b=self.time_prior_array) - optimal_snr_squared / 2
-            else:
-                log_l = logsumexp(
-                    d_inner_h_squared_tc_array.real,
-                    b=self.time_prior_array) - optimal_snr_squared / 2
+            log_l = self.time_marginalized_likelihood(
+                d_inner_h_tc_array=d_inner_h_tc_array,
+                h_inner_h=optimal_snr_squared)
 
         elif self.distance_marginalization:
-            rho_mf_ref, rho_opt_ref = self._setup_rho(d_inner_h, optimal_snr_squared)
-            if self.phase_marginalization:
-                rho_mf_ref = abs(rho_mf_ref)
-            log_l = self._interp_dist_margd_loglikelihood(rho_mf_ref.real, rho_opt_ref)[0]
+            log_l = self.distance_marginalized_likelihood(
+                d_inner_h=d_inner_h, h_inner_h=optimal_snr_squared)
 
         elif self.phase_marginalization:
-            d_inner_h = self._bessel_function_interped(abs(d_inner_h))
-            log_l = d_inner_h - optimal_snr_squared / 2
+            log_l = self.phase_marginalized_likelihood(
+                d_inner_h=d_inner_h, h_inner_h=optimal_snr_squared)
 
         else:
-            log_l = d_inner_h.real - optimal_snr_squared / 2
+            log_l = np.real(d_inner_h) - optimal_snr_squared / 2
 
-        return log_l.real
+        return float(log_l.real)
 
     def generate_posterior_sample_from_marginalized_likelihood(self):
         """
@@ -352,14 +337,8 @@ class GravitationalWaveTransient(likelihood.Likelihood):
             h_inner_h += ifo.optimal_snr_squared(signal=signal).real
 
         if self.distance_marginalization:
-            rho_mf_ref_tc_array, rho_opt_ref = self._setup_rho(
+            time_log_like = self.distance_marginalized_likelihood(
                 d_inner_h, h_inner_h)
-            if self.phase_marginalization:
-                time_log_like = self._interp_dist_margd_loglikelihood(
-                    abs(rho_mf_ref_tc_array), rho_opt_ref)
-            else:
-                time_log_like = self._interp_dist_margd_loglikelihood(
-                    rho_mf_ref_tc_array.real, rho_opt_ref)
         elif self.phase_marginalization:
             time_log_like = (self._bessel_function_interped(abs(d_inner_h)) -
                              h_inner_h.real / 2)
@@ -479,13 +458,39 @@ class GravitationalWaveTransient(likelihood.Likelihood):
         new_phase = Interped(phases, phase_post).sample()
         return new_phase
 
+    def distance_marginalized_likelihood(self, d_inner_h, h_inner_h):
+        d_inner_h_ref, h_inner_h_ref = self._setup_rho(
+            d_inner_h, h_inner_h)
+        if self.phase_marginalization:
+            d_inner_h_ref = np.abs(d_inner_h_ref)
+        else:
+            d_inner_h_ref = np.real(d_inner_h_ref)
+        return self._interp_dist_margd_loglikelihood(
+            d_inner_h_ref, h_inner_h_ref)
+
+    def phase_marginalized_likelihood(self, d_inner_h, h_inner_h):
+        d_inner_h = self._bessel_function_interped(abs(d_inner_h))
+        return d_inner_h - h_inner_h / 2
+
+    def time_marginalized_likelihood(self, d_inner_h_tc_array, h_inner_h):
+        if self.distance_marginalization:
+            log_l_tc_array = self.distance_marginalized_likelihood(
+                d_inner_h=d_inner_h_tc_array, h_inner_h=h_inner_h)
+        elif self.phase_marginalization:
+            log_l_tc_array = self.phase_marginalized_likelihood(
+                d_inner_h=d_inner_h_tc_array,
+                h_inner_h=h_inner_h)
+        else:
+            log_l_tc_array = np.real(d_inner_h_tc_array) - h_inner_h / 2
+        return logsumexp(log_l_tc_array, b=self.time_prior_array)
+
     def _setup_rho(self, d_inner_h, optimal_snr_squared):
-        rho_opt_ref = (optimal_snr_squared.real *
-                       self.parameters['luminosity_distance'] ** 2 /
-                       self._ref_dist ** 2.)
-        rho_mf_ref = (d_inner_h * self.parameters['luminosity_distance'] /
-                      self._ref_dist)
-        return rho_mf_ref, rho_opt_ref
+        optimal_snr_squared_ref = (optimal_snr_squared.real *
+                                   self.parameters['luminosity_distance'] ** 2 /
+                                   self._ref_dist ** 2.)
+        d_inner_h_ref = (d_inner_h * self.parameters['luminosity_distance'] /
+                         self._ref_dist)
+        return d_inner_h_ref, optimal_snr_squared_ref
 
     def log_likelihood(self):
         return self.log_likelihood_ratio() + self.noise_log_likelihood()
@@ -500,12 +505,12 @@ class GravitationalWaveTransient(likelihood.Likelihood):
         return self._distance_array[0]
 
     @property
-    def _rho_opt_ref_array(self):
+    def _optimal_snr_squared_ref_array(self):
         """ Optimal filter snr at fiducial distance of ref_dist Mpc """
         return np.logspace(-5, 10, self._dist_margd_loglikelihood_array.shape[0])
 
     @property
-    def _rho_mf_ref_array(self):
+    def _d_inner_h_ref_array(self):
         """ Matched filter snr at fiducial distance of ref_dist Mpc """
         if self.phase_marginalization:
             return np.logspace(-5, 10, self._dist_margd_loglikelihood_array.shape[1])
@@ -527,7 +532,7 @@ class GravitationalWaveTransient(likelihood.Likelihood):
         else:
             self._create_lookup_table()
         self._interp_dist_margd_loglikelihood = UnsortedInterp2d(
-            self._rho_mf_ref_array, self._rho_opt_ref_array,
+            self._d_inner_h_ref_array, self._optimal_snr_squared_ref_array,
             self._dist_margd_loglikelihood_array)
 
     @property
@@ -551,13 +556,14 @@ class GravitationalWaveTransient(likelihood.Likelihood):
     def load_lookup_table(self, filename):
         if os.path.exists(filename):
             loaded_file = dict(np.load(filename))
-            if self._test_cached_lookup_table(loaded_file):
+            match, failure = self._test_cached_lookup_table(loaded_file)
+            if match:
                 logger.info('Loaded distance marginalisation lookup table from '
                             '{}.'.format(filename))
                 return loaded_file
             else:
                 logger.info('Loaded distance marginalisation lookup table does '
-                            'not match prior')
+                            'not match for {}.'.format(failure))
                 return None
         elif isinstance(filename, str):
             logger.info('Distance marginalisation file {} does not '
@@ -571,30 +577,42 @@ class GravitationalWaveTransient(likelihood.Likelihood):
                  distance_array=self._distance_array,
                  prior_array=self.distance_prior_array,
                  lookup_table=self._dist_margd_loglikelihood_array,
-                 reference_distance=self._ref_dist)
+                 reference_distance=self._ref_dist,
+                 phase_marginalization=self.phase_marginalization)
 
     def _test_cached_lookup_table(self, loaded_file):
-        cond_a = np.all(self._distance_array == loaded_file['distance_array'])
-        cond_b = np.all(self.distance_prior_array == loaded_file['prior_array'])
-        cond_c = self._ref_dist == loaded_file['reference_distance']
-        return all([cond_a, cond_b, cond_c])
+        pairs = dict(
+            distance_array=self._distance_array,
+            prior_array=self.distance_prior_array,
+            reference_distance=self._ref_dist,
+            phase_marginalization=self.phase_marginalization)
+        for key in pairs:
+            if key not in loaded_file:
+                return False, key
+            elif not np.array_equal(np.atleast_1d(loaded_file[key]),
+                                    np.atleast_1d(pairs[key])):
+                return False, key
+        return True, None
 
     def _create_lookup_table(self):
         """ Make the lookup table """
         logger.info('Building lookup table for distance marginalisation.')
 
         self._dist_margd_loglikelihood_array = np.zeros((400, 800))
-        for ii, rho_opt_ref in enumerate(self._rho_opt_ref_array):
-            for jj, rho_mf_ref in enumerate(self._rho_mf_ref_array):
-                optimal_snr_squared_array = rho_opt_ref * self._ref_dist ** 2. / self._distance_array ** 2
-                d_inner_h_array = rho_mf_ref * self._ref_dist / self._distance_array
+        for ii, optimal_snr_squared_ref in enumerate(self._optimal_snr_squared_ref_array):
+            for jj, d_inner_h_ref in enumerate(self._d_inner_h_ref_array):
+                optimal_snr_squared_array = (
+                    optimal_snr_squared_ref * self._ref_dist ** 2. /
+                    self._distance_array ** 2)
+                d_inner_h_array = (
+                    d_inner_h_ref * self._ref_dist / self._distance_array)
                 if self.phase_marginalization:
                     d_inner_h_array =\
                         self._bessel_function_interped(abs(d_inner_h_array))
                 self._dist_margd_loglikelihood_array[ii][jj] = \
                     logsumexp(d_inner_h_array - optimal_snr_squared_array / 2,
                               b=self.distance_prior_array * self._delta_distance)
-        log_norm = logsumexp(0. / self._distance_array - 0. / self._distance_array ** 2.,
+        log_norm = logsumexp(0. / self._distance_array,
                              b=self.distance_prior_array * self._delta_distance)
         self._dist_margd_loglikelihood_array -= log_norm
         self.cache_lookup_table()
@@ -806,13 +824,13 @@ class ROQGravitationalWaveTransient(GravitationalWaveTransient):
         self.frequency_nodes_quadratic = \
             waveform_generator.waveform_arguments['frequency_nodes_quadratic']
 
-    def calculate_snrs(self, signal, interferometer):
+    def calculate_snrs(self, waveform_polarizations, interferometer):
         """
         Compute the snrs for ROQ
 
         Parameters
         ----------
-        signal: waveform
+        waveform_polarizations: waveform
         interferometer: bilby.gw.detector.Interferometer
 
         """
@@ -826,7 +844,7 @@ class ROQGravitationalWaveTransient(GravitationalWaveTransient):
 
         dt = interferometer.time_delay_from_geocenter(
             self.parameters['ra'], self.parameters['dec'],
-            interferometer.strain_data.start_time)
+            self.parameters['geocent_time'])
         ifo_time = self.parameters['geocent_time'] + dt - \
             interferometer.strain_data.start_time
 
@@ -837,16 +855,17 @@ class ROQGravitationalWaveTransient(GravitationalWaveTransient):
             self.frequency_nodes_quadratic,
             prefix='recalib_{}_'.format(interferometer.name), **self.parameters)
 
-        h_plus_linear = f_plus * signal['linear']['plus'] * calib_linear
-        h_cross_linear = f_cross * signal['linear']['cross'] * calib_linear
+        h_plus_linear = f_plus * waveform_polarizations['linear']['plus'] * calib_linear
+        h_cross_linear = f_cross * waveform_polarizations['linear']['cross'] * calib_linear
         h_plus_quadratic = (
-            f_plus * signal['quadratic']['plus'] * calib_quadratic)
+            f_plus * waveform_polarizations['quadratic']['plus'] * calib_quadratic)
         h_cross_quadratic = (
-            f_cross * signal['quadratic']['cross'] * calib_quadratic)
+            f_cross * waveform_polarizations['quadratic']['cross'] * calib_quadratic)
 
         indices, in_bounds = self._closest_time_indices(
             ifo_time, self.weights['time_samples'])
         if not in_bounds:
+            logger.debug("SNR calculation error: requested time at edge of ROQ time samples")
             return self._CalculatedSNRs(
                 d_inner_h=np.nan_to_num(-np.inf), optimal_snr_squared=0,
                 complex_matched_filter_snr=np.nan_to_num(-np.inf),
@@ -858,7 +877,7 @@ class ROQGravitationalWaveTransient(GravitationalWaveTransient):
 
         d_inner_h = interp1d(
             self.weights['time_samples'][indices],
-            d_inner_h_tc_array, kind='cubic')(ifo_time)
+            d_inner_h_tc_array, kind='cubic', assume_sorted=True)(ifo_time)
 
         optimal_snr_squared = \
             np.vdot(np.abs(h_plus_quadratic + h_cross_quadratic)**2,
@@ -897,53 +916,56 @@ class ROQGravitationalWaveTransient(GravitationalWaveTransient):
         return indices, in_bounds
 
     def _set_weights(self, linear_matrix, quadratic_matrix):
-        """
-        Setup the time-dependent ROQ weights.
-        This follows FIXME: Smith et al.
+        """ Setup the time-dependent ROQ weights.
+
+        Parameters
+        ----------
+        linear_matrix, quadratic_matrix: array_like
+            Arrays of the linear and quadratic basis
 
-        The times are chosen to allow all the merger times allows in the time
-        prior.
         """
 
-        self.weights['time_samples'] = np.arange(
-            self.priors['geocent_time'].minimum - 0.045,
-            self.priors['geocent_time'].maximum + 0.045,
-            self._get_time_resolution()) - self.interferometers.start_time
+        # Maximum delay time to geocentre plus 10%
+        earth_light_crossing_time = 1.1 * radius_of_earth / speed_of_light
+        time_space = self._get_time_resolution()
+        delta_times = np.arange(
+            self.priors['geocent_time'].minimum - earth_light_crossing_time,
+            self.priors['geocent_time'].maximum + earth_light_crossing_time,
+            time_space)
+        time_samples = delta_times - self.interferometers.start_time
+        self.weights['time_samples'] = time_samples
+        logger.info("Using {} ROQ time samples".format(len(time_samples)))
 
         for ifo in self.interferometers:
             if self.roq_params is not None:
-                frequencies = np.arange(
-                    self.roq_params['flow'],
-                    self.roq_params['fhigh'] + 1 / self.roq_params['seglen'],
-                    1 / self.roq_params['seglen'])
+                if ifo.maximum_frequency > self.roq_params['fhigh']:
+                    raise ValueError("Requested maximum frequency larger than ROQ basis fhigh")
+                # Generate frequencies for the ROQ
+                roq_frequencies = create_frequency_series(
+                    sampling_frequency=self.roq_params['fhigh'] * 2,
+                    duration=self.roq_params['seglen'])
+                roq_mask = roq_frequencies >= self.roq_params['flow']
+                roq_frequencies = roq_frequencies[roq_mask]
                 overlap_frequencies, ifo_idxs, roq_idxs = np.intersect1d(
-                    ifo.frequency_array[ifo.frequency_mask], frequencies,
+                    ifo.frequency_array[ifo.frequency_mask], roq_frequencies,
                     return_indices=True)
             else:
                 overlap_frequencies = ifo.frequency_array[ifo.frequency_mask]
                 roq_idxs = np.arange(linear_matrix.shape[0], dtype=int)
-                ifo_idxs = ifo.frequency_mask
-
-            # array of relative time shifts to be applied to the data
-            # 0.045s comes from time for GW to traverse the Earth
-            time_space = (self.weights['time_samples'][1] -
-                          self.weights['time_samples'][0])
-
-            # array to be filled with data, shifted by discrete time_samples
-            tc_shifted_data = np.zeros([
-                len(self.weights['time_samples']), len(overlap_frequencies)],
-                dtype=complex)
-
-            # shift data to beginning of the prior increment by the time step
-            shifted_data =\
-                ifo.frequency_domain_strain[ifo_idxs] * \
-                np.exp(2j * np.pi * overlap_frequencies *
-                       self.weights['time_samples'][0])
-            single_time_shift = np.exp(
-                2j * np.pi * overlap_frequencies * time_space)
-            for j in range(len(self.weights['time_samples'])):
-                tc_shifted_data[j] = shifted_data
-                shifted_data *= single_time_shift
+                ifo_idxs = [True] * sum(ifo.frequency_mask)
+                if sum(ifo_idxs) != len(roq_idxs):
+                    raise ValueError(
+                        "Mismatch between ROQ basis and frequency array for "
+                        "{}".format(ifo.name))
+            logger.info(
+                "Building ROQ weights for {} with {} frequencies between {} "
+                "and {}.".format(
+                    ifo.name, len(overlap_frequencies),
+                    min(overlap_frequencies), max(overlap_frequencies)))
+
+            data = ifo.frequency_domain_strain[ifo.frequency_mask][ifo_idxs]
+            tc_shifted_data = data * np.exp(
+                2j * np.pi * overlap_frequencies * time_samples[:, np.newaxis])
 
             # to not kill all computers this minimises the memory usage of the
             # required inner products
@@ -951,17 +973,22 @@ class ROQGravitationalWaveTransient(GravitationalWaveTransient):
             max_elements = int((max_block_gigabytes * 2 ** 30) / 8)
 
             self.weights[ifo.name + '_linear'] = blockwise_dot_product(
-                tc_shifted_data / ifo.power_spectral_density_array[ifo_idxs],
+                tc_shifted_data /
+                ifo.power_spectral_density_array[ifo.frequency_mask][ifo_idxs],
                 linear_matrix[roq_idxs],
                 max_elements) * 4 / ifo.strain_data.duration
 
-            del tc_shifted_data
+            del tc_shifted_data, overlap_frequencies
+            gc.collect()
 
             self.weights[ifo.name + '_quadratic'] = build_roq_weights(
-                1 / ifo.power_spectral_density_array[ifo_idxs],
+                1 /
+                ifo.power_spectral_density_array[ifo.frequency_mask][ifo_idxs],
                 quadratic_matrix[roq_idxs].real,
                 1 / ifo.strain_data.duration)
 
+            logger.info("Finished building weights for {}".format(ifo.name))
+
     def save_weights(self, filename):
         with open(filename, 'w') as file:
             json.dump(self.weights, file, indent=2, cls=BilbyJsonEncoder)
@@ -1027,6 +1054,10 @@ class ROQGravitationalWaveTransient(GravitationalWaveTransient):
 
         delta_t = fhigh**-1
 
+        # Apply a safety factor to ensure the time step is short enough
+        delta_t = delta_t / 5
+
+        logger.info("ROQ time-step = {}".format(delta_t))
         return delta_t
 
     def _rescale_signal(self, signal, new_distance):
diff --git a/bilby/gw/utils.py b/bilby/gw/utils.py
index e0f3da9c5d65571ec705089b31fc4b6f04ccde7f..d7b56d453ed9106aac2d32e138f42a2954720a36 100644
--- a/bilby/gw/utils.py
+++ b/bilby/gw/utils.py
@@ -447,7 +447,7 @@ def read_frame_file(file_name, start_time, end_time, channel=None, buffer_time=0
 
 
 def get_gracedb(gracedb, outdir, duration, calibration, detectors, query_types=None, server=None):
-    candidate = gracedb_to_json(gracedb, outdir)
+    candidate = gracedb_to_json(gracedb, outdir=outdir)
     trigger_time = candidate['gpstime']
     gps_start_time = trigger_time - duration
     cache_files = []
@@ -461,7 +461,7 @@ def get_gracedb(gracedb, outdir, duration, calibration, detectors, query_types=N
     return candidate, cache_files
 
 
-def gracedb_to_json(gracedb, cred=None, outdir=None):
+def gracedb_to_json(gracedb, cred=None, service_url='https://gracedb.ligo.org/api/', outdir=None):
     """ Script to download a GraceDB candidate
 
     Parameters
@@ -470,6 +470,10 @@ def gracedb_to_json(gracedb, cred=None, outdir=None):
         The UID of the GraceDB candidate
     cred:
         Credentials for authentications, see ligo.gracedb.rest.GraceDb
+    service_url:
+        The url of the GraceDB candidate
+        GraceDB 'https://gracedb.ligo.org/api/' (default)
+        GraceDB-playground 'https://gracedb-playground.ligo.org/api/'
     outdir: str, optional
         If given, a string identfying the location in which to store the json
     """
@@ -478,8 +482,9 @@ def gracedb_to_json(gracedb, cred=None, outdir=None):
     from ligo.gracedb.rest import GraceDb
 
     logger.info('Initialise client and attempt to download')
+    logger.info('Fetching from {}'.format(service_url))
     try:
-        client = GraceDb(cred=cred)
+        client = GraceDb(cred=cred, service_url=service_url)
     except IOError:
         raise ValueError(
             'Failed to authenticate with gracedb: check your X509 '
diff --git a/examples/injection_examples/roq_example.py b/examples/injection_examples/roq_example.py
index 6d54aaa99459347cc5d791f8798d0f0450903ed7..b98f4f2bfb279ce08dff05dc63cfaa66358e8242 100644
--- a/examples/injection_examples/roq_example.py
+++ b/examples/injection_examples/roq_example.py
@@ -17,19 +17,32 @@ import bilby
 outdir = 'outdir'
 label = 'roq'
 
+# The ROQ bases can be given an overall scaling.
+# Note how this is applied to durations, frequencies and masses.
+scale_factor = 1.6
+
 # Load in the pieces for the linear part of the ROQ. Note you will need to
 # adjust the filenames here to the correct paths on your machine
 basis_matrix_linear = np.load("B_linear.npy").T
-freq_nodes_linear = np.load("fnodes_linear.npy")
+freq_nodes_linear = np.load("fnodes_linear.npy") * scale_factor
 
 # Load in the pieces for the quadratic part of the ROQ
 basis_matrix_quadratic = np.load("B_quadratic.npy").T
-freq_nodes_quadratic = np.load("fnodes_quadratic.npy")
+freq_nodes_quadratic = np.load("fnodes_quadratic.npy") * scale_factor
+
+# Load the parameters describing the valid parameters for the basis.
+params = np.genfromtxt("params.dat", names=True)
+params['flow'] *= scale_factor
+params['fhigh'] *= scale_factor
+params['seglen'] /= scale_factor
+params['chirpmassmin'] /= scale_factor
+params['chirpmassmax'] /= scale_factor
+params['compmin'] /= scale_factor
 
 np.random.seed(170808)
 
-duration = 4
-sampling_frequency = 2048
+duration = 4 / scale_factor
+sampling_frequency = 2048 * scale_factor
 
 injection_parameters = dict(
     mass_1=36.0, mass_2=29.0, a_1=0.4, a_2=0.3, tilt_1=0.0, tilt_2=0.0,
@@ -37,7 +50,8 @@ injection_parameters = dict(
     phase=1.3, geocent_time=1126259642.413, ra=1.375, dec=-1.2108)
 
 waveform_arguments = dict(waveform_approximant='IMRPhenomPv2',
-                          reference_frequency=20., minimum_frequency=20.)
+                          reference_frequency=20. * scale_factor,
+                          minimum_frequency=20. * scale_factor)
 
 waveform_generator = bilby.gw.WaveformGenerator(
     duration=duration, sampling_frequency=sampling_frequency,
@@ -58,17 +72,22 @@ search_waveform_generator = bilby.gw.waveform_generator.WaveformGenerator(
     frequency_domain_source_model=bilby.gw.source.roq,
     waveform_arguments=dict(frequency_nodes_linear=freq_nodes_linear,
                             frequency_nodes_quadratic=freq_nodes_quadratic,
-                            reference_frequency=20., minimum_frequency=20.,
+                            reference_frequency=20. * scale_factor,
+                            minimum_frequency=20. * scale_factor,
                             approximant='IMRPhenomPv2'),
     parameter_conversion=bilby.gw.conversion.convert_to_lal_binary_black_hole_parameters)
 
-# Here we add constraints on chirp mass and mass ratio to the prior
+# Here we add constraints on chirp mass and mass ratio to the prior, these are
+# determined by the domain of validity of the ROQ basis.
 priors = bilby.gw.prior.BBHPriorDict()
 for key in ['a_1', 'a_2', 'tilt_1', 'tilt_2', 'theta_jn', 'phase', 'psi', 'ra',
             'dec', 'phi_12', 'phi_jl', 'luminosity_distance']:
     priors[key] = injection_parameters[key]
+for key in ['mass_1', 'mass_2']:
+    priors[key].minimum = max(priors[key].minimum, params['compmin'])
 priors['chirp_mass'] = bilby.core.prior.Constraint(
-    name='chirp_mass', minimum=12.5, maximum=45)
+    name='chirp_mass', minimum=float(params['chirpmassmin']),
+    maximum=float(params['chirpmassmax']))
 priors['mass_ratio'] = bilby.core.prior.Constraint(0.125, 1, name='mass_ratio')
 priors['geocent_time'] = bilby.core.prior.Uniform(
     injection_parameters['geocent_time'] - 0.1,
@@ -77,7 +96,7 @@ priors['geocent_time'] = bilby.core.prior.Uniform(
 likelihood = bilby.gw.likelihood.ROQGravitationalWaveTransient(
     interferometers=ifos, waveform_generator=search_waveform_generator,
     linear_matrix=basis_matrix_linear, quadratic_matrix=basis_matrix_quadratic,
-    priors=priors)
+    priors=priors, roq_params=params)
 
 # write the weights to file so they can be loaded multiple times
 likelihood.save_weights('weights.json')
diff --git a/examples/other_examples/gaussian_example.py b/examples/other_examples/gaussian_example.py
index 78d960e8128a3d7ddbac63f0329c45959dd1aadb..c30fadc09297673a315a5cb43e213afbb34aefe6 100644
--- a/examples/other_examples/gaussian_example.py
+++ b/examples/other_examples/gaussian_example.py
@@ -31,7 +31,7 @@ class SimpleGaussianLikelihood(bilby.Likelihood):
         data: array_like
             The data to analyse
         """
-        bilby.Likelihood.__init__(parameters={'mu': None, 'sigma': None})
+        bilby.Likelihood.__init__(self, parameters={'mu': None, 'sigma': None})
         self.data = data
         self.N = len(data)
 
diff --git a/setup.py b/setup.py
index f1b0dac5a287c7570892e2c1e132d60666556aa2..5dca2b737d065d32a736f38228906c46170547be 100644
--- a/setup.py
+++ b/setup.py
@@ -57,7 +57,7 @@ def readfile(filename):
     return filecontents
 
 
-VERSION = '0.4.5'
+VERSION = '0.5.0'
 version_file = write_version_file(VERSION)
 long_description = get_long_description()
 
diff --git a/test/detector_test.py b/test/detector_test.py
index 9bb46a2591e0aacf99f847a868d66fe5447d171b..f63ee5baa4cbdb9fe25998c016035f67a06da4cb 100644
--- a/test/detector_test.py
+++ b/test/detector_test.py
@@ -14,13 +14,9 @@ import logging
 import deepdish as dd
 
 
-class TestInterferometer(unittest.TestCase):
+class TestInterferometerGeometry(unittest.TestCase):
 
     def setUp(self):
-        self.name = 'name'
-        self.power_spectral_density = bilby.gw.detector.PowerSpectralDensity.from_aligo()
-        self.minimum_frequency = 10
-        self.maximum_frequency = 20
         self.length = 30
         self.latitude = 1
         self.longitude = 2
@@ -29,24 +25,16 @@ class TestInterferometer(unittest.TestCase):
         self.yarm_azimuth = 5
         self.xarm_tilt = 0.
         self.yarm_tilt = 0.
-        # noinspection PyTypeChecker
-        self.ifo = bilby.gw.detector.Interferometer(name=self.name, power_spectral_density=self.power_spectral_density,
-                                                    minimum_frequency=self.minimum_frequency,
-                                                    maximum_frequency=self.maximum_frequency, length=self.length,
-                                                    latitude=self.latitude, longitude=self.longitude,
-                                                    elevation=self.elevation,
-                                                    xarm_azimuth=self.xarm_azimuth, yarm_azimuth=self.yarm_azimuth,
-                                                    xarm_tilt=self.xarm_tilt, yarm_tilt=self.yarm_tilt)
-        self.ifo.strain_data.set_from_frequency_domain_strain(
-            np.linspace(0, 4096, 4097), sampling_frequency=4096, duration=2)
-        self.outdir = 'outdir'
-        bilby.core.utils.check_directory_exists_and_if_not_mkdir(self.outdir)
+        self.geometry = bilby.gw.detector.InterferometerGeometry(length=self.length,
+                                                                 latitude=self.latitude,
+                                                                 longitude=self.longitude,
+                                                                 elevation=self.elevation,
+                                                                 xarm_azimuth=self.xarm_azimuth,
+                                                                 yarm_azimuth=self.yarm_azimuth,
+                                                                 xarm_tilt=self.xarm_tilt,
+                                                                 yarm_tilt=self.yarm_tilt)
 
     def tearDown(self):
-        del self.name
-        del self.power_spectral_density
-        del self.minimum_frequency
-        del self.maximum_frequency
         del self.length
         del self.latitude
         del self.longitude
@@ -55,180 +43,253 @@ class TestInterferometer(unittest.TestCase):
         del self.yarm_azimuth
         del self.xarm_tilt
         del self.yarm_tilt
-        del self.ifo
-        rmtree(self.outdir)
-
-    def test_name_setting(self):
-        self.assertEqual(self.ifo.name, self.name)
-
-    def test_psd_setting(self):
-        self.assertEqual(self.ifo.power_spectral_density, self.power_spectral_density)
-
-    def test_min_freq_setting(self):
-        self.assertEqual(self.ifo.strain_data.minimum_frequency, self.minimum_frequency)
-
-    def test_max_freq_setting(self):
-        self.assertEqual(self.ifo.strain_data.maximum_frequency, self.maximum_frequency)
+        del self.geometry
 
     def test_length_setting(self):
-        self.assertEqual(self.ifo.length, self.length)
+        self.assertEqual(self.geometry.length, self.length)
 
     def test_latitude_setting(self):
-        self.assertEqual(self.ifo.latitude, self.latitude)
+        self.assertEqual(self.geometry.latitude, self.latitude)
 
     def test_longitude_setting(self):
-        self.assertEqual(self.ifo.longitude, self.longitude)
+        self.assertEqual(self.geometry.longitude, self.longitude)
 
     def test_elevation_setting(self):
-        self.assertEqual(self.ifo.elevation, self.elevation)
+        self.assertEqual(self.geometry.elevation, self.elevation)
 
     def test_xarm_azi_setting(self):
-        self.assertEqual(self.ifo.xarm_azimuth, self.xarm_azimuth)
+        self.assertEqual(self.geometry.xarm_azimuth, self.xarm_azimuth)
 
     def test_yarm_azi_setting(self):
-        self.assertEqual(self.ifo.yarm_azimuth, self.yarm_azimuth)
+        self.assertEqual(self.geometry.yarm_azimuth, self.yarm_azimuth)
 
     def test_xarm_tilt_setting(self):
-        self.assertEqual(self.ifo.xarm_tilt, self.xarm_tilt)
+        self.assertEqual(self.geometry.xarm_tilt, self.xarm_tilt)
 
     def test_yarm_tilt_setting(self):
-        self.assertEqual(self.ifo.yarm_tilt, self.yarm_tilt)
+        self.assertEqual(self.geometry.yarm_tilt, self.yarm_tilt)
 
     def test_vertex_without_update(self):
-        _ = self.ifo.vertex
+        _ = self.geometry.vertex
         with mock.patch('bilby.gw.utils.get_vertex_position_geocentric') as m:
             m.return_value = np.array([1])
-            self.assertFalse(np.array_equal(self.ifo.vertex, np.array([1])))
+            self.assertFalse(np.array_equal(self.geometry.vertex, np.array([1])))
 
     def test_vertex_with_latitude_update(self):
         with mock.patch('bilby.gw.utils.get_vertex_position_geocentric') as m:
             m.return_value = np.array([1])
-            self.ifo.latitude = 5
-            self.assertEqual(self.ifo.vertex, np.array([1]))
+            self.geometry.latitude = 5
+            self.assertEqual(self.geometry.vertex, np.array([1]))
 
     def test_vertex_with_longitude_update(self):
         with mock.patch('bilby.gw.utils.get_vertex_position_geocentric') as m:
             m.return_value = np.array([1])
-            self.ifo.longitude = 5
-            self.assertEqual(self.ifo.vertex, np.array([1]))
+            self.geometry.longitude = 5
+            self.assertEqual(self.geometry.vertex, np.array([1]))
 
     def test_vertex_with_elevation_update(self):
         with mock.patch('bilby.gw.utils.get_vertex_position_geocentric') as m:
             m.return_value = np.array([1])
-            self.ifo.elevation = 5
-            self.assertEqual(self.ifo.vertex, np.array([1]))
+            self.geometry.elevation = 5
+            self.assertEqual(self.geometry.vertex, np.array([1]))
 
     def test_x_without_update(self):
-        _ = self.ifo.x
-        self.ifo.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        _ = self.geometry.x
+        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
 
-        self.assertFalse(np.array_equal(self.ifo.x,
+        self.assertFalse(np.array_equal(self.geometry.x,
                                         np.array([1])))
 
     def test_x_with_xarm_tilt_update(self):
-        self.ifo.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
-        self.ifo.xarm_tilt = 0
-        self.assertTrue(np.array_equal(self.ifo.x,
+        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.xarm_tilt = 0
+        self.assertTrue(np.array_equal(self.geometry.x,
                                        np.array([1])))
 
     def test_x_with_xarm_azimuth_update(self):
-        self.ifo.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
-        self.ifo.xarm_azimuth = 0
-        self.assertTrue(np.array_equal(self.ifo.x,
+        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.xarm_azimuth = 0
+        self.assertTrue(np.array_equal(self.geometry.x,
                                        np.array([1])))
 
     def test_x_with_longitude_update(self):
-        self.ifo.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
-        self.ifo.longitude = 0
-        self.assertTrue(np.array_equal(self.ifo.x,
+        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.longitude = 0
+        self.assertTrue(np.array_equal(self.geometry.x,
                                        np.array([1])))
 
     def test_x_with_latitude_update(self):
-        self.ifo.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
-        self.ifo.latitude = 0
-        self.assertTrue(np.array_equal(self.ifo.x,
+        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.latitude = 0
+        self.assertTrue(np.array_equal(self.geometry.x,
                                        np.array([1])))
 
     def test_y_without_update(self):
-        _ = self.ifo.y
-        self.ifo.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        _ = self.geometry.y
+        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
 
-        self.assertFalse(np.array_equal(self.ifo.y,
+        self.assertFalse(np.array_equal(self.geometry.y,
                                         np.array([1])))
 
     def test_y_with_yarm_tilt_update(self):
-        self.ifo.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
-        self.ifo.yarm_tilt = 0
-        self.assertTrue(np.array_equal(self.ifo.y,
+        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.yarm_tilt = 0
+        self.assertTrue(np.array_equal(self.geometry.y,
                                        np.array([1])))
 
     def test_y_with_yarm_azimuth_update(self):
-        self.ifo.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
-        self.ifo.yarm_azimuth = 0
-        self.assertTrue(np.array_equal(self.ifo.y,
+        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.yarm_azimuth = 0
+        self.assertTrue(np.array_equal(self.geometry.y,
                                        np.array([1])))
 
     def test_y_with_longitude_update(self):
-        self.ifo.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
-        self.ifo.longitude = 0
-        self.assertTrue(np.array_equal(self.ifo.y,
+        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.longitude = 0
+        self.assertTrue(np.array_equal(self.geometry.y,
                                        np.array([1])))
 
     def test_y_with_latitude_update(self):
-        self.ifo.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
-        self.ifo.latitude = 0
-        self.assertTrue(np.array_equal(self.ifo.y,
+        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.latitude = 0
+        self.assertTrue(np.array_equal(self.geometry.y,
                                        np.array([1])))
 
     def test_detector_tensor_without_update(self):
-        _ = self.ifo.detector_tensor
+        _ = self.geometry.detector_tensor
         with mock.patch('numpy.einsum') as m:
             m.return_value = 1
             expected = np.array([[-9.24529394e-06, 1.02425803e-04, 3.24550668e-04],
                                  [1.02425803e-04, 1.37390844e-03, -8.61137566e-03],
                                  [3.24550668e-04, -8.61137566e-03, -1.36466315e-03]])
-            self.assertTrue(np.allclose(expected, self.ifo.detector_tensor))
+            self.assertTrue(np.allclose(expected, self.geometry.detector_tensor))
 
     def test_detector_tensor_with_x_azimuth_update(self):
-        _ = self.ifo.detector_tensor
+        _ = self.geometry.detector_tensor
         with mock.patch('numpy.einsum') as m:
             m.return_value = 1
-            self.ifo.xarm_azimuth = 1
-            self.assertEqual(0, self.ifo.detector_tensor)
+            self.geometry.xarm_azimuth = 1
+            self.assertEqual(0, self.geometry.detector_tensor)
 
     def test_detector_tensor_with_y_azimuth_update(self):
-        _ = self.ifo.detector_tensor
+        _ = self.geometry.detector_tensor
         with mock.patch('numpy.einsum') as m:
             m.return_value = 1
-            self.ifo.yarm_azimuth = 1
-            self.assertEqual(0, self.ifo.detector_tensor)
+            self.geometry.yarm_azimuth = 1
+            self.assertEqual(0, self.geometry.detector_tensor)
 
     def test_detector_tensor_with_x_tilt_update(self):
-        _ = self.ifo.detector_tensor
+        _ = self.geometry.detector_tensor
         with mock.patch('numpy.einsum') as m:
             m.return_value = 1
-            self.ifo.xarm_tilt = 1
-            self.assertEqual(0, self.ifo.detector_tensor)
+            self.geometry.xarm_tilt = 1
+            self.assertEqual(0, self.geometry.detector_tensor)
 
     def test_detector_tensor_with_y_tilt_update(self):
-        _ = self.ifo.detector_tensor
+        _ = self.geometry.detector_tensor
         with mock.patch('numpy.einsum') as m:
             m.return_value = 1
-            self.ifo.yarm_tilt = 1
-            self.assertEqual(0, self.ifo.detector_tensor)
+            self.geometry.yarm_tilt = 1
+            self.assertEqual(0, self.geometry.detector_tensor)
 
     def test_detector_tensor_with_longitude_update(self):
         with mock.patch('numpy.einsum') as m:
             m.return_value = 1
-            self.ifo.longitude = 1
-            self.assertEqual(0, self.ifo.detector_tensor)
+            self.geometry.longitude = 1
+            self.assertEqual(0, self.geometry.detector_tensor)
 
     def test_detector_tensor_with_latitude_update(self):
         with mock.patch('numpy.einsum') as m:
-            _ = self.ifo.detector_tensor
+            _ = self.geometry.detector_tensor
+            m.return_value = 1
+            self.geometry.latitude = 1
+            self.assertEqual(self.geometry.detector_tensor, 0)
+
+    def test_unit_vector_along_arm_default(self):
+        with self.assertRaises(ValueError):
+            self.geometry.unit_vector_along_arm('z')
+
+    def test_unit_vector_along_arm_x(self):
+        with mock.patch('numpy.array') as m:
+            m.return_value = 1
+            self.geometry.xarm_tilt = 0
+            self.geometry.xarm_azimuth = 0
+            self.geometry.yarm_tilt = 0
+            self.geometry.yarm_azimuth = 90
+            self.assertAlmostEqual(self.geometry.unit_vector_along_arm('x'), 1)
+
+    def test_unit_vector_along_arm_y(self):
+        with mock.patch('numpy.array') as m:
             m.return_value = 1
-            self.ifo.latitude = 1
-            self.assertEqual(self.ifo.detector_tensor, 0)
+            self.geometry.xarm_tilt = 0
+            self.geometry.xarm_azimuth = 90
+            self.geometry.yarm_tilt = 0
+            self.geometry.yarm_azimuth = 180
+            self.assertAlmostEqual(self.geometry.unit_vector_along_arm('y'), -1)
+
+    def test_repr(self):
+        expected = 'InterferometerGeometry(length={}, latitude={}, longitude={}, elevation={}, xarm_azimuth={}, ' \
+                   'yarm_azimuth={}, xarm_tilt={}, yarm_tilt={})' \
+            .format(float(self.length), float(self.latitude), float(self.longitude), float(self.elevation),
+                    float(self.xarm_azimuth), float(self.yarm_azimuth), float(self.xarm_tilt), float(self.yarm_tilt))
+        self.assertEqual(expected, repr(self.geometry))
+
+
+class TestInterferometer(unittest.TestCase):
+
+    def setUp(self):
+        self.name = 'name'
+        self.power_spectral_density = bilby.gw.detector.PowerSpectralDensity.from_aligo()
+        self.minimum_frequency = 10
+        self.maximum_frequency = 20
+        self.length = 30
+        self.latitude = 1
+        self.longitude = 2
+        self.elevation = 3
+        self.xarm_azimuth = 4
+        self.yarm_azimuth = 5
+        self.xarm_tilt = 0.
+        self.yarm_tilt = 0.
+        # noinspection PyTypeChecker
+        self.ifo = bilby.gw.detector.Interferometer(name=self.name, power_spectral_density=self.power_spectral_density,
+                                                    minimum_frequency=self.minimum_frequency,
+                                                    maximum_frequency=self.maximum_frequency, length=self.length,
+                                                    latitude=self.latitude, longitude=self.longitude,
+                                                    elevation=self.elevation,
+                                                    xarm_azimuth=self.xarm_azimuth, yarm_azimuth=self.yarm_azimuth,
+                                                    xarm_tilt=self.xarm_tilt, yarm_tilt=self.yarm_tilt)
+        self.ifo.strain_data.set_from_frequency_domain_strain(
+            np.linspace(0, 4096, 4097), sampling_frequency=4096, duration=2)
+        self.outdir = 'outdir'
+        bilby.core.utils.check_directory_exists_and_if_not_mkdir(self.outdir)
+
+    def tearDown(self):
+        del self.name
+        del self.power_spectral_density
+        del self.minimum_frequency
+        del self.maximum_frequency
+        del self.length
+        del self.latitude
+        del self.longitude
+        del self.elevation
+        del self.xarm_azimuth
+        del self.yarm_azimuth
+        del self.xarm_tilt
+        del self.yarm_tilt
+        del self.ifo
+        rmtree(self.outdir)
+
+    def test_name_setting(self):
+        self.assertEqual(self.ifo.name, self.name)
+
+    def test_psd_setting(self):
+        self.assertEqual(self.ifo.power_spectral_density, self.power_spectral_density)
+
+    def test_min_freq_setting(self):
+        self.assertEqual(self.ifo.strain_data.minimum_frequency, self.minimum_frequency)
+
+    def test_max_freq_setting(self):
+        self.assertEqual(self.ifo.strain_data.maximum_frequency, self.maximum_frequency)
 
     def test_antenna_response_default(self):
         with mock.patch('bilby.gw.utils.get_polarization_tensor') as m:
@@ -261,15 +322,12 @@ class TestInterferometer(unittest.TestCase):
         self.ifo.epoch = 1
         self.minimum_frequency = 10
         self.maximum_frequency = 20
-        # self.ifo.frequency_array = np.array([8, 12, 16, 20, 24])
         plus = np.linspace(0, 4096, 4097)
         response = self.ifo.get_detector_response(
             waveform_polarizations=dict(plus=plus),
             parameters=dict(ra=0, dec=0, geocent_time=0, psi=0))
         expected_response = plus * self.ifo.frequency_mask * np.exp(-1j * 2 * np.pi * self.ifo.frequency_array)
-        self.assertTrue(np.allclose(abs(response),
-                                    abs(plus * self.ifo.frequency_mask * np.exp(
-                                        -1j * 2 * np.pi * self.ifo.frequency_array))))
+        self.assertTrue(np.allclose(abs(expected_response), abs(response)))
 
     def test_get_detector_response_multiple_modes(self):
         self.ifo.antenna_response = MagicMock(return_value=1)
@@ -277,7 +335,6 @@ class TestInterferometer(unittest.TestCase):
         self.ifo.epoch = 0
         self.minimum_frequency = 10
         self.maximum_frequency = 20
-        # self.ifo.frequency_array = np.array([8, 12, 16, 20, 24])
         plus = np.linspace(0, 4096, 4097)
         cross = np.linspace(0, 4096, 4097)
         response = self.ifo.get_detector_response(
@@ -289,28 +346,6 @@ class TestInterferometer(unittest.TestCase):
         with self.assertRaises(ValueError):
             self.ifo.inject_signal(injection_polarizations=None, parameters=None)
 
-    def test_unit_vector_along_arm_default(self):
-        with self.assertRaises(ValueError):
-            self.ifo.unit_vector_along_arm('z')
-
-    def test_unit_vector_along_arm_x(self):
-        with mock.patch('numpy.array') as m:
-            m.return_value = 1
-            self.ifo.xarm_tilt = 0
-            self.ifo.xarm_azimuth = 0
-            self.ifo.yarm_tilt = 0
-            self.ifo.yarm_azimuth = 90
-            self.assertAlmostEqual(self.ifo.unit_vector_along_arm('x'), 1)
-
-    def test_unit_vector_along_arm_y(self):
-        with mock.patch('numpy.array') as m:
-            m.return_value = 1
-            self.ifo.xarm_tilt = 0
-            self.ifo.xarm_azimuth = 90
-            self.ifo.yarm_tilt = 0
-            self.ifo.yarm_azimuth = 180
-            self.assertAlmostEqual(self.ifo.unit_vector_along_arm('y'), -1)
-
     def test_time_delay_from_geocenter(self):
         with mock.patch('bilby.gw.utils.time_delay_geocentric') as m:
             m.return_value = 1
diff --git a/test/gw_likelihood_test.py b/test/gw_likelihood_test.py
index a1584bdc7c45a902b6573bb14403f4f8a25806b9..8447b8fd6b0dcd57ba0df4dd76c2f28e6e83facc 100644
--- a/test/gw_likelihood_test.py
+++ b/test/gw_likelihood_test.py
@@ -497,6 +497,9 @@ class TestROQLikelihood(unittest.TestCase):
         fnodes_linear = np.load(fnodes_linear_file).T
         fnodes_quadratic_file = "{}/fnodes_quadratic.npy".format(roq_dir)
         fnodes_quadratic = np.load(fnodes_quadratic_file).T
+        self.linear_matrix_file = "{}/B_linear.npy".format(roq_dir)
+        self.quadratic_matrix_file = "{}/B_quadratic.npy".format(roq_dir)
+        self.params_file = "{}/params.dat".format(roq_dir)
 
         self.test_parameters = dict(
             mass_1=36.0, mass_2=36.0, a_1=0.0, a_2=0.0, tilt_1=0.0, tilt_2=0.0,
@@ -520,6 +523,8 @@ class TestROQLikelihood(unittest.TestCase):
         ifos.inject_signal(
             parameters=self.test_parameters, waveform_generator=non_roq_wfg)
 
+        self.ifos = ifos
+
         roq_wfg = bilby.gw.waveform_generator.WaveformGenerator(
             duration=self.duration, sampling_frequency=self.sampling_frequency,
             frequency_domain_source_model=bilby.gw.source.roq,
@@ -529,6 +534,8 @@ class TestROQLikelihood(unittest.TestCase):
                 reference_frequency=20., minimum_frequency=20.,
                 approximant='IMRPhenomPv2'))
 
+        self.roq_wfg = roq_wfg
+
         self.non_roq = bilby.gw.likelihood.GravitationalWaveTransient(
             interferometers=ifos, waveform_generator=non_roq_wfg)
 
@@ -548,7 +555,8 @@ class TestROQLikelihood(unittest.TestCase):
             phase_marginalization=True, priors=self.priors.copy())
 
     def tearDown(self):
-        pass
+        del (self.roq, self.non_roq, self.non_roq_phase, self.roq_phase,
+             self.ifos, self.priors)
 
     def test_matches_non_roq(self):
         self.non_roq.parameters.update(self.test_parameters)
@@ -573,6 +581,31 @@ class TestROQLikelihood(unittest.TestCase):
                 self.roq_phase.log_likelihood_ratio()) /
             self.non_roq_phase.log_likelihood_ratio(), 1e-3)
 
+    def test_create_roq_weights_with_params(self):
+        roq = bilby.gw.likelihood.ROQGravitationalWaveTransient(
+            interferometers=self.ifos, waveform_generator=self.roq_wfg,
+            linear_matrix=self.linear_matrix_file, roq_params=self.params_file,
+            quadratic_matrix=self.quadratic_matrix_file, priors=self.priors)
+        roq.parameters.update(self.test_parameters)
+        self.roq.parameters.update(self.test_parameters)
+        self.assertEqual(
+            roq.log_likelihood_ratio(), self.roq.log_likelihood_ratio())
+
+    def test_create_roq_weights_frequency_mismatch_works_with_params(self):
+        self.ifos[0].maximum_frequency = self.ifos[0].maximum_frequency / 2
+        _ = bilby.gw.likelihood.ROQGravitationalWaveTransient(
+            interferometers=self.ifos, waveform_generator=self.roq_wfg,
+            linear_matrix=self.linear_matrix_file, roq_params=self.params_file,
+            quadratic_matrix=self.quadratic_matrix_file, priors=self.priors)
+
+    def test_create_roq_weights_frequency_mismatch_fails_without_params(self):
+        self.ifos[0].maximum_frequency = self.ifos[0].maximum_frequency / 2
+        with self.assertRaises(ValueError):
+            _ = bilby.gw.likelihood.ROQGravitationalWaveTransient(
+                interferometers=self.ifos, waveform_generator=self.roq_wfg,
+                linear_matrix=self.linear_matrix_file,
+                quadratic_matrix=self.quadratic_matrix_file, priors=self.priors)
+
 
 class TestBBHLikelihoodSetUp(unittest.TestCase):