diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index d3b5a4ce731ae10b1b541867272a3fecd6b99e52..57415e7d6beede82dd272225360a995ce3d9459f 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -20,14 +20,14 @@ stages:
 # Check author list is up to date
 authors:
   stage: initial
-  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python37
+  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
   script:
     - python test/check_author_list.py
 
 # Test containers scripts are up to date
 containers:
   stage: initial
-  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python37
+  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
   script:
     - cd containers
     - python write_dockerfiles.py #HACK
@@ -63,143 +63,115 @@ containers:
           ${script} --help;
       done
 
-# Test basic setup on python 3.7
-basic-3.7:
-  <<: *test-python
-  image: python:3.7
-
-# Test basic setup on python 3.8
 basic-3.8:
   <<: *test-python
   image: python:3.8
 
-# Test basic setup on python 3.9
 basic-3.9:
   <<: *test-python
   image: python:3.9
 
+basic-3.10:
+  <<: *test-python
+  image: python:3.10
 
-precommits-py3.7:
+.precommits: &precommits
   stage: initial
-  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python37
   script:
-    - source activate python37
-    - mkdir -p .pip37
+    - source activate $PYVERSION
+    - mkdir -p $CACHE_DIR
     - pip install --upgrade pip
-    - pip --cache-dir=.pip37 install --upgrade bilby
-    - pip --cache-dir=.pip37 install .
-    - pip --cache-dir=.pip37 install pre-commit
+    - pip --cache-dir=$CACHE_DIR install --upgrade bilby
+    - pip --cache-dir=$CACHE_DIR install .
+    - pip --cache-dir=$CACHE_DIR install pre-commit
     # Run precommits (flake8, spellcheck, isort, no merge conflicts, etc)
     - pre-commit run --all-files --verbose --show-diff-on-failure
 
 precommits-py3.8:
-  stage: initial
+  <<: *precommits
   image: containers.ligo.org/lscsoft/bilby/v2-bilby-python38
-  script:
-    - source activate python38
-    - mkdir -p .pip38
-    - pip install --upgrade pip
-    - pip --cache-dir=.pip38 install --upgrade bilby
-    - pip --cache-dir=.pip38 install .
-    - pip --cache-dir=.pip38 install pre-commit
-    # Run precommits (flake8, spellcheck, isort, no merge conflicts, etc)
-    - pre-commit run --all-files --verbose --show-diff-on-failure
+  variables:
+    CACHE_DIR: ".pip38"
+    PYVERSION: "python38"
 
 precommits-py3.9:
-  stage: initial
+  <<: *precommits
   image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
-  script:
-    - source activate python39
-    - mkdir -p .pip38
-    - pip install --upgrade pip
-    - pip --cache-dir=.pip39 install --upgrade bilby
-    - pip --cache-dir=.pip39 install .
-    - pip --cache-dir=.pip39 install pre-commit
-    # Run precommits (flake8, spellcheck, isort, no merge conflicts, etc)
-    - pre-commit run --all-files --verbose --show-diff-on-failure
+  variables:
+    CACHE_DIR: ".pip39"
+    PYVERSION: "python39"
+
+# FIXME: when image builds for 3.10 change this back.
+#precommits-py3.10:
+#  <<: *precommits
+#  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
+#  variables:
+#    CACHE_DIR: ".pip310"
+#    PYVERSION: "python310"
 
 # ------------------- Test stage -------------------------------------------
 
-# test example on python 3.7 and build coverage
-python-3.7:
+.unit-tests: &unit-test
   stage: test
-  needs: ["basic-3.7", "precommits-py3.7"]
-  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python37
   script:
     - python -m pip install .
     - python -m pip list installed
 
-    # Run pyflakes
-    - flake8 .
-
-    # Run tests and collect coverage data
     - pytest --cov=bilby --durations 10
-    - coverage html
-    - coverage-badge -o coverage_badge.svg -f
 
-  artifacts:
-    paths:
-      - coverage_badge.svg
-      - htmlcov/
-
-# test example on python 3.8
 python-3.8:
-  stage: test
+  <<: *unit-test
   needs: ["basic-3.8", "precommits-py3.8"]
   image: containers.ligo.org/lscsoft/bilby/v2-bilby-python38
-  script:
-    - python -m pip install .
-    - python -m pip list installed
-    - pytest
 
-# test example on python 3.9
 python-3.9:
-  stage: test
+  <<: *unit-test
   needs: ["basic-3.9", "precommits-py3.9"]
   image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
-  script:
-    - python -m pip install .
-    - python -m pip list installed
-    - pytest
+  after_script:
+    - coverage html
+    - coverage xml
+  artifacts:
+    reports:
+      cobertura: coverage.xml
+    paths:
+      - htmlcov/
+    expire_in: 30 days
 
+# add back when 3.10 image is available
+#python-3.10:
+#  <<: *unit-test
+#  needs: ["basic-3.10", "precommits-py3.10"]
+#  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
 
-# test samplers on python 3.7
-python-3.7-samplers:
+.test-sampler: &test-sampler
   stage: test
-  needs: ["basic-3.7", "precommits-py3.7"]
-  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python37
   script:
     - python -m pip install .
     - python -m pip list installed
 
     - pytest test/integration/sampler_run_test.py --durations 10
 
-# test samplers on python 3.8
 python-3.8-samplers:
-  stage: test
+  <<: *test-sampler
   needs: ["basic-3.8", "precommits-py3.8"]
   image: containers.ligo.org/lscsoft/bilby/v2-bilby-python38
-  script:
-    - python -m pip install .
-    - python -m pip list installed
-
-    - pytest test/integration/sampler_run_test.py --durations 10
 
-# test samplers on python 3.9
 python-3.9-samplers:
-  stage: test
+  <<: *test-sampler
   needs: ["basic-3.9", "precommits-py3.9"]
   image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
-  script:
-    - python -m pip install .
-    - python -m pip list installed
 
-    - pytest test/integration/sampler_run_test.py --durations 10
+# add back when 3.10 image is available
+#python-3.10-samplers:
+#  <<: *test-sampler
+#  needs: ["basic-3.10", "precommits-py3.10"]
+#  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
 
-integration-tests-python-3.7:
+integration-tests-python-3.9:
   stage: test
-  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python37
-  needs: ["basic-3.7", "precommits-py3.7"]
+  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
+  needs: ["basic-3.9", "precommits-py3.9"]
   only:
     - schedules
   script:
@@ -208,38 +180,37 @@ integration-tests-python-3.7:
     # Run tests which are only done on schedule
     - pytest test/integration/example_test.py
 
-plotting-python-3.7:
+.plotting: &plotting
   stage: test
-  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python37
-  needs: ["basic-3.7", "precommits-py3.7"]
   only:
     - schedules
   script:
     - python -m pip install .
-    - python -m pip install "ligo.skymap>=0.5.3"
     - python -m pip list installed
     - pytest test/gw/plot_test.py
 
 
+plotting-python-3.8:
+  <<: *plotting
+  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python38
+  needs: ["basic-3.8", "precommits-py3.8"]
+
 plotting-python-3.9:
-  stage: test
-  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python37
+  <<: *plotting
+  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
   needs: ["basic-3.9", "precommits-py3.9"]
-  only:
-    - schedules
-  script:
-    - python -m pip install .
-    - python -m pip install "ligo.skymap>=0.5.3"
-    - python -m pip list installed
-    - pytest test/gw/plot_test.py
 
+# add back when 3.10 image is available
+#plotting-python-3.10:
+#  <<: *plotting
+#  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python310
+#  needs: ["basic-3.10", "precommits-py3.10"]
 
 # ------------------- Docs stage -------------------------------------------
 
 docs:
   stage: docs
-  needs: ["python-3.7"]
-  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python37
+  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
   script:
     # Make the documentation
     - apt-get -yqq install pandoc
@@ -262,11 +233,10 @@ docs:
 
 deploy-docs:
   stage: deploy
-  needs: ["docs", "python-3.7"]
+  needs: ["docs", "python-3.9"]
   script:
     - mkdir public/
     - mv htmlcov/ public/
-    - mv coverage_badge.svg public/
     - mv docs/_build/html/* public/
   artifacts:
     paths:
@@ -275,49 +245,38 @@ deploy-docs:
   only:
     - master
 
-# Build the containers
-build-python37-container:
+.build-container: &build-container
   stage: deploy
-  image: docker:19.03.12
+  image: docker:20.10.12
   needs: ["containers"]
   only:
     - schedules
   script:
     - cd containers
     - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
-    - docker build --tag v3-bilby-python37 - < v3-dockerfile-test-suite-python37
-    - docker image tag v3-bilby-python37 containers.ligo.org/lscsoft/bilby/v2-bilby-python37:latest
-    - docker image push containers.ligo.org/lscsoft/bilby/v2-bilby-python37:latest
+    - docker build --tag v3-bilby-$PYVERSION - < v3-dockerfile-test-suite-$PYVERSION
+    - docker image tag v3-bilby-$PYVERSION containers.ligo.org/lscsoft/bilby/v2-bilby-$PYVERSION:latest
+    - docker image push containers.ligo.org/lscsoft/bilby/v2-bilby-$PYVERSION:latest
 
 build-python38-container:
-  stage: deploy
-  image: docker:19.03.12
-  needs: ["containers"]
-  only:
-    - schedules
-  script:
-    - cd containers
-    - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
-    - docker build --tag v3-bilby-python38 - < v3-dockerfile-test-suite-python38
-    - docker image tag v3-bilby-python38 containers.ligo.org/lscsoft/bilby/v2-bilby-python38:latest
-    - docker image push containers.ligo.org/lscsoft/bilby/v2-bilby-python38:latest
+  <<: *build-container
+  variables:
+    PYVERSION: "python38"
 
 build-python39-container:
-  stage: deploy
-  image: docker:19.03.12
-  needs: ["containers"]
-  only:
-    - schedules
-  script:
-    - cd containers
-    - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
-    - docker build --tag v3-bilby-python39 - < v3-dockerfile-test-suite-python39
-    - docker image tag v3-bilby-python39 containers.ligo.org/lscsoft/bilby/v2-bilby-python39:latest
-    - docker image push containers.ligo.org/lscsoft/bilby/v2-bilby-python39:latest
+  <<: *build-container
+  variables:
+    PYVERSION: "python39"
+
+# add back when 3.10 image is available
+#build-python310-container:
+#  <<: *build-container
+#  variables:
+#    PYVERSION: "python310"
 
 pypi-release:
   stage: deploy
-  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python37
+  image: containers.ligo.org/lscsoft/bilby/v2-bilby-python39
   variables:
     TWINE_USERNAME: $PYPI_USERNAME
     TWINE_PASSWORD: $PYPI_PASSWORD
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 959bc977f86c1bd11f9831084017e60d50e6def3..3672d0fc055fadb660847992dbfa519a1b8f9d37 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -5,13 +5,13 @@ repos:
     -   id: check-merge-conflict # prevent committing files with merge conflicts
     -   id: flake8 # checks for flake8 errors
 -   repo: https://github.com/psf/black
-    rev: 20.8b1
+    rev: 21.12b0
     hooks:
       - id: black
         language_version: python3
-        files: '(^bilby/bilby_mcmc/|^examples/core_examples/)'
+        files: '(^bilby/bilby_mcmc/|^examples/core_examples/|examples/gw_examples/data_examples)'
 -   repo: https://github.com/codespell-project/codespell
-    rev: v1.16.0
+    rev: v2.1.0
     hooks:
     -   id: codespell
         args: [--ignore-words=.dictionary.txt]
@@ -20,4 +20,4 @@ repos:
     hooks:
     -   id: isort # sort imports alphabetically and separates import into sections
         args: [-w=88, -m=3, -tc, -sp=setup.cfg ]
-        files: '(^bilby/bilby_mcmc/|^examples/core_examples/)'
+        files: '(^bilby/bilby_mcmc/|^examples/core_examples/examples/gw_examples/data_examples)'
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 16ccafc5e7bd46d33b7223b16df5a663db1ce852..73cdbefb1a01c4f877ee657fa6f3c4fc668162e4 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,23 @@
 # All notable changes will be documented in this file
 
+## [1.1.5] 2022-01-14
+Version 1.1.5 release of bilby
+
+### Added
+- Option to enforce that a GW signal fits into the segment duration (!1041)
+- Remove the save `.dat` samples file with `dynesty` (!1028)
+- Catch corrupted `json` result result files being passed (!1034)
+
+### Changes
+- Fixes to conversion function for `nessai` and `cpnest` (!1055)
+- Workaround for `astropy` v5 (!1054)
+- Various fixes to testing system (!1038, !1044, !1045, !1048)
+- Updated defaults for `nessai` (!1042)
+- Small bug fixes (!1032, !1036, !1039, !1046, !1052)
+- Bug fix in the definition of some standard interferometers (!1037)
+- Improvements to the multi-banded GWT likelihood (!1026)
+- Improve meta data comparison (!1035)
+
 ## [1.1.4] 2021-10-08
 Version 1.1.4 release of bilby
 
diff --git a/README.rst b/README.rst
index bddce5f89f78ff19960a31d6ca9c07b89943b6b9..37b98613e0fb68572f378cfb9340b53856b3ac93 100644
--- a/README.rst
+++ b/README.rst
@@ -73,7 +73,7 @@ as requested in their associated documentation.
 
 .. |pipeline status| image:: https://git.ligo.org/lscsoft/bilby/badges/master/pipeline.svg
    :target: https://git.ligo.org/lscsoft/bilby/commits/master
-.. |coverage report| image:: https://lscsoft.docs.ligo.org/bilby/coverage_badge.svg
+.. |coverage report| image:: https://git.ligo.org/lscsoft/bilby/badges/master/coverage.svg
    :target: https://lscsoft.docs.ligo.org/bilby/htmlcov/
 .. |pypi| image:: https://badge.fury.io/py/bilby.svg
    :target: https://pypi.org/project/bilby/
diff --git a/bilby/bilby_mcmc/chain.py b/bilby/bilby_mcmc/chain.py
index 358265e2cf26f798f996ba3b966fb2ef5371ae59..8969d353623bd9570c30ca0829570dbc16f793e9 100644
--- a/bilby/bilby_mcmc/chain.py
+++ b/bilby/bilby_mcmc/chain.py
@@ -253,7 +253,7 @@ class Chain(object):
 
     @property
     def tau(self):
-        """ The maximum ACT over all parameters """
+        """The maximum ACT over all parameters"""
 
         if self.position in self.max_tau_dict:
             # If we have the ACT at the current position, return it
@@ -272,7 +272,7 @@ class Chain(object):
 
     @property
     def tau_nocache(self):
-        """ Calculate tau forcing a recalculation (no cached tau) """
+        """Calculate tau forcing a recalculation (no cached tau)"""
         tau = max(self.tau_dict.values())
         self.max_tau_dict[self.position] = tau
         self.cached_tau_count = 0
@@ -280,7 +280,7 @@ class Chain(object):
 
     @property
     def tau_last(self):
-        """ Return the last-calculated tau if it exists, else inf """
+        """Return the last-calculated tau if it exists, else inf"""
         if len(self.max_tau_dict) > 0:
             return list(self.max_tau_dict.values())[-1]
         else:
@@ -288,7 +288,7 @@ class Chain(object):
 
     @property
     def _tau_for_full_chain(self):
-        """ The maximum ACT over all parameters """
+        """The maximum ACT over all parameters"""
         return max(self._tau_dict_for_full_chain.values())
 
     @property
@@ -297,11 +297,11 @@ class Chain(object):
 
     @property
     def tau_dict(self):
-        """ Calculate a dictionary of tau (ACT) for every parameter """
+        """Calculate a dictionary of tau (ACT) for every parameter"""
         return self._calculate_tau_dict(self.minimum_index)
 
     def _calculate_tau_dict(self, minimum_index):
-        """ Calculate a dictionary of tau (ACT) for every parameter """
+        """Calculate a dictionary of tau (ACT) for every parameter"""
         logger.debug(f"Calculating tau_dict {self}")
 
         # If there are too few samples to calculate tau
diff --git a/bilby/bilby_mcmc/proposals.py b/bilby/bilby_mcmc/proposals.py
index 99daa824fb8390e26446e73c8d4ff667bc65a564..2c4ffe4ae55ff27ce01baea2d3a14b59b5f4a9bf 100644
--- a/bilby/bilby_mcmc/proposals.py
+++ b/bilby/bilby_mcmc/proposals.py
@@ -930,7 +930,7 @@ def _stretch_move(sample, complement, scale, ndim, parameters):
 
 
 class EnsembleProposal(BaseProposal):
-    """ Base EnsembleProposal class for ensemble-based swap proposals """
+    """Base EnsembleProposal class for ensemble-based swap proposals"""
 
     def __init__(self, priors, weight=1):
         super(EnsembleProposal, self).__init__(priors, weight)
diff --git a/bilby/bilby_mcmc/sampler.py b/bilby/bilby_mcmc/sampler.py
index 767e3a65085fb40235e9eebfe0002a9d83ad39a3..a1446e4bed4c3f2cb78c233e1273f54e8bc6fa10 100644
--- a/bilby/bilby_mcmc/sampler.py
+++ b/bilby/bilby_mcmc/sampler.py
@@ -653,7 +653,7 @@ class BilbyPTMCMCSampler(object):
 
     @property
     def sampler_list(self):
-        """ A list of all individual samplers """
+        """A list of all individual samplers"""
         return [s for item in self.sampler_dictionary.values() for s in item]
 
     @sampler_list.setter
diff --git a/bilby/gw/conversion.py b/bilby/gw/conversion.py
index 9c516c737d2e23db26b0d30c5507d87c95d17655..2c236e2a40aeeb122373a8f63746433a99f5e4f0 100644
--- a/bilby/gw/conversion.py
+++ b/bilby/gw/conversion.py
@@ -174,7 +174,7 @@ def convert_to_lal_binary_black_hole_parameters(parameters):
                 chirp_mass_and_total_mass_to_symmetric_mass_ratio(
                     converted_parameters['chirp_mass'],
                     converted_parameters['total_mass'])
-        if 'symmetric_mass_ratio' in converted_parameters.keys():
+        if 'symmetric_mass_ratio' in converted_parameters.keys() and "mass_ratio" not in converted_parameters:
             converted_parameters['mass_ratio'] =\
                 symmetric_mass_ratio_to_mass_ratio(
                     converted_parameters['symmetric_mass_ratio'])
diff --git a/bilby/gw/detector/interferometer.py b/bilby/gw/detector/interferometer.py
index 874d213dfcbefc87abe1a22e4341a5b2d4838989..8447c16eff482fa3a1abb00d4e5bfd0ecd52a354 100644
--- a/bilby/gw/detector/interferometer.py
+++ b/bilby/gw/detector/interferometer.py
@@ -1,5 +1,4 @@
 import os
-import sys
 
 import numpy as np
 
@@ -778,9 +777,6 @@ class Interferometer(object):
     ))
     def to_hdf5(self, outdir='outdir', label=None):
         import deepdish
-        if sys.version_info[0] < 3:
-            raise NotImplementedError('Pickling of Interferometer is not supported in Python 2.'
-                                      'Use Python 3 instead.')
         if label is None:
             label = self.name
         utils.check_directory_exists_and_if_not_mkdir('outdir')
@@ -795,9 +791,6 @@ class Interferometer(object):
     @docstring(_load_docstring.format(format="hdf5"))
     def from_hdf5(cls, filename=None):
         import deepdish
-        if sys.version_info[0] < 3:
-            raise NotImplementedError('Pickling of Interferometer is not supported in Python 2.'
-                                      'Use Python 3 instead.')
 
         res = deepdish.io.load(filename)
         if res.__class__ != cls:
diff --git a/bilby/gw/detector/networks.py b/bilby/gw/detector/networks.py
index bc1067ff94b12ff0a15bccc6c7773f3fa6c6cd37..db0d4743b148338b49590a59c067e9f3f2cc97d9 100644
--- a/bilby/gw/detector/networks.py
+++ b/bilby/gw/detector/networks.py
@@ -1,5 +1,4 @@
 import os
-import sys
 
 import numpy as np
 import math
@@ -276,11 +275,6 @@ class InterferometerList(list):
     def to_hdf5(self, outdir="outdir", label="ifo_list"):
         import deepdish
 
-        if sys.version_info[0] < 3:
-            raise NotImplementedError(
-                "Pickling of InterferometerList is not supported in Python 2."
-                "Use Python 3 instead."
-            )
         label = label + "_" + "".join(ifo.name for ifo in self)
         utils.check_directory_exists_and_if_not_mkdir(outdir)
         try:
@@ -296,11 +290,6 @@ class InterferometerList(list):
     def from_hdf5(cls, filename=None):
         import deepdish
 
-        if sys.version_info[0] < 3:
-            raise NotImplementedError(
-                "Pickling of InterferometerList is not supported in Python 2."
-                "Use Python 3 instead."
-            )
         res = deepdish.io.load(filename)
         if res.__class__ == list:
             res = cls(res)
diff --git a/bilby/gw/likelihood/base.py b/bilby/gw/likelihood/base.py
index b468ff67ed793a9644288b08d5b0b0758d37ca49..55845c09372222b48f83e15d862dbd95c9f13ae8 100644
--- a/bilby/gw/likelihood/base.py
+++ b/bilby/gw/likelihood/base.py
@@ -378,9 +378,7 @@ class GravitationalWaveTransient(Likelihood):
         elif self.time_marginalization:
             if self.jitter_time:
                 self.parameters['geocent_time'] += self.parameters['time_jitter']
-            d_inner_h_array = np.zeros(
-                len(self.interferometers.frequency_array[0:-1]),
-                dtype=np.complex128)
+            d_inner_h_array = np.zeros(len(self._times), dtype=np.complex128)
 
         elif self.calibration_marginalization:
             d_inner_h_array = np.zeros(self.number_of_response_curves, dtype=np.complex128)
diff --git a/bilby/gw/likelihood/roq.py b/bilby/gw/likelihood/roq.py
index 5d459082a1bee60066cdd56a289c9d6e51eb2d47..ce3e679f528c3ea07254d1a75286a74c01198fb9 100644
--- a/bilby/gw/likelihood/roq.py
+++ b/bilby/gw/likelihood/roq.py
@@ -44,6 +44,20 @@ class ROQGravitationalWaveTransient(GravitationalWaveTransient):
         A dictionary of priors containing at least the geocent_time prior
         Warning: when using marginalisation the dict is overwritten which will change the
         the dict you are passing in. If this behaviour is undesired, pass `priors.copy()`.
+    time_marginalization: bool, optional
+        If true, marginalize over time in the likelihood.
+        The spacing of time samples can be specified through delta_tc.
+        If using time marginalisation and jitter_time is True a "jitter"
+        parameter is added to the prior which modifies the position of the
+        grid of times.
+    jitter_time: bool, optional
+        Whether to introduce a `time_jitter` parameter. This avoids either
+        missing the likelihood peak, or introducing biases in the
+        reconstructed time posterior due to an insufficient sampling frequency.
+        Default is False, however using this parameter is strongly encouraged.
+    delta_tc: float, optional
+        The spacing of time samples for time marginalization. If not specified,
+        it is determined based on the signal-to-noise ratio of signal.
     distance_marginalization_lookup_table: (dict, str), optional
         If a dict, dictionary containing the lookup_table, distance_array,
         (distance) prior_array, and reference_distance used to construct
@@ -71,18 +85,20 @@ class ROQGravitationalWaveTransient(GravitationalWaveTransient):
             weights=None, linear_matrix=None, quadratic_matrix=None,
             roq_params=None, roq_params_check=True, roq_scale_factor=1,
             distance_marginalization=False, phase_marginalization=False,
+            time_marginalization=False, jitter_time=True, delta_tc=None,
             distance_marginalization_lookup_table=None,
             reference_frame="sky", time_reference="geocenter"
 
     ):
+        self._delta_tc = delta_tc
         super(ROQGravitationalWaveTransient, self).__init__(
             interferometers=interferometers,
             waveform_generator=waveform_generator, priors=priors,
             distance_marginalization=distance_marginalization,
             phase_marginalization=phase_marginalization,
-            time_marginalization=False,
+            time_marginalization=time_marginalization,
             distance_marginalization_lookup_table=distance_marginalization_lookup_table,
-            jitter_time=False,
+            jitter_time=jitter_time,
             reference_frame=reference_frame,
             time_reference=time_reference
         )
@@ -117,6 +133,19 @@ class ROQGravitationalWaveTransient(GravitationalWaveTransient):
         self.frequency_nodes_quadratic = \
             waveform_generator.waveform_arguments['frequency_nodes_quadratic']
 
+    def _setup_time_marginalization(self):
+        if self._delta_tc is None:
+            self._delta_tc = self._get_time_resolution()
+        tcmin = self.priors['geocent_time'].minimum
+        tcmax = self.priors['geocent_time'].maximum
+        number_of_time_samples = int(np.ceil((tcmax - tcmin) / self._delta_tc))
+        # adjust delta tc so that the last time sample has an equal weight
+        self._delta_tc = (tcmax - tcmin) / number_of_time_samples
+        logger.info(
+            "delta tc for time marginalization = {} seconds.".format(self._delta_tc))
+        self._times = tcmin + self._delta_tc / 2. + np.arange(number_of_time_samples) * self._delta_tc
+        self._beam_pattern_reference_time = (tcmin + tcmax) / 2.
+
     def calculate_snrs(self, waveform_polarizations, interferometer):
         """
         Compute the snrs for ROQ
@@ -128,18 +157,21 @@ class ROQGravitationalWaveTransient(GravitationalWaveTransient):
 
         """
 
-        f_plus = interferometer.antenna_response(
-            self.parameters['ra'], self.parameters['dec'],
-            self.parameters['geocent_time'], self.parameters['psi'], 'plus')
-        f_cross = interferometer.antenna_response(
-            self.parameters['ra'], self.parameters['dec'],
-            self.parameters['geocent_time'], self.parameters['psi'], 'cross')
-
-        dt = interferometer.time_delay_from_geocenter(
-            self.parameters['ra'], self.parameters['dec'],
-            self.parameters['geocent_time'])
-        dt_geocent = self.parameters['geocent_time'] - interferometer.strain_data.start_time
-        ifo_time = dt_geocent + dt
+        if self.time_marginalization:
+            time_ref = self._beam_pattern_reference_time
+        else:
+            time_ref = self.parameters['geocent_time']
+
+        h_linear = np.zeros(len(self.frequency_nodes_linear), dtype=complex)
+        h_quadratic = np.zeros(len(self.frequency_nodes_quadratic), dtype=complex)
+        for mode in waveform_polarizations['linear']:
+            response = interferometer.antenna_response(
+                self.parameters['ra'], self.parameters['dec'],
+                self.parameters['geocent_time'], self.parameters['psi'],
+                mode
+            )
+            h_linear += waveform_polarizations['linear'][mode] * response
+            h_quadratic += waveform_polarizations['quadratic'][mode] * response
 
         calib_linear = interferometer.calibration_model.get_calibration_factor(
             self.frequency_nodes_linear,
@@ -148,46 +180,56 @@ class ROQGravitationalWaveTransient(GravitationalWaveTransient):
             self.frequency_nodes_quadratic,
             prefix='recalib_{}_'.format(interferometer.name), **self.parameters)
 
-        h_plus_linear = f_plus * waveform_polarizations['linear']['plus'] * calib_linear
-        h_cross_linear = f_cross * waveform_polarizations['linear']['cross'] * calib_linear
-        h_plus_quadratic = (
-            f_plus * waveform_polarizations['quadratic']['plus'] * calib_quadratic
-        )
-        h_cross_quadratic = (
-            f_cross * waveform_polarizations['quadratic']['cross'] * calib_quadratic
-        )
+        h_linear *= calib_linear
+        h_quadratic *= calib_quadratic
 
-        indices, in_bounds = self._closest_time_indices(
-            ifo_time, self.weights['time_samples'])
-        if not in_bounds:
-            logger.debug("SNR calculation error: requested time at edge of ROQ time samples")
-            return self._CalculatedSNRs(
-                d_inner_h=np.nan_to_num(-np.inf), optimal_snr_squared=0,
-                complex_matched_filter_snr=np.nan_to_num(-np.inf),
-                d_inner_h_squared_tc_array=None,
-                d_inner_h_array=None,
-                optimal_snr_squared_array=None)
+        optimal_snr_squared = \
+            np.vdot(np.abs(h_quadratic)**2, self.weights[interferometer.name + '_quadratic'])
+
+        dt = interferometer.time_delay_from_geocenter(
+            self.parameters['ra'], self.parameters['dec'], time_ref)
 
-        d_inner_h_tc_array = np.einsum(
-            'i,ji->j', np.conjugate(h_plus_linear + h_cross_linear),
-            self.weights[interferometer.name + '_linear'][indices])
+        if not self.time_marginalization:
+            dt_geocent = self.parameters['geocent_time'] - interferometer.strain_data.start_time
+            ifo_time = dt_geocent + dt
 
-        d_inner_h = self._interp_five_samples(
-            self.weights['time_samples'][indices], d_inner_h_tc_array, ifo_time)
+            indices, in_bounds = self._closest_time_indices(
+                ifo_time, self.weights['time_samples'])
+            if not in_bounds:
+                logger.debug("SNR calculation error: requested time at edge of ROQ time samples")
+                return self._CalculatedSNRs(
+                    d_inner_h=np.nan_to_num(-np.inf), optimal_snr_squared=0,
+                    complex_matched_filter_snr=np.nan_to_num(-np.inf),
+                    d_inner_h_squared_tc_array=None,
+                    d_inner_h_array=None,
+                    optimal_snr_squared_array=None)
 
-        optimal_snr_squared = \
-            np.vdot(np.abs(h_plus_quadratic + h_cross_quadratic)**2,
-                    self.weights[interferometer.name + '_quadratic'])
+            d_inner_h_tc_array = np.einsum(
+                'i,ji->j', np.conjugate(h_linear),
+                self.weights[interferometer.name + '_linear'][indices])
+
+            d_inner_h = self._interp_five_samples(
+                self.weights['time_samples'][indices], d_inner_h_tc_array, ifo_time)
+
+            with np.errstate(invalid="ignore"):
+                complex_matched_filter_snr = d_inner_h / (optimal_snr_squared**0.5)
+
+            d_inner_h_array = None
 
-        with np.errstate(invalid="ignore"):
-            complex_matched_filter_snr = d_inner_h / (optimal_snr_squared**0.5)
-        d_inner_h_squared_tc_array = None
+        else:
+            ifo_times = self._times - interferometer.strain_data.start_time + dt
+            if self.jitter_time:
+                ifo_times += self.parameters['time_jitter']
+            d_inner_h_array = self._calculate_d_inner_h_array(ifo_times, h_linear, interferometer.name)
+
+            d_inner_h = 0.
+            complex_matched_filter_snr = 0.
 
         return self._CalculatedSNRs(
             d_inner_h=d_inner_h, optimal_snr_squared=optimal_snr_squared,
             complex_matched_filter_snr=complex_matched_filter_snr,
-            d_inner_h_squared_tc_array=d_inner_h_squared_tc_array,
-            d_inner_h_array=None,
+            d_inner_h_squared_tc_array=None,
+            d_inner_h_array=d_inner_h_array,
             optimal_snr_squared_array=None)
 
     @staticmethod
@@ -242,6 +284,53 @@ class ROQGravitationalWaveTransient(GravitationalWaveTransient):
         d = (b**3. - b) / 6.
         return a * values[2] + b * values[3] + c * r1 + d * r2
 
+    def _calculate_d_inner_h_array(self, times, h_linear, ifo_name):
+        """
+        Calculate d_inner_h at regularly-spaced time samples. Each value is
+        interpolated from the nearest 5 samples with the algorithm explained in
+        https://dcc.ligo.org/T2100224.
+
+        Parameters
+        ==========
+        times: array-like
+            Regularly-spaced time samples at which d_inner_h are calculated.
+        h_linear: array-like
+            Waveforms at linear frequency nodes
+        ifo_name: str
+
+        Returns
+        =======
+        d_inner_h_array: array-like
+        """
+        roq_time_space = self.weights['time_samples'][1] - self.weights['time_samples'][0]
+        times_per_roq_time_space = (times - self.weights['time_samples'][0]) / roq_time_space
+        closest_idxs = np.floor(times_per_roq_time_space).astype(int)
+        # Get the nearest 5 samples of d_inner_h. Calculate only the required d_inner_h values if the time
+        # spacing is larger than 5 times the ROQ time spacing.
+        weights_linear = self.weights[ifo_name + '_linear']
+        h_linear_conj = np.conjugate(h_linear)
+        if (times[1] - times[0]) / roq_time_space > 5:
+            d_inner_h_m2 = np.dot(weights_linear[closest_idxs - 2], h_linear_conj)
+            d_inner_h_m1 = np.dot(weights_linear[closest_idxs - 1], h_linear_conj)
+            d_inner_h_0 = np.dot(weights_linear[closest_idxs], h_linear_conj)
+            d_inner_h_p1 = np.dot(weights_linear[closest_idxs + 1], h_linear_conj)
+            d_inner_h_p2 = np.dot(weights_linear[closest_idxs + 2], h_linear_conj)
+        else:
+            d_inner_h_at_roq_time_samples = np.dot(weights_linear, h_linear_conj)
+            d_inner_h_m2 = d_inner_h_at_roq_time_samples[closest_idxs - 2]
+            d_inner_h_m1 = d_inner_h_at_roq_time_samples[closest_idxs - 1]
+            d_inner_h_0 = d_inner_h_at_roq_time_samples[closest_idxs]
+            d_inner_h_p1 = d_inner_h_at_roq_time_samples[closest_idxs + 1]
+            d_inner_h_p2 = d_inner_h_at_roq_time_samples[closest_idxs + 2]
+        # quantities required for spline interpolation
+        b = times_per_roq_time_space - closest_idxs
+        a = 1. - b
+        c = (a**3. - a) / 6.
+        d = (b**3. - b) / 6.
+        r1 = (-d_inner_h_m2 + 8. * d_inner_h_m1 - 14. * d_inner_h_0 + 8. * d_inner_h_p1 - d_inner_h_p2) / 4.
+        r2 = d_inner_h_0 - 2. * d_inner_h_p1 + d_inner_h_p2
+        return a * d_inner_h_0 + b * d_inner_h_p1 + c * r1 + d * r2
+
     def perform_roq_params_check(self, ifo=None):
         """ Perform checking that the prior and data are valid for the ROQ
 
diff --git a/containers/dockerfile-template b/containers/dockerfile-template
index fc74e7a08d623cbf6d82957f5ce3466c5b4f1e6a..939ba29ca01b0f263f4c4c2b4e25dceb7533201f 100644
--- a/containers/dockerfile-template
+++ b/containers/dockerfile-template
@@ -14,7 +14,7 @@ RUN conda info
 RUN python --version
 
 # Install conda-installable programs
-RUN conda install -n ${{conda_env}} -y matplotlib numpy scipy pandas astropy flake8 mock
+RUN conda install -n ${{conda_env}} -y matplotlib numpy scipy pandas astropy flake8
 RUN conda install -n ${{conda_env}} -c anaconda coverage configargparse future
 RUN conda install -n ${{conda_env}} -c conda-forge black pytest-cov deepdish arviz
 
diff --git a/containers/v3-dockerfile-test-suite-python37 b/containers/v3-dockerfile-test-suite-python37
deleted file mode 100644
index bb4d1996639deb30d1975f6aa9d958b4068fe959..0000000000000000000000000000000000000000
--- a/containers/v3-dockerfile-test-suite-python37
+++ /dev/null
@@ -1,66 +0,0 @@
-# This dockerfile is written automatically and should not be modified by hand.
-
-FROM containers.ligo.org/docker/base:conda
-LABEL name="bilby CI testing" \
-maintainer="Gregory Ashton <gregory.ashton@ligo.org>"
-
-RUN conda update -n base -c defaults conda
-
-ENV conda_env python37
-
-RUN conda create -n ${conda_env} python=3.7
-RUN echo "source activate ${conda_env}" > ~/.bashrc
-ENV PATH /opt/conda/envs/${conda_env}/bin:$PATH
-RUN /bin/bash -c "source activate ${conda_env}"
-RUN conda info
-RUN python --version
-
-# Install conda-installable programs
-RUN conda install -n ${conda_env} -y matplotlib numpy scipy pandas astropy flake8 mock
-RUN conda install -n ${conda_env} -c anaconda coverage configargparse future
-RUN conda install -n ${conda_env} -c conda-forge black pytest-cov deepdish arviz
-
-# Install pip-requirements
-RUN pip install --upgrade pip
-RUN pip install --upgrade setuptools coverage-badge parameterized
-
-# Install documentation requirements
-RUN pip install sphinx numpydoc nbsphinx sphinx_rtd_theme sphinx-tabs autodoc
-
-# Install dependencies and samplers
-RUN pip install corner healpy cython tables
-RUN conda install -n ${conda_env} -c conda-forge dynesty emcee nestle ptemcee
-RUN conda install -n ${conda_env} -c conda-forge pymultinest ultranest
-RUN conda install -n ${conda_env} -c conda-forge cpnest kombine dnest4 zeus-mcmc
-RUN conda install -n ${conda_env} -c conda-forge pytorch
-RUN conda install -n ${conda_env} -c conda-forge theano-pymc
-RUN conda install -n ${conda_env} -c conda-forge pymc3
-RUN pip install nessai
-
-# Install Polychord
-RUN apt-get update --allow-releaseinfo-change
-RUN apt-get install -y build-essential
-RUN apt-get install -y libblas3 libblas-dev
-RUN apt-get install -y liblapack3 liblapack-dev
-RUN apt-get install -y libatlas3-base libatlas-base-dev
-RUN apt-get install -y gfortran
-
-RUN git clone https://github.com/PolyChord/PolyChordLite.git \
-&& (cd PolyChordLite && python setup.py --no-mpi install)
-
-# Install PTMCMCSampler
-RUN git clone https://github.com/jellis18/PTMCMCSampler.git \
-&& (cd PTMCMCSampler && python setup.py install)
-
-# Install GW packages
-RUN conda install -n ${conda_env} -c conda-forge python-lalsimulation
-RUN pip install ligo-gracedb gwpy ligo.skymap
-
-# Add the ROQ data to the image
-RUN mkdir roq_basis \
-    && cd roq_basis \
-    && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/B_linear.npy \
-    && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/B_quadratic.npy \
-    && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/fnodes_linear.npy \
-    && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/fnodes_quadratic.npy \
-    && wget https://git.ligo.org/lscsoft/ROQ_data/raw/master/IMRPhenomPv2/4s/params.dat
diff --git a/containers/v3-dockerfile-test-suite-python38 b/containers/v3-dockerfile-test-suite-python38
index e5724e0c884428442c68aff473dab1438d80ee01..080f1fd2274b8fab1e500f47a20a050abd47adcb 100644
--- a/containers/v3-dockerfile-test-suite-python38
+++ b/containers/v3-dockerfile-test-suite-python38
@@ -16,7 +16,7 @@ RUN conda info
 RUN python --version
 
 # Install conda-installable programs
-RUN conda install -n ${conda_env} -y matplotlib numpy scipy pandas astropy flake8 mock
+RUN conda install -n ${conda_env} -y matplotlib numpy scipy pandas astropy flake8
 RUN conda install -n ${conda_env} -c anaconda coverage configargparse future
 RUN conda install -n ${conda_env} -c conda-forge black pytest-cov deepdish arviz
 
diff --git a/containers/v3-dockerfile-test-suite-python39 b/containers/v3-dockerfile-test-suite-python39
index 106cd4573d5d57c832bf54b686a92d0a0fe94064..91b906ef83badb352d0e26edf70b271ab2cda2a1 100644
--- a/containers/v3-dockerfile-test-suite-python39
+++ b/containers/v3-dockerfile-test-suite-python39
@@ -16,7 +16,7 @@ RUN conda info
 RUN python --version
 
 # Install conda-installable programs
-RUN conda install -n ${conda_env} -y matplotlib numpy scipy pandas astropy flake8 mock
+RUN conda install -n ${conda_env} -y matplotlib numpy scipy pandas astropy flake8
 RUN conda install -n ${conda_env} -c anaconda coverage configargparse future
 RUN conda install -n ${conda_env} -c conda-forge black pytest-cov deepdish arviz
 
diff --git a/containers/write_dockerfiles.py b/containers/write_dockerfiles.py
index b7c154be5de892cddb715dfb941c9e8d7392b71c..d4394c0e44b23e7791ce19778e9f9734c6d1dada 100644
--- a/containers/write_dockerfiles.py
+++ b/containers/write_dockerfiles.py
@@ -3,7 +3,7 @@ from datetime import date
 with open("dockerfile-template", "r") as ff:
     template = ff.read()
 
-python_versions = [(3, 7), (3, 8), (3, 9)]
+python_versions = [(3, 8), (3, 9)]
 today = date.today().strftime("%Y%m%d")
 
 for python_major_version, python_minor_version in python_versions:
diff --git a/examples/core_examples/radioactive_decay.py b/examples/core_examples/radioactive_decay.py
index cf1f7a54c14a3f619035af5bee7da0320ae3e087..9e2070d5d8c7b699848aac202f014afd09fba384 100644
--- a/examples/core_examples/radioactive_decay.py
+++ b/examples/core_examples/radioactive_decay.py
@@ -35,7 +35,7 @@ def decay_rate(delta_t, halflife, n_init):
     halflife: float
         Halflife of atom in minutes
     n_init: int, float
-        Initial nummber of atoms
+        Initial number of atoms
     """
 
     times = np.cumsum(delta_t)
diff --git a/examples/gw_examples/data_examples/GW150914.prior b/examples/gw_examples/data_examples/GW150914.prior
index 41b0badd083da8c0d68f792e104fa10a729cfa2f..9ee67a3f649e6f60e82274edb85f807f78a9b78d 100644
--- a/examples/gw_examples/data_examples/GW150914.prior
+++ b/examples/gw_examples/data_examples/GW150914.prior
@@ -1,17 +1,16 @@
-mass_ratio = Uniform(name='mass_ratio', minimum=0.125, maximum=1, boundary='reflective')
-chirp_mass = Uniform(name='chirp_mass', minimum=25, maximum=35, unit='$M_{\odot}$', boundary='reflective')
+chirp_mass = bilby.gw.prior.UniformInComponentsChirpMass(name='chirp_mass', minimum=25, maximum=35, unit='$M_{\odot}$')
+mass_ratio = bilby.gw.prior.UniformInComponentsMassRatio(name='mass_ratio', minimum=0.125, maximum=1)
 mass_1 = Constraint(name='mass_1', minimum=10, maximum=80)
 mass_2 = Constraint(name='mass_2', minimum=10, maximum=80)
-a_1 = Uniform(name='a_1', minimum=0, maximum=0.99, boundary='reflective')
-a_2 = Uniform(name='a_2', minimum=0, maximum=0.99, boundary='reflective')
-tilt_1 = Sine(name='tilt_1', boundary='reflective')
-tilt_2 = Sine(name='tilt_2', boundary='reflective')
+a_1 = Uniform(name='a_1', minimum=0, maximum=0.99)
+a_2 = Uniform(name='a_2', minimum=0, maximum=0.99)
+tilt_1 = Sine(name='tilt_1')
+tilt_2 = Sine(name='tilt_2')
 phi_12 = Uniform(name='phi_12', minimum=0, maximum=2 * np.pi, boundary='periodic')
 phi_jl = Uniform(name='phi_jl', minimum=0, maximum=2 * np.pi, boundary='periodic')
 luminosity_distance = PowerLaw(alpha=2, name='luminosity_distance', minimum=50, maximum=2000, unit='Mpc', latex_label='$d_L$')
-dec =  Cosine(name='dec', boundary='reflective')
+dec =  Cosine(name='dec')
 ra =  Uniform(name='ra', minimum=0, maximum=2 * np.pi, boundary='periodic')
-theta_jn =  Sine(name='theta_jn', boundary='reflective')
+theta_jn =  Sine(name='theta_jn')
 psi =  Uniform(name='psi', minimum=0, maximum=np.pi, boundary='periodic')
 phase =  Uniform(name='phase', minimum=0, maximum=2 * np.pi, boundary='periodic')
-geocent_time = Uniform(name='geocent_time', minimum =1126259460.4, maximum=1126259464.4)
diff --git a/examples/gw_examples/data_examples/GW150914.py b/examples/gw_examples/data_examples/GW150914.py
index 34356e2a3d8db0a09f3d425325840fbb42ab21b1..8f3651e00792bd3dd2b33aecfcb54b7772dee0e5 100755
--- a/examples/gw_examples/data_examples/GW150914.py
+++ b/examples/gw_examples/data_examples/GW150914.py
@@ -13,12 +13,16 @@ import bilby
 from gwpy.timeseries import TimeSeries
 
 logger = bilby.core.utils.logger
-outdir = 'outdir'
-label = 'GW150914'
-
-# Data set up
-trigger_time = 1126259462
+outdir = "outdir"
+label = "GW150914"
 
+# Note you can get trigger times using the gwosc package, e.g.:
+# > from gwosc import datasets
+# > datasets.event_gps("GW150914")
+trigger_time = 1126259462.4
+detectors = ["H1", "L1"]
+maximum_frequency = 512
+minimum_frequency = 20
 roll_off = 0.4  # Roll off duration of tukey window in seconds, default is 0.4s
 duration = 4  # Analysis segment duration
 post_trigger_duration = 2  # Time between trigger time and end of segment
@@ -31,7 +35,7 @@ psd_end_time = start_time
 
 # We now use gwpy to obtain analysis and psd data and create the ifo_list
 ifo_list = bilby.gw.detector.InterferometerList([])
-for det in ["H1", "L1"]:
+for det in detectors:
     logger.info("Downloading analysis data for ifo {}".format(det))
     ifo = bilby.gw.detector.get_empty_interferometer(det)
     data = TimeSeries.fetch_open_data(det, start_time, end_time)
@@ -41,13 +45,13 @@ for det in ["H1", "L1"]:
     psd_data = TimeSeries.fetch_open_data(det, psd_start_time, psd_end_time)
     psd_alpha = 2 * roll_off / duration
     psd = psd_data.psd(
-        fftlength=duration,
-        overlap=0,
-        window=("tukey", psd_alpha),
-        method="median"
+        fftlength=duration, overlap=0, window=("tukey", psd_alpha), method="median"
     )
     ifo.power_spectral_density = bilby.gw.detector.PowerSpectralDensity(
-        frequency_array=psd.frequencies.value, psd_array=psd.value)
+        frequency_array=psd.frequencies.value, psd_array=psd.value
+    )
+    ifo.maximum_frequency = maximum_frequency
+    ifo.minimum_frequency = minimum_frequency
     ifo_list.append(ifo)
 
 logger.info("Saving data plots to {}".format(outdir))
@@ -59,7 +63,12 @@ ifo_list.plot_data(outdir=outdir, label=label)
 # The prior is printed to the terminal at run-time.
 # You can overwrite this using the syntax below in the file,
 # or choose a fixed value by just providing a float value as the prior.
-priors = bilby.gw.prior.BBHPriorDict(filename='GW150914.prior')
+priors = bilby.gw.prior.BBHPriorDict(filename="GW150914.prior")
+
+# Add the geocent time prior
+priors["geocent_time"] = bilby.core.prior.Uniform(
+    trigger_time - 0.1, trigger_time + 0.1, name="geocent_time"
+)
 
 # In this step we define a `waveform_generator`. This is the object which
 # creates the frequency-domain strain. In this instance, we are using the
@@ -69,19 +78,36 @@ priors = bilby.gw.prior.BBHPriorDict(filename='GW150914.prior')
 waveform_generator = bilby.gw.WaveformGenerator(
     frequency_domain_source_model=bilby.gw.source.lal_binary_black_hole,
     parameter_conversion=bilby.gw.conversion.convert_to_lal_binary_black_hole_parameters,
-    waveform_arguments={'waveform_approximant': 'IMRPhenomPv2',
-                        'reference_frequency': 50})
+    waveform_arguments={
+        "waveform_approximant": "IMRPhenomPv2",
+        "reference_frequency": 50,
+    },
+)
 
 # In this step, we define the likelihood. Here we use the standard likelihood
 # function, passing it the data and the waveform generator.
+# Note, phase_marginalization is formally invalid with a precessing waveform such as IMRPhenomPv2
 likelihood = bilby.gw.likelihood.GravitationalWaveTransient(
-    ifo_list, waveform_generator, priors=priors, time_marginalization=True,
-    phase_marginalization=True, distance_marginalization=True)
+    ifo_list,
+    waveform_generator,
+    priors=priors,
+    time_marginalization=True,
+    phase_marginalization=False,
+    distance_marginalization=True,
+)
 
 # Finally, we run the sampler. This function takes the likelihood and prior
 # along with some options for how to do the sampling and how to save the data
 result = bilby.run_sampler(
-    likelihood, priors, sampler='dynesty', outdir=outdir, label=label,
-    nlive=1000, walks=100, n_check_point=10000, check_point_plot=True,
-    conversion_function=bilby.gw.conversion.generate_all_bbh_parameters)
+    likelihood,
+    priors,
+    sampler="dynesty",
+    outdir=outdir,
+    label=label,
+    nlive=1000,
+    check_point_delta_t=600,
+    check_point_plot=True,
+    npool=1,
+    conversion_function=bilby.gw.conversion.generate_all_bbh_parameters,
+)
 result.plot_corner()
diff --git a/examples/gw_examples/data_examples/GW150914_advanced.py b/examples/gw_examples/data_examples/GW150914_advanced.py
deleted file mode 100755
index c22568a93b196386194f5cce6151465df3abc2d4..0000000000000000000000000000000000000000
--- a/examples/gw_examples/data_examples/GW150914_advanced.py
+++ /dev/null
@@ -1,122 +0,0 @@
-#!/usr/bin/env python
-"""
-This tutorial includes advanced specifications for analysing known events.
-Here GW150914 is used as an example.
-
-LIST OF AVAILABLE EVENTS:
->> from gwosc import datasets
->> for event in datasets.find_datasets(type="event"):
-...     print(event, datasets.event_gps(event))
-
-List of events in BILBY dict: run -> help(bilby.gw.utils.get_event_time(event))
-"""
-import bilby
-from gwpy.timeseries import TimeSeries
-
-outdir = 'outdir'
-label = 'GW150914'
-logger = bilby.core.utils.logger
-time_of_event = bilby.gw.utils.get_event_time(label)
-
-bilby.core.utils.setup_logger(outdir=outdir, label=label)
-
-# GET DATA FROM INTERFEROMETER
-interferometer_names = ['H1', 'L1']  # include 'V1' for appropriate O2 events
-duration = 4  # length of data segment containing the signal
-post_trigger_duration = 2  # time between trigger time and end of segment
-end_time = time_of_event + post_trigger_duration
-start_time = end_time - duration
-
-roll_off = 0.4  # smoothness in a tukey window, default is 0.4s
-# This determines the time window used to fetch open data
-psd_duration = 32 * duration
-psd_start_time = start_time - psd_duration
-psd_end_time = start_time
-
-filter_freq = None  # low pass filter frequency to cut signal content above
-# Nyquist frequency. The condition is 2 * filter_freq >= sampling_frequency
-
-
-ifo_list = bilby.gw.detector.InterferometerList([])
-for det in interferometer_names:
-    logger.info("Downloading analysis data for ifo {}".format(det))
-    ifo = bilby.gw.detector.get_empty_interferometer(det)
-    data = TimeSeries.fetch_open_data(det, start_time, end_time)
-    ifo.set_strain_data_from_gwpy_timeseries(data)
-# Additional arguments you might need to pass to TimeSeries.fetch_open_data:
-# - sample_rate = 4096, most data are stored by LOSC at this frequency
-# there may be event-related data releases with a 16384Hz rate.
-# - tag = 'CLN' for clean data; C00/C01 for raw data (different releases)
-# note that for O2 events a "tag" is required to download the data.
-# - channel =  {'H1': 'H1:DCS-CALIB_STRAIN_C02',
-#               'L1': 'L1:DCS-CALIB_STRAIN_C02',
-#               'V1': 'V1:FAKE_h_16384Hz_4R'}}
-# for some events can specify channels: source data stream for LOSC data.
-    logger.info("Downloading psd data for ifo {}".format(det))
-    psd_data = TimeSeries.fetch_open_data(det, psd_start_time, psd_end_time)
-    psd_alpha = 2 * roll_off / duration  # shape parameter of tukey window
-    psd = psd_data.psd(
-        fftlength=duration,
-        overlap=0,
-        window=("tukey", psd_alpha),
-        method="median")
-    ifo.power_spectral_density = bilby.gw.detector.PowerSpectralDensity(
-        frequency_array=psd.frequencies.value, psd_array=psd.value)
-    ifo_list.append(ifo)
-
-logger.info("Saving data plots to {}".format(outdir))
-bilby.core.utils.check_directory_exists_and_if_not_mkdir(outdir)
-ifo_list.plot_data(outdir=outdir, label=label)
-
-# CHOOSE PRIOR FILE
-# DEFAULT PRIOR FILES: GW150914.prior, binary_black_holes.prior,
-# binary_neutron_stars.prior (if bns, use BNSPriorDict)
-# Needs to specify path if you want to use any other prior file.
-prior = bilby.gw.prior.BBHPriorDict(filename='GW150914.prior')
-
-# GENERATE WAVEFORM
-sampling_frequency = 4096.  # same at which the data is stored
-conversion = bilby.gw.conversion.convert_to_lal_binary_black_hole_parameters
-
-# OVERVIEW OF APPROXIMANTS:
-# https://www.lsc-group.phys.uwm.edu/ligovirgo/cbcnote/Waveforms/Overview
-waveform_arguments = {
-    'waveform_approximant': 'IMRPhenomPv2',
-    'reference_frequency': 50  # most sensitive frequency
-}
-
-waveform_generator = bilby.gw.WaveformGenerator(
-    parameter_conversion=conversion,
-    frequency_domain_source_model=bilby.gw.source.lal_binary_black_hole,
-    waveform_arguments=waveform_arguments)
-
-# CHOOSE LIKELIHOOD FUNCTION
-# Time marginalisation uses FFT and can use a prior file.
-# Distance marginalisation uses a look up table calculated at run time.
-# Phase marginalisation is done analytically using a Bessel function.
-# If prior given, used in the distance and phase marginalization.
-likelihood = bilby.gw.likelihood.GravitationalWaveTransient(
-    interferometers=ifo_list, waveform_generator=waveform_generator,
-    priors=prior, time_marginalization=False, distance_marginalization=False,
-    phase_marginalization=False)
-
-# RUN SAMPLER
-# Can use log_likelihood_ratio, rather than just the log_likelihood.
-# A function can be specified in 'conversion_function' and applied
-# to posterior to generate additional parameters e.g. source-frame masses.
-
-# Implemented Samplers:
-# LIST OF AVAILABLE SAMPLERS: Run -> bilby.sampler.implemented_samplers
-npoints = 512  # number of live points for the nested sampler
-n_steps = 100  # min number of steps before proposing a new live point,
-# defaults `ndim * 10`
-sampler = 'dynesty'
-# Different samplers can have different additional kwargs,
-# visit https://lscsoft.docs.ligo.org/bilby/samplers.html for details.
-
-result = bilby.run_sampler(
-    likelihood, prior, outdir=outdir, label=label,
-    sampler=sampler, nlive=npoints, use_ratio=False,
-    walks=n_steps, n_check_point=10000, check_point_plot=True,
-    conversion_function=bilby.gw.conversion.generate_all_bbh_parameters)
-result.plot_corner()
diff --git a/examples/gw_examples/data_examples/GW150914_minimal.py b/examples/gw_examples/data_examples/GW150914_minimal.py
deleted file mode 100644
index e37cb0003d170be2b4d832035fe26dabce7b24a8..0000000000000000000000000000000000000000
--- a/examples/gw_examples/data_examples/GW150914_minimal.py
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env python
-"""
-Tutorial to demonstrate the minimum number of steps required to run parameter
-stimation on GW150914 using open data.
-
-"""
-import bilby
-
-prior = bilby.gw.prior.BBHPriorDict(filename='GW150914.prior')
-interferometers = bilby.gw.detector.get_event_data("GW150914")
-likelihood = bilby.gw.likelihood.get_binary_black_hole_likelihood(interferometers)
-result = bilby.run_sampler(likelihood, prior, label='GW150914')
-result.plot_corner()
diff --git a/examples/gw_examples/data_examples/GW170817.py b/examples/gw_examples/data_examples/GW170817.py
deleted file mode 100644
index 4bcd2e7e5e88e50c729edc02a39b85b9978e2fa0..0000000000000000000000000000000000000000
--- a/examples/gw_examples/data_examples/GW170817.py
+++ /dev/null
@@ -1,103 +0,0 @@
-#!/usr/bin/env python
-"""
-This tutorial includes advanced specifications
-for analysing binary neutron star event data.
-Here GW170817 is used as an example.
-"""
-import bilby
-
-outdir = 'outdir'
-label = 'GW170817'
-time_of_event = bilby.gw.utils.get_event_time(label)
-bilby.core.utils.setup_logger(outdir=outdir, label=label)
-# GET DATA FROM INTERFEROMETER
-# include 'V1' for appropriate O2 events
-interferometer_names = ['H1', 'L1', 'V1']
-duration = 32
-roll_off = 0.2  # how smooth is the transition from no signal
-# to max signal in a Tukey Window.
-psd_offset = -512  # PSD is estimated using data from
-# `center_time+psd_offset` to `center_time+psd_offset + psd_duration`
-# This determines the time window used to fetch open data.
-psd_duration = 1024
-coherence_test = False  # coherence between detectors
-filter_freq = None  # low pass filter frequency to cut signal content above
-# Nyquist frequency. The condition is 2 * filter_freq >= sampling_frequency
-
-
-# All keyword arguments are passed to
-# `gwpy.timeseries.TimeSeries.fetch_open_data()'
-kwargs = {}
-# Data are stored by LOSC at 4096 Hz, however
-# there may be event-related data releases with a 16384 Hz rate.
-kwargs['sample_rate'] = 4096
-# For O2 events a "tag" is required to download the data.
-# CLN = clean data; C02 = raw data
-kwargs['tag'] = 'C02'
-interferometers = bilby.gw.detector.get_event_data(
-    label,
-    interferometer_names=interferometer_names,
-    duration=duration,
-    roll_off=roll_off,
-    psd_offset=psd_offset,
-    psd_duration=psd_duration,
-    cache=True,
-    filter_freq=filter_freq,
-    **kwargs)
-# CHOOSE PRIOR FILE
-prior = bilby.gw.prior.BNSPriorDict(filename='GW170817.prior')
-deltaT = 0.1
-prior['geocent_time'] = bilby.core.prior.Uniform(
-    minimum=time_of_event - deltaT / 2,
-    maximum=time_of_event + deltaT / 2,
-    name='geocent_time',
-    latex_label='$t_c$',
-    unit='$s$')
-# GENERATE WAVEFORM
-# OVERVIEW OF APPROXIMANTS:
-# https://www.lsc-group.phys.uwm.edu/ligovirgo/cbcnote/Waveforms/Overview
-duration = None  # duration and sampling frequency will be overwritten
-# to match the ones in interferometers.
-sampling_frequency = kwargs['sample_rate']
-start_time = 0  # set the starting time of the time array
-waveform_arguments = {
-    'waveform_approximant': 'IMRPhenomPv2_NRTidal', 'reference_frequency': 20}
-
-source_model = bilby.gw.source.lal_binary_neutron_star
-convert_bns = bilby.gw.conversion.convert_to_lal_binary_neutron_star_parameters
-waveform_generator = bilby.gw.WaveformGenerator(
-    duration=duration,
-    sampling_frequency=sampling_frequency,
-    start_time=start_time,
-    frequency_domain_source_model=source_model,
-    parameter_conversion=convert_bns,
-    waveform_arguments=waveform_arguments,)
-
-# CHOOSE LIKELIHOOD FUNCTION
-# Time marginalisation uses FFT.
-# Distance marginalisation uses a look up table calculated at run time.
-# Phase marginalisation is done analytically using a Bessel function.
-likelihood = bilby.gw.likelihood.GravitationalWaveTransient(
-    interferometers,
-    waveform_generator,
-    time_marginalization=False,
-    distance_marginalization=False,
-    phase_marginalization=False,)
-
-# RUN SAMPLER
-# Implemented Samplers:
-# LIST OF AVAILABLE SAMPLERS: Run -> bilby.sampler.implemented_samplers
-# conversion function = bilby.gw.conversion.generate_all_bns_parameters
-npoints = 512
-sampler = 'dynesty'
-result = bilby.run_sampler(
-    likelihood,
-    prior,
-    outdir=outdir,
-    label=label,
-    sampler=sampler,
-    npoints=npoints,
-    use_ratio=False,
-    conversion_function=bilby.gw.conversion.generate_all_bns_parameters)
-
-result.plot_corner()
diff --git a/examples/gw_examples/data_examples/GW190425.prior b/examples/gw_examples/data_examples/GW190425.prior
new file mode 100644
index 0000000000000000000000000000000000000000..b7aef888ab7a37bd31b21f9426aea4300774b696
--- /dev/null
+++ b/examples/gw_examples/data_examples/GW190425.prior
@@ -0,0 +1,18 @@
+chirp_mass = bilby.gw.prior.UniformInComponentsChirpMass(name='chirp_mass', minimum=1.485, maximum=1.49, unit='$M_{\odot}$')
+mass_ratio = bilby.gw.prior.UniformInComponentsMassRatio(name='mass_ratio', minimum=0.125, maximum=1)
+mass_1 = Constraint(name='mass_1', minimum=1, maximum=2.5)
+mass_2 = Constraint(name='mass_2', minimum=1, maximum=2.5)
+a_1 = Uniform(name='a_1', minimum=0, maximum=0.05)
+a_2 = Uniform(name='a_2', minimum=0, maximum=0.05)
+tilt_1 = Sine(name='tilt_1')
+tilt_2 = Sine(name='tilt_2')
+phi_12 = Uniform(name='phi_12', minimum=0, maximum=2 * np.pi, boundary='periodic')
+phi_jl = Uniform(name='phi_jl', minimum=0, maximum=2 * np.pi, boundary='periodic')
+luminosity_distance = PowerLaw(alpha=2, name='luminosity_distance', minimum=10, maximum=1000, unit='Mpc', latex_label='$d_L$')
+dec =  Cosine(name='dec')
+ra =  Uniform(name='ra', minimum=0, maximum=2 * np.pi, boundary='periodic')
+theta_jn =  Sine(name='theta_jn')
+psi =  Uniform(name='psi', minimum=0, maximum=np.pi, boundary='periodic')
+phase =  Uniform(name='phase', minimum=0, maximum=2 * np.pi, boundary='periodic')
+lambda_1 = Uniform(name='lambda_1', minimum=0, maximum=5000)
+lambda_2 = Uniform(name='lambda_2', minimum=0, maximum=5000)
diff --git a/examples/gw_examples/data_examples/GW190425.py b/examples/gw_examples/data_examples/GW190425.py
new file mode 100644
index 0000000000000000000000000000000000000000..b575a4ed88842cae9f0b4683878e310e8197e654
--- /dev/null
+++ b/examples/gw_examples/data_examples/GW190425.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python
+"""
+Tutorial to demonstrate running parameter estimation on GW190425
+
+This example estimates all 17 parameters of the binary neutron star system using
+commonly used prior distributions. This will take several hours to run. The
+data is obtained using gwpy, see [1] for information on how to access data on
+the LIGO Data Grid instead.
+
+[1] https://gwpy.github.io/docs/stable/timeseries/remote-access.html
+"""
+import bilby
+from gwpy.timeseries import TimeSeries
+
+logger = bilby.core.utils.logger
+outdir = "outdir"
+label = "GW190425"
+
+# Note you can get trigger times using the gwosc package, e.g.:
+# > from gwosc import datasets
+# > datasets.event_gps("GW190425")
+trigger_time = 1240215503.0
+detectors = ["L1", "V1"]
+maximum_frequency = 512
+minimum_frequency = 20
+roll_off = 0.4  # Roll off duration of tukey window in seconds, default is 0.4s
+duration = 128  # Analysis segment duration
+post_trigger_duration = 2  # Time between trigger time and end of segment
+end_time = trigger_time + post_trigger_duration
+start_time = end_time - duration
+
+psd_duration = 1024
+psd_start_time = start_time - psd_duration
+psd_end_time = start_time
+
+# We now use gwpy to obtain analysis and psd data and create the ifo_list
+ifo_list = bilby.gw.detector.InterferometerList([])
+for det in detectors:
+    logger.info("Downloading analysis data for ifo {}".format(det))
+    ifo = bilby.gw.detector.get_empty_interferometer(det)
+    data = TimeSeries.fetch_open_data(det, start_time, end_time)
+    ifo.strain_data.set_from_gwpy_timeseries(data)
+
+    logger.info("Downloading psd data for ifo {}".format(det))
+    psd_data = TimeSeries.fetch_open_data(det, psd_start_time, psd_end_time)
+    psd_alpha = 2 * roll_off / duration
+    psd = psd_data.psd(
+        fftlength=duration, overlap=0, window=("tukey", psd_alpha), method="median"
+    )
+    ifo.power_spectral_density = bilby.gw.detector.PowerSpectralDensity(
+        frequency_array=psd.frequencies.value, psd_array=psd.value
+    )
+    ifo.maximum_frequency = maximum_frequency
+    ifo.minimum_frequency = minimum_frequency
+    ifo_list.append(ifo)
+
+logger.info("Saving data plots to {}".format(outdir))
+bilby.core.utils.check_directory_exists_and_if_not_mkdir(outdir)
+ifo_list.plot_data(outdir=outdir, label=label)
+
+# We now define the prior.
+# We have defined our prior distribution in a local file, GW190425.prior
+# The prior is printed to the terminal at run-time.
+# You can overwrite this using the syntax below in the file,
+# or choose a fixed value by just providing a float value as the prior.
+priors = bilby.gw.prior.BBHPriorDict(filename="GW190425.prior")
+
+# Add the geocent time prior
+priors["geocent_time"] = bilby.core.prior.Uniform(
+    trigger_time - 0.1, trigger_time + 0.1, name="geocent_time"
+)
+
+# In this step we define a `waveform_generator`. This is the object which
+# creates the frequency-domain strain. In this instance, we are using the
+# `lal_binary_black_hole model` source model. We also pass other parameters:
+# the waveform approximant and reference frequency and a parameter conversion
+# which allows us to sample in chirp mass and ratio rather than component mass
+waveform_generator = bilby.gw.WaveformGenerator(
+    frequency_domain_source_model=bilby.gw.source.lal_binary_neutron_star,
+    parameter_conversion=bilby.gw.conversion.convert_to_lal_binary_neutron_star_parameters,
+    waveform_arguments={
+        "waveform_approximant": "IMRPhenomPv2_NRTidalv2",
+        "reference_frequency": 20,
+    },
+)
+
+# In this step, we define the likelihood. Here we use the standard likelihood
+# function, passing it the data and the waveform generator.
+# Note, phase_marginalization is formally invalid with a precessing waveform such as IMRPhenomPv2
+likelihood = bilby.gw.likelihood.GravitationalWaveTransient(
+    ifo_list,
+    waveform_generator,
+    priors=priors,
+    time_marginalization=True,
+    phase_marginalization=False,
+    distance_marginalization=True,
+)
+
+# Finally, we run the sampler. This function takes the likelihood and prior
+# along with some options for how to do the sampling and how to save the data
+result = bilby.run_sampler(
+    likelihood,
+    priors,
+    sampler="dynesty",
+    outdir=outdir,
+    label=label,
+    nlive=1000,
+    check_point_delta_t=600,
+    check_point_plot=True,
+    npool=1,
+    conversion_function=bilby.gw.conversion.generate_all_bbh_parameters,
+)
+result.plot_corner()
diff --git a/examples/gw_examples/data_examples/get_LOSC_event_data.py b/examples/gw_examples/data_examples/get_LOSC_event_data.py
deleted file mode 100644
index 5ab16bc69abe9e7e342cd0a1087a7377afd6ca9d..0000000000000000000000000000000000000000
--- a/examples/gw_examples/data_examples/get_LOSC_event_data.py
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/usr/bin/env python
-""" Helper script to facilitate downloading data from LOSC
-
-Usage: To download the GW150914 data from https://losc.ligo.org/events/
-
-$ python get_LOSC_event_data -e GW150914
-
-"""
-
-import numpy as np
-import os
-import argparse
-
-parser = argparse.ArgumentParser(description='Script to download LOSC data.')
-parser.add_argument('-e', '--event', metavar='event', type=str)
-parser.add_argument('-o', '--outdir', metavar='outdir',
-                    default='tutorial_data')
-
-args = parser.parse_args()
-
-url_dictionary = dict(
-    GW150914="https://losc.ligo.org/s/events/GW150914/{}-{}1_LOSC_4_V2-1126259446-32.txt.gz",
-    LVT151012="https://losc.ligo.org/s/events/LVT151012/{}-{}1_LOSC_4_V2-1128678884-32.txt.gz",
-    GW151226="https://losc.ligo.org/s/events/GW151226/{}-{}1_LOSC_4_V2-1135136334-32.txt.gz",
-    GW170104="https://losc.ligo.org/s/events/GW170104/{}-{}1_LOSC_4_V1-1167559920-32.txt.gz",
-    GW170608="https://losc.ligo.org/s/events/GW170608/{}-{}1_LOSC_CLN_4_V1-1180922478-32.txt.gz",
-    GW170814="https://dcc.ligo.org/public/0146/P1700341/001/{}-{}1_LOSC_CLN_4_V1-1186741845-32.txt.gz",
-    GW170817="https://dcc.ligo.org/public/0146/P1700349/001/{}-{}1_LOSC_CLN_4_V1-1187007040-2048.txt.gz")
-
-outdir = 'tutorial_data'
-
-data = []
-for det, in ['H', 'L']:
-    url = url_dictionary[args.event].format(det, det)
-    filename = os.path.basename(url)
-    if os.path.isfile(filename.rstrip('.gz')) is False:
-        print("Downloading data from {}".format(url))
-        os.system("wget {} ".format(url))
-        os.system("gunzip {}".format(filename))
-        filename = filename.rstrip('.gz')
-    data.append(np.loadtxt(filename))
-    with open(filename, 'r') as f:
-        header = f.readlines()[:3]
-        event = header[0].split(' ')[5]
-        detector = header[0].split(' ')[7]
-        sampling_frequency = header[1].split(' ')[4]
-        starttime = header[2].split(' ')[3]
-        duration = header[2].split(' ')[5]
-        print('Loaded data for event={}, detector={}, sampling_frequency={}'
-              ', starttime={}, duration={}'.format(
-                  event, detector, sampling_frequency, starttime, duration))
-    os.remove(filename)
-
-time = np.arange(0, int(duration), 1 / int(sampling_frequency)) + int(starttime)
-arr = [time] + data
-
-outfile = '{}/{}_strain_data.npy'.format(args.outdir, args.event)
-np.save(outfile, arr)
-print("Saved data to {}".format(outfile))
diff --git a/examples/gw_examples/data_examples/read_data_from_channel_name.py b/examples/gw_examples/data_examples/read_data_from_channel_name.py
index 2e25a323ed5a7a2a9b9299109110c2e2102ce6d6..577bc74a83865b37ffc19e69de7321b729b2109c 100644
--- a/examples/gw_examples/data_examples/read_data_from_channel_name.py
+++ b/examples/gw_examples/data_examples/read_data_from_channel_name.py
@@ -8,28 +8,28 @@ This tutorial must be run on a LIGO cluster.
 
 import bilby
 
-label = 'GW170608'
-outdir = label + '_out'
+label = "GW170608"
+outdir = label + "_out"
 # List of detectors involved in the event. GW170608 is pre-August 2017 so only
 # H1 and L1 were involved
-detectors = ['H1', 'L1']
+detectors = ["H1", "L1"]
 # Names of the channels and query types to use, and the event GraceDB ID - ref.
 # https://ldas-jobs.ligo.caltech.edu/~eve.chase/monitor/online_pe/C02_clean/
 # C02_HL_Pv2/lalinferencenest/IMRPhenomPv2pseudoFourPN/config.ini
-channel_names = ['H1:DCH-CLEAN_STRAIN_C02', 'L1:DCH-CLEAN_STRAIN_C02']
-gracedb = 'G288686'
+channel_names = ["H1:DCH-CLEAN_STRAIN_C02", "L1:DCH-CLEAN_STRAIN_C02"]
+gracedb = "G288686"
 # Duration of data around the event to use
 duration = 16
 # Duration of PSD data
 psd_duration = 1024
 # Minimum frequency and reference frequency
 minimum_frequency = 10  # Hz
-sampling_frequency = 2048.
+sampling_frequency = 2048.0
 
 # Get trigger time
 candidate = bilby.gw.utils.gracedb_to_json(gracedb, outdir=outdir)
-trigger_time = candidate['gpstime']
-gps_start_time = trigger_time + 2. - duration
+trigger_time = candidate["gpstime"]
+gps_start_time = trigger_time + 2.0 - duration
 
 # Load the PSD data starting after the segment you want to analyze
 psd_start_time = gps_start_time + duration
@@ -39,9 +39,14 @@ interferometers = bilby.gw.detector.InterferometerList([])
 
 for channel_name in channel_names:
     interferometer = bilby.gw.detector.load_data_by_channel_name(
-        start_time=gps_start_time, segment_duration=duration,
-        psd_duration=psd_duration, psd_start_time=psd_start_time,
-        channel_name=channel_name, sampling_frequency=sampling_frequency, outdir=outdir)
+        start_time=gps_start_time,
+        segment_duration=duration,
+        psd_duration=psd_duration,
+        psd_start_time=psd_start_time,
+        channel_name=channel_name,
+        sampling_frequency=sampling_frequency,
+        outdir=outdir,
+    )
     interferometer.minimum_frequency = minimum_frequency
     interferometers.append(interferometer)
 
diff --git a/examples/gw_examples/data_examples/read_gracedb_data.py b/examples/gw_examples/data_examples/read_gracedb_data.py
index ee1f53521df02c70387c115eddec63c385bbae5e..e92ef8cd8f1b98ba1a22223a6ce25d764b4cb5d0 100644
--- a/examples/gw_examples/data_examples/read_gracedb_data.py
+++ b/examples/gw_examples/data_examples/read_gracedb_data.py
@@ -8,17 +8,17 @@ This tutorial must be run on a LIGO cluster.
 
 import bilby
 
-label = 'GW170608'
-outdir = label + '_out'
+label = "GW170608"
+outdir = label + "_out"
 # List of detectors involved in the event. GW170608 is pre-August 2017 so only
 # H1 and L1 were involved
-detectors = ['H1', 'L1']
+detectors = ["H1", "L1"]
 # Names of the channels and query types to use, and the event GraceDB ID - ref.
 # https://ldas-jobs.ligo.caltech.edu/~eve.chase/monitor/online_pe/C02_clean/
 # C02_HL_Pv2/lalinferencenest/IMRPhenomPv2pseudoFourPN/config.ini
-channel_names = ['H1:DCH-CLEAN_STRAIN_C02', 'L1:DCH-CLEAN_STRAIN_C02']
-query_types = ['H1_CLEANED_HOFT_C02', 'L1_CLEANED_HOFT_C02']
-gracedb = 'G288686'
+channel_names = ["H1:DCH-CLEAN_STRAIN_C02", "L1:DCH-CLEAN_STRAIN_C02"]
+query_types = ["H1_CLEANED_HOFT_C02", "L1_CLEANED_HOFT_C02"]
+gracedb = "G288686"
 # Calibration number to inform bilby's guess of query type if those provided are
 # not recognised
 calibration = 2
@@ -33,14 +33,16 @@ minimum_frequency = 10  # Hz
 
 # Get frame caches
 candidate, frame_caches = bilby.gw.utils.get_gracedb(
-    gracedb, outdir, duration, calibration, detectors, query_types)
+    gracedb, outdir, duration, calibration, detectors, query_types
+)
 
 # Set up interferometer objects from the cache files
 interferometers = bilby.gw.detector.InterferometerList([])
 
 for cache_file, channel_name in zip(frame_caches, channel_names):
     interferometer = bilby.gw.detector.load_data_from_cache_file(
-        cache_file, trigger_time, duration, psd_duration, channel_name)
+        cache_file, trigger_time, duration, psd_duration, channel_name
+    )
     interferometer.minimum_frequency = minimum_frequency
     interferometers.append(interferometer)
 
diff --git a/examples/gw_examples/injection_examples/roq_example.py b/examples/gw_examples/injection_examples/roq_example.py
index 9b8b9d97a6f011f962ff083c9f894e71d6dde2f6..82f87d3e418620c5b86f136b2a81b0da7bfec3b7 100644
--- a/examples/gw_examples/injection_examples/roq_example.py
+++ b/examples/gw_examples/injection_examples/roq_example.py
@@ -59,7 +59,7 @@ waveform_generator = bilby.gw.WaveformGenerator(
 ifos = bilby.gw.detector.InterferometerList(['H1', 'L1', 'V1'])
 ifos.set_strain_data_from_power_spectral_densities(
     sampling_frequency=sampling_frequency, duration=duration,
-    start_time=injection_parameters['geocent_time'] - 3)
+    start_time=injection_parameters['geocent_time'] - 3 / scale_factor)
 ifos.inject_signal(waveform_generator=waveform_generator,
                    parameters=injection_parameters)
 for ifo in ifos:
diff --git a/requirements.txt b/requirements.txt
index 99b3eedbbf9e86b09300ce89e8b466e824c62536..e23d0a0cb1c11eaedf40c4390456148c84a7673e 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -5,7 +5,6 @@ numpy
 matplotlib>=2.1
 scipy>=1.5
 pandas
-mock
 dill
 tqdm
 h5py
diff --git a/setup.cfg b/setup.cfg
index 7f999dcf7d5ee889b4a528f229d1d17801451a55..707f38591b65ef0ff2cc035583e48e290c94c131 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -21,3 +21,6 @@ gw =
 	astropy
 	gwpy
 	lalsuite
+
+[isort]
+known_third_party = astropy,attr,bilby,deepdish,gwin,gwinc,gwpy,lal,lalsimulation,matplotlib,mock,nflows,numpy,packaging,pandas,past,pycbc,pymc3,pytest,scipy,setuptools,skimage,torch
diff --git a/setup.py b/setup.py
index 0c3cc35f40e164780339509526bf959a98e9e0ad..d195385b11ba6da20807d5acd55e7bc77952ef04 100644
--- a/setup.py
+++ b/setup.py
@@ -5,10 +5,9 @@ import subprocess
 import sys
 import os
 
-# check that python version is 3.7 or above
 python_version = sys.version_info
-if python_version < (3, 7):
-    sys.exit("Python < 3.7 is not supported, aborting setup")
+if python_version < (3, 8):
+    sys.exit("Python < 3.8 is not supported, aborting setup")
 
 
 def write_version_file(version):
@@ -70,7 +69,7 @@ def readfile(filename):
     return filecontents
 
 
-VERSION = '1.1.4'
+VERSION = '1.1.5'
 version_file = write_version_file(VERSION)
 long_description = get_long_description()
 
@@ -106,7 +105,7 @@ setup(
         "bilby.gw.eos": ["eos_tables/*.dat"],
         "bilby": [version_file],
     },
-    python_requires=">=3.7",
+    python_requires=">=3.8",
     install_requires=get_requirements(),
     entry_points={
         "console_scripts": [
@@ -115,7 +114,6 @@ setup(
         ]
     },
     classifiers=[
-        "Programming Language :: Python :: 3.7",
         "Programming Language :: Python :: 3.8",
         "Programming Language :: Python :: 3.9",
         "License :: OSI Approved :: MIT License",
diff --git a/test/bilby_mcmc/__init__.py b/test/bilby_mcmc/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/test/core/likelihood_test.py b/test/core/likelihood_test.py
index a3adfa926490ebcc6a8965932aa7d2fd28a4ac5f..d5a0e296865bc66fc3b052bd8019d05f8a76f154 100644
--- a/test/core/likelihood_test.py
+++ b/test/core/likelihood_test.py
@@ -1,6 +1,5 @@
 import unittest
-from mock import MagicMock
-import mock
+from unittest import mock
 import numpy as np
 from bilby.core.likelihood import (
     Likelihood,
@@ -657,9 +656,9 @@ class TestJointLikelihood(unittest.TestCase):
         )
 
     def test_log_noise_likelihood(self):
-        self.first_likelihood.noise_log_likelihood = MagicMock(return_value=1)
-        self.second_likelihood.noise_log_likelihood = MagicMock(return_value=2)
-        self.third_likelihood.noise_log_likelihood = MagicMock(return_value=3)
+        self.first_likelihood.noise_log_likelihood = mock.MagicMock(return_value=1)
+        self.second_likelihood.noise_log_likelihood = mock.MagicMock(return_value=2)
+        self.third_likelihood.noise_log_likelihood = mock.MagicMock(return_value=3)
         self.joint_likelihood = JointLikelihood(
             self.first_likelihood, self.second_likelihood, self.third_likelihood
         )
diff --git a/test/core/prior/base_test.py b/test/core/prior/base_test.py
index 49887de5032580096b1acdf321f5a83534ab5d3c..c9b7887320d76a3042d96693f1d1503e5ebae796 100644
--- a/test/core/prior/base_test.py
+++ b/test/core/prior/base_test.py
@@ -1,7 +1,7 @@
 import unittest
+from unittest.mock import Mock
 
 import numpy as np
-from mock import Mock
 
 import bilby
 
diff --git a/test/core/prior/conditional_test.py b/test/core/prior/conditional_test.py
index 3e2c5d051cedfe93ee401424020ee5958e98d7a9..3f7e8873169572ab0d6a096c6ddc8ad4f67078d2 100644
--- a/test/core/prior/conditional_test.py
+++ b/test/core/prior/conditional_test.py
@@ -1,6 +1,6 @@
 import unittest
+from unittest import mock
 
-import mock
 import numpy as np
 
 import bilby
diff --git a/test/core/prior/dict_test.py b/test/core/prior/dict_test.py
index 03881612a61404db58ebc43d2944fc833480643b..0a421336be0d4d90bbf694f02aba07f4a636a741 100644
--- a/test/core/prior/dict_test.py
+++ b/test/core/prior/dict_test.py
@@ -1,8 +1,8 @@
 import os
 import unittest
+from unittest.mock import Mock
 
 import numpy as np
-from mock import Mock
 
 import bilby
 
diff --git a/test/core/sampler/base_sampler_test.py b/test/core/sampler/base_sampler_test.py
index d14eeaa4998b4f36b318029edb9f0b3d5c49c597..30be5e2ba542205d4cdbef10c6f8e9d681bcbbf1 100644
--- a/test/core/sampler/base_sampler_test.py
+++ b/test/core/sampler/base_sampler_test.py
@@ -1,9 +1,9 @@
 import copy
 import os
 import unittest
+from unittest.mock import MagicMock
 
 import numpy as np
-from mock import MagicMock
 
 import bilby
 from bilby.core import prior
diff --git a/test/core/sampler/cpnest_test.py b/test/core/sampler/cpnest_test.py
index 569e00c0365153795d515e360284e15fa573858a..08a23b0a85276818423f83c583765ac5f6721547 100644
--- a/test/core/sampler/cpnest_test.py
+++ b/test/core/sampler/cpnest_test.py
@@ -1,6 +1,5 @@
 import unittest
-
-from mock import MagicMock
+from unittest.mock import MagicMock
 
 import bilby
 
diff --git a/test/core/sampler/dnest4_test.py b/test/core/sampler/dnest4_test.py
index fc67a0f8f3dbe1c37e6301bce95ef69e9fae6570..dac5289f68ec775e1a00189a85244200b85891f8 100644
--- a/test/core/sampler/dnest4_test.py
+++ b/test/core/sampler/dnest4_test.py
@@ -1,6 +1,5 @@
 import unittest
-
-from mock import MagicMock
+from unittest.mock import MagicMock
 
 import bilby
 
diff --git a/test/core/sampler/dynesty_test.py b/test/core/sampler/dynesty_test.py
index 2b5ae1f0771c2bf7b8c1d4532af5d0d3f07b8830..d1c42b099a86e4fa69b7d47319a99af43808d241 100644
--- a/test/core/sampler/dynesty_test.py
+++ b/test/core/sampler/dynesty_test.py
@@ -1,7 +1,7 @@
 import unittest
+from unittest.mock import MagicMock
 
 import numpy as np
-from mock import MagicMock
 
 import bilby
 
diff --git a/test/core/sampler/emcee_test.py b/test/core/sampler/emcee_test.py
index 9a81d073f8c0e1086e43e0e50e55e4423e6fce49..66265e51e44797123a90b77193137c8e1fba4c2b 100644
--- a/test/core/sampler/emcee_test.py
+++ b/test/core/sampler/emcee_test.py
@@ -1,6 +1,5 @@
 import unittest
-
-from mock import MagicMock
+from unittest.mock import MagicMock
 
 import bilby
 
diff --git a/test/core/sampler/kombine_test.py b/test/core/sampler/kombine_test.py
index f4bdc80303f474c223ffae513fb5f4b05ff46dfe..d16eb8c90c7f11f6cf4280e00f8ca0c8e5ebb1a3 100644
--- a/test/core/sampler/kombine_test.py
+++ b/test/core/sampler/kombine_test.py
@@ -1,6 +1,5 @@
 import unittest
-
-from mock import MagicMock
+from unittest.mock import MagicMock
 
 import bilby
 
diff --git a/test/core/sampler/nessai_test.py b/test/core/sampler/nessai_test.py
index 6f902f71d8f8e43a53c824345356bdd389eaa922..86b03fb38e74afb5ce75c06b5fbd91add3d7f49e 100644
--- a/test/core/sampler/nessai_test.py
+++ b/test/core/sampler/nessai_test.py
@@ -1,6 +1,5 @@
 import unittest
-
-from mock import MagicMock, patch, mock_open
+from unittest.mock import MagicMock, patch, mock_open
 
 import bilby
 
diff --git a/test/core/sampler/nestle_test.py b/test/core/sampler/nestle_test.py
index 5be3ac8d8076d9e1704c358123ca44183155fa92..e5623ef336552a0b3ba8936c49d8704fad22ee0d 100644
--- a/test/core/sampler/nestle_test.py
+++ b/test/core/sampler/nestle_test.py
@@ -1,6 +1,5 @@
 import unittest
-
-from mock import MagicMock
+from unittest.mock import MagicMock
 
 import bilby
 
diff --git a/test/core/sampler/polychord_test.py b/test/core/sampler/polychord_test.py
index 8427315ac49c3faeed40996463b678f7a530e885..88193a83a0cf3b0c644f63f728a1356f79ccfb46 100644
--- a/test/core/sampler/polychord_test.py
+++ b/test/core/sampler/polychord_test.py
@@ -1,7 +1,7 @@
 import unittest
+from unittest.mock import MagicMock
 
 import numpy as np
-from mock import MagicMock
 
 import bilby
 
diff --git a/test/core/sampler/proposal_test.py b/test/core/sampler/proposal_test.py
index 6b1f5b7747cb6d2df0cce4b03dbf8f0d94afde63..ce678d3fe1d1fdaa5a2750c664a3e6d7a284377f 100644
--- a/test/core/sampler/proposal_test.py
+++ b/test/core/sampler/proposal_test.py
@@ -1,5 +1,5 @@
 import unittest
-import mock
+from unittest import mock
 import random
 
 import numpy as np
diff --git a/test/core/sampler/ptemcee_test.py b/test/core/sampler/ptemcee_test.py
index 08439c814878878583d191b2170a5ac36a5e751b..2bc4d6580f889285d736031bd09c143d238086a0 100644
--- a/test/core/sampler/ptemcee_test.py
+++ b/test/core/sampler/ptemcee_test.py
@@ -1,6 +1,5 @@
 import unittest
-
-from mock import MagicMock
+from unittest.mock import MagicMock
 
 import bilby
 
diff --git a/test/core/sampler/pymc3_test.py b/test/core/sampler/pymc3_test.py
index 9fb710c344159f96a1b3707011ff1aa817f706bf..b3bb758d3270602debdda62f12e6279f9c48d534 100644
--- a/test/core/sampler/pymc3_test.py
+++ b/test/core/sampler/pymc3_test.py
@@ -1,13 +1,11 @@
 import unittest
-import pytest
-import sys
+from unittest.mock import MagicMock
 
-from mock import MagicMock
+import pytest
 
 import bilby
 
 
-@pytest.mark.skipif(sys.version_info[1] <= 6, reason="pymc3 is broken in py36")
 @pytest.mark.xfail(
     raises=AttributeError,
     reason="Dependency issue with pymc3 causes attribute error on import",
diff --git a/test/core/sampler/pymultinest_test.py b/test/core/sampler/pymultinest_test.py
index 41c3de4077e578e33bb6e65f9a19efade1cff424..8ffcef6745b89ed350dd1bc00a784b5e9999585d 100644
--- a/test/core/sampler/pymultinest_test.py
+++ b/test/core/sampler/pymultinest_test.py
@@ -1,6 +1,5 @@
 import unittest
-
-from mock import MagicMock
+from unittest.mock import MagicMock
 
 import bilby
 
diff --git a/test/core/sampler/ultranest_test.py b/test/core/sampler/ultranest_test.py
index e70a70fdfb7785bd2457c01ee6399d022e88a91e..dc578cd71932c877f0de8414361781cc86837789 100644
--- a/test/core/sampler/ultranest_test.py
+++ b/test/core/sampler/ultranest_test.py
@@ -1,7 +1,6 @@
 import shutil
 import unittest
-
-from mock import MagicMock
+from unittest.mock import MagicMock
 
 import bilby
 
diff --git a/test/core/sampler/zeus_test.py b/test/core/sampler/zeus_test.py
index e7bd9289cf5e3115a017c8eeae50e21b99c12ed8..2b3e2b5dea14dfe19c6b8930bb353d6871d73409 100644
--- a/test/core/sampler/zeus_test.py
+++ b/test/core/sampler/zeus_test.py
@@ -1,6 +1,5 @@
 import unittest
-
-from mock import MagicMock
+from unittest.mock import MagicMock
 
 import bilby
 
diff --git a/test/gw/detector/geometry_test.py b/test/gw/detector/geometry_test.py
index b05b67459fc65abeb46ed32d6e0dea64d2e18bf8..3960c196471dfe4fd6856f0ecebd2aafc401f187 100644
--- a/test/gw/detector/geometry_test.py
+++ b/test/gw/detector/geometry_test.py
@@ -1,8 +1,7 @@
 import unittest
+from unittest import mock
 
-import mock
 import numpy as np
-from mock import MagicMock
 
 import bilby
 
@@ -89,53 +88,53 @@ class TestInterferometerGeometry(unittest.TestCase):
 
     def test_x_without_update(self):
         _ = self.geometry.x
-        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.unit_vector_along_arm = mock.MagicMock(return_value=np.array([1]))
 
         self.assertFalse(np.array_equal(self.geometry.x, np.array([1])))
 
     def test_x_with_xarm_tilt_update(self):
-        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.unit_vector_along_arm = mock.MagicMock(return_value=np.array([1]))
         self.geometry.xarm_tilt = 0
         self.assertTrue(np.array_equal(self.geometry.x, np.array([1])))
 
     def test_x_with_xarm_azimuth_update(self):
-        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.unit_vector_along_arm = mock.MagicMock(return_value=np.array([1]))
         self.geometry.xarm_azimuth = 0
         self.assertTrue(np.array_equal(self.geometry.x, np.array([1])))
 
     def test_x_with_longitude_update(self):
-        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.unit_vector_along_arm = mock.MagicMock(return_value=np.array([1]))
         self.geometry.longitude = 0
         self.assertTrue(np.array_equal(self.geometry.x, np.array([1])))
 
     def test_x_with_latitude_update(self):
-        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.unit_vector_along_arm = mock.MagicMock(return_value=np.array([1]))
         self.geometry.latitude = 0
         self.assertTrue(np.array_equal(self.geometry.x, np.array([1])))
 
     def test_y_without_update(self):
         _ = self.geometry.y
-        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.unit_vector_along_arm = mock.MagicMock(return_value=np.array([1]))
 
         self.assertFalse(np.array_equal(self.geometry.y, np.array([1])))
 
     def test_y_with_yarm_tilt_update(self):
-        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.unit_vector_along_arm = mock.MagicMock(return_value=np.array([1]))
         self.geometry.yarm_tilt = 0
         self.assertTrue(np.array_equal(self.geometry.y, np.array([1])))
 
     def test_y_with_yarm_azimuth_update(self):
-        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.unit_vector_along_arm = mock.MagicMock(return_value=np.array([1]))
         self.geometry.yarm_azimuth = 0
         self.assertTrue(np.array_equal(self.geometry.y, np.array([1])))
 
     def test_y_with_longitude_update(self):
-        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.unit_vector_along_arm = mock.MagicMock(return_value=np.array([1]))
         self.geometry.longitude = 0
         self.assertTrue(np.array_equal(self.geometry.y, np.array([1])))
 
     def test_y_with_latitude_update(self):
-        self.geometry.unit_vector_along_arm = MagicMock(return_value=np.array([1]))
+        self.geometry.unit_vector_along_arm = mock.MagicMock(return_value=np.array([1]))
         self.geometry.latitude = 0
         self.assertTrue(np.array_equal(self.geometry.y, np.array([1])))
 
diff --git a/test/gw/detector/interferometer_test.py b/test/gw/detector/interferometer_test.py
index 3c879d22a766955bb5f37a764f075fd8f88a7281..2a37830fcfec33bd1dab8e8df1cca41e4b9c4ca1 100644
--- a/test/gw/detector/interferometer_test.py
+++ b/test/gw/detector/interferometer_test.py
@@ -1,5 +1,5 @@
-import sys
 import unittest
+from unittest import mock
 
 import lal
 import lalsimulation
@@ -8,9 +8,7 @@ from packaging import version
 from shutil import rmtree
 
 import deepdish as dd
-import mock
 import numpy as np
-from mock import MagicMock, patch
 import pandas
 
 import bilby
@@ -57,7 +55,7 @@ class TestInterferometer(unittest.TestCase):
         self.injection_polarizations["plus"] = np.random.random(4097)
         self.injection_polarizations["cross"] = np.random.random(4097)
 
-        self.waveform_generator = MagicMock()
+        self.waveform_generator = mock.MagicMock()
         self.wg_polarizations = dict(
             plus=np.random.random(4097), cross=np.random.random(4097)
         )
@@ -119,8 +117,8 @@ class TestInterferometer(unittest.TestCase):
             )
 
     def test_get_detector_response_default_behaviour(self):
-        self.ifo.antenna_response = MagicMock(return_value=1)
-        self.ifo.time_delay_from_geocenter = MagicMock(return_value=0)
+        self.ifo.antenna_response = mock.MagicMock(return_value=1)
+        self.ifo.time_delay_from_geocenter = mock.MagicMock(return_value=0)
         self.ifo.epoch = 0
         self.minimum_frequency = 10
         self.maximum_frequency = 20
@@ -135,8 +133,8 @@ class TestInterferometer(unittest.TestCase):
         )
 
     def test_get_detector_response_with_dt(self):
-        self.ifo.antenna_response = MagicMock(return_value=1)
-        self.ifo.time_delay_from_geocenter = MagicMock(return_value=0)
+        self.ifo.antenna_response = mock.MagicMock(return_value=1)
+        self.ifo.time_delay_from_geocenter = mock.MagicMock(return_value=0)
         self.ifo.epoch = 1
         self.minimum_frequency = 10
         self.maximum_frequency = 20
@@ -153,8 +151,8 @@ class TestInterferometer(unittest.TestCase):
         self.assertTrue(np.allclose(abs(expected_response), abs(response)))
 
     def test_get_detector_response_multiple_modes(self):
-        self.ifo.antenna_response = MagicMock(return_value=1)
-        self.ifo.time_delay_from_geocenter = MagicMock(return_value=0)
+        self.ifo.antenna_response = mock.MagicMock(return_value=1)
+        self.ifo.time_delay_from_geocenter = mock.MagicMock(return_value=0)
         self.ifo.epoch = 0
         self.minimum_frequency = 10
         self.maximum_frequency = 20
@@ -228,7 +226,7 @@ class TestInterferometer(unittest.TestCase):
                 injection_polarizations=self.injection_polarizations,
             )
 
-    @patch.object(bilby.core.utils.logger, "warning")
+    @mock.patch.object(bilby.core.utils.logger, "warning")
     def test_inject_signal_outside_segment_logs_warning(self, m):
         self.parameters["geocent_time"] = 24345.0
         self.ifo.get_detector_response = lambda x, params: x["plus"] + x["cross"]
@@ -254,7 +252,7 @@ class TestInterferometer(unittest.TestCase):
             )
         )
 
-    @patch.object(
+    @mock.patch.object(
         bilby.gw.detector.Interferometer, "inject_signal_from_waveform_generator"
     )
     def test_inject_signal_with_waveform_generator_correct_call(self, m):
@@ -297,7 +295,7 @@ class TestInterferometer(unittest.TestCase):
             np.array_equal(expected, self.ifo.strain_data._frequency_domain_strain)
         )
 
-    @patch.object(
+    @mock.patch.object(
         bilby.gw.detector.Interferometer, "inject_signal_from_waveform_polarizations"
     )
     def test_inject_signal_with_injection_polarizations_and_waveform_generator(self, m):
@@ -378,18 +376,14 @@ class TestInterferometer(unittest.TestCase):
 
     @pytest.mark.skipif(pandas_version_test, reason=skip_reason)
     def test_to_and_from_hdf5_loading(self):
-        if sys.version_info[0] < 3:
-            with self.assertRaises(NotImplementedError):
-                self.ifo.to_hdf5(outdir="outdir", label="test")
-        else:
-            self.ifo.to_hdf5(outdir="outdir", label="test")
-            filename = self.ifo._filename_from_outdir_label_extension(
-                outdir="outdir", label="test", extension="h5"
-            )
-            recovered_ifo = bilby.gw.detector.Interferometer.from_hdf5(filename)
-            self.assertEqual(self.ifo, recovered_ifo)
+        self.ifo.to_hdf5(outdir="outdir", label="test")
+        filename = self.ifo._filename_from_outdir_label_extension(
+            outdir="outdir", label="test", extension="h5"
+        )
+        recovered_ifo = bilby.gw.detector.Interferometer.from_hdf5(filename)
+        self.assertEqual(self.ifo, recovered_ifo)
 
-    @pytest.mark.skipif(pandas_version_test or sys.version_info[0] < 3, reason=skip_reason)
+    @pytest.mark.skipif(pandas_version_test, reason=skip_reason)
     def test_to_and_from_hdf5_wrong_class(self):
         bilby.core.utils.check_directory_exists_and_if_not_mkdir("outdir")
         dd.io.save("./outdir/psd.h5", self.power_spectral_density)
diff --git a/test/gw/detector/networks_test.py b/test/gw/detector/networks_test.py
index b0d02e4eae2e58f7ad43aee61be3365c86137603..971254b24f425a79e333fda6a57c464d75dfbbf2 100644
--- a/test/gw/detector/networks_test.py
+++ b/test/gw/detector/networks_test.py
@@ -1,14 +1,12 @@
-import sys
 import unittest
+from unittest import mock
 import pytest
 from shutil import rmtree
 from packaging import version
 from itertools import combinations
 
 import deepdish as dd
-import mock
 import numpy as np
-from mock import patch, MagicMock
 import pandas
 
 import bilby
@@ -184,7 +182,7 @@ class TestInterferometerList(unittest.TestCase):
         with self.assertRaises(ValueError):
             bilby.gw.detector.InterferometerList([self.ifo1, self.ifo2])
 
-    @patch.object(bilby.gw.detector.networks.logger, "warning")
+    @mock.patch.object(bilby.gw.detector.networks.logger, "warning")
     def test_check_interferometers_relative_tolerance(self, mock_warning):
         # Value larger than relative tolerance -- not tolerated
         self.ifo2.strain_data.start_time = self.ifo1.strain_data.start_time + 1e-4
@@ -202,7 +200,7 @@ class TestInterferometerList(unittest.TestCase):
             "The start_time of all interferometers are not the same:" in warning_log_str
         )
 
-    @patch.object(
+    @mock.patch.object(
         bilby.gw.detector.Interferometer, "set_strain_data_from_power_spectral_density"
     )
     def test_set_strain_data_from_power_spectral_density(self, m):
@@ -225,21 +223,21 @@ class TestInterferometerList(unittest.TestCase):
         meta_data = {ifo.name: ifo.meta_data for ifo in ifos_list}
         self.assertEqual(ifos.meta_data, meta_data)
 
-    @patch.object(
+    @mock.patch.object(
         bilby.gw.waveform_generator.WaveformGenerator, "frequency_domain_strain"
     )
     def test_inject_signal_pol_none_calls_frequency_domain_strain(self, m):
         waveform_generator = bilby.gw.waveform_generator.WaveformGenerator(
             frequency_domain_source_model=lambda x, y, z: x
         )
-        self.ifo1.inject_signal = MagicMock(return_value=None)
-        self.ifo2.inject_signal = MagicMock(return_value=None)
+        self.ifo1.inject_signal = mock.MagicMock(return_value=None)
+        self.ifo2.inject_signal = mock.MagicMock(return_value=None)
         self.ifo_list.inject_signal(
             parameters=None, waveform_generator=waveform_generator
         )
         self.assertTrue(m.called)
 
-    @patch.object(bilby.gw.detector.Interferometer, "inject_signal")
+    @mock.patch.object(bilby.gw.detector.Interferometer, "inject_signal")
     def test_inject_signal_with_inj_pol(self, m):
         self.ifo_list.inject_signal(
             injection_polarizations=dict(plus=1), raise_error=False
@@ -249,7 +247,7 @@ class TestInterferometerList(unittest.TestCase):
         )
         self.assertEqual(len(self.ifo_list), m.call_count)
 
-    @patch.object(bilby.gw.detector.Interferometer, "inject_signal")
+    @mock.patch.object(bilby.gw.detector.Interferometer, "inject_signal")
     def test_inject_signal_returns_expected_polarisations(self, m):
         m.return_value = dict(plus=1, cross=2)
         injection_polarizations = dict(plus=1, cross=2)
@@ -265,7 +263,7 @@ class TestInterferometerList(unittest.TestCase):
             ifos_pol[1],
         )
 
-    @patch.object(bilby.gw.detector.Interferometer, "save_data")
+    @mock.patch.object(bilby.gw.detector.Interferometer, "save_data")
     def test_save_data(self, m):
         self.ifo_list.save_data(outdir="test_outdir", label="test_outdir")
         m.assert_called_with(outdir="test_outdir", label="test_outdir")
@@ -333,18 +331,12 @@ class TestInterferometerList(unittest.TestCase):
 
     @pytest.mark.skipif(pandas_version_test, reason=skip_reason)
     def test_to_and_from_hdf5_loading(self):
-        if sys.version_info[0] < 3:
-            with self.assertRaises(NotImplementedError):
-                self.ifo_list.to_hdf5(outdir="outdir", label="test")
-        else:
-            self.ifo_list.to_hdf5(outdir="outdir", label="test")
-            filename = "outdir/test_name1name2.h5"
-            recovered_ifo = bilby.gw.detector.InterferometerList.from_hdf5(filename)
-            self.assertListEqual(self.ifo_list, recovered_ifo)
-
-    @pytest.mark.skipif(
-        pandas_version_test or sys.version_info[0] < 3, reason=skip_reason
-    )
+        self.ifo_list.to_hdf5(outdir="outdir", label="test")
+        filename = "outdir/test_name1name2.h5"
+        recovered_ifo = bilby.gw.detector.InterferometerList.from_hdf5(filename)
+        self.assertListEqual(self.ifo_list, recovered_ifo)
+
+    @pytest.mark.skipif(pandas_version_test, reason=skip_reason)
     def test_to_and_from_hdf5_wrong_class(self):
         dd.io.save("./outdir/psd.h5", self.ifo_list[0].power_spectral_density)
         filename = self.ifo_list._filename_from_outdir_label_extension(
diff --git a/test/gw/detector/psd_test.py b/test/gw/detector/psd_test.py
index 2d9f9acb6ee4ee62b678def08160f7f7b700a59a..238c3cfa0e464881231b44b40fb16fb22fcab96a 100644
--- a/test/gw/detector/psd_test.py
+++ b/test/gw/detector/psd_test.py
@@ -1,10 +1,9 @@
 import logging
 import os
 import unittest
+from unittest import mock
 
-import mock
 import numpy as np
-from mock import MagicMock
 
 import bilby
 
@@ -199,28 +198,28 @@ class TestPowerSpectralDensityWithFiles(unittest.TestCase):
 
     def test_check_file_psd_file_set_to_asd_file(self):
         logger = logging.getLogger("bilby")
-        m = MagicMock()
+        m = mock.MagicMock()
         logger.warning = m
         _ = bilby.gw.detector.PowerSpectralDensity(psd_file=self.asd_file)
         self.assertEqual(4, m.call_count)
 
     def test_check_file_not_called_psd_file_set_to_psd_file(self):
         logger = logging.getLogger("bilby")
-        m = MagicMock()
+        m = mock.MagicMock()
         logger.warning = m
         _ = bilby.gw.detector.PowerSpectralDensity(psd_file=self.psd_file)
         self.assertEqual(0, m.call_count)
 
     def test_check_file_asd_file_set_to_psd_file(self):
         logger = logging.getLogger("bilby")
-        m = MagicMock()
+        m = mock.MagicMock()
         logger.warning = m
         _ = bilby.gw.detector.PowerSpectralDensity(asd_file=self.psd_file)
         self.assertEqual(4, m.call_count)
 
     def test_check_file_not_called_asd_file_set_to_asd_file(self):
         logger = logging.getLogger("bilby")
-        m = MagicMock()
+        m = mock.MagicMock()
         logger.warning = m
         _ = bilby.gw.detector.PowerSpectralDensity(asd_file=self.asd_file)
         self.assertEqual(0, m.call_count)
diff --git a/test/gw/detector/strain_data_test.py b/test/gw/detector/strain_data_test.py
index 9cba741042a714e9875e2995d55bc628d7b78cfd..0f82a40a26e1d058c0005e7a546360a07cd8c502 100644
--- a/test/gw/detector/strain_data_test.py
+++ b/test/gw/detector/strain_data_test.py
@@ -1,9 +1,8 @@
 import unittest
+from unittest import mock
 
-import mock
 import numpy as np
 import scipy.signal
-from mock import patch
 
 import bilby
 
@@ -198,7 +197,7 @@ class TestInterferometerStrainData(unittest.TestCase):
         self.ifosd._time_domain_strain = expected_strain
         self.assertEqual(expected_strain, self.ifosd.time_domain_strain)
 
-    @patch("bilby.core.utils.infft")
+    @mock.patch("bilby.core.utils.infft")
     def test_time_domain_strain_from_frequency_domain_strain(self, m):
         m.return_value = 5
         self.ifosd.sampling_frequency = 200
@@ -222,7 +221,7 @@ class TestInterferometerStrainData(unittest.TestCase):
             np.array_equal(expected_strain, self.ifosd.frequency_domain_strain)
         )
 
-    @patch("bilby.core.utils.nfft")
+    @mock.patch("bilby.core.utils.nfft")
     def test_frequency_domain_strain_from_frequency_domain_strain(self, m):
         self.ifosd.start_time = 0
         self.ifosd.duration = 4
diff --git a/test/gw/likelihood_test.py b/test/gw/likelihood_test.py
index f2c6e7ec52782a882fc3de13e732310cb5d107b5..6e34f5872917fb055ecd25ae7a893b5977e8b33e 100644
--- a/test/gw/likelihood_test.py
+++ b/test/gw/likelihood_test.py
@@ -428,6 +428,9 @@ class TestMarginalizations(unittest.TestCase):
     The `time_jitter` parameter makes this a weaker dependence during sampling.
     """
 
+    lookup_phase = "distance_lookup_phase.npz"
+    lookup_no_phase = "distance_lookup_no_phase.npz"
+
     def setUp(self):
         np.random.seed(500)
         self.duration = 4
@@ -482,6 +485,13 @@ class TestMarginalizations(unittest.TestCase):
         del self.waveform_generator
         del self.priors
 
+    @classmethod
+    def tearDownClass(cls):
+        # remove lookup tables so that they are not used accidentally in subsequent tests
+        for filename in [cls.lookup_phase, cls.lookup_no_phase]:
+            if os.path.exists(filename):
+                os.remove(filename)
+
     def get_likelihood(
         self,
         time_marginalization=False,
@@ -492,9 +502,9 @@ class TestMarginalizations(unittest.TestCase):
         if priors is None:
             priors = self.priors.copy()
         if distance_marginalization and phase_marginalization:
-            lookup = "distance_lookup_phase.npz"
+            lookup = TestMarginalizations.lookup_phase
         elif distance_marginalization:
-            lookup = "distance_lookup_no_phase.npz"
+            lookup = TestMarginalizations.lookup_no_phase
         else:
             lookup = None
         like = bilby.gw.likelihood.GravitationalWaveTransient(
@@ -644,6 +654,174 @@ class TestMarginalizations(unittest.TestCase):
         )
 
 
+class TestMarginalizationsROQ(TestMarginalizations):
+
+    lookup_phase = "distance_lookup_phase.npz"
+    lookup_no_phase = "distance_lookup_no_phase.npz"
+    path_to_roq_weights = "weights.npz"
+
+    def setUp(self):
+        np.random.seed(500)
+        self.duration = 4
+        self.sampling_frequency = 2048
+        self.parameters = dict(
+            mass_1=31.0,
+            mass_2=29.0,
+            a_1=0.4,
+            a_2=0.3,
+            tilt_1=0.0,
+            tilt_2=0.0,
+            phi_12=1.7,
+            phi_jl=0.3,
+            luminosity_distance=4000.0,
+            theta_jn=0.4,
+            psi=2.659,
+            phase=1.3,
+            geocent_time=1126259642.413,
+            ra=1.375,
+            dec=-1.2108,
+            time_jitter=0,
+        )
+
+        self.interferometers = bilby.gw.detector.InterferometerList(["H1"])
+        self.interferometers.set_strain_data_from_power_spectral_densities(
+            sampling_frequency=self.sampling_frequency,
+            duration=self.duration,
+            start_time=1126259640,
+        )
+
+        waveform_generator = bilby.gw.waveform_generator.WaveformGenerator(
+            duration=self.duration,
+            sampling_frequency=self.sampling_frequency,
+            frequency_domain_source_model=bilby.gw.source.lal_binary_black_hole,
+            start_time=1126259640,
+            waveform_arguments=dict(
+                reference_frequency=20.0,
+                minimum_frequency=20.0,
+                approximant="IMRPhenomPv2"
+            )
+        )
+        self.interferometers.inject_signal(
+            parameters=self.parameters, waveform_generator=waveform_generator
+        )
+
+        self.priors = bilby.gw.prior.BBHPriorDict()
+        # prior range should be a part of segment since ROQ likelihood can not
+        # calculate values at samples close to edges
+        self.priors["geocent_time"] = bilby.prior.Uniform(
+            minimum=self.parameters["geocent_time"] - 0.1,
+            maximum=self.parameters["geocent_time"] + 0.1
+        )
+
+        # Possible locations for the ROQ: in the docker image, local, or on CIT
+        trial_roq_paths = [
+            "/roq_basis",
+            os.path.join(os.path.expanduser("~"), "ROQ_data/IMRPhenomPv2/4s"),
+            "/home/cbc/ROQ_data/IMRPhenomPv2/4s",
+        ]
+        roq_dir = None
+        for path in trial_roq_paths:
+            if os.path.isdir(path):
+                roq_dir = path
+                break
+        if roq_dir is None:
+            raise Exception("Unable to load ROQ basis: cannot proceed with tests")
+
+        self.waveform_generator = bilby.gw.waveform_generator.WaveformGenerator(
+            duration=self.duration,
+            sampling_frequency=self.sampling_frequency,
+            frequency_domain_source_model=bilby.gw.source.binary_black_hole_roq,
+            start_time=1126259640,
+            waveform_arguments=dict(
+                reference_frequency=20.0,
+                minimum_frequency=20.0,
+                approximant="IMRPhenomPv2",
+                frequency_nodes_linear=np.load("{}/fnodes_linear.npy".format(roq_dir)),
+                frequency_nodes_quadratic=np.load("{}/fnodes_quadratic.npy".format(roq_dir)),
+            )
+        )
+        self.roq_linear_matrix_file = "{}/B_linear.npy".format(roq_dir)
+        self.roq_quadratic_matrix_file = "{}/B_quadratic.npy".format(roq_dir)
+
+    @classmethod
+    def tearDownClass(cls):
+        for filename in [cls.lookup_phase, cls.lookup_no_phase, cls.path_to_roq_weights]:
+            if os.path.exists(filename):
+                os.remove(filename)
+
+    def get_likelihood(
+        self,
+        time_marginalization=False,
+        phase_marginalization=False,
+        distance_marginalization=False,
+        priors=None
+    ):
+        if priors is None:
+            priors = self.priors.copy()
+        if distance_marginalization and phase_marginalization:
+            lookup = TestMarginalizationsROQ.lookup_phase
+        elif distance_marginalization:
+            lookup = TestMarginalizationsROQ.lookup_no_phase
+        else:
+            lookup = None
+        kwargs = dict(
+            interferometers=self.interferometers,
+            waveform_generator=self.waveform_generator,
+            distance_marginalization=distance_marginalization,
+            phase_marginalization=phase_marginalization,
+            time_marginalization=time_marginalization,
+            distance_marginalization_lookup_table=lookup,
+            priors=priors
+        )
+        if os.path.exists(TestMarginalizationsROQ.path_to_roq_weights):
+            kwargs.update(dict(weights=TestMarginalizationsROQ.path_to_roq_weights))
+            like = bilby.gw.likelihood.ROQGravitationalWaveTransient(**kwargs)
+        else:
+            kwargs.update(
+                dict(
+                    linear_matrix=self.roq_linear_matrix_file,
+                    quadratic_matrix=self.roq_quadratic_matrix_file
+                )
+            )
+            like = bilby.gw.likelihood.ROQGravitationalWaveTransient(**kwargs)
+            like.save_weights(TestMarginalizationsROQ.path_to_roq_weights)
+        like.parameters = self.parameters.copy()
+        if time_marginalization:
+            like.parameters["geocent_time"] = self.interferometers.start_time
+        return like
+
+    def test_time_marginalisation(self):
+        self._template(
+            self.get_likelihood(time_marginalization=True),
+            self.get_likelihood(),
+            key="geocent_time",
+        )
+
+    def test_time_distance_marginalisation(self):
+        self._template(
+            self.get_likelihood(time_marginalization=True, distance_marginalization=True),
+            self.get_likelihood(distance_marginalization=True),
+            key="geocent_time",
+        )
+
+    def test_time_phase_marginalisation(self):
+        self._template(
+            self.get_likelihood(time_marginalization=True, phase_marginalization=True),
+            self.get_likelihood(phase_marginalization=True),
+            key="geocent_time",
+        )
+
+    def test_time_distance_phase_marginalisation(self):
+        self._template(
+            self.get_likelihood(time_marginalization=True, phase_marginalization=True, distance_marginalization=True),
+            self.get_likelihood(phase_marginalization=True, distance_marginalization=True),
+            key="geocent_time",
+        )
+
+    def test_time_marginalisation_partial_segment(self):
+        pass
+
+
 class TestROQLikelihood(unittest.TestCase):
     def setUp(self):
         self.duration = 4
diff --git a/test/gw/sampler/proposal_test.py b/test/gw/sampler/proposal_test.py
index 282ea61f6cc0c653e523bf242c9b446a8ae53212..3ad800084f7d25dbe0d95b14c8e9c2c850d633b9 100644
--- a/test/gw/sampler/proposal_test.py
+++ b/test/gw/sampler/proposal_test.py
@@ -1,5 +1,5 @@
 import unittest
-import mock
+from unittest import mock
 
 import numpy as np
 
diff --git a/test/gw/waveform_generator_test.py b/test/gw/waveform_generator_test.py
index f11bb04f9b3b27eeee0f2693b14d71369956ebbc..f2564c6d539fcc8d0dd0f100d87ac47f345dd29b 100644
--- a/test/gw/waveform_generator_test.py
+++ b/test/gw/waveform_generator_test.py
@@ -1,8 +1,7 @@
 import unittest
+from unittest import mock
 import bilby
 import numpy as np
-import mock
-from mock import MagicMock
 
 
 def dummy_func_array_return_value(
@@ -259,7 +258,7 @@ class TestFrequencyDomainStrainMethod(unittest.TestCase):
         del self.simulation_parameters
 
     def test_parameter_conversion_is_called(self):
-        self.waveform_generator.parameter_conversion = MagicMock(
+        self.waveform_generator.parameter_conversion = mock.MagicMock(
             side_effect=KeyError("test")
         )
         with self.assertRaises(KeyError):
@@ -328,7 +327,7 @@ class TestFrequencyDomainStrainMethod(unittest.TestCase):
             )
 
     def test_key_popping(self):
-        self.waveform_generator.parameter_conversion = MagicMock(
+        self.waveform_generator.parameter_conversion = mock.MagicMock(
             return_value=(
                 dict(
                     amplitude=1e-21,
@@ -465,7 +464,7 @@ class TestTimeDomainStrainMethod(unittest.TestCase):
         del self.simulation_parameters
 
     def test_parameter_conversion_is_called(self):
-        self.waveform_generator.parameter_conversion = MagicMock(
+        self.waveform_generator.parameter_conversion = mock.MagicMock(
             side_effect=KeyError("test")
         )
         with self.assertRaises(KeyError):
@@ -538,7 +537,7 @@ class TestTimeDomainStrainMethod(unittest.TestCase):
             )
 
     def test_key_popping(self):
-        self.waveform_generator.parameter_conversion = MagicMock(
+        self.waveform_generator.parameter_conversion = mock.MagicMock(
             return_value=(
                 dict(
                     amplitude=1e-2,
diff --git a/test/integration/sampler_run_test.py b/test/integration/sampler_run_test.py
index 0a0b1b19e23def083e6dd58f69ebfda780e4c3b2..0c40107201c8b2f3572b54fec8bb4f8eab8d595f 100644
--- a/test/integration/sampler_run_test.py
+++ b/test/integration/sampler_run_test.py
@@ -1,7 +1,6 @@
 import unittest
 import pytest
 import shutil
-import sys
 
 import bilby
 import numpy as np
@@ -184,7 +183,6 @@ class TestRunningSamplers(unittest.TestCase):
         assert "derived" in res.posterior
         assert res.log_likelihood_evaluations is not None
 
-    @pytest.mark.skipif(sys.version_info[1] <= 6, reason="pymc3 is broken in py36")
     @pytest.mark.xfail(
         raises=AttributeError,
         reason="Dependency issue with pymc3 causes attribute error on import",