From 36131cd74abff17c39c64d6c9f63a9ce2fa6728f Mon Sep 17 00:00:00 2001
From: Greg Ashton <gregory.ashton@ligo.org>
Date: Thu, 23 Sep 2021 20:33:34 +0000
Subject: [PATCH] Add pre-commit checks to the core examples

---
 .pre-commit-config.yaml                       |   6 +-
 examples/core_examples/15d_gaussian.py        | 383 ++++++++++++++----
 .../linear_regression_bilby_mcmc.py           |  31 +-
 .../linear_regression_pymc3.py                |  32 +-
 ...near_regression_pymc3_custom_likelihood.py |  54 +--
 examples/core_examples/conditional_prior.py   |  35 +-
 examples/core_examples/dirichlet.py           |  17 +-
 examples/core_examples/gaussian_example.py    |  30 +-
 examples/core_examples/grid_example.py        |  20 +-
 .../core_examples/hyper_parameter_example.py  |  85 ++--
 examples/core_examples/linear_regression.py   |  31 +-
 .../core_examples/linear_regression_grid.py   |  72 ++--
 .../linear_regression_unknown_noise.py        |  33 +-
 examples/core_examples/logo/sample_logo.py    |  18 +-
 .../multivariate_gaussian_prior.py            |  62 +--
 .../core_examples/occam_factor_example.py     |  60 +--
 examples/core_examples/radioactive_decay.py   |  46 ++-
 examples/core_examples/slabspike_example.py   | 141 +++++--
 18 files changed, 796 insertions(+), 360 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index d7f48675d..8b4324cb2 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -9,7 +9,7 @@ repos:
     hooks:
       - id: black
         language_version: python3
-        files: ^bilby/bilby_mcmc/
+        files: '(^bilby/bilby_mcmc/|^examples/core_examples/)'
 -   repo: https://github.com/codespell-project/codespell
     rev: v1.16.0
     hooks:
@@ -20,10 +20,10 @@ repos:
     hooks:
     -   id: seed-isort-config
         args: [--application-directories, 'bilby/']
-        files: ^bilby/bilby_mcmc/
+        files: '(^bilby/bilby_mcmc/|^examples/core_examples/)'
 -   repo: https://github.com/pre-commit/mirrors-isort
     rev: v4.3.21
     hooks:
     -   id: isort # sort imports alphabetically and separates import into sections
         args: [-w=88, -m=3, -tc, -sp=setup.cfg ]
-        files: ^bilby/bilby_mcmc/
+        files: '(^bilby/bilby_mcmc/|^examples/core_examples/)'
diff --git a/examples/core_examples/15d_gaussian.py b/examples/core_examples/15d_gaussian.py
index 036c66fdb..e52bc9e6b 100644
--- a/examples/core_examples/15d_gaussian.py
+++ b/examples/core_examples/15d_gaussian.py
@@ -1,60 +1,269 @@
 import bilby
 import numpy as np
-
-from bilby.core.likelihood import AnalyticalMultidimensionalCovariantGaussian, \
-    AnalyticalMultidimensionalBimodalCovariantGaussian
+from bilby.core.likelihood import (
+    AnalyticalMultidimensionalBimodalCovariantGaussian,
+    AnalyticalMultidimensionalCovariantGaussian,
+)
 
 logger = bilby.core.utils.logger
 
 cov = [
-    [0.045991865933182365, -0.005489748382557155, -0.01025067223674548, 0.0020087713726603213, -0.0032648855847982987,
-     -0.0034218261781145264, -0.0037173401838545774, -0.007694897715679858, 0.005260905282822458, 0.0013607957548231718,
-     0.001970785895702776, 0.006708452591621081, -0.005107684668720825, 0.004402554308030673, -0.00334987648531921],
-    [-0.005489748382557152, 0.05478640427684032, -0.004786202916836846, -0.007930397407501268, -0.0005945107515129139,
-     0.004858466255616657, -0.011667819871670204, 0.003169780190169035, 0.006761345004654851, -0.0037599761532668475,
-     0.005571796842520162, -0.0071098291510566895, -0.004477773540640284, -0.011250694688474672, 0.007465228985669282],
-    [-0.01025067223674548, -0.004786202916836844, 0.044324704403674524, -0.0010572820723801645, -0.009885693540838514,
-     -0.0048321205972943464, -0.004576186966267275, 0.0025107211483955676, -0.010126911913571181, 0.01153595152487264,
-     0.005773054728678472, 0.005286558230422045, -0.0055438798694137734, 0.0044772210361854765, -0.00620416958073918],
-    [0.0020087713726603213, -0.007930397407501268, -0.0010572820723801636, 0.029861342087731065, -0.007803477134405363,
-     -0.0011466944120756021, 0.009925736654597632, -0.0007664415942207051, -0.0057593957402320385,
-     -0.00027248233573270216, 0.003885350572544307, 0.00022362281488693097, 0.006609741178005571, -0.003292722856107429,
-     -0.005873218251875897],
-    [-0.0032648855847982987, -0.0005945107515129156, -0.009885693540838514, -0.007803477134405362, 0.0538403407841302,
-     -0.007446654755103316, -0.0025216534232170153, 0.004499568241334517, 0.009591034277125177, 0.00008612746932654223,
-     0.003386296829505543, -0.002600737873367083, 0.000621148057330571, -0.006603857049454523, -0.009241221870695395],
-    [-0.0034218261781145264, 0.004858466255616657, -0.004832120597294347, -0.0011466944120756015, -0.007446654755103318,
-     0.043746559133865104, 0.008962713024625965, -0.011099652042761613, -0.0006620240117921668, -0.0012591530037708058,
-     -0.006899982952117269, 0.0019732354732442878, -0.002445676747004324, -0.006454778807421816, 0.0033303577606412765],
-    [-0.00371734018385458, -0.011667819871670206, -0.004576186966267273, 0.009925736654597632, -0.0025216534232170153,
-     0.008962713024625965, 0.03664582756831382, -0.009470328827284009, -0.006213741694945105, 0.007118775954484294,
-     -0.0006741237990418526, -0.006003374957986355, 0.005718636997353189, -0.0005191095254772077,
-     -0.008466566781233205],
-    [-0.007694897715679857, 0.0031697801901690347, 0.002510721148395566, -0.0007664415942207059, 0.004499568241334515,
-     -0.011099652042761617, -0.009470328827284016, 0.057734267068088, 0.005521731225009532, -0.017008048805405164,
-     0.006749693090695894, -0.006348460110898, -0.007879244727681924, -0.005321753837620446, 0.011126783289057604],
-    [0.005260905282822458, 0.0067613450046548505, -0.010126911913571181, -0.00575939574023204, 0.009591034277125177,
-     -0.0006620240117921668, -0.006213741694945106, 0.005521731225009532, 0.04610670018969681, -0.010427010812879566,
-     -0.0009861561285861987, -0.008896020395949732, -0.0037627528719902485, 0.00033704453138913093,
-     -0.003173552163182467],
-    [0.0013607957548231744, -0.0037599761532668475, 0.01153595152487264, -0.0002724823357326985, 0.0000861274693265406,
-     -0.0012591530037708062, 0.007118775954484294, -0.01700804880540517, -0.010427010812879568, 0.05909125052583998,
-     0.002192545816395299, -0.002057672237277737, -0.004801518314458135, -0.014065326026662672, -0.005619012077114913],
-    [0.0019707858957027763, 0.005571796842520162, 0.005773054728678472, 0.003885350572544309, 0.003386296829505542,
-     -0.006899982952117272, -0.0006741237990418522, 0.006749693090695893, -0.0009861561285862005, 0.0021925458163952988,
-     0.024417715762416557, -0.003037163447600162, -0.011173674374382736, -0.0008193127407211239, -0.007137012700864866],
-    [0.006708452591621083, -0.0071098291510566895, 0.005286558230422046, 0.00022362281488693216, -0.0026007378733670806,
-     0.0019732354732442886, -0.006003374957986352, -0.006348460110897999, -0.008896020395949732, -0.002057672237277737,
-     -0.003037163447600163, 0.04762367868805726, 0.0008818947598625008, -0.0007262691465810616, -0.006482422704208912],
-    [-0.005107684668720825, -0.0044777735406402895, -0.005543879869413772, 0.006609741178005571, 0.0006211480573305693,
-     -0.002445676747004324, 0.0057186369973531905, -0.00787924472768192, -0.003762752871990247, -0.004801518314458137,
-     -0.011173674374382736, 0.0008818947598624995, 0.042639958466440225, 0.0010194948614718209, 0.0033872675386130637],
-    [0.004402554308030674, -0.011250694688474675, 0.004477221036185477, -0.003292722856107429, -0.006603857049454523,
-     -0.006454778807421815, -0.0005191095254772072, -0.005321753837620446, 0.0003370445313891318, -0.014065326026662679,
-     -0.0008193127407211239, -0.0007262691465810616, 0.0010194948614718226, 0.05244900188599414, -0.000256550861960499],
-    [-0.00334987648531921, 0.007465228985669282, -0.006204169580739178, -0.005873218251875899, -0.009241221870695395,
-     0.003330357760641278, -0.008466566781233205, 0.011126783289057604, -0.0031735521631824654, -0.005619012077114915,
-     -0.007137012700864866, -0.006482422704208912, 0.0033872675386130632, -0.000256550861960499, 0.05380987317762257]]
+    [
+        0.045991865933182365,
+        -0.005489748382557155,
+        -0.01025067223674548,
+        0.0020087713726603213,
+        -0.0032648855847982987,
+        -0.0034218261781145264,
+        -0.0037173401838545774,
+        -0.007694897715679858,
+        0.005260905282822458,
+        0.0013607957548231718,
+        0.001970785895702776,
+        0.006708452591621081,
+        -0.005107684668720825,
+        0.004402554308030673,
+        -0.00334987648531921,
+    ],
+    [
+        -0.005489748382557152,
+        0.05478640427684032,
+        -0.004786202916836846,
+        -0.007930397407501268,
+        -0.0005945107515129139,
+        0.004858466255616657,
+        -0.011667819871670204,
+        0.003169780190169035,
+        0.006761345004654851,
+        -0.0037599761532668475,
+        0.005571796842520162,
+        -0.0071098291510566895,
+        -0.004477773540640284,
+        -0.011250694688474672,
+        0.007465228985669282,
+    ],
+    [
+        -0.01025067223674548,
+        -0.004786202916836844,
+        0.044324704403674524,
+        -0.0010572820723801645,
+        -0.009885693540838514,
+        -0.0048321205972943464,
+        -0.004576186966267275,
+        0.0025107211483955676,
+        -0.010126911913571181,
+        0.01153595152487264,
+        0.005773054728678472,
+        0.005286558230422045,
+        -0.0055438798694137734,
+        0.0044772210361854765,
+        -0.00620416958073918,
+    ],
+    [
+        0.0020087713726603213,
+        -0.007930397407501268,
+        -0.0010572820723801636,
+        0.029861342087731065,
+        -0.007803477134405363,
+        -0.0011466944120756021,
+        0.009925736654597632,
+        -0.0007664415942207051,
+        -0.0057593957402320385,
+        -0.00027248233573270216,
+        0.003885350572544307,
+        0.00022362281488693097,
+        0.006609741178005571,
+        -0.003292722856107429,
+        -0.005873218251875897,
+    ],
+    [
+        -0.0032648855847982987,
+        -0.0005945107515129156,
+        -0.009885693540838514,
+        -0.007803477134405362,
+        0.0538403407841302,
+        -0.007446654755103316,
+        -0.0025216534232170153,
+        0.004499568241334517,
+        0.009591034277125177,
+        0.00008612746932654223,
+        0.003386296829505543,
+        -0.002600737873367083,
+        0.000621148057330571,
+        -0.006603857049454523,
+        -0.009241221870695395,
+    ],
+    [
+        -0.0034218261781145264,
+        0.004858466255616657,
+        -0.004832120597294347,
+        -0.0011466944120756015,
+        -0.007446654755103318,
+        0.043746559133865104,
+        0.008962713024625965,
+        -0.011099652042761613,
+        -0.0006620240117921668,
+        -0.0012591530037708058,
+        -0.006899982952117269,
+        0.0019732354732442878,
+        -0.002445676747004324,
+        -0.006454778807421816,
+        0.0033303577606412765,
+    ],
+    [
+        -0.00371734018385458,
+        -0.011667819871670206,
+        -0.004576186966267273,
+        0.009925736654597632,
+        -0.0025216534232170153,
+        0.008962713024625965,
+        0.03664582756831382,
+        -0.009470328827284009,
+        -0.006213741694945105,
+        0.007118775954484294,
+        -0.0006741237990418526,
+        -0.006003374957986355,
+        0.005718636997353189,
+        -0.0005191095254772077,
+        -0.008466566781233205,
+    ],
+    [
+        -0.007694897715679857,
+        0.0031697801901690347,
+        0.002510721148395566,
+        -0.0007664415942207059,
+        0.004499568241334515,
+        -0.011099652042761617,
+        -0.009470328827284016,
+        0.057734267068088,
+        0.005521731225009532,
+        -0.017008048805405164,
+        0.006749693090695894,
+        -0.006348460110898,
+        -0.007879244727681924,
+        -0.005321753837620446,
+        0.011126783289057604,
+    ],
+    [
+        0.005260905282822458,
+        0.0067613450046548505,
+        -0.010126911913571181,
+        -0.00575939574023204,
+        0.009591034277125177,
+        -0.0006620240117921668,
+        -0.006213741694945106,
+        0.005521731225009532,
+        0.04610670018969681,
+        -0.010427010812879566,
+        -0.0009861561285861987,
+        -0.008896020395949732,
+        -0.0037627528719902485,
+        0.00033704453138913093,
+        -0.003173552163182467,
+    ],
+    [
+        0.0013607957548231744,
+        -0.0037599761532668475,
+        0.01153595152487264,
+        -0.0002724823357326985,
+        0.0000861274693265406,
+        -0.0012591530037708062,
+        0.007118775954484294,
+        -0.01700804880540517,
+        -0.010427010812879568,
+        0.05909125052583998,
+        0.002192545816395299,
+        -0.002057672237277737,
+        -0.004801518314458135,
+        -0.014065326026662672,
+        -0.005619012077114913,
+    ],
+    [
+        0.0019707858957027763,
+        0.005571796842520162,
+        0.005773054728678472,
+        0.003885350572544309,
+        0.003386296829505542,
+        -0.006899982952117272,
+        -0.0006741237990418522,
+        0.006749693090695893,
+        -0.0009861561285862005,
+        0.0021925458163952988,
+        0.024417715762416557,
+        -0.003037163447600162,
+        -0.011173674374382736,
+        -0.0008193127407211239,
+        -0.007137012700864866,
+    ],
+    [
+        0.006708452591621083,
+        -0.0071098291510566895,
+        0.005286558230422046,
+        0.00022362281488693216,
+        -0.0026007378733670806,
+        0.0019732354732442886,
+        -0.006003374957986352,
+        -0.006348460110897999,
+        -0.008896020395949732,
+        -0.002057672237277737,
+        -0.003037163447600163,
+        0.04762367868805726,
+        0.0008818947598625008,
+        -0.0007262691465810616,
+        -0.006482422704208912,
+    ],
+    [
+        -0.005107684668720825,
+        -0.0044777735406402895,
+        -0.005543879869413772,
+        0.006609741178005571,
+        0.0006211480573305693,
+        -0.002445676747004324,
+        0.0057186369973531905,
+        -0.00787924472768192,
+        -0.003762752871990247,
+        -0.004801518314458137,
+        -0.011173674374382736,
+        0.0008818947598624995,
+        0.042639958466440225,
+        0.0010194948614718209,
+        0.0033872675386130637,
+    ],
+    [
+        0.004402554308030674,
+        -0.011250694688474675,
+        0.004477221036185477,
+        -0.003292722856107429,
+        -0.006603857049454523,
+        -0.006454778807421815,
+        -0.0005191095254772072,
+        -0.005321753837620446,
+        0.0003370445313891318,
+        -0.014065326026662679,
+        -0.0008193127407211239,
+        -0.0007262691465810616,
+        0.0010194948614718226,
+        0.05244900188599414,
+        -0.000256550861960499,
+    ],
+    [
+        -0.00334987648531921,
+        0.007465228985669282,
+        -0.006204169580739178,
+        -0.005873218251875899,
+        -0.009241221870695395,
+        0.003330357760641278,
+        -0.008466566781233205,
+        0.011126783289057604,
+        -0.0031735521631824654,
+        -0.005619012077114915,
+        -0.007137012700864866,
+        -0.006482422704208912,
+        0.0033872675386130632,
+        -0.000256550861960499,
+        0.05380987317762257,
+    ],
+]
 
 dim = 15
 mean = np.zeros(dim)
@@ -64,7 +273,12 @@ outdir = "outdir"
 
 likelihood = AnalyticalMultidimensionalCovariantGaussian(mean, cov)
 priors = bilby.core.prior.PriorDict()
-priors.update({"x{0}".format(i): bilby.core.prior.Uniform(-5, 5, "x{0}".format(i)) for i in range(dim)})
+priors.update(
+    {
+        "x{0}".format(i): bilby.core.prior.Uniform(-5, 5, "x{0}".format(i))
+        for i in range(dim)
+    }
+)
 
 result = bilby.run_sampler(
     likelihood=likelihood,
@@ -73,7 +287,7 @@ result = bilby.run_sampler(
     outdir=outdir,
     label=label,
     check_point_plot=True,
-    resume=True
+    resume=True,
 )
 
 result.plot_corner(parameters={"x{0}".format(i): mean[i] for i in range(dim)})
@@ -81,18 +295,27 @@ result.plot_corner(parameters={"x{0}".format(i): mean[i] for i in range(dim)})
 # The prior is constant and flat, and the likelihood is normalised such that the area under it is one.
 # The analytical evidence is then given as 1/(prior volume)
 
-log_prior_vol = np.sum(np.log([prior.maximum - prior.minimum for key, prior in priors.items()]))
+log_prior_vol = np.sum(
+    np.log([prior.maximum - prior.minimum for key, prior in priors.items()])
+)
 log_evidence = -log_prior_vol
 
-sampled_std = [np.std(result.posterior[param]) for param in result.search_parameter_keys]
+sampled_std = [
+    np.std(result.posterior[param]) for param in result.search_parameter_keys
+]
 
 logger.info("Analytic log evidence: " + str(log_evidence))
-logger.info("Sampled log evidence:  " + str(result.log_evidence) + ' +/- ' + str(result.log_evidence_err))
+logger.info(
+    "Sampled log evidence:  "
+    + str(result.log_evidence)
+    + " +/- "
+    + str(result.log_evidence_err)
+)
 
 for i, search_parameter_key in enumerate(result.search_parameter_keys):
     logger.info(search_parameter_key)
-    logger.info('Expected posterior standard deviation: ' + str(likelihood.sigma[i]))
-    logger.info('Sampled posterior standard deviation:  ' + str(sampled_std[i]))
+    logger.info("Expected posterior standard deviation: " + str(likelihood.sigma[i]))
+    logger.info("Sampled posterior standard deviation:  " + str(sampled_std[i]))
 
 # BIMODAL distribution
 
@@ -104,7 +327,12 @@ mean_2 = -4 * np.sqrt(np.diag(cov))
 
 likelihood = AnalyticalMultidimensionalBimodalCovariantGaussian(mean_1, mean_2, cov)
 priors = bilby.core.prior.PriorDict()
-priors.update({"x{0}".format(i): bilby.core.prior.Uniform(-5, 5, "x{0}".format(i)) for i in range(dim)})
+priors.update(
+    {
+        "x{0}".format(i): bilby.core.prior.Uniform(-5, 5, "x{0}".format(i))
+        for i in range(dim)
+    }
+)
 
 result = bilby.run_sampler(
     likelihood=likelihood,
@@ -113,14 +341,20 @@ result = bilby.run_sampler(
     outdir=outdir,
     label=label,
     check_point_plot=True,
-    resume=True
+    resume=True,
+)
+result.plot_corner(
+    parameters={"x{0}".format(i): mean_1[i] for i in range(dim)},
+    filename=outdir + "/multidim_gaussian_bimodal_mode_1",
+)
+result.plot_corner(
+    parameters={"x{0}".format(i): mean_2[i] for i in range(dim)},
+    filename=outdir + "/multidim_gaussian_bimodal_mode_2",
 )
-result.plot_corner(parameters={"x{0}".format(i): mean_1[i] for i in range(dim)},
-                   filename=outdir + '/multidim_gaussian_bimodal_mode_1')
-result.plot_corner(parameters={"x{0}".format(i): mean_2[i] for i in range(dim)},
-                   filename=outdir + '/multidim_gaussian_bimodal_mode_2')
 
-log_prior_vol = np.sum(np.log([prior.maximum - prior.minimum for key, prior in priors.items()]))
+log_prior_vol = np.sum(
+    np.log([prior.maximum - prior.minimum for key, prior in priors.items()])
+)
 log_evidence = -log_prior_vol
 sampled_std_1 = []
 sampled_std_2 = []
@@ -132,10 +366,21 @@ for param in result.search_parameter_keys:
     sampled_std_2.append(np.std(samples_2))
 
 logger.info("Analytic log evidence: " + str(log_evidence))
-logger.info("Sampled log evidence:  " + str(result.log_evidence) + ' +/- ' + str(result.log_evidence_err))
+logger.info(
+    "Sampled log evidence:  "
+    + str(result.log_evidence)
+    + " +/- "
+    + str(result.log_evidence_err)
+)
 
 for i, search_parameter_key in enumerate(result.search_parameter_keys):
     logger.info(search_parameter_key)
-    logger.info('Expected posterior standard deviation both modes: ' + str(likelihood.sigma[i]))
-    logger.info('Sampled posterior standard deviation first mode:  ' + str(sampled_std_1[i]))
-    logger.info('Sampled posterior standard deviation second mode:  ' + str(sampled_std_2[i]))
+    logger.info(
+        "Expected posterior standard deviation both modes: " + str(likelihood.sigma[i])
+    )
+    logger.info(
+        "Sampled posterior standard deviation first mode:  " + str(sampled_std_1[i])
+    )
+    logger.info(
+        "Sampled posterior standard deviation second mode:  " + str(sampled_std_2[i])
+    )
diff --git a/examples/core_examples/alternative_samplers/linear_regression_bilby_mcmc.py b/examples/core_examples/alternative_samplers/linear_regression_bilby_mcmc.py
index abad1bb72..2b2d42ec1 100644
--- a/examples/core_examples/alternative_samplers/linear_regression_bilby_mcmc.py
+++ b/examples/core_examples/alternative_samplers/linear_regression_bilby_mcmc.py
@@ -6,12 +6,12 @@ data with background Gaussian noise
 
 """
 import bilby
-import numpy as np
 import matplotlib.pyplot as plt
+import numpy as np
 
 # A few simple setup steps
-label = 'linear_regression'
-outdir = 'outdir'
+label = "linear_regression"
+outdir = "outdir"
 bilby.utils.check_directory_exists_and_if_not_mkdir(outdir)
 
 
@@ -36,12 +36,12 @@ data = model(time, **injection_parameters) + np.random.normal(0, sigma, N)
 
 # We quickly plot the data to check it looks sensible
 fig, ax = plt.subplots()
-ax.plot(time, data, 'o', label='data')
-ax.plot(time, model(time, **injection_parameters), '--r', label='signal')
-ax.set_xlabel('time')
-ax.set_ylabel('y')
+ax.plot(time, data, "o", label="data")
+ax.plot(time, model(time, **injection_parameters), "--r", label="signal")
+ax.set_xlabel("time")
+ax.set_ylabel("y")
 ax.legend()
-fig.savefig('{}/{}_data.png'.format(outdir, label))
+fig.savefig("{}/{}_data.png".format(outdir, label))
 
 # Now lets instantiate a version of our GaussianLikelihood, giving it
 # the time, data and signal model
@@ -50,14 +50,19 @@ likelihood = bilby.likelihood.GaussianLikelihood(time, data, model, sigma)
 # From hereon, the syntax is exactly equivalent to other bilby examples
 # We make a prior
 priors = dict()
-priors['m'] = bilby.core.prior.Uniform(0, 5, 'm')
-priors['c'] = bilby.core.prior.Uniform(-2, 2, 'c')
+priors["m"] = bilby.core.prior.Uniform(0, 5, "m")
+priors["c"] = bilby.core.prior.Uniform(-2, 2, "c")
 
 # And run sampler
 result = bilby.run_sampler(
-    likelihood=likelihood, priors=priors, sampler='dynesty', nlive=250,
-    injection_parameters=injection_parameters, outdir=outdir,
-    label=label)
+    likelihood=likelihood,
+    priors=priors,
+    sampler="dynesty",
+    nlive=250,
+    injection_parameters=injection_parameters,
+    outdir=outdir,
+    label=label,
+)
 
 # Finally plot a corner plot: all outputs are stored in outdir
 result.plot_corner()
diff --git a/examples/core_examples/alternative_samplers/linear_regression_pymc3.py b/examples/core_examples/alternative_samplers/linear_regression_pymc3.py
index 2c6b02f49..75cbf16ae 100644
--- a/examples/core_examples/alternative_samplers/linear_regression_pymc3.py
+++ b/examples/core_examples/alternative_samplers/linear_regression_pymc3.py
@@ -6,14 +6,13 @@ data with background Gaussian noise
 
 """
 import bilby
-import numpy as np
 import matplotlib.pyplot as plt
-
+import numpy as np
 from bilby.core.likelihood import GaussianLikelihood
 
 # A few simple setup steps
-label = 'linear_regression_pymc3'
-outdir = 'outdir'
+label = "linear_regression_pymc3"
+outdir = "outdir"
 bilby.utils.check_directory_exists_and_if_not_mkdir(outdir)
 
 
@@ -38,12 +37,12 @@ data = model(time, **injection_parameters) + np.random.normal(0, sigma, N)
 
 # We quickly plot the data to check it looks sensible
 fig, ax = plt.subplots()
-ax.plot(time, data, 'o', label='data')
-ax.plot(time, model(time, **injection_parameters), '--r', label='signal')
-ax.set_xlabel('time')
-ax.set_ylabel('y')
+ax.plot(time, data, "o", label="data")
+ax.plot(time, model(time, **injection_parameters), "--r", label="signal")
+ax.set_xlabel("time")
+ax.set_ylabel("y")
 ax.legend()
-fig.savefig('{}/{}_data.png'.format(outdir, label))
+fig.savefig("{}/{}_data.png".format(outdir, label))
 
 # Now lets instantiate a version of our GaussianLikelihood, giving it
 # the time, data and signal model
@@ -52,12 +51,17 @@ likelihood = GaussianLikelihood(time, data, model, sigma=sigma)
 # From hereon, the syntax is exactly equivalent to other bilby examples
 # We make a prior
 priors = dict()
-priors['m'] = bilby.core.prior.Uniform(0, 5, 'm')
-priors['c'] = bilby.core.prior.Uniform(-2, 2, 'c')
+priors["m"] = bilby.core.prior.Uniform(0, 5, "m")
+priors["c"] = bilby.core.prior.Uniform(-2, 2, "c")
 
 # And run sampler
 result = bilby.run_sampler(
-    likelihood=likelihood, priors=priors, sampler='pymc3',
-    injection_parameters=injection_parameters, outdir=outdir,
-    draws=2000, label=label)
+    likelihood=likelihood,
+    priors=priors,
+    sampler="pymc3",
+    injection_parameters=injection_parameters,
+    outdir=outdir,
+    draws=2000,
+    label=label,
+)
 result.plot_corner()
diff --git a/examples/core_examples/alternative_samplers/linear_regression_pymc3_custom_likelihood.py b/examples/core_examples/alternative_samplers/linear_regression_pymc3_custom_likelihood.py
index fbea649f5..864c9afe3 100644
--- a/examples/core_examples/alternative_samplers/linear_regression_pymc3_custom_likelihood.py
+++ b/examples/core_examples/alternative_samplers/linear_regression_pymc3_custom_likelihood.py
@@ -8,15 +8,16 @@ would give equivalent results as using the pre-defined 'Gaussian Likelihood'
 
 """
 
+import inspect
+
 import bilby
-import numpy as np
 import matplotlib.pyplot as plt
-import inspect
+import numpy as np
 import pymc3 as pm
 
 # A few simple setup steps
-label = 'linear_regression_pymc3_custom_likelihood'
-outdir = 'outdir'
+label = "linear_regression_pymc3_custom_likelihood"
+outdir = "outdir"
 bilby.utils.check_directory_exists_and_if_not_mkdir(outdir)
 
 
@@ -41,18 +42,17 @@ data = model(time, **injection_parameters) + np.random.normal(0, sigma, N)
 
 # We quickly plot the data to check it looks sensible
 fig, ax = plt.subplots()
-ax.plot(time, data, 'o', label='data')
-ax.plot(time, model(time, **injection_parameters), '--r', label='signal')
-ax.set_xlabel('time')
-ax.set_ylabel('y')
+ax.plot(time, data, "o", label="data")
+ax.plot(time, model(time, **injection_parameters), "--r", label="signal")
+ax.set_xlabel("time")
+ax.set_ylabel("y")
 ax.legend()
-fig.savefig('{}/{}_data.png'.format(outdir, label))
+fig.savefig("{}/{}_data.png".format(outdir, label))
 
 
 # Parameter estimation: we now define a Gaussian Likelihood class relevant for
 # our model.
 class GaussianLikelihoodPyMC3(bilby.Likelihood):
-
     def __init__(self, x, y, sigma, function):
         """
         A general Gaussian likelihood - the parameters are inferred from the
@@ -95,17 +95,17 @@ class GaussianLikelihoodPyMC3(bilby.Likelihood):
         if not isinstance(sampler, Pymc3):
             raise ValueError("Sampler is not a bilby Pymc3 sampler object")
 
-        if not hasattr(sampler, 'pymc3_model'):
+        if not hasattr(sampler, "pymc3_model"):
             raise AttributeError("Sampler has not PyMC3 model attribute")
 
         with sampler.pymc3_model:
-            mdist = sampler.pymc3_priors['m']
-            cdist = sampler.pymc3_priors['c']
+            mdist = sampler.pymc3_priors["m"]
+            cdist = sampler.pymc3_priors["c"]
 
             mu = model(time, mdist, cdist)
 
             # set the likelihood distribution
-            pm.Normal('likelihood', mu=mu, sd=self.sigma, observed=self.y)
+            pm.Normal("likelihood", mu=mu, sd=self.sigma, observed=self.y)
 
 
 # Now lets instantiate a version of our GaussianLikelihood, giving it
@@ -120,9 +120,9 @@ class PriorPyMC3(bilby.core.prior.Prior):
         Uniform prior with bounds (should be equivalent to bilby.prior.Uniform)
         """
 
-        bilby.core.prior.Prior.__init__(self, name, latex_label,
-                                        minimum=minimum,
-                                        maximum=maximum)
+        bilby.core.prior.Prior.__init__(
+            self, name, latex_label, minimum=minimum, maximum=maximum
+        )
 
     def ln_prob(self, sampler=None):
         """
@@ -135,19 +135,25 @@ class PriorPyMC3(bilby.core.prior.Prior):
         if not isinstance(sampler, Pymc3):
             raise ValueError("Sampler is not a bilby Pymc3 sampler object")
 
-        return pm.Uniform(self.name, lower=self.minimum,
-                          upper=self.maximum)
+        return pm.Uniform(self.name, lower=self.minimum, upper=self.maximum)
 
 
 # From hereon, the syntax is exactly equivalent to other bilby examples
 # We make a prior
 priors = dict()
-priors['m'] = bilby.core.prior.Uniform(0, 5, 'm')
-priors['c'] = PriorPyMC3(-2, 2, 'c')
+priors["m"] = bilby.core.prior.Uniform(0, 5, "m")
+priors["c"] = PriorPyMC3(-2, 2, "c")
 
 # And run sampler
 result = bilby.run_sampler(
-    likelihood=likelihood, priors=priors, sampler='pymc3', draws=1000,
-    tune=1000, discard_tuned_samples=True,
-    injection_parameters=injection_parameters, outdir=outdir, label=label)
+    likelihood=likelihood,
+    priors=priors,
+    sampler="pymc3",
+    draws=1000,
+    tune=1000,
+    discard_tuned_samples=True,
+    injection_parameters=injection_parameters,
+    outdir=outdir,
+    label=label,
+)
 result.plot_corner()
diff --git a/examples/core_examples/conditional_prior.py b/examples/core_examples/conditional_prior.py
index abc12d300..0c6f4efd0 100644
--- a/examples/core_examples/conditional_prior.py
+++ b/examples/core_examples/conditional_prior.py
@@ -9,36 +9,47 @@ import numpy as np
 
 
 class ZeroLikelihood(bilby.core.likelihood.Likelihood):
-    """ Flat likelihood. This always returns 0.
+    """Flat likelihood. This always returns 0.
     This way our posterior distribution is exactly the prior distribution."""
+
     def log_likelihood(self):
         return 0
 
 
 def condition_func_y(reference_params, x):
     """ Condition function for our p(y|x) prior."""
-    radius = 0.5 * (reference_params['maximum'] - reference_params['minimum'])
-    y_max = np.sqrt(radius**2 - x**2)
+    radius = 0.5 * (reference_params["maximum"] - reference_params["minimum"])
+    y_max = np.sqrt(radius ** 2 - x ** 2)
     return dict(minimum=-y_max, maximum=y_max)
 
 
 def condition_func_z(reference_params, x, y):
     """ Condition function for our p(z|x, y) prior."""
-    radius = 0.5 * (reference_params['maximum'] - reference_params['minimum'])
-    z_max = np.sqrt(radius**2 - x**2 - y**2)
+    radius = 0.5 * (reference_params["maximum"] - reference_params["minimum"])
+    z_max = np.sqrt(radius ** 2 - x ** 2 - y ** 2)
     return dict(minimum=-z_max, maximum=z_max)
 
 
 # Set up the conditional priors and the flat likelihood
 priors = bilby.core.prior.ConditionalPriorDict()
-priors['x'] = bilby.core.prior.Uniform(minimum=-1, maximum=1, latex_label="$x$")
-priors['y'] = bilby.core.prior.ConditionalUniform(condition_func=condition_func_y, minimum=-1,
-                                                  maximum=1, latex_label="$y$")
-priors['z'] = bilby.core.prior.ConditionalUniform(condition_func=condition_func_z, minimum=-1,
-                                                  maximum=1, latex_label="$z$")
+priors["x"] = bilby.core.prior.Uniform(minimum=-1, maximum=1, latex_label="$x$")
+priors["y"] = bilby.core.prior.ConditionalUniform(
+    condition_func=condition_func_y, minimum=-1, maximum=1, latex_label="$y$"
+)
+priors["z"] = bilby.core.prior.ConditionalUniform(
+    condition_func=condition_func_z, minimum=-1, maximum=1, latex_label="$z$"
+)
 likelihood = ZeroLikelihood(parameters=dict(x=0, y=0, z=0))
 
 # Sample the prior distribution
-res = bilby.run_sampler(likelihood=likelihood, priors=priors, sampler='dynesty', nlive=5000,
-                        label='conditional_prior', outdir='outdir', resume=False, clean=True)
+res = bilby.run_sampler(
+    likelihood=likelihood,
+    priors=priors,
+    sampler="dynesty",
+    nlive=5000,
+    label="conditional_prior",
+    outdir="outdir",
+    resume=False,
+    clean=True,
+)
 res.plot_corner()
diff --git a/examples/core_examples/dirichlet.py b/examples/core_examples/dirichlet.py
index 08bc6a415..71d29ee5e 100644
--- a/examples/core_examples/dirichlet.py
+++ b/examples/core_examples/dirichlet.py
@@ -1,11 +1,9 @@
 import numpy as np
 import pandas as pd
-
 from bilby.core.likelihood import Multinomial
 from bilby.core.prior import DirichletPriorDict
 from bilby.core.sampler import run_sampler
 
-
 n_dim = 3
 label = "dirichlet_"
 priors = DirichletPriorDict(n_dim=n_dim, label=label)
@@ -20,14 +18,21 @@ data = [injection_parameters[label + str(ii)] * 1000 for ii in range(n_dim)]
 likelihood = Multinomial(data=data, n_dimensions=n_dim, label=label)
 
 result = run_sampler(
-    likelihood=likelihood, priors=priors, nlive=100,
-    label="multinomial", injection_parameters=injection_parameters
+    likelihood=likelihood,
+    priors=priors,
+    nlive=100,
+    label="multinomial",
+    injection_parameters=injection_parameters,
 )
 
-result.posterior[label + str(n_dim - 1)] = 1 - np.sum([result.posterior[key] for key in priors], axis=0)
+result.posterior[label + str(n_dim - 1)] = 1 - np.sum(
+    [result.posterior[key] for key in priors], axis=0
+)
 result.plot_corner(parameters=injection_parameters)
 
 samples = priors.sample(10000)
 samples[label + str(n_dim - 1)] = 1 - np.sum([samples[key] for key in samples], axis=0)
 result.posterior = pd.DataFrame(samples)
-result.plot_corner(parameters=[key for key in samples], filename="outdir/dirichlet_prior_corner.png")
+result.plot_corner(
+    parameters=[key for key in samples], filename="outdir/dirichlet_prior_corner.png"
+)
diff --git a/examples/core_examples/gaussian_example.py b/examples/core_examples/gaussian_example.py
index a99423e67..da7f252ab 100644
--- a/examples/core_examples/gaussian_example.py
+++ b/examples/core_examples/gaussian_example.py
@@ -7,8 +7,8 @@ import bilby
 import numpy as np
 
 # A few simple setup steps
-label = 'gaussian_example'
-outdir = 'outdir'
+label = "gaussian_example"
+outdir = "outdir"
 
 # Here is minimum requirement for a Likelihood class to run with bilby. In this
 # case, we setup a GaussianLikelihood, which needs to have a log_likelihood
@@ -30,24 +30,32 @@ class SimpleGaussianLikelihood(bilby.Likelihood):
         data: array_like
             The data to analyse
         """
-        super().__init__(parameters={'mu': None, 'sigma': None})
+        super().__init__(parameters={"mu": None, "sigma": None})
         self.data = data
         self.N = len(data)
 
     def log_likelihood(self):
-        mu = self.parameters['mu']
-        sigma = self.parameters['sigma']
+        mu = self.parameters["mu"]
+        sigma = self.parameters["sigma"]
         res = self.data - mu
-        return -0.5 * (np.sum((res / sigma)**2) +
-                       self.N * np.log(2 * np.pi * sigma**2))
+        return -0.5 * (
+            np.sum((res / sigma) ** 2) + self.N * np.log(2 * np.pi * sigma ** 2)
+        )
 
 
 likelihood = SimpleGaussianLikelihood(data)
-priors = dict(mu=bilby.core.prior.Uniform(0, 5, 'mu'),
-              sigma=bilby.core.prior.Uniform(0, 10, 'sigma'))
+priors = dict(
+    mu=bilby.core.prior.Uniform(0, 5, "mu"),
+    sigma=bilby.core.prior.Uniform(0, 10, "sigma"),
+)
 
 # And run sampler
 result = bilby.run_sampler(
-    likelihood=likelihood, priors=priors, sampler='dynesty', nlive=1000,
-    outdir=outdir, label=label)
+    likelihood=likelihood,
+    priors=priors,
+    sampler="dynesty",
+    nlive=1000,
+    outdir=outdir,
+    label=label,
+)
 result.plot_corner()
diff --git a/examples/core_examples/grid_example.py b/examples/core_examples/grid_example.py
index a81e1ce6e..2388ed3d8 100644
--- a/examples/core_examples/grid_example.py
+++ b/examples/core_examples/grid_example.py
@@ -6,12 +6,12 @@ data with background Gaussian noise
 
 """
 import bilby
-import numpy as np
 import matplotlib.pyplot as plt
+import numpy as np
 
 # A few simple setup steps
-label = 'linear_regression_grid'
-outdir = 'outdir'
+label = "linear_regression_grid"
+outdir = "outdir"
 bilby.utils.check_directory_exists_and_if_not_mkdir(outdir)
 
 
@@ -36,12 +36,12 @@ data = model(time, **injection_parameters) + np.random.normal(0, sigma, N)
 
 # We quickly plot the data to check it looks sensible
 fig, ax = plt.subplots()
-ax.plot(time, data, 'o', label='data')
-ax.plot(time, model(time, **injection_parameters), '--r', label='signal')
-ax.set_xlabel('time')
-ax.set_ylabel('y')
+ax.plot(time, data, "o", label="data")
+ax.plot(time, model(time, **injection_parameters), "--r", label="signal")
+ax.set_xlabel("time")
+ax.set_ylabel("y")
 ax.legend()
-fig.savefig('{}/{}_data.png'.format(outdir, label))
+fig.savefig("{}/{}_data.png".format(outdir, label))
 plt.close()
 
 # Now lets instantiate a version of our GaussianLikelihood, giving it
@@ -51,7 +51,7 @@ likelihood = bilby.likelihood.GaussianLikelihood(time, data, model, sigma)
 # From hereon, the syntax is exactly equivalent to other bilby examples
 # We make a prior
 priors = dict()
-priors['m'] = bilby.core.prior.Uniform(0, 5, 'm')
-priors['c'] = bilby.core.prior.Uniform(-2, 2, 'c')
+priors["m"] = bilby.core.prior.Uniform(0, 5, "m")
+priors["c"] = bilby.core.prior.Uniform(-2, 2, "c")
 
 grid = bilby.core.grid.Grid(likelihood=likelihood, priors=priors)
diff --git a/examples/core_examples/hyper_parameter_example.py b/examples/core_examples/hyper_parameter_example.py
index bbccc97e5..5c920989c 100644
--- a/examples/core_examples/hyper_parameter_example.py
+++ b/examples/core_examples/hyper_parameter_example.py
@@ -2,16 +2,16 @@
 """
 An example of how to use bilby to perform parameter estimation for hyper params
 """
-import numpy as np
 import matplotlib.pyplot as plt
+import numpy as np
 from bilby.core.likelihood import GaussianLikelihood
 from bilby.core.prior import Uniform
-from bilby.core.sampler import run_sampler
 from bilby.core.result import make_pp_plot
-from bilby.hyper.likelihood import HyperparameterLikelihood
+from bilby.core.sampler import run_sampler
 from bilby.core.utils import check_directory_exists_and_if_not_mkdir
+from bilby.hyper.likelihood import HyperparameterLikelihood
 
-outdir = 'outdir'
+outdir = "outdir"
 check_directory_exists_and_if_not_mkdir(outdir)
 
 
@@ -24,7 +24,7 @@ N = 10
 x = np.linspace(0, 10, N)
 sigma = 1
 Nevents = 4
-labels = ['a', 'b', 'c', 'd']
+labels = ["a", "b", "c", "d"]
 
 true_mu_c0 = 5
 true_sigma_c0 = 1
@@ -40,32 +40,46 @@ for i in range(Nevents):
     injection_parameters = dict(c0=c0, c1=c1)
 
     data = model(x, **injection_parameters) + np.random.normal(0, sigma, N)
-    line = ax1.plot(x, data, '-x', label=labels[i])
+    line = ax1.plot(x, data, "-x", label=labels[i])
 
     likelihood = GaussianLikelihood(x, data, model, sigma)
-    priors = dict(c0=Uniform(-10, 10, 'c0'), c1=Uniform(-10, 10, 'c1'))
+    priors = dict(c0=Uniform(-10, 10, "c0"), c1=Uniform(-10, 10, "c1"))
 
     result = run_sampler(
-        likelihood=likelihood, priors=priors, sampler='nestle', nlive=1000,
-        outdir=outdir, verbose=False, label='individual_{}'.format(i),
-        save=False, injection_parameters=injection_parameters)
-    ax2.hist(result.posterior.c0, color=line[0].get_color(), density=True,
-             alpha=0.5, label=labels[i])
+        likelihood=likelihood,
+        priors=priors,
+        sampler="nestle",
+        nlive=1000,
+        outdir=outdir,
+        verbose=False,
+        label="individual_{}".format(i),
+        save=False,
+        injection_parameters=injection_parameters,
+    )
+    ax2.hist(
+        result.posterior.c0,
+        color=line[0].get_color(),
+        density=True,
+        alpha=0.5,
+        label=labels[i],
+    )
     results.append(result)
 
-ax1.set_xlabel('x')
-ax1.set_ylabel('y(x)')
+ax1.set_xlabel("x")
+ax1.set_ylabel("y(x)")
 ax1.legend()
-fig1.savefig('outdir/hyper_parameter_data.png')
-ax2.set_xlabel('c0')
-ax2.set_ylabel('density')
+fig1.savefig("outdir/hyper_parameter_data.png")
+ax2.set_xlabel("c0")
+ax2.set_ylabel("density")
 ax2.legend()
-fig2.savefig('outdir/hyper_parameter_combined_posteriors.png')
+fig2.savefig("outdir/hyper_parameter_combined_posteriors.png")
 
 
 def hyper_prior(dataset, mu, sigma):
-    return np.exp(- (dataset['c0'] - mu)**2 / (2 * sigma**2)) /\
-        (2 * np.pi * sigma**2)**0.5
+    return (
+        np.exp(-((dataset["c0"] - mu) ** 2) / (2 * sigma ** 2))
+        / (2 * np.pi * sigma ** 2) ** 0.5
+    )
 
 
 def run_prior(dataset):
@@ -75,16 +89,29 @@ def run_prior(dataset):
 samples = [result.posterior for result in results]
 evidences = [result.log_evidence for result in results]
 hp_likelihood = HyperparameterLikelihood(
-    posteriors=samples, hyper_prior=hyper_prior,
-    sampling_prior=run_prior, log_evidences=evidences, max_samples=500)
-
-hp_priors = dict(mu=Uniform(-10, 10, 'mu', '$\mu_{c0}$'),
-                 sigma=Uniform(0, 10, 'sigma', '$\sigma_{c0}$'))
+    posteriors=samples,
+    hyper_prior=hyper_prior,
+    sampling_prior=run_prior,
+    log_evidences=evidences,
+    max_samples=500,
+)
+
+hp_priors = dict(
+    mu=Uniform(-10, 10, "mu", "$\mu_{c0}$"),
+    sigma=Uniform(0, 10, "sigma", "$\sigma_{c0}$"),
+)
 
 # And run sampler
 result = run_sampler(
-    likelihood=hp_likelihood, priors=hp_priors, sampler='dynesty', nlive=1000,
-    use_ratio=False, outdir=outdir, label='hyper_parameter',
-    verbose=True, clean=True)
+    likelihood=hp_likelihood,
+    priors=hp_priors,
+    sampler="dynesty",
+    nlive=1000,
+    use_ratio=False,
+    outdir=outdir,
+    label="hyper_parameter",
+    verbose=True,
+    clean=True,
+)
 result.plot_corner(truth=dict(mu=true_mu_c0, sigma=true_sigma_c0))
-make_pp_plot(results, filename=outdir + '/hyper_parameter_pp.png')
+make_pp_plot(results, filename=outdir + "/hyper_parameter_pp.png")
diff --git a/examples/core_examples/linear_regression.py b/examples/core_examples/linear_regression.py
index abad1bb72..2b2d42ec1 100644
--- a/examples/core_examples/linear_regression.py
+++ b/examples/core_examples/linear_regression.py
@@ -6,12 +6,12 @@ data with background Gaussian noise
 
 """
 import bilby
-import numpy as np
 import matplotlib.pyplot as plt
+import numpy as np
 
 # A few simple setup steps
-label = 'linear_regression'
-outdir = 'outdir'
+label = "linear_regression"
+outdir = "outdir"
 bilby.utils.check_directory_exists_and_if_not_mkdir(outdir)
 
 
@@ -36,12 +36,12 @@ data = model(time, **injection_parameters) + np.random.normal(0, sigma, N)
 
 # We quickly plot the data to check it looks sensible
 fig, ax = plt.subplots()
-ax.plot(time, data, 'o', label='data')
-ax.plot(time, model(time, **injection_parameters), '--r', label='signal')
-ax.set_xlabel('time')
-ax.set_ylabel('y')
+ax.plot(time, data, "o", label="data")
+ax.plot(time, model(time, **injection_parameters), "--r", label="signal")
+ax.set_xlabel("time")
+ax.set_ylabel("y")
 ax.legend()
-fig.savefig('{}/{}_data.png'.format(outdir, label))
+fig.savefig("{}/{}_data.png".format(outdir, label))
 
 # Now lets instantiate a version of our GaussianLikelihood, giving it
 # the time, data and signal model
@@ -50,14 +50,19 @@ likelihood = bilby.likelihood.GaussianLikelihood(time, data, model, sigma)
 # From hereon, the syntax is exactly equivalent to other bilby examples
 # We make a prior
 priors = dict()
-priors['m'] = bilby.core.prior.Uniform(0, 5, 'm')
-priors['c'] = bilby.core.prior.Uniform(-2, 2, 'c')
+priors["m"] = bilby.core.prior.Uniform(0, 5, "m")
+priors["c"] = bilby.core.prior.Uniform(-2, 2, "c")
 
 # And run sampler
 result = bilby.run_sampler(
-    likelihood=likelihood, priors=priors, sampler='dynesty', nlive=250,
-    injection_parameters=injection_parameters, outdir=outdir,
-    label=label)
+    likelihood=likelihood,
+    priors=priors,
+    sampler="dynesty",
+    nlive=250,
+    injection_parameters=injection_parameters,
+    outdir=outdir,
+    label=label,
+)
 
 # Finally plot a corner plot: all outputs are stored in outdir
 result.plot_corner()
diff --git a/examples/core_examples/linear_regression_grid.py b/examples/core_examples/linear_regression_grid.py
index c517ec65e..76fd49630 100644
--- a/examples/core_examples/linear_regression_grid.py
+++ b/examples/core_examples/linear_regression_grid.py
@@ -5,14 +5,13 @@ fitting a linear function to data with background Gaussian noise.
 This will compare the output of using a stochastic sampling method
 to evaluating the posterior on a grid.
 """
-import numpy as np
-import matplotlib.pyplot as plt
-
 import bilby
+import matplotlib.pyplot as plt
+import numpy as np
 
 # A few simple setup steps
-label = 'linear_regression_grid'
-outdir = 'outdir'
+label = "linear_regression_grid"
+outdir = "outdir"
 bilby.utils.check_directory_exists_and_if_not_mkdir(outdir)
 
 
@@ -23,8 +22,8 @@ def model(time, m, c):
 
 # Now we define the injection parameters which we make simulated data with
 injection_parameters = dict()
-injection_parameters['c'] = 0.2
-injection_parameters['m'] = 0.5
+injection_parameters["c"] = 0.2
+injection_parameters["m"] = 0.5
 
 # For this example, we'll use standard Gaussian noise
 
@@ -34,17 +33,17 @@ sampling_frequency = 10
 time_duration = 10
 time = np.arange(0, time_duration, 1 / sampling_frequency)
 N = len(time)
-sigma = 3.
+sigma = 3.0
 data = model(time, **injection_parameters) + np.random.normal(0, sigma, N)
 
 # We quickly plot the data to check it looks sensible
 fig, ax = plt.subplots()
-ax.plot(time, data, 'o', label='data')
-ax.plot(time, model(time, **injection_parameters), '--r', label='signal')
-ax.set_xlabel('time')
-ax.set_ylabel('y')
+ax.plot(time, data, "o", label="data")
+ax.plot(time, model(time, **injection_parameters), "--r", label="signal")
+ax.set_xlabel("time")
+ax.set_ylabel("y")
 ax.legend()
-fig.savefig('{}/{}_data.png'.format(outdir, label))
+fig.savefig("{}/{}_data.png".format(outdir, label))
 
 # Now lets instantiate a version of our GaussianLikelihood, giving it
 # the time, data and signal model
@@ -53,30 +52,45 @@ likelihood = bilby.likelihood.GaussianLikelihood(time, data, model, sigma)
 # From hereon, the syntax is exactly equivalent to other bilby examples
 # We make a prior
 priors = bilby.core.prior.PriorDict()
-priors['m'] = bilby.core.prior.Uniform(0, 5, 'm')
-priors['c'] = bilby.core.prior.Uniform(-2, 2, 'c')
+priors["m"] = bilby.core.prior.Uniform(0, 5, "m")
+priors["c"] = bilby.core.prior.Uniform(-2, 2, "c")
 
 # And run sampler
 result = bilby.run_sampler(
-    likelihood=likelihood, priors=priors, sampler='dynesty', nlive=500,
-    sample='unif', injection_parameters=injection_parameters, outdir=outdir,
-    label=label)
+    likelihood=likelihood,
+    priors=priors,
+    sampler="dynesty",
+    nlive=500,
+    sample="unif",
+    injection_parameters=injection_parameters,
+    outdir=outdir,
+    label=label,
+)
 fig = result.plot_corner(parameters=injection_parameters, save=False)
 
-grid = bilby.core.grid.Grid(likelihood, priors, grid_size={'m': 200, 'c': 100})
+grid = bilby.core.grid.Grid(likelihood, priors, grid_size={"m": 200, "c": 100})
 
 # overplot the grid estimates
 grid_evidence = grid.log_evidence
 axes = fig.get_axes()
-axes[0].plot(grid.sample_points['c'], np.exp(grid.marginalize_ln_posterior(
-    not_parameter='c') - grid_evidence), 'k--')
-axes[3].plot(grid.sample_points['m'], np.exp(grid.marginalize_ln_posterior(
-    not_parameter='m') - grid_evidence), 'k--')
-axes[2].contour(grid.mesh_grid[1], grid.mesh_grid[0],
-                np.exp(grid.ln_posterior - np.max(grid.ln_posterior)))
-
-fig.savefig('{}/{}_corner.png'.format(outdir, label), dpi=300)
+axes[0].plot(
+    grid.sample_points["c"],
+    np.exp(grid.marginalize_ln_posterior(not_parameter="c") - grid_evidence),
+    "k--",
+)
+axes[3].plot(
+    grid.sample_points["m"],
+    np.exp(grid.marginalize_ln_posterior(not_parameter="m") - grid_evidence),
+    "k--",
+)
+axes[2].contour(
+    grid.mesh_grid[1],
+    grid.mesh_grid[0],
+    np.exp(grid.ln_posterior - np.max(grid.ln_posterior)),
+)
+
+fig.savefig("{}/{}_corner.png".format(outdir, label), dpi=300)
 
 # compare evidences
-print('Dynesty log(evidence): {}'.format(result.log_evidence))
-print('Grid log(evidence): {}'.format(grid_evidence))
+print("Dynesty log(evidence): {}".format(result.log_evidence))
+print("Grid log(evidence): {}".format(grid_evidence))
diff --git a/examples/core_examples/linear_regression_unknown_noise.py b/examples/core_examples/linear_regression_unknown_noise.py
index d9bd5755c..7604f7674 100644
--- a/examples/core_examples/linear_regression_unknown_noise.py
+++ b/examples/core_examples/linear_regression_unknown_noise.py
@@ -6,12 +6,12 @@ data with background Gaussian noise with unknown variance.
 
 """
 import bilby
-import numpy as np
 import matplotlib.pyplot as plt
+import numpy as np
 
 # A few simple setup steps
-label = 'linear_regression_unknown_noise'
-outdir = 'outdir'
+label = "linear_regression_unknown_noise"
+outdir = "outdir"
 bilby.utils.check_directory_exists_and_if_not_mkdir(outdir)
 
 
@@ -36,12 +36,12 @@ data = model(time, **injection_parameters) + np.random.normal(0, sigma, N)
 
 # We quickly plot the data to check it looks sensible
 fig, ax = plt.subplots()
-ax.plot(time, data, 'o', label='data')
-ax.plot(time, model(time, **injection_parameters), '--r', label='signal')
-ax.set_xlabel('time')
-ax.set_ylabel('y')
+ax.plot(time, data, "o", label="data")
+ax.plot(time, model(time, **injection_parameters), "--r", label="signal")
+ax.set_xlabel("time")
+ax.set_ylabel("y")
 ax.legend()
-fig.savefig('{}/{}_data.png'.format(outdir, label))
+fig.savefig("{}/{}_data.png".format(outdir, label))
 
 injection_parameters.update(dict(sigma=1))
 
@@ -51,15 +51,20 @@ injection_parameters.update(dict(sigma=1))
 likelihood = bilby.core.likelihood.GaussianLikelihood(time, data, model)
 
 priors = dict()
-priors['m'] = bilby.core.prior.Uniform(0, 5, 'm')
-priors['c'] = bilby.core.prior.Uniform(-2, 2, 'c')
-priors['sigma'] = bilby.core.prior.Uniform(0, 10, 'sigma')
+priors["m"] = bilby.core.prior.Uniform(0, 5, "m")
+priors["c"] = bilby.core.prior.Uniform(-2, 2, "c")
+priors["sigma"] = bilby.core.prior.Uniform(0, 10, "sigma")
 
 # And run sampler
 result = bilby.run_sampler(
-    likelihood=likelihood, priors=priors, sampler='dynesty', npoints=250,
-    injection_parameters=injection_parameters, outdir=outdir,
-    label=label)
+    likelihood=likelihood,
+    priors=priors,
+    sampler="dynesty",
+    npoints=250,
+    injection_parameters=injection_parameters,
+    outdir=outdir,
+    label=label,
+)
 
 # Finally plot a corner plot: all outputs are stored in outdir
 result.plot_corner()
diff --git a/examples/core_examples/logo/sample_logo.py b/examples/core_examples/logo/sample_logo.py
index 20b591a39..6b5b7225a 100644
--- a/examples/core_examples/logo/sample_logo.py
+++ b/examples/core_examples/logo/sample_logo.py
@@ -11,11 +11,11 @@ class Likelihood(bilby.Likelihood):
         super().__init__(parameters=dict(x=None, y=None))
 
     def log_likelihood(self):
-        return -1 / (self.interp(self.parameters['x'], self.parameters['y'])[0])
+        return -1 / (self.interp(self.parameters["x"], self.parameters["y"])[0])
 
 
-for letter in ['B', 'I', 'L', 'Y']:
-    img = 1 - io.imread('{}.png'.format(letter), as_gray=True)[::-1, :]
+for letter in ["B", "I", "L", "Y"]:
+    img = 1 - io.imread("{}.png".format(letter), as_gray=True)[::-1, :]
     x = np.arange(img.shape[0])
     y = np.arange(img.shape[1])
     interp = si.interpolate.interp2d(x, y, img.T)
@@ -23,10 +23,14 @@ for letter in ['B', 'I', 'L', 'Y']:
     likelihood = Likelihood(interp)
 
     priors = {}
-    priors['x'] = bilby.prior.Uniform(0, max(x), 'x')
-    priors['y'] = bilby.prior.Uniform(0, max(y), 'y')
+    priors["x"] = bilby.prior.Uniform(0, max(x), "x")
+    priors["y"] = bilby.prior.Uniform(0, max(y), "y")
 
     result = bilby.run_sampler(
-        likelihood=likelihood, priors=priors, sampler='nestle', npoints=5000,
-        label=letter)
+        likelihood=likelihood,
+        priors=priors,
+        sampler="nestle",
+        npoints=5000,
+        label=letter,
+    )
     fig = result.plot_corner(quantiles=None)
diff --git a/examples/core_examples/multivariate_gaussian_prior.py b/examples/core_examples/multivariate_gaussian_prior.py
index 93cfbd24c..e60c7c42e 100644
--- a/examples/core_examples/multivariate_gaussian_prior.py
+++ b/examples/core_examples/multivariate_gaussian_prior.py
@@ -5,15 +5,14 @@ Gaussian prior distribution.
 """
 
 import bilby
-import numpy as np
-from scipy import linalg, stats
 import matplotlib as mpl
-
+import numpy as np
 from bilby.core.likelihood import GaussianLikelihood
+from scipy import linalg, stats
 
 # A few simple setup steps
-label = 'multivariate_gaussian_prior'
-outdir = 'outdir'
+label = "multivariate_gaussian_prior"
+outdir = "outdir"
 bilby.utils.check_directory_exists_and_if_not_mkdir(outdir)
 
 
@@ -25,36 +24,44 @@ def model(time, m, c):
 # For this example assume a very broad Gaussian likelihood, so that the
 # posterior is completely dominated by the prior (and we can check the
 # prior sampling looks correct!)
-sigma = 300.
+sigma = 300.0
 
 # These lines of code generate the fake (noise-free) data
 sampling_frequency = 1
 time_duration = 10
 time = np.arange(0, time_duration, 1 / sampling_frequency)
 N = len(time)
-data = model(time, 0., 0.)  # noiseless data
+data = model(time, 0.0, 0.0)  # noiseless data
 
 
 # instantiate the GaussianLikelihood
 likelihood = GaussianLikelihood(time, data, model, sigma=sigma)
 
 # Create a Multivariate Gaussian prior distribution with two modes
-names = ['m', 'c']
-mus = [[-5., -5.], [5., 5.]]  # means of the two modes
-corrcoefs = [[[1., -0.7], [-0.7, 1.]], [[1., 0.7], [0.7, 1.]]]  # correlation coefficients of the two modes
+names = ["m", "c"]
+mus = [[-5.0, -5.0], [5.0, 5.0]]  # means of the two modes
+corrcoefs = [
+    [[1.0, -0.7], [-0.7, 1.0]],
+    [[1.0, 0.7], [0.7, 1.0]],
+]  # correlation coefficients of the two modes
 sigmas = [[1.5, 1.5], [2.1, 2.1]]  # standard deviations of the two modes
-weights = [1., 3.]  # relative weights of each mode
+weights = [1.0, 3.0]  # relative weights of each mode
 nmodes = 2
-mvg = bilby.core.prior.MultivariateGaussianDist(names, nmodes=2, mus=mus,
-                                                corrcoefs=corrcoefs,
-                                                sigmas=sigmas, weights=weights)
+mvg = bilby.core.prior.MultivariateGaussianDist(
+    names, nmodes=2, mus=mus, corrcoefs=corrcoefs, sigmas=sigmas, weights=weights
+)
 priors = dict()
-priors['m'] = bilby.core.prior.MultivariateGaussian(mvg, 'm')
-priors['c'] = bilby.core.prior.MultivariateGaussian(mvg, 'c')
+priors["m"] = bilby.core.prior.MultivariateGaussian(mvg, "m")
+priors["c"] = bilby.core.prior.MultivariateGaussian(mvg, "c")
 
 result = bilby.run_sampler(
-    likelihood=likelihood, priors=priors, sampler='dynesty', nlive=4000,
-    outdir=outdir, label=label)
+    likelihood=likelihood,
+    priors=priors,
+    sampler="dynesty",
+    nlive=4000,
+    outdir=outdir,
+    label=label,
+)
 
 fig = result.plot_corner(save=False)
 
@@ -70,16 +77,25 @@ for j in range(2):  # loop over parameters
         gp += weights[i] * stats.norm.pdf(x, loc=mus[i][j], scale=mvg.sigmas[i][j])
     gp = gp / np.trapz(gp, x)  # renormalise
 
-    axs[aidx[j]].plot(x, gp, 'k--', lw=2)
+    axs[aidx[j]].plot(x, gp, "k--", lw=2)
 
 # plot the 2d distribution
 for i in range(nmodes):
     v, w = linalg.eigh(mvg.covs[i])
-    v = 2. * np.sqrt(2.) * np.sqrt(v)
+    v = 2.0 * np.sqrt(2.0) * np.sqrt(v)
     u = w[0] / linalg.norm(w[0])
     angle = np.arctan(u[1] / u[0])
-    angle = 180. * angle / np.pi  # convert to degrees
-    ell = mpl.patches.Ellipse(mus[i], v[0], v[1], 180. + angle, edgecolor='black', facecolor='none', lw=2, ls='--')
+    angle = 180.0 * angle / np.pi  # convert to degrees
+    ell = mpl.patches.Ellipse(
+        mus[i],
+        v[0],
+        v[1],
+        180.0 + angle,
+        edgecolor="black",
+        facecolor="none",
+        lw=2,
+        ls="--",
+    )
     axs[2].add_artist(ell)
 
-fig.savefig('{}/{}_corner.png'.format(outdir, label), dpi=300)
+fig.savefig("{}/{}_corner.png".format(outdir, label), dpi=300)
diff --git a/examples/core_examples/occam_factor_example.py b/examples/core_examples/occam_factor_example.py
index 1565d8b5c..5ff7d4745 100644
--- a/examples/core_examples/occam_factor_example.py
+++ b/examples/core_examples/occam_factor_example.py
@@ -31,12 +31,12 @@ improved by increasing this to say 500 or 1000.
 
 """
 import bilby
-import numpy as np
 import matplotlib.pyplot as plt
+import numpy as np
 
 # A few simple setup steps
-label = 'occam_factor'
-outdir = 'outdir'
+label = "occam_factor"
+outdir = "outdir"
 bilby.utils.check_directory_exists_and_if_not_mkdir(outdir)
 
 sigma = 1
@@ -47,12 +47,12 @@ coeffs = [1, 2, 3]
 data = np.polyval(coeffs, time) + np.random.normal(0, sigma, N)
 
 fig, ax = plt.subplots()
-ax.plot(time, data, 'o', label='data', color='C0')
-ax.plot(time, np.polyval(coeffs, time), label='true signal', color='C1')
-ax.set_xlabel('time')
-ax.set_ylabel('y')
+ax.plot(time, data, "o", label="data", color="C0")
+ax.plot(time, np.polyval(coeffs, time), label="true signal", color="C1")
+ax.set_xlabel("time")
+ax.set_ylabel("y")
 ax.legend()
-fig.savefig('{}/{}_data.png'.format(outdir, label))
+fig.savefig("{}/{}_data.png".format(outdir, label))
 
 
 class Polynomial(bilby.Likelihood):
@@ -69,7 +69,7 @@ class Polynomial(bilby.Likelihood):
         n: int
             The degree of the polynomial to fit.
         """
-        self.keys = ['c{}'.format(k) for k in range(n)]
+        self.keys = ["c{}".format(k) for k in range(n)]
         super().__init__(parameters={k: None for k in self.keys})
         self.x = x
         self.y = y
@@ -83,22 +83,32 @@ class Polynomial(bilby.Likelihood):
 
     def log_likelihood(self):
         res = self.y - self.polynomial(self.x, self.parameters)
-        return -0.5 * (np.sum((res / self.sigma)**2) +
-                       self.N * np.log(2 * np.pi * self.sigma**2))
+        return -0.5 * (
+            np.sum((res / self.sigma) ** 2)
+            + self.N * np.log(2 * np.pi * self.sigma ** 2)
+        )
 
 
 def fit(n):
     likelihood = Polynomial(time, data, sigma, n)
     priors = {}
     for i in range(n):
-        k = 'c{}'.format(i)
+        k = "c{}".format(i)
         priors[k] = bilby.core.prior.Uniform(0, 10, k)
 
     result = bilby.run_sampler(
-        likelihood=likelihood, priors=priors, nlive=1000, outdir=outdir,
-        label=label, sampler="nestle")
-    return (result.log_evidence, result.log_evidence_err,
-            np.log(result.occam_factor(priors)))
+        likelihood=likelihood,
+        priors=priors,
+        nlive=1000,
+        outdir=outdir,
+        label=label,
+        sampler="nestle",
+    )
+    return (
+        result.log_evidence,
+        result.log_evidence_err,
+        np.log(result.occam_factor(priors)),
+    )
 
 
 fig, ax1 = plt.subplots()
@@ -114,15 +124,13 @@ for ll in ns:
     log_evidences_err.append(e_err)
     log_occam_factors.append(o)
 
-ax1.errorbar(ns, log_evidences, yerr=log_evidences_err,
-             fmt='-o', color='C0')
-ax1.set_ylabel("Unnormalized log evidence", color='C0')
-ax1.tick_params('y', colors='C0')
+ax1.errorbar(ns, log_evidences, yerr=log_evidences_err, fmt="-o", color="C0")
+ax1.set_ylabel("Unnormalized log evidence", color="C0")
+ax1.tick_params("y", colors="C0")
 
-ax2.plot(ns, log_occam_factors,
-         '-o', color='C1', alpha=0.5)
-ax2.tick_params('y', colors='C1')
-ax2.set_ylabel('Occam factor', color='C1')
-ax1.set_xlabel('Degree of polynomial')
+ax2.plot(ns, log_occam_factors, "-o", color="C1", alpha=0.5)
+ax2.tick_params("y", colors="C1")
+ax2.set_ylabel("Occam factor", color="C1")
+ax1.set_xlabel("Degree of polynomial")
 
-fig.savefig('{}/{}_test'.format(outdir, label))
+fig.savefig("{}/{}_test".format(outdir, label))
diff --git a/examples/core_examples/radioactive_decay.py b/examples/core_examples/radioactive_decay.py
index e23afb35e..cf1f7a54c 100644
--- a/examples/core_examples/radioactive_decay.py
+++ b/examples/core_examples/radioactive_decay.py
@@ -5,15 +5,14 @@ non-gravitational wave data. In this case, fitting the half-life and
 initial radionuclide number for Polonium 214.
 """
 import bilby
-import numpy as np
 import matplotlib.pyplot as plt
-
+import numpy as np
 from bilby.core.likelihood import PoissonLikelihood
 from bilby.core.prior import LogUniform
 
 # A few simple setup steps
-label = 'radioactive_decay'
-outdir = 'outdir'
+label = "radioactive_decay"
+outdir = "outdir"
 bilby.utils.check_directory_exists_and_if_not_mkdir(outdir)
 
 # generate a set of counts per minute for n_init atoms of
@@ -44,8 +43,14 @@ def decay_rate(delta_t, halflife, n_init):
 
     n_atoms = n_init * atto * n_avogadro
 
-    rates = (np.exp(-np.log(2) * (times[:-1] / halflife)) -
-             np.exp(- np.log(2) * (times[1:] / halflife))) * n_atoms / delta_t
+    rates = (
+        (
+            np.exp(-np.log(2) * (times[:-1] / halflife))
+            - np.exp(-np.log(2) * (times[1:] / halflife))
+        )
+        * n_atoms
+        / delta_t
+    )
 
     return rates
 
@@ -67,12 +72,12 @@ theoretical = decay_rate(delta_t, **injection_parameters)
 
 # We quickly plot the data to check it looks sensible
 fig, ax = plt.subplots()
-ax.semilogy(time[:-1], counts, 'o', label='data')
-ax.semilogy(time[:-1], theoretical, '--r', label='signal')
-ax.set_xlabel('time')
-ax.set_ylabel('counts')
+ax.semilogy(time[:-1], counts, "o", label="data")
+ax.semilogy(time[:-1], theoretical, "--r", label="signal")
+ax.set_xlabel("time")
+ax.set_ylabel("counts")
 ax.legend()
-fig.savefig('{}/{}_data.png'.format(outdir, label))
+fig.savefig("{}/{}_data.png".format(outdir, label))
 
 # Now lets instantiate a version of the Poisson Likelihood, giving it
 # the time intervals, counts and rate model
@@ -80,14 +85,19 @@ likelihood = PoissonLikelihood(delta_t, counts, decay_rate)
 
 # Make the prior
 priors = dict()
-priors['halflife'] = LogUniform(
-    1e-5, 1e5, latex_label='$t_{1/2}$', unit='min')
-priors['n_init'] = LogUniform(
-    1e-25 / atto, 1e-10 / atto, latex_label='$N_0$', unit='attomole')
+priors["halflife"] = LogUniform(1e-5, 1e5, latex_label="$t_{1/2}$", unit="min")
+priors["n_init"] = LogUniform(
+    1e-25 / atto, 1e-10 / atto, latex_label="$N_0$", unit="attomole"
+)
 
 # And run sampler
 result = bilby.run_sampler(
-    likelihood=likelihood, priors=priors, sampler='dynesty',
-    nlive=1000, injection_parameters=injection_parameters,
-    outdir=outdir, label=label)
+    likelihood=likelihood,
+    priors=priors,
+    sampler="dynesty",
+    nlive=1000,
+    injection_parameters=injection_parameters,
+    outdir=outdir,
+    label=label,
+)
 result.plot_corner()
diff --git a/examples/core_examples/slabspike_example.py b/examples/core_examples/slabspike_example.py
index d79851235..945583421 100644
--- a/examples/core_examples/slabspike_example.py
+++ b/examples/core_examples/slabspike_example.py
@@ -8,24 +8,41 @@ up to three Gaussians.
 """
 
 import bilby
-import numpy as np
 import matplotlib.pyplot as plt
+import numpy as np
 
-outdir = 'outdir'
-label = 'slabspike'
+outdir = "outdir"
+label = "slabspike"
 bilby.utils.check_directory_exists_and_if_not_mkdir(outdir)
 
 
 # Here we define our model. We want to inject two Gaussians and recover with up to three.
 def gaussian(xs, amplitude, mu, sigma):
-    return amplitude / np.sqrt(2 * np.pi * sigma**2) * np.exp(-0.5 * (xs - mu)**2 / sigma**2)
-
-
-def triple_gaussian(xs, amplitude_0, amplitude_1, amplitude_2, mu_0, mu_1, mu_2, sigma_0, sigma_1, sigma_2, **kwargs):
-    return \
-        gaussian(xs, amplitude_0, mu_0, sigma_0) + \
-        gaussian(xs, amplitude_1, mu_1, sigma_1) + \
-        gaussian(xs, amplitude_2, mu_2, sigma_2)
+    return (
+        amplitude
+        / np.sqrt(2 * np.pi * sigma ** 2)
+        * np.exp(-0.5 * (xs - mu) ** 2 / sigma ** 2)
+    )
+
+
+def triple_gaussian(
+    xs,
+    amplitude_0,
+    amplitude_1,
+    amplitude_2,
+    mu_0,
+    mu_1,
+    mu_2,
+    sigma_0,
+    sigma_1,
+    sigma_2,
+    **kwargs,
+):
+    return (
+        gaussian(xs, amplitude_0, mu_0, sigma_0)
+        + gaussian(xs, amplitude_1, mu_1, sigma_1)
+        + gaussian(xs, amplitude_2, mu_2, sigma_2)
+    )
 
 
 # Let's create our data set. We create 200 points on a grid.
@@ -34,64 +51,110 @@ xs = np.linspace(-5, 5, 200)
 dx = xs[1] - xs[0]
 
 # Note for our injection parameters we set the amplitude of the second component to 0.
-injection_params = dict(amplitude_0=-3, mu_0=-4, sigma_0=4,
-                        amplitude_1=0, mu_1=0, sigma_1=1,
-                        amplitude_2=4, mu_2=3, sigma_2=3)
+injection_params = dict(
+    amplitude_0=-3,
+    mu_0=-4,
+    sigma_0=4,
+    amplitude_1=0,
+    mu_1=0,
+    sigma_1=1,
+    amplitude_2=4,
+    mu_2=3,
+    sigma_2=3,
+)
 
 # We calculate the injected curve and add some Gaussian noise on the data points
 sigma = 0.02
 p = bilby.core.prior.Gaussian(mu=0, sigma=sigma)
 ys = triple_gaussian(xs=xs, **injection_params) + p.sample(len(xs))
 
-plt.errorbar(xs, ys, yerr=sigma, fmt=".k", capsize=0, label='Injected data')
-plt.plot(xs, triple_gaussian(xs=xs, **injection_params), label='True signal')
+plt.errorbar(xs, ys, yerr=sigma, fmt=".k", capsize=0, label="Injected data")
+plt.plot(xs, triple_gaussian(xs=xs, **injection_params), label="True signal")
 plt.legend()
-plt.savefig(f'{outdir}/{label}_injected_data')
+plt.savefig(f"{outdir}/{label}_injected_data")
 plt.clf()
 
 
 # Now we want to set up our priors.
 priors = bilby.core.prior.PriorDict()
 # For the slab-and-spike prior, we first need to define the 'slab' part, which is just a regular bilby prior.
-amplitude_slab_0 = bilby.core.prior.Uniform(minimum=-10, maximum=10, name='amplitude_0', latex_label='$A_0$')
-amplitude_slab_1 = bilby.core.prior.Uniform(minimum=-10, maximum=10, name='amplitude_1', latex_label='$A_1$')
-amplitude_slab_2 = bilby.core.prior.Uniform(minimum=-10, maximum=10, name='amplitude_2', latex_label='$A_2$')
+amplitude_slab_0 = bilby.core.prior.Uniform(
+    minimum=-10, maximum=10, name="amplitude_0", latex_label="$A_0$"
+)
+amplitude_slab_1 = bilby.core.prior.Uniform(
+    minimum=-10, maximum=10, name="amplitude_1", latex_label="$A_1$"
+)
+amplitude_slab_2 = bilby.core.prior.Uniform(
+    minimum=-10, maximum=10, name="amplitude_2", latex_label="$A_2$"
+)
 # We do the following to create the slab-and-spike prior. The spike height is somewhat arbitrary and can
 # be corrected in post-processing.
-priors['amplitude_0'] = bilby.core.prior.SlabSpikePrior(slab=amplitude_slab_0, spike_location=0, spike_height=0.1)
-priors['amplitude_1'] = bilby.core.prior.SlabSpikePrior(slab=amplitude_slab_1, spike_location=0, spike_height=0.1)
-priors['amplitude_2'] = bilby.core.prior.SlabSpikePrior(slab=amplitude_slab_2, spike_location=0, spike_height=0.1)
+priors["amplitude_0"] = bilby.core.prior.SlabSpikePrior(
+    slab=amplitude_slab_0, spike_location=0, spike_height=0.1
+)
+priors["amplitude_1"] = bilby.core.prior.SlabSpikePrior(
+    slab=amplitude_slab_1, spike_location=0, spike_height=0.1
+)
+priors["amplitude_2"] = bilby.core.prior.SlabSpikePrior(
+    slab=amplitude_slab_2, spike_location=0, spike_height=0.1
+)
 # Our problem has a degeneracy in the ordering. In general, this problem is somewhat difficult to resolve properly.
 # See e.g. https://github.com/GregoryAshton/kookaburra/blob/master/src/priors.py#L72 for an implementation.
 # We resolve this by not letting the priors overlap in this case.
-priors['mu_0'] = bilby.core.prior.Uniform(minimum=-5, maximum=-2, name='mu_0', latex_label='$\mu_0$')
-priors['mu_1'] = bilby.core.prior.Uniform(minimum=-2, maximum=2, name='mu_1', latex_label='$\mu_1$')
-priors['mu_2'] = bilby.core.prior.Uniform(minimum=2, maximum=5, name='mu_2', latex_label='$\mu_2$')
-priors['sigma_0'] = bilby.core.prior.LogUniform(minimum=0.01, maximum=10, name='sigma_0', latex_label='$\sigma_0$')
-priors['sigma_1'] = bilby.core.prior.LogUniform(minimum=0.01, maximum=10, name='sigma_1', latex_label='$\sigma_1$')
-priors['sigma_2'] = bilby.core.prior.LogUniform(minimum=0.01, maximum=10, name='sigma_2', latex_label='$\sigma_2$')
+priors["mu_0"] = bilby.core.prior.Uniform(
+    minimum=-5, maximum=-2, name="mu_0", latex_label="$\mu_0$"
+)
+priors["mu_1"] = bilby.core.prior.Uniform(
+    minimum=-2, maximum=2, name="mu_1", latex_label="$\mu_1$"
+)
+priors["mu_2"] = bilby.core.prior.Uniform(
+    minimum=2, maximum=5, name="mu_2", latex_label="$\mu_2$"
+)
+priors["sigma_0"] = bilby.core.prior.LogUniform(
+    minimum=0.01, maximum=10, name="sigma_0", latex_label="$\sigma_0$"
+)
+priors["sigma_1"] = bilby.core.prior.LogUniform(
+    minimum=0.01, maximum=10, name="sigma_1", latex_label="$\sigma_1$"
+)
+priors["sigma_2"] = bilby.core.prior.LogUniform(
+    minimum=0.01, maximum=10, name="sigma_2", latex_label="$\sigma_2$"
+)
 
 # Setting up the likelihood and running the samplers works the same as elsewhere.
-likelihood = bilby.core.likelihood.GaussianLikelihood(x=xs, y=ys, func=triple_gaussian, sigma=sigma)
-result = bilby.run_sampler(likelihood=likelihood, priors=priors, outdir=outdir, label=label,
-                           sampler='dynesty', nlive=400)
+likelihood = bilby.core.likelihood.GaussianLikelihood(
+    x=xs, y=ys, func=triple_gaussian, sigma=sigma
+)
+result = bilby.run_sampler(
+    likelihood=likelihood,
+    priors=priors,
+    outdir=outdir,
+    label=label,
+    sampler="dynesty",
+    nlive=400,
+)
 
 result.plot_corner(truths=injection_params)
 
 
 # Let's also plot the maximum likelihood fit along with the data.
 max_like_params = result.posterior.iloc[-1]
-plt.errorbar(xs, ys, yerr=sigma, fmt=".k", capsize=0, label='Injected data')
-plt.plot(xs, triple_gaussian(xs=xs, **injection_params), label='True signal')
-plt.plot(xs, triple_gaussian(xs=xs, **max_like_params), label='Max likelihood fit')
+plt.errorbar(xs, ys, yerr=sigma, fmt=".k", capsize=0, label="Injected data")
+plt.plot(xs, triple_gaussian(xs=xs, **injection_params), label="True signal")
+plt.plot(xs, triple_gaussian(xs=xs, **max_like_params), label="Max likelihood fit")
 plt.legend()
-plt.savefig(f'{outdir}/{label}_max_likelihood_recovery')
+plt.savefig(f"{outdir}/{label}_max_likelihood_recovery")
 plt.clf()
 
 # Finally, we can check what fraction of amplitude samples are exactly on the spike.
-spike_samples_0 = len(np.where(result.posterior['amplitude_0'] == 0.0)[0]) / len(result.posterior)
-spike_samples_1 = len(np.where(result.posterior['amplitude_1'] == 0.0)[0]) / len(result.posterior)
-spike_samples_2 = len(np.where(result.posterior['amplitude_2'] == 0.0)[0]) / len(result.posterior)
+spike_samples_0 = len(np.where(result.posterior["amplitude_0"] == 0.0)[0]) / len(
+    result.posterior
+)
+spike_samples_1 = len(np.where(result.posterior["amplitude_1"] == 0.0)[0]) / len(
+    result.posterior
+)
+spike_samples_2 = len(np.where(result.posterior["amplitude_2"] == 0.0)[0]) / len(
+    result.posterior
+)
 print(f"{spike_samples_0 * 100:.2f}% of amplitude_0 samples are exactly 0.0")
 print(f"{spike_samples_1 * 100:.2f}% of amplitude_1 samples are exactly 0.0")
 print(f"{spike_samples_2 * 100:.2f}% of amplitude_2 samples are exactly 0.0")
-- 
GitLab