diff --git a/gstlal-inspiral/bin/Makefile.am b/gstlal-inspiral/bin/Makefile.am
index 907866f9952d0c8556d5ef7cf6c9e924c18f4854..5cbb855908503179a41327ba02f1c9e98db5a6bd 100644
--- a/gstlal-inspiral/bin/Makefile.am
+++ b/gstlal-inspiral/bin/Makefile.am
@@ -16,8 +16,6 @@ dist_bin_SCRIPTS = \
 	gstlal_inspiral_create_dt_dphi_snr_ratio_pdfs \
 	gstlal_inspiral_create_dt_dphi_snr_ratio_pdfs_dag \
 	gstlal_inspiral_create_p_of_ifos_given_horizon \
-	gstlal_inspiral_create_p_of_ifos_given_horizon_dag \
-	gstlal_inspiral_add_p_of_ifos_given_horizon \
 	gstlal_inspiral_create_prior_diststats \
 	gstlal_inspiral_destagger_injections \
 	gstlal_inspiral_dlrs_diag \
diff --git a/gstlal-inspiral/bin/gstlal_inspiral_add_dt_dphi_snr_ratio_pdfs b/gstlal-inspiral/bin/gstlal_inspiral_add_dt_dphi_snr_ratio_pdfs
index 3c64bf1ad2df7f469cde7b66b2c296c256e67caf..11141ecfac62066dffcec074e36d18bb39167b40 100755
--- a/gstlal-inspiral/bin/gstlal_inspiral_add_dt_dphi_snr_ratio_pdfs
+++ b/gstlal-inspiral/bin/gstlal_inspiral_add_dt_dphi_snr_ratio_pdfs
@@ -18,24 +18,20 @@
 
 import sys
 import numpy
-import h5py
 from gstlal.stats.inspiral_extrinsics import TimePhaseSNR
 
 # Read in and combine all of the input files
-h5_covmat = h5py.File(sys.argv[1])
-kwargs = {"SNR":h5_covmat["SNR"], "psd_fname":h5_covmat["psd"]}
-h5_covmat.close()
-files = sys.argv[2:]
-TPS = TimePhaseSNR.from_hdf5(files[0], files[1:], **kwargs)
+files = sys.argv[1:]
+TPS = TimePhaseSNR.from_hdf5(files[0], files[1:])
 
 # compute the normalization
 time, phase, deff = TimePhaseSNR.tile(NSIDE = 8, NANGLE = 17)
 # This actually doesn't matter it is just needed to map from eff distance to
 # snr, but the TimePhaseSNR code actually undoes this...
-h = {"H1":1., "L1":1., "V1":1., "K1":1.}
+h = {"H1":1., "L1":1., "V1":1.}
 
 # set the norm to 1
-combos = TPS.combos + (("H1",),("L1",),("V1",),("K1",))
+combos = TPS.combos + (("H1",),("L1",),("V1",))
 norm = dict((frozenset(k), 1.) for k in combos)
 TPS.norm = norm
 
diff --git a/gstlal-inspiral/bin/gstlal_inspiral_add_p_of_ifos_given_horizon b/gstlal-inspiral/bin/gstlal_inspiral_add_p_of_ifos_given_horizon
deleted file mode 100644
index f413ad5c3b79c16e04d950014b58164f04d4e22c..0000000000000000000000000000000000000000
--- a/gstlal-inspiral/bin/gstlal_inspiral_add_p_of_ifos_given_horizon
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2019 Leo Tsukada
-#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License as published by the
-# Free Software Foundation; either version 2 of the License, or (at your
-# option) any later version.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
-# Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
-
-import sys
-from gstlal.stats import inspiral_extrinsics
-
-# Read in and combine all of the input files
-instruments = "".join(sorted(sys.argv[1].split(",")))
-files = sys.argv[2:]
-PofI = inspiral_extrinsics.p_of_instruments_given_horizons.from_hdf5(files[0], files[1:])
-
-PofI.to_hdf5("%s_p_of_instruments_given_H_d.h5" % instruments)
diff --git a/gstlal-inspiral/bin/gstlal_inspiral_compute_dtdphideff_cov_matrix b/gstlal-inspiral/bin/gstlal_inspiral_compute_dtdphideff_cov_matrix
index 39e879041a30c5a5a363414e5adf004d8e5a3af6..57f0cbcb9e5a5d17603c990e71ab705f060a87d6 100755
--- a/gstlal-inspiral/bin/gstlal_inspiral_compute_dtdphideff_cov_matrix
+++ b/gstlal-inspiral/bin/gstlal_inspiral_compute_dtdphideff_cov_matrix
@@ -19,8 +19,10 @@ import argparse
 import itertools
 import numpy, scipy.interpolate
 from lal import series
+from gstlal.stats.inspiral_extrinsics import TimePhaseSNR
 from gstlal import templates
 from ligo.lw import utils as ligolw_utils
+import sys
 import h5py
 
 parser = argparse.ArgumentParser(description = 'generate a dt dphi covariance matrix')
@@ -28,7 +30,6 @@ parser.add_argument('--psd-xml', help = 'XML containing HLV psd')
 parser.add_argument('--H-snr', type = float, help = 'H characteristic SNR')
 parser.add_argument('--L-snr', type = float, help = 'L characteristic SNR')
 parser.add_argument('--V-snr', type = float, help = 'V characteristic SNR')
-parser.add_argument('--K-snr', type = float, help = 'K characteristic SNR')
 parser.add_argument('--m1', type = float, default = 1.4, help = 'primary component mass')
 parser.add_argument('--m2', type = float, default = 1.4, help = 'secondary component mass')
 parser.add_argument('--s1', type = float, default = 0., help = 'primary (z) spin')
@@ -42,14 +43,14 @@ if args.flow >= args.fhigh:
 	raise ValueError("flow cannot be greater than fhigh")
 
 refpsd = args.psd_xml
-rho = {"L1": args.L_snr, "H1": args.H_snr, "V1": args.V_snr, "K1": args.K_snr}
+rho = {"L1": args.L_snr, "H1": args.H_snr, "V1": args.V_snr}
 
-instruments = ("H1", "L1", "V1", "K1")
-snr = series.read_psd_xmldoc(ligolw_utils.load_filename(refpsd, verbose = True, contenthandler = series.PSDContentHandler))
+
+psd = series.read_psd_xmldoc(ligolw_utils.load_filename(refpsd, verbose = True, contenthandler = series.PSDContentHandler))
 psddict = {}
-for ifo in rho:
-	f = numpy.arange(len(snr[ifo].data.data)) * snr[ifo].deltaF
-	psddict[ifo] = scipy.interpolate.interp1d(f, snr[ifo].data.data)
+for ifo in psd:
+	f = numpy.arange(len(psd[ifo].data.data)) * psd[ifo].deltaF
+	psddict[ifo] = scipy.interpolate.interp1d(f, psd[ifo].data.data)
 
 def moment(fmin, fmax, n, m1, m2, s1, s2, ifo, psddict = psddict, delta_f = 0.25):
 	farr = numpy.linspace(0, fmax, fmax / delta_f + delta_f)
@@ -72,31 +73,23 @@ transtt = {}
 transpp = {}
 transtp = {}
 transpt = {}
-transdd = {}
 # FIXME do an actual calculation
-# transdd = {frozenset(("H1", "L1")): 1. / numpy.sqrt(sigsqdd["H1"] + sigsqdd["L1"])  , frozenset(("H1", "V1")): 1. / numpy.sqrt(sigsqdd["H1"] + sigsqdd["V1"]), frozenset(("L1", "V1")): 1. / numpy.sqrt(sigsqdd["L1"] + sigsqdd["V1"])}
-# transdd = {frozenset(instrument_pair) : 1. / numpy.sqrt(sigsqdd[instrument_pair[0]] + sigsqdd[instrument_pair[1]]) for instrument_pair in TimePhaseSNR.instument_pairs(("H1", "L1", "V1", "K1"))}
+transdd = {frozenset(("H1", "L1")): 1. / numpy.sqrt(sigsqdd["H1"] + sigsqdd["L1"])  , frozenset(("H1", "V1")): 1. / numpy.sqrt(sigsqdd["H1"] + sigsqdd["V1"]), frozenset(("L1", "V1")): 1. / numpy.sqrt(sigsqdd["L1"] + sigsqdd["V1"])}
 
-pairs = [tuple(sorted(pair)) for pair in itertools.combinations(instruments, 2)]
-for pair in pairs:
-	a,b = pair
+for combo in itertools.combinations(("H1", "L1", "V1"), 2):
+	a,b = combo
 	m11 = sigsqtt[a] + sigsqtt[b]
 	m22 = sigsqpp[a] + sigsqpp[b]
 	m12 = m21 = sigsqtp[a] + sigsqtp[b]
 	mat = numpy.array([[m11, m12], [m21, m22]])
 	matinv = numpy.linalg.inv(mat)
 	cholesky_transpose = numpy.linalg.cholesky(matinv).T
-	transtt[frozenset(pair)] = cholesky_transpose[0,0]
-	transtp[frozenset(pair)] = cholesky_transpose[0,1]
-	transpt[frozenset(pair)] = cholesky_transpose[1,0]
-	transpp[frozenset(pair)] = cholesky_transpose[1,1]
-	transdd[frozenset(pair)] = 1. / numpy.sqrt(sigsqdd[a] + sigsqdd[b])
+	transtt[frozenset(combo)] = cholesky_transpose[0,0]
+	transtp[frozenset(combo)] = cholesky_transpose[0,1]
+	transpt[frozenset(combo)] = cholesky_transpose[1,0]
+	transpp[frozenset(combo)] = cholesky_transpose[1,1]
 
 f = h5py.File(args.output, "w")
-f.create_dataset("psd", data=args.psd_xml)
-h5_snr = f.create_group("SNR")
-for ifo, snr in rho.items():
-    h5_snr.create_dataset(ifo, data=snr)
 h5_transtt = f.create_group("transtt")
 h5_transtp = f.create_group("transtp")
 h5_transpt = f.create_group("transpt")
@@ -106,17 +99,13 @@ for group, mat in zip((h5_transtt, h5_transtp, h5_transpt, h5_transpp, h5_transd
 	for k,v in mat.items():
 		group.create_dataset(",".join(sorted(k)), data = float(v))
 
-combos = set()
-for i in range(1, len(instruments) + 1):
-	for choice in itertools.combinations(instruments, i):
-		combos.add(tuple(sorted(choice)))
-combos = tuple(sorted(list(combos)))
+combos = TimePhaseSNR.instrument_combos(("H1","L1","V1")) + (("H1",),("L1",),("V1",))
 norm = dict((frozenset(k), 0.) for k in combos)
 norm = {}
 h5_norm = f.create_group("norm")
-for combo in combos:
-	h5_norm.create_dataset(",".join(sorted(combo)), data = 1.0)
-	norm[combo] = 1.0
+for k in combos:
+	h5_norm.create_dataset(",".join(sorted(k)), data = 1.0)
+	norm[k] = 1.0
 f.close()
 
 print "transtt =", transtt
diff --git a/gstlal-inspiral/bin/gstlal_inspiral_create_dt_dphi_snr_ratio_pdfs_dag b/gstlal-inspiral/bin/gstlal_inspiral_create_dt_dphi_snr_ratio_pdfs_dag
index 46bc211117133e6ca5e037436a3b4ad15e931996..e8e83a2bf43328374c391ce5cf439a7d2b407115 100755
--- a/gstlal-inspiral/bin/gstlal_inspiral_create_dt_dphi_snr_ratio_pdfs_dag
+++ b/gstlal-inspiral/bin/gstlal_inspiral_create_dt_dphi_snr_ratio_pdfs_dag
@@ -21,11 +21,10 @@ from gstlal import dagparts
 import argparse
 
 parser = argparse.ArgumentParser(description = 'generate a dt dphi covariance matrix and tree data to replace share/inspiral_dtdphi_pdf.h5')
-parser.add_argument('--psd-xml', help = 'XML containing HLVK psd')
+parser.add_argument('--psd-xml', help = 'XML containing HLV psd')
 parser.add_argument('--H-snr', type = float, help = 'H characteristic SNR')
 parser.add_argument('--L-snr', type = float, help = 'L characteristic SNR')
 parser.add_argument('--V-snr', type = float, help = 'V characteristic SNR')
-parser.add_argument('--K-snr', type = float, help = 'K characteristic SNR')
 parser.add_argument('--m1', type = float, default = 1.4, help = 'primary component mass')
 parser.add_argument('--m2', type = float, default = 1.4, help = 'secondary component mass')
 parser.add_argument('--s1', type = float, default = 0., help = 'primary (z) spin')
@@ -47,7 +46,7 @@ covJob = dagparts.DAGJob("gstlal_inspiral_compute_dtdphideff_cov_matrix", condor
 margJob = dagparts.DAGJob("gstlal_inspiral_create_dt_dphi_snr_ratio_pdfs", condor_commands = {"request_memory":"7GB", "want_graceful_removal":"True", "kill_sig":"15", "accounting_group":"ligo.prod.o3.cbc.uber.gstlaloffline"})
 addJob = dagparts.DAGJob("gstlal_inspiral_add_dt_dphi_snr_ratio_pdfs", condor_commands = {"request_memory":"4GB", "want_graceful_removal":"True", "kill_sig":"15", "accounting_group":"ligo.prod.o3.cbc.uber.gstlaloffline"})
 
-covnode =  dagparts.DAGNode(covJob, dag, parent_nodes = [], opts = {"H-snr": args.H_snr, "L-snr": args.L_snr, "V-snr": args.V_snr, "K-snr": args.K_snr, "flow": args.flow, "fhigh": args.fhigh, "m1": args.m1, "m2": args.m2, "s1": args.s1, "s2": args.s2}, input_files = {"psd-xml": args.psd_xml}, output_files = {"output":"covmat.h5"})
+covnode =  dagparts.DAGNode(covJob, dag, parent_nodes = [], opts = {"H-snr": args.H_snr, "L-snr": args.L_snr, "V-snr": args.V_snr, "flow": args.flow, "fhigh": args.fhigh, "m1": args.m1, "m2": args.m2, "s1": args.s1, "s2": args.s2}, input_files = {"psd-xml": args.psd_xml}, output_files = {"output":"covmat.h5"})
 
 num = 1000
 margnodes = []
@@ -56,9 +55,7 @@ for start in range(0, 3345408, num):
 	stop = start + num
 	margnodes.append(dagparts.DAGNode(margJob, dag, parent_nodes = [covnode], opts = {"start":str(start), "stop":str(stop)}, input_files = {"cov-mat": "covmat.h5"}, output_files = {"output":"%s/inspiral_dtdphi_pdf_%d_%d.h5" % (margJob.output_path, start, stop)}))
 
-addJob_input = ["covmat.h5"]
-addJob_input.extend([n.output_files["output"] for n in margnodes])
-addnode = dagparts.DAGNode(addJob, dag, parent_nodes = margnodes, input_files = {"": addJob_input})
+addnode = dagparts.DAGNode(addJob, dag, parent_nodes = margnodes, input_files = {"": [n.output_files["output"] for n in margnodes]})
 
 dag.write_sub_files()
 dag.write_dag()
diff --git a/gstlal-inspiral/bin/gstlal_inspiral_create_p_of_ifos_given_horizon b/gstlal-inspiral/bin/gstlal_inspiral_create_p_of_ifos_given_horizon
index f94b8c6bf890dc332e152803743dcefd4a394ec6..2e3a1fa4b8f57864cce42d49857cad06a4e185d0 100755
--- a/gstlal-inspiral/bin/gstlal_inspiral_create_p_of_ifos_given_horizon
+++ b/gstlal-inspiral/bin/gstlal_inspiral_create_p_of_ifos_given_horizon
@@ -23,11 +23,8 @@ from gstlal.stats import inspiral_extrinsics, inspiral_lr
 parser = argparse.ArgumentParser(description='Generate PDFs of extrinsic parameters')
 parser.add_argument('--snr-thresh', type=float, default = inspiral_lr.LnLRDensity.snr_min, help = 'set the snr minimum to define found')
 parser.add_argument('--output-file', default = 'extparams.h5', help = 'set the output hdf5 file. Default extparams.h5')
-parser.add_argument('--instruments', type=str, help='add instruments. Separate tham with comma, does not have to be alphabetical.', required = True)
-parser.add_argument('--start', type=int, default = 0, help = 'The first bin index to count')
-parser.add_argument('--stop', type=int, default = None, help = 'The last bin index to count')
+parser.add_argument('--instrument', action = "append", help='add instrument. Can be given multiple times', required = True)
 args = parser.parse_args()
 
-instruments = sorted(args.instruments.split(","))
-PofI = inspiral_extrinsics.p_of_instruments_given_horizons(instruments = instruments, snr_thresh = args.snr_thresh, bin_start = args.start, bin_stop = args.stop)
+PofI = inspiral_extrinsics.p_of_instruments_given_horizons(instruments = args.instrument, snr_thresh = args.snr_thresh)
 PofI.to_hdf5(args.output_file)
diff --git a/gstlal-inspiral/bin/gstlal_inspiral_create_p_of_ifos_given_horizon_dag b/gstlal-inspiral/bin/gstlal_inspiral_create_p_of_ifos_given_horizon_dag
deleted file mode 100644
index e68e77161e332fdf8078fc99965fb66213cfbf2e..0000000000000000000000000000000000000000
--- a/gstlal-inspiral/bin/gstlal_inspiral_create_p_of_ifos_given_horizon_dag
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2019 Leo Tsukdda
-#
-# This program is free software; you can redistribute it and/or modify it
-# under the terms of the GNU General Public License as published by the
-# Free Software Foundation; either version 2 of the License, or (at your
-# option) any later version.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General
-# Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
-
-import os
-from gstlal import dagparts
-from gstlal.stats import inspiral_extrinsics, inspiral_lr
-import argparse
-
-parser = argparse.ArgumentParser(description = 'generate a dt dphi covariance matrix and tree data to replace share/inspiral_dtdphi_pdf.h5')
-parser.add_argument('--snr-thresh', type=float, default = inspiral_lr.LnLRDensity.snr_min, help = 'set the snr minimum to define found')
-# parser.add_argument('--output-file', default = 'p_of_instruments_given_H_d.h5', help = 'set the output hdf5 file. Default extparams.h5')
-parser.add_argument('--instruments', type=str, help='add instruments. Separate tham with comma, does not have to be alphabetical.', required = True)
-args = parser.parse_args()
-
-instruments = "".join(sorted(args.instruments.split(",")))
-
-try:
-	os.mkdir("logs")
-except:
-	pass
-dag = dagparts.DAG("p_of_I_%s" % instruments)
-
-margJob = dagparts.DAGJob("gstlal_inspiral_create_p_of_ifos_given_horizon", condor_commands = {"request_memory":"4GB", "want_graceful_removal":"True", "kill_sig":"15", "accounting_group":"ligo.prod.o3.cbc.uber.gstlaloffline"})
-addJob = dagparts.DAGJob("gstlal_inspiral_add_p_of_ifos_given_horizon", condor_commands = {"request_memory":"4GB", "want_graceful_removal":"True", "kill_sig":"15", "accounting_group":"ligo.prod.o3.cbc.uber.gstlaloffline"})
-
-
-num = 41
-margnodes = []
-for start in range(0, 41**(len(instruments) / 2 - 1), num):
-	stop = start + num
-	margnodes.append(dagparts.DAGNode(margJob, dag, parent_nodes = [], opts = {"start":str(start), "stop":str(stop), "instruments": args.instruments}, output_files = {"output-file":"%s/%s_p_of_instruments_given_H_d_%d_%d.h5" % (margJob.output_path, "".join(instruments), start, stop)}))
-
-addJob_input = []
-addJob_input.extend([n.output_files["output-file"] for n in margnodes])
-addnode = dagparts.DAGNode(addJob, dag, parent_nodes = margnodes, opts = {"": args.instruments}, input_files = {"": addJob_input})
-
-dag.write_sub_files()
-dag.write_dag()
-dag.write_script()
-dag.write_cache()
-
diff --git a/gstlal-inspiral/python/stats/inspiral_extrinsics.py b/gstlal-inspiral/python/stats/inspiral_extrinsics.py
index 017205cab4c2caed0770ff1f1d6d495939d95849..9376396bc346e651419d29c0d1035c54866d3489 100644
--- a/gstlal-inspiral/python/stats/inspiral_extrinsics.py
+++ b/gstlal-inspiral/python/stats/inspiral_extrinsics.py
@@ -865,7 +865,7 @@ class SNRPDF(object):
 	def load(cls, fileobj = None, verbose = False):
 		if fileobj is None:
 			fileobj = open(cls.DEFAULT_FILENAME)
-		return cls.from_xml(ligolw_utils.load_fileobj(fileobj, gz = True, contenthandler = cls.LIGOLWContentHandler))
+		return cls.from_xml(ligolw_utils.load_fileobj(fileobj, gz = True, contenthandler = cls.LIGOLWContentHandler)[0])
 
 
 #
@@ -1183,11 +1183,11 @@ class TimePhaseSNR(object):
 	"""
 	# NOTE to save a couple hundred megs of ram we do not
 	# include kagra for now...
-	responses = {"H1": lal.CachedDetectors[lal.LHO_4K_DETECTOR].response, "L1":lal.CachedDetectors[lal.LLO_4K_DETECTOR].response, "V1":lal.CachedDetectors[lal.VIRGO_DETECTOR].response, "K1":lal.CachedDetectors[lal.KAGRA_DETECTOR].response}
-	locations = {"H1":lal.CachedDetectors[lal.LHO_4K_DETECTOR].location, "L1":lal.CachedDetectors[lal.LLO_4K_DETECTOR].location, "V1":lal.CachedDetectors[lal.VIRGO_DETECTOR].location, "K1":lal.CachedDetectors[lal.KAGRA_DETECTOR].location}
+	responses = {"H1": lal.CachedDetectors[lal.LHO_4K_DETECTOR].response, "L1":lal.CachedDetectors[lal.LLO_4K_DETECTOR].response, "V1":lal.CachedDetectors[lal.VIRGO_DETECTOR].response}#, "K1":lal.CachedDetectors[lal.KAGRA_DETECTOR].response}
+	locations = {"H1":lal.CachedDetectors[lal.LHO_4K_DETECTOR].location, "L1":lal.CachedDetectors[lal.LLO_4K_DETECTOR].location, "V1":lal.CachedDetectors[lal.VIRGO_DETECTOR].location}#, "K1":lal.CachedDetectors[lal.KAGRA_DETECTOR].location}
 	numchunks = 20
 
-	def __init__(self, transtt = None, transtp = None, transpt = None, transpp = None, transdd = None, norm = None, tree_data = None, margsky = None, verbose = False, margstart = 0, margstop = None, SNR=None, psd_fname=None):
+	def __init__(self, transtt = None, transtp = None, transpt = None, transpp = None, transdd = None, norm = None, tree_data = None, margsky = None, verbose = False, margstart = 0, margstop = None):
 		"""
 		Initialize a new class from scratch via explicit computation
 		of the tree data and marginalized probability distributions or by providing
@@ -1270,9 +1270,6 @@ class TimePhaseSNR(object):
 		self.tree_data = tree_data
 		self.margsky = margsky
 
-		self.snr = SNR
-		self.psd_fname = psd_fname
-
 		if self.tree_data is None:
 			time, phase, deff = TimePhaseSNR.tile(verbose = verbose)
 			self.tree_data = self.dtdphideffpoints(time, phase, deff, self.slices)
@@ -1328,11 +1325,6 @@ class TimePhaseSNR(object):
 			for k,v in mat.items():
 				group.create_dataset(",".join(sorted(k)), data = float(v))
 
-		f.create_dataset("psd", data=self.psd_fname)
-		h5_snr = f.create_group("SNR")
-		for ifo in self.snr:
-			h5_snr.create_dataset(ifo, data=self.snr[ifo].value)
-
 		f.close()
 
 	@staticmethod
@@ -1648,7 +1640,7 @@ class p_of_instruments_given_horizons(object):
 
 	Note, linear interpolation is used over the bins
 	"""
-	def __init__(self, instruments = ("H1", "L1", "V1"), snr_thresh = 4., nbins = 41, hmin = 0.05, hmax = 20.0, histograms = None, bin_start = 0, bin_stop = None):
+	def __init__(self, instruments = ("H1", "L1", "V1"), snr_thresh = 4., nbins = 41, hmin = 0.05, hmax = 20.0, histograms = None):
 		"""
 		for each sub-combintation of the "on" instruments, e.g.,
 		"H1","L1" out of "H1","L1","V1", the probability of getting a trigger above the
@@ -1675,12 +1667,6 @@ class p_of_instruments_given_horizons(object):
 		# The number of bins in the histogram of horizond distance ratios.
 		self.nbins = nbins
 
-		# Set the stop bin number and make sure that start/stop bin numbers are the multiple of nbins
-		if bin_stop is not None and bin_stop % self.nbins:
-			raise ValueError("stop bin indexd must be multiple of nbins=%d" % self.nbins)
-		elif bin_start % self.nbins:
-			raise ValueError("start bin index must be multiple of nbins=%d" % self.nbins)
-
 		# The minimum and maximum horizon distance ratio to consider
 		# for the binning.  Anything outside this range will be
 		# clipped.
@@ -1732,8 +1718,8 @@ class p_of_instruments_given_horizons(object):
 			# NOTE we end up clipping any value outside of our
 			# histogram to just be the value in the last(first)
 			# bin, so we track those center values here.
-			self.first_center = self.histograms.values()[0].centres()[0][0]
-			self.last_center = self.histograms.values()[0].centres()[0][-1]
+			self.first_center = histograms.values()[0].centres()[0][0]
+			self.last_center = histograms.values()[0].centres()[0][-1]
 
 			# Now we start the monte carlo simulation of a bunch of
 			# signals distributed uniformly in the volume of space
@@ -1756,7 +1742,7 @@ class p_of_instruments_given_horizons(object):
 			# Iterate over the (# of instruments - 1) horizon
 			# distance ratios for all of the instruments that could
 			# have produced coincs
-			for horizontuple in list(itertools.product(*[b.centres() for b in bins]))[bin_start:bin_stop]:
+			for horizontuple in itertools.product(*[b.centres() for b in bins]):
 				horizondict = {}
 				# Calculate horizon distances for each of the
 				# instruments based on these ratios NOTE by
@@ -1857,10 +1843,9 @@ class p_of_instruments_given_horizons(object):
 					# record this probability in the histograms
 					self.histograms[combo][horizontuple] += count
 					total += count
-				if bin_stop is None:
-					# normalize the result so that the sum at this horizon ratio is one over all the combinations
-					for I in self.histograms:
-						self.histograms[I][horizontuple] /= total
+				# normalize the result so that the sum at this horizon ratio is one over all the combinations
+				for I in self.histograms:
+					self.histograms[I][horizontuple] /= total
 		self.mkinterp()
 
 	def mkinterp(self):
@@ -1898,7 +1883,7 @@ class p_of_instruments_given_horizons(object):
 		f.close()
 
 	@staticmethod
-	def from_hdf5(fname, other_fnames=[], **kwargs):
+	def from_hdf5(fname):
 		"""
 		Read data from a file so that you don't have to remake it from scratch
 		"""
@@ -1911,26 +1896,13 @@ class p_of_instruments_given_horizons(object):
 		instruments = tuple(sorted(grp.attrs["instruments"].split(",")))
 		histograms = {}
 		bins = []
-		combos = TimePhaseSNR.instrument_combos(instruments, min_instruments = 1)
 		for i in range(len(instruments) - 1):
 			bins.append(rate.LogarithmicBins(hmin, hmax, nbins))
-		for combo in combos:
+		for combo in TimePhaseSNR.instrument_combos(instruments, min_instruments = 1):
 			histograms[combo] = rate.BinnedArray(rate.NDBins(bins))
 			histograms[combo].array[:] = numpy.array(grp[",".join(combo)])[:]
 		f.close()
-
-		if len(other_fnames) > 0:
-			for fn in other_fnames:
-				f = h5py.File(fn, "r")
-				grp = f['gstlal_p_of_instruments']
-				for combo in combos:
-					histograms[combo].array[:] += numpy.array(grp[",".join(combo)])[:]
-				f.close()
-		norm = numpy.sum([BinnedArray.array[:] for BinnedArray in histograms.values()], axis=0)
-		for combo in combos:
-			histograms[combo].array[:][norm!=0] /= norm[norm!=0]
-
-		return p_of_instruments_given_horizons(instruments = instruments, snr_thresh = snr_thresh, nbins = nbins, hmin = hmin, hmax = hmax, histograms = histograms, **kwargs)
+		return p_of_instruments_given_horizons(instruments = instruments, snr_thresh = snr_thresh, nbins = nbins, hmin = hmin, hmax = hmax, histograms = histograms)
 
 
 #
@@ -1982,17 +1954,10 @@ class InspiralExtrinsics(object):
 	"""
 	p_of_ifos = {}
 	# FIXME add Kagra
-	p_of_ifos[("H1", "K1", "L1", "V1",)] = p_of_instruments_given_horizons.from_hdf5(os.path.join(gstlal_config_paths["pkgdatadir"], "H1K1L1V1_p_of_instruments_given_H_d.h5"))
 	p_of_ifos[("H1", "L1", "V1",)] = p_of_instruments_given_horizons.from_hdf5(os.path.join(gstlal_config_paths["pkgdatadir"], "H1L1V1_p_of_instruments_given_H_d.h5"))
-	p_of_ifos[("H1", "K1", "V1",)] = p_of_instruments_given_horizons.from_hdf5(os.path.join(gstlal_config_paths["pkgdatadir"], "H1K1V1_p_of_instruments_given_H_d.h5"))
-	p_of_ifos[("H1", "K1", "L1",)] = p_of_instruments_given_horizons.from_hdf5(os.path.join(gstlal_config_paths["pkgdatadir"], "H1K1L1_p_of_instruments_given_H_d.h5"))
-	p_of_ifos[("K1", "L1", "V1",)] = p_of_instruments_given_horizons.from_hdf5(os.path.join(gstlal_config_paths["pkgdatadir"], "K1L1V1_p_of_instruments_given_H_d.h5"))
 	p_of_ifos[("H1", "L1",)] = p_of_instruments_given_horizons.from_hdf5(os.path.join(gstlal_config_paths["pkgdatadir"], "H1L1_p_of_instruments_given_H_d.h5"))
 	p_of_ifos[("H1", "V1",)] = p_of_instruments_given_horizons.from_hdf5(os.path.join(gstlal_config_paths["pkgdatadir"], "H1V1_p_of_instruments_given_H_d.h5"))
 	p_of_ifos[("L1", "V1",)] = p_of_instruments_given_horizons.from_hdf5(os.path.join(gstlal_config_paths["pkgdatadir"], "L1V1_p_of_instruments_given_H_d.h5"))
-	p_of_ifos[("H1", "K1",)] = p_of_instruments_given_horizons.from_hdf5(os.path.join(gstlal_config_paths["pkgdatadir"], "H1K1_p_of_instruments_given_H_d.h5"))
-	p_of_ifos[("K1", "L1",)] = p_of_instruments_given_horizons.from_hdf5(os.path.join(gstlal_config_paths["pkgdatadir"], "K1L1_p_of_instruments_given_H_d.h5"))
-	p_of_ifos[("K1", "V1",)] = p_of_instruments_given_horizons.from_hdf5(os.path.join(gstlal_config_paths["pkgdatadir"], "K1V1_p_of_instruments_given_H_d.h5"))
 
 	def __init__(self, min_instruments = 1, filename = None):
 		#
diff --git a/gstlal-inspiral/share/H1K1L1V1_p_of_instruments_given_H_d.h5 b/gstlal-inspiral/share/H1K1L1V1_p_of_instruments_given_H_d.h5
deleted file mode 100644
index ae32abf202b7e13d3d84655be5e1486c33a6d5b7..0000000000000000000000000000000000000000
Binary files a/gstlal-inspiral/share/H1K1L1V1_p_of_instruments_given_H_d.h5 and /dev/null differ
diff --git a/gstlal-inspiral/share/H1K1L1_p_of_instruments_given_H_d.h5 b/gstlal-inspiral/share/H1K1L1_p_of_instruments_given_H_d.h5
deleted file mode 100644
index ad33be8653412d2af9028f150b6954d28f804ce8..0000000000000000000000000000000000000000
Binary files a/gstlal-inspiral/share/H1K1L1_p_of_instruments_given_H_d.h5 and /dev/null differ
diff --git a/gstlal-inspiral/share/H1K1V1_p_of_instruments_given_H_d.h5 b/gstlal-inspiral/share/H1K1V1_p_of_instruments_given_H_d.h5
deleted file mode 100644
index e274b61f3a8e1d2c38537b8e143c0ab3f757ea7f..0000000000000000000000000000000000000000
Binary files a/gstlal-inspiral/share/H1K1V1_p_of_instruments_given_H_d.h5 and /dev/null differ
diff --git a/gstlal-inspiral/share/H1K1_p_of_instruments_given_H_d.h5 b/gstlal-inspiral/share/H1K1_p_of_instruments_given_H_d.h5
deleted file mode 100644
index a9f23c2363c912869a21053868f8cd4ad3b866b4..0000000000000000000000000000000000000000
Binary files a/gstlal-inspiral/share/H1K1_p_of_instruments_given_H_d.h5 and /dev/null differ
diff --git a/gstlal-inspiral/share/K1L1V1_p_of_instruments_given_H_d.h5 b/gstlal-inspiral/share/K1L1V1_p_of_instruments_given_H_d.h5
deleted file mode 100644
index 24c6f15d54eda1102cd486c6f86ace1114ec6f88..0000000000000000000000000000000000000000
Binary files a/gstlal-inspiral/share/K1L1V1_p_of_instruments_given_H_d.h5 and /dev/null differ
diff --git a/gstlal-inspiral/share/K1L1_p_of_instruments_given_H_d.h5 b/gstlal-inspiral/share/K1L1_p_of_instruments_given_H_d.h5
deleted file mode 100644
index ceee634992a74d3dcdce9da2f5ff847d8ba01309..0000000000000000000000000000000000000000
Binary files a/gstlal-inspiral/share/K1L1_p_of_instruments_given_H_d.h5 and /dev/null differ
diff --git a/gstlal-inspiral/share/K1V1_p_of_instruments_given_H_d.h5 b/gstlal-inspiral/share/K1V1_p_of_instruments_given_H_d.h5
deleted file mode 100644
index a62c8572b0c8a161eb1906c97917c8ee33d514bc..0000000000000000000000000000000000000000
Binary files a/gstlal-inspiral/share/K1V1_p_of_instruments_given_H_d.h5 and /dev/null differ
diff --git a/gstlal-inspiral/share/Makefile.am b/gstlal-inspiral/share/Makefile.am
index b24b84d2cfe34fbace34751a5e6fc7cf71b12fdf..da85b6946542bb22c726e44631f1511693437bce 100644
--- a/gstlal-inspiral/share/Makefile.am
+++ b/gstlal-inspiral/share/Makefile.am
@@ -6,10 +6,6 @@ endif
 
 dist_pkgdata_DATA = \
 	de_calc_likelihood.sql \
-	H1K1_p_of_instruments_given_H_d.h5 \
-	H1K1L1_p_of_instruments_given_H_d.h5 \
-	H1K1V1_p_of_instruments_given_H_d.h5 \
-	H1K1L1V1_p_of_instruments_given_H_d.h5 \
 	H1L1_p_of_instruments_given_H_d.h5 \
 	H1L1V1_p_of_instruments_given_H_d.h5 \
 	H1V1_p_of_instruments_given_H_d.h5 \
@@ -18,9 +14,6 @@ dist_pkgdata_DATA = \
 	inspiral_datalesslndensity.xml.gz \
 	inspiral_dtdphi_pdf.h5 \
 	inspiral_snr_pdf.xml.gz \
-	K1L1_p_of_instruments_given_H_d.h5 \
-	K1L1V1_p_of_instruments_given_H_d.h5 \
-	K1V1_p_of_instruments_given_H_d.h5 \
 	L1V1_p_of_instruments_given_H_d.h5 \
 	ll_simplify.sql \
 	ll_simplify_and_cluster.sql \
diff --git a/gstlal-inspiral/share/inspiral_dtdphi_pdf.h5 b/gstlal-inspiral/share/inspiral_dtdphi_pdf.h5
index 57a0d6ca809a926f2ccac4c46d6fd29f33e84cc7..176b73f3ca78d3e17949ae463323385d53c196a3 100644
--- a/gstlal-inspiral/share/inspiral_dtdphi_pdf.h5
+++ b/gstlal-inspiral/share/inspiral_dtdphi_pdf.h5
@@ -1,3 +1,3 @@
 version https://git-lfs.github.com/spec/v1
-oid sha256:a1c452351bde420112dee829d324efc3dc49722221bc835bba09973bc5ad5a28
-size 123608667
+oid sha256:d175062c21457d53197d9d46df05112773d5db12288ba5eaed86c287abbb8d29
+size 54102205
diff --git a/gstlal-inspiral/tests/dtdphitest.py b/gstlal-inspiral/tests/dtdphitest.py
index 273f04b47d08c9f3dde643204912fd4b8309c408..22b22024b915ae78b5654e6ba1e14e139f7a0048 100644
--- a/gstlal-inspiral/tests/dtdphitest.py
+++ b/gstlal-inspiral/tests/dtdphitest.py
@@ -1,14 +1,8 @@
 import sys
 import numpy
 import numpy.random
-import scipy
-from scipy import interpolate as interp
-from scipy import stats
 import lal
 from gstlal.stats import inspiral_extrinsics
-import h5py
-
-import pdb
 
 import matplotlib
 matplotlib.rcParams['text.usetex'] = True
@@ -21,11 +15,10 @@ matplotlib.rcParams['legend.fontsize'] = 10
 matplotlib.use('Agg')
 from matplotlib import pyplot
 
-
 TPDPDF = inspiral_extrinsics.InspiralExtrinsics()
-R = {"H1":lal.CachedDetectors[lal.LHO_4K_DETECTOR].response,"L1":lal.CachedDetectors[lal.LLO_4K_DETECTOR].response,"V1":lal.CachedDetectors[lal.VIRGO_DETECTOR].response,"K1":lal.CachedDetectors[lal.KAGRA_DETECTOR].response}
-X = {"H1":lal.CachedDetectors[lal.LHO_4K_DETECTOR].location,"L1":lal.CachedDetectors[lal.LLO_4K_DETECTOR].location,"V1":lal.CachedDetectors[lal.VIRGO_DETECTOR].location,"K1":lal.CachedDetectors[lal.KAGRA_DETECTOR].location}
-refhorizon = {"H1": 110., "L1": 140., "V1": 45., "K1": 45.}
+
+R = {"H":lal.CachedDetectors[lal.LHO_4K_DETECTOR].response,"L":lal.CachedDetectors[lal.LLO_4K_DETECTOR].response,"V":lal.CachedDetectors[lal.VIRGO_DETECTOR].response}
+X = {"H":lal.CachedDetectors[lal.LHO_4K_DETECTOR].location,"L":lal.CachedDetectors[lal.LLO_4K_DETECTOR].location,"V":lal.CachedDetectors[lal.VIRGO_DETECTOR].location}
 
 def random_source(R = R, X = X, epoch = lal.LIGOTimeGPS(0), gmst = lal.GreenwichMeanSiderealTime(lal.LIGOTimeGPS(0))):
 
@@ -49,59 +42,31 @@ def random_source(R = R, X = X, epoch = lal.LIGOTimeGPS(0), gmst = lal.Greenwich
 
 	return T, Deff, phi
 
-def dtdphi2prob(dt, dphi, ifo1, ifo2, refsnr, refhorizon=refhorizon):
-	"""function that takes dt,dphi samples and return the probability (density) values derived from the new pdf
-
-	:dt: the time delay between detector pair
-	:dphi: the difference of the coalescence phase between detector pair
-	:returns: dtdpipdf(dt, dphi) * snr^4 (assuming snr = {"H1": 5., "V1": 2.25} and horizon = {"H1": 110., "V1": 45.})
-
-	"""
-
-	# ifo1 += "1"
-	# ifo2 += "1"
-	t = {ifo1: 0, ifo2: dt}
-	phi = {ifo1: 0, ifo2: dphi}
-	snr = {ifo1: refsnr[ifo1], ifo2: refsnr[ifo2]} # our choice of characteristic SNR
-	horizon = {ifo1: refhorizon[ifo1], ifo2: refhorizon[ifo2]} # typical horizon distance taken from the summary page on 2019 May 09.
-	# signature is (time, phase, snr, horizon)
-	p = TPDPDF.time_phase_snr(t, phi, snr, horizon) * (sum(s**2 for s in snr.values())**.5)**4
-	return float(p)
-
-pdf_fname = sys.argv[1]
-ifo1, ifo2 = sorted(sys.argv[2].split(","))
-combo = ifo1 + "," + ifo2
-
-ndraws = 100000#100000 maybe take 10min to iterate
-delta_phi_list = []
-delta_t_list = []
-prob_simulation = []
+
+ndraws = 100000 #100000 maybe take 10min to iterate
+delta_phi_HV = []
+delta_t_HV = []
 i = 0
 
-# the following cov matrix elements were derived from running `gstlal_inspiral_compute_dtdphideff_cov_matrix --psd-xml share/O3/2019-05-09-H1L1V1psd_new.xml.gz --H-snr 5 --L-snr 7.0 --V-snr 4.0 --K-snr 4.0`
-data = h5py.File(pdf_fname)
-# pdb.set_trace()
-refsnr = dict((ifo, data["SNR"][ifo].value) for ifo in data["SNR"])
-transmat_dtdphi = numpy.array([[data["transtt"][combo].value, data["transtp"][combo].value], [data["transpt"][combo].value, data["transpp"][combo].value]])
-covmat_dtdphi = numpy.linalg.inv(numpy.dot(transmat_dtdphi.T, transmat_dtdphi))
-sigmadd = 1. / (data["transdd"][combo].value)**2
-rd_slice = (refhorizon[ifo1] / refsnr[ifo1]) / (refhorizon[ifo2] / refsnr[ifo2])
+# the following cov matrix elements were derived from running `gstlal_inspiral_compute_dtdphideff_cov_matrix --psd-xml share/O3/2019-05-09-H1L1V1psd_new.xml.gz --H-snr 5 --L-snr 7.0 --V-snr 2.25`
+covmat = [[0.000001, 0.000610], [0.000610, 0.648023]]
+sigmadd = 0.487371
 while i < ndraws:
 	t,d,p = random_source()
 	# Use EXACTLY the same covariance matrix assumptions as the gstlal
 	# code.  It could be a bad assumption, but we are testing the method
 	# not the assumptions by doing this.
-	mean = [t[ifo1] - t[ifo2], p[ifo1] - p[ifo2]]
-	dt, dp = numpy.random.multivariate_normal(mean, covmat_dtdphi)
-	rd = numpy.log(d[ifo1] / d[ifo2]) + numpy.random.randn() * numpy.sqrt(sigmadd)
+	mean = [t["H"] - t["V"], p["H"] - p["V"]]
+	dtHV, dpHV = numpy.random.multivariate_normal(mean, covmat)
+	rdHV = numpy.log(d["H"] / d["V"]) + numpy.random.randn() * sigmadd
 	# only choose things with almost the same effective distance ratio for
 	# this test to agree with setting the same horizon and snr below
-	if numpy.log(rd_slice + 0.05) >= rd >= numpy.log(rd_slice - 0.05):
-		delta_phi_list.append(dp)
-		delta_t_list.append(dt)
-		prob_simulation.append(dtdphi2prob(dt, dp, ifo1, ifo2, refsnr, refhorizon))
+	if numpy.log(1.1 + 0.1) >= rdHV >= numpy.log(1.1 - 0.1): # 1.1 here comes from Deff_V1 / Deff_H1 = (110/5) / (45/2.25)
+		delta_phi_HV.append(dpHV)
+		delta_t_HV.append(dtHV)
+		print i
 		i += 1
-prob_simulation.sort()
+
 
 num1 = 100
 num2 = 101
@@ -113,60 +78,28 @@ Prob = numpy.zeros((num1,num2))
 
 for j, dt in enumerate(dtvec):
 	for i, dp in enumerate(dphivec):
-		p = dtdphi2prob(dt, dp, ifo1, ifo2, refsnr, refhorizon)
+		t = {"H1": 0, "V1": dt}
+		phi = {"H1": 0, "V1": dp}
+		snr = {"H1": 5., "V1": 2.25} # our choice of characteristic SNR
+		horizon = {"H1": 110., "V1": 45.} # typical horizon distance taken from the summary page on 2019 May 09.
+		# signature is (time, phase, snr, horizon)
+		p = TPDPDF.time_phase_snr(t, phi, snr, horizon)
 		Prob[j,i] = p
 		DPProb[i] += p
 		DTProb[j] += p
 
 pyplot.figure(figsize=(7.5,7.5))
 pyplot.subplot(221)
-pyplot.hist(delta_phi_list, dphivec, normed = True, label = "Simulation")
+pyplot.hist(delta_phi_HV, dphivec, normed = True, label = "Simulation")
 pyplot.plot(dphivec, DPProb / numpy.sum(DPProb) / (dphivec[1] - dphivec[0]), label ="Direct Calculation")
-pyplot.ylabel(r"""$P(\Delta\phi | s, \{D_{%s}, D_{%s}\}, \{\rho_{%s}, \rho_{%s}\})$""" % (ifo1, ifo2, ifo1, ifo2))
+pyplot.ylabel(r"""$P(\Delta\phi | s, D_H = D_V, \rho_H = \rho_V)$""")
 pyplot.legend(bbox_to_anchor=(1, 1), loc='upper left', ncol=1)
 pyplot.subplot(223)
 pyplot.pcolor(dphivec, dtvec, Prob)
-pyplot.xlabel(r"""$\phi_{%s} - \phi_{%s}$""" % (ifo1, ifo2))
-pyplot.ylabel(r"""$t_{%s} - t_{%s}$""" % (ifo1, ifo2))
+pyplot.xlabel(r"""$\phi_H - \phi_V$""")
+pyplot.ylabel(r"""$t_H - t_V$""")
 pyplot.subplot(224)
-pyplot.hist(delta_t_list, dtvec, normed = True, orientation = "horizontal")
+pyplot.hist(delta_t_HV, dtvec, normed = True, orientation = "horizontal")
 pyplot.plot(DTProb / numpy.sum(DTProb) / (dtvec[1] - dtvec[0]), dtvec)
-pyplot.xlabel(r"""$P(\Delta t | s, \{D_{%s}, D_{%s}\}, \{\rho_{%s}, \rho_{%s}\})$""" % (ifo1, ifo2, ifo1, ifo2))
-pyplot.savefig("%s%sPDF.pdf" % (ifo1, ifo2))
-
-if False:
-	prob_grid = numpy.sort(Prob.flatten())
-	percentiles_grid = numpy.cumsum(prob_grid)
-	percentiles_grid /= percentiles_grid[-1]
-	p_interp = interp.interp1d(prob_grid, percentiles_grid)
-	# plot of the interpolate function of percentile to see if it is a good approximation
-	fig = pyplot.figure()
-	ax1 = fig.add_subplot(111)
-	ax1.loglog(prob_grid, percentiles_grid, linestyle="None", color="r", marker=".")
-	ax1.loglog(prob_grid, p_interp(prob_grid))
-	pyplot.savefig("./percentile_interp_check.png")
-	# make pp-plot from the interpolate function of percentiles
-	try:
-		percentiles_exp = sorted(p_interp(prob_simulation))
-	except ValueError:
-		prob_simulation = numpy.array(prob_simulation)
-		percentiles_exp = numpy.empty(len(prob_simulation))
-		mask_ok = [(prob_simulation < max(prob_grid)) & (prob_simulation > min(prob_grid))]
-		mask_above = [prob_simulation > max(prob_grid)]
-		mask_below = [prob_simulation < min(prob_grid)]
-		percentiles_exp[mask_ok] = sorted(p_interp(prob_simulation[mask_ok]))
-		percentiles_exp[mask_above] = 1.
-		percentiles_exp[mask_below] = 0
-	percentiles_inj = numpy.cumsum(numpy.ones(len(percentiles_exp))) / len(percentiles_exp)
-	fig = pyplot.figure()
-	ax2 = fig.add_subplot(111)
-	ax2.plot([0,1], [0, 1], linestyle="--")
-	ax2.plot(percentiles_exp, percentiles_inj, linestyle="-", color="r", label="simulation")
-	CI = 0.9
-	quant = stats.norm.ppf(CI * 0.5 + 0.5)
-	err = quant * numpy.sqrt(percentiles_inj * (1 - percentiles_inj) / len(percentiles_inj))
-	ax2.fill_between(percentiles_exp, percentiles_inj - err, percentiles_inj + err, facecolor='r', label="90$\%$ measurement uncertainty", alpha=.3)
-	pyplot.legend(loc = "lower right")
-	ax2.set_xlabel("Percentile computed from the pdf")
-	ax2.set_ylabel("Percentile in the injection set")
-	pyplot.savefig("./%s%s_pp_plot.pdf" % (ifo1, ifo2))
+pyplot.xlabel(r"""$P(\Delta t | s, D_H = D_V, \rho_H = \rho_V)$""")
+pyplot.savefig("HVPDF.pdf")