diff --git a/gstlal-inspiral/python/stats/inspiral_extrinsics.py b/gstlal-inspiral/python/stats/inspiral_extrinsics.py
index cb0f66ed75819b93c1b6170bbe4ea93bad02bf71..4648a422fd71b296739ba51b7e1af5d0f06b7c7a 100644
--- a/gstlal-inspiral/python/stats/inspiral_extrinsics.py
+++ b/gstlal-inspiral/python/stats/inspiral_extrinsics.py
@@ -1144,23 +1144,35 @@ class TimePhaseSNR(object):
 	locations = {"H1":lal.CachedDetectors[lal.LHO_4K_DETECTOR].location, "L1":lal.CachedDetectors[lal.LLO_4K_DETECTOR].location, "V1":lal.CachedDetectors[lal.VIRGO_DETECTOR].location}#, "K1":lal.CachedDetectors[lal.KAGRA_DETECTOR].location}
 	numchunks = 20
 
-	# NOTE NOTE NOTE: You cannot just change these without fully rebuilding
-	# the trees and the marginalized distributions.
-	# These are generated by running
-	# gstlal_inspiral_compute_dtdphideff_cov_matrix --psd-xml share/O3/2019-05-09-H1L1V1psd_new.xml.gz --H-snr 5 --L-snr 7.0 --V-snr 2.25
-	transtt = {frozenset(['V1', 'H1']): 1721.2939671945821, frozenset(['H1', 'L1']): 4264.931381497161, frozenset(['V1', 'L1']): 1768.5971596859304}
-	transtp = {frozenset(['V1', 'H1']): -1.620591401760186, frozenset(['H1', 'L1']): -3.07346071253937, frozenset(['V1', 'L1']): -1.7020167980031902}
-	transpt = {frozenset(['V1', 'H1']): 0.0, frozenset(['H1', 'L1']): 0.0, frozenset(['V1', 'L1']): 0.0}
-	transpp = {frozenset(['V1', 'H1']): 1.2422379329336048, frozenset(['H1', 'L1']): 2.6540061786001834, frozenset(['V1', 'L1']): 1.2984050700516363}
-	transdd = {frozenset(['V1', 'H1']): 2.0518233866439894, frozenset(['H1', 'L1']): 4.068667356033675, frozenset(['V1', 'L1']): 2.1420642628918447}
-
-	def __init__(self, tree_data = None, margsky = None, verbose = False, margstart = 0, margstop = None):
+	def __init__(self, transtt = None, transtp = None, transpt = None, transpp = None, transdd = None, norm = None, tree_data = None, margsky = None, verbose = False, margstart = 0, margstop = None):
 		"""
 		Initialize a new class from scratch via explicit computation
 		of the tree data and marginalized probability distributions or by providing
 		these.  **NOTE** generally speaking a user will not initialize
 		one of these from scratch, but instead will read the data from disk using the
 		from_hdf() method below.
+
+		transtt, transtp, transpt, transpp, transdd are required.  They
+		can be produced by running gstlal_inspiral_compute_dtdphideff_cov_matrix.  An
+		example is here
+
+		gstlal_inspiral_compute_dtdphideff_cov_matrix --psd-xml share/O3/2019-05-09-H1L1V1psd_new.xml.gz --H-snr 5 --L-snr 7.0 --V-snr 2.25
+
+		transtt = {frozenset(['V1', 'H1']): 1721.2939671945821, frozenset(['H1', 'L1']): 4264.931381497161, frozenset(['V1', 'L1']): 1768.5971596859304}
+		transtp = {frozenset(['V1', 'H1']): -1.620591401760186, frozenset(['H1', 'L1']): -3.07346071253937, frozenset(['V1', 'L1']): -1.7020167980031902}
+		transpt = {frozenset(['V1', 'H1']): 0.0, frozenset(['H1', 'L1']): 0.0, frozenset(['V1', 'L1']): 0.0}
+		transpp = {frozenset(['V1', 'H1']): 1.2422379329336048, frozenset(['H1', 'L1']): 2.6540061786001834, frozenset(['V1', 'L1']): 1.2984050700516363}
+		transdd = {frozenset(['V1', 'H1']): 2.0518233866439894, frozenset(['H1', 'L1']): 4.068667356033675, frozenset(['V1', 'L1']): 2.1420642628918447}
+
+		norm is required. An example is:
+
+		norm = {('H1', 'V1'):332.96168414700816, ('H1', 'L1', 'V1'):7.729877009864116, ('L1', 'V1'):313.34679951193306, ('L1',):0.0358423687136922, ('H1', 'L1'):409.06489455239137, ('H1',):0.0358423687136922, ('V1',):0.0358423687136922}
+
+		typically a user would make a new inspiral_dt_dphi_pdf.h5 file
+		which contains all of these by running:
+
+		gstlal_inspiral_create_dt_dphi_snr_ratio_pdfs_dag
+
 		"""
 
 		# This is such that the integral over the sky and over all
@@ -1202,7 +1214,15 @@ class TimePhaseSNR(object):
 		# print result
 		# >>> {('H1', 'V1'): array([ 1.00000003]), ('H1', 'L1', 'V1'): array([ 1.00000004]), ('L1', 'V1'): array([ 0.99999999]), ('L1',): 0.99999999999646638, ('H1', 'L1'): array([ 0.99999997]), ('H1',): 0.99999999999646638, ('V1',): 0.99999999999646638}
 
-		self.norm = {('H1', 'V1'):332.96168414700816, ('H1', 'L1', 'V1'):7.729877009864116, ('L1', 'V1'):313.34679951193306, ('L1',):0.0358423687136922, ('H1', 'L1'):409.06489455239137, ('H1',):0.0358423687136922, ('V1',):0.0358423687136922}
+		if any([x is None for x in (norm, transtt, transtp, transpt, transpp, transdd)]):
+			raise ValueError("transtt, transtp, transpt, transpp, transdd and norm are required and cannot be None")
+
+		self.norm = norm
+		self.transtt = transtt
+		self.transtp = transtp
+		self.transpt = transpt
+		self.transpp = transpp
+		self.transdd = transdd
 
 		self.tree_data = tree_data
 		self.margsky = margsky
@@ -1251,21 +1271,48 @@ class TimePhaseSNR(object):
 		mgrp = dgrp.create_group("marg")
 		for combo in self.combos:
 			mgrp.create_dataset(",".join(combo), data = self.margsky[combo], compression="gzip")
+
+		h5_transtt = f.create_group("transtt")
+		h5_transtp = f.create_group("transtp")
+		h5_transpt = f.create_group("transpt")
+		h5_transpp = f.create_group("transpp")
+		h5_transdd = f.create_group("transdd")
+		h5_norm = f.create_group("norm")
+		for group, mat in zip((h5_transtt, h5_transtp, h5_transpt, h5_transpp, h5_transdd, h5_norm), (self.transtt, self.transtp, self.transpt, self.transpp, self.transdd, self.norm)):
+			for k,v in mat.items():
+				group.create_dataset(",".join(sorted(k)), data = float(v))
+
 		f.close()
 
 	@staticmethod
-	def from_hdf5(fname, other_fnames = []):
+	def from_hdf5(fname, other_fnames = [], **kwargs):
 		"""
 		Initialize one of these from a file instead of computing it from scratch
 		"""
 		f = h5py.File(fname, "r")
-		dgrp = f["gstlal_extparams"]
-		tree_data = numpy.array(dgrp["treedata"])
-		margsky = {}
-		for combo in dgrp["marg"]:
-			key = tuple(combo.split(","))
-			margsky[key] = numpy.array(dgrp["marg"][combo])
+		# These *have* to be here
+		transtt = dict((frozenset(k.split(",")), numpy.array(f["transtt"][k])) for k in f["transtt"])
+		transtp = dict((frozenset(k.split(",")), numpy.array(f["transtp"][k])) for k in f["transtp"])
+		transpt = dict((frozenset(k.split(",")), numpy.array(f["transpt"][k])) for k in f["transpt"])
+		transpp = dict((frozenset(k.split(",")), numpy.array(f["transpp"][k])) for k in f["transpp"])
+		transdd = dict((frozenset(k.split(",")), numpy.array(f["transdd"][k])) for k in f["transdd"])
+		norm = dict((frozenset(k.split(",")), numpy.array(f["norm"][k])) for k in f["norm"])
+
+		try:
+			dgrp = f["gstlal_extparams"]
+			tree_data = numpy.array(dgrp["treedata"])
+			margsky = {}
+			for combo in dgrp["marg"]:
+				key = tuple(combo.split(","))
+				margsky[key] = numpy.array(dgrp["marg"][combo])
+		except:
+			tree_data = None
+			margsky = None
+
 		f.close()
+
+		# FIXME add sanity checking on this - also make it a += ? In
+		# general this is kinda crappy.
 		for fn in other_fnames:
 			f = h5py.File(fn, "r")
 			dgrp = f["gstlal_extparams"]
@@ -1273,7 +1320,8 @@ class TimePhaseSNR(object):
 				key = tuple(combo.split(","))
 				margsky[key] += numpy.array(dgrp["marg"][combo])
 			f.close()
-		return TimePhaseSNR(tree_data = tree_data, margsky = margsky)
+
+		return TimePhaseSNR(transtt = transtt, transtp = transtp, transpt = transpt, transpp = transpp, transdd = transdd, norm = norm, tree_data = tree_data, margsky = margsky, **kwargs)
 
 	@property
 	def combos(self):
@@ -1459,7 +1507,7 @@ class TimePhaseSNR(object):
 		# NOTE shortcut for single IFO
 		#
 		if len(snr) == 1:
-			return 1. /  self.norm[combo] * 5.66 / (sum(s**2 for s in snr.values())**.5)**4
+			return 1. /  self.norm[frozenset(combo)] * 5.66 / (sum(s**2 for s in snr.values())**.5)**4
 
 		deff = dict((k, horizon[k] / snr[k] * 8.0) for k in snr)
 		# FIXME can this be a function call??
@@ -1474,7 +1522,7 @@ class TimePhaseSNR(object):
 		# that goes like rho^-4 with a somewhat arbitrary normilization
 		# which comes form 5.66 ~ (4**2 + 4**2)**.5, so that the factor
 		# is 1 for a double right at threshold.
-		return numpy.exp(-D2 / 2.) * self.margsky[combo][nearestix] / self.norm[combo] * 5.66 / (sum(s**2 for s in snr.values())**.5)**4
+		return numpy.exp(-D2 / 2.) * self.margsky[combo][nearestix] / self.norm[frozenset(combo)] * 5.66 / (sum(s**2 for s in snr.values())**.5)**4
 
 
 #