Skip to content
Snippets Groups Projects
Commit d53e5c4e authored by Colm Talbot's avatar Colm Talbot
Browse files

Merge branch 'standard-multivariate-normal' into 'master'

MultivariateGaussianDist(): use "standard" multivariate normal distribution internally

See merge request !1142
parents 272a670b b0339064
No related branches found
No related tags found
1 merge request!1142MultivariateGaussianDist(): use "standard" multivariate normal distribution internally
Pipeline #449330 passed
......@@ -24,6 +24,7 @@ Eric Thrane
Ethan Payne
Francisco Javier Hernandez
Gregory Ashton
Hank Hua
Hector Estelles
Ignacio Magaña Hernandez
Isobel Marguarethe Romero-Shaw
......@@ -34,6 +35,7 @@ Jeremy G Baier
John Veitch
Joshua Brandt
Josh Willis
Karl Wette
Katerina Chatziioannou
Kaylee de Soto
Khun Sang Phukon
......
......@@ -382,6 +382,7 @@ class MultivariateGaussianDist(BaseJointPriorDist):
self.covs = []
self.corrcoefs = []
self.sigmas = []
self.logprodsigmas = [] # log of product of sigmas, needed for "standard" multivariate normal
self.weights = []
self.eigvalues = []
self.eigvectors = []
......@@ -528,6 +529,9 @@ class MultivariateGaussianDist(BaseJointPriorDist):
self.covs.append(np.eye(self.num_vars))
self.sigmas.append(np.ones(self.num_vars))
# compute log of product of sigmas, needed for "standard" multivariate normal
self.logprodsigmas.append(np.log(np.prod(self.sigmas[-1])))
# get eigen values and vectors
try:
evals, evecs = np.linalg.eig(self.corrcoefs[-1])
......@@ -557,9 +561,16 @@ class MultivariateGaussianDist(BaseJointPriorDist):
# add the mode
self.nmodes += 1
# add multivariate Gaussian
# add "standard" multivariate normal distribution
# - when the typical scales of the parameters are very different,
# multivariate_normal() may complain that the covariance matrix is singular
# - instead pass zero means and correlation matrix instead of covariance matrix
# to get the equivalent of a standard normal distribution in higher dimensions
# - this modifies the multivariate normal PDF as follows:
# multivariate_normal(mean=mus, cov=cov).logpdf(x)
# = multivariate_normal(mean=0, cov=corrcoefs).logpdf((x - mus)/sigmas) - logprodsigmas
self.mvn.append(
scipy.stats.multivariate_normal(mean=self.mus[-1], cov=self.covs[-1])
scipy.stats.multivariate_normal(mean=np.zeros(self.num_vars), cov=self.corrcoefs[-1])
)
def _rescale(self, samp, **kwargs):
......@@ -630,7 +641,9 @@ class MultivariateGaussianDist(BaseJointPriorDist):
for j in range(samp.shape[0]):
# loop over the modes and sum the probabilities
for i in range(self.nmodes):
lnprob[j] = np.logaddexp(lnprob[j], self.mvn[i].logpdf(samp[j]))
# self.mvn[i] is a "standard" multivariate normal distribution; see add_mode()
z = (samp[j] - self.mus[i]) / self.sigmas[i]
lnprob[j] = np.logaddexp(lnprob[j], self.mvn[i].logpdf(z) - self.logprodsigmas[i])
# set out-of-bounds values to -inf
lnprob[outbounds] = -np.inf
......
......@@ -39,5 +39,61 @@ MultivariateGaussianDist(
)
class TestMultivariateGaussianDistParameterScales(unittest.TestCase):
def _test_mvg_ln_prob_diff_expected(self, mvg, mus, sigmas, corrcoefs):
# the columns of the Cholesky decompsition give the directions along which
# the multivariate Gaussian PDF will decrease by exact differences per unit
# sigma; test that these are as expected
ln_prob_mus = mvg.ln_prob(mus)
d = np.linalg.cholesky(corrcoefs)
for i in np.ndindex(4, 4, 4):
ln_prob_mus_sigmas_d_i = mvg.ln_prob(mus + sigmas * (d @ i))
diff_ln_prob = ln_prob_mus - ln_prob_mus_sigmas_d_i
diff_ln_prob_expected = 0.5 * np.sum(np.array(i)**2)
self.assertTrue(
np.allclose(diff_ln_prob, diff_ln_prob_expected)
)
def test_mvg_unit_scales(self):
# test using order-unity standard deviations and correlations
sigmas = 0.3 * np.ones(3)
corrcoefs = np.identity(3)
mus = np.array([3, 1, 2])
mvg = bilby.core.prior.MultivariateGaussianDist(
names=['a', 'b', 'c'],
mus=mus,
sigmas=sigmas,
corrcoefs=corrcoefs,
)
self._test_mvg_ln_prob_diff_expected(mvg, mus, sigmas, corrcoefs)
def test_mvg_cw_scales(self):
# test using standard deviations and correlations from the
# inverse Fisher information matrix for the frequency/spindown
# parameters of a continuous wave signal
T = 365.25 * 86400
rho = 10
sigmas = np.array([
5 * np.sqrt(3) / (2 * np.pi * T * rho),
6 * np.sqrt(5) / (np.pi * T**2 * rho),
60 * np.sqrt(7) / (np.pi * T**3 * rho)
])
corrcoefs = np.identity(3)
corrcoefs[0, 2] = corrcoefs[2, 0] = -np.sqrt(21) / 5
# test MultivariateGaussianDist() can handle parameters with very different scales:
# - f ~ 100, fd ~ 1/T, fdd ~ 1/T^2
mus = [123.4, -5.6e-8, 9e-18]
mvg = bilby.core.prior.MultivariateGaussianDist(
names=["f", "fd", "fdd"],
mus=mus,
sigmas=sigmas,
corrcoefs=corrcoefs,
)
self._test_mvg_ln_prob_diff_expected(mvg, mus, sigmas, corrcoefs)
if __name__ == "__main__":
unittest.main()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment