diff --git a/AUTHORS.md b/AUTHORS.md index a1a3176a7742c09b17b8a238f0098f7aa0c71a14..c21985dc36766b72c4c2ae0cb02f4c274282f7bc 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -24,6 +24,7 @@ Eric Thrane Ethan Payne Francisco Javier Hernandez Gregory Ashton +Hank Hua Hector Estelles Ignacio Magaña Hernandez Isobel Marguarethe Romero-Shaw @@ -34,6 +35,7 @@ Jeremy G Baier John Veitch Joshua Brandt Josh Willis +Karl Wette Katerina Chatziioannou Kaylee de Soto Khun Sang Phukon diff --git a/bilby/core/prior/joint.py b/bilby/core/prior/joint.py index 9dc35f40dd57b15a4b14c77f3395b8d1839ccac0..7f35b8716526e8811571409c2aba6d476a9958cc 100644 --- a/bilby/core/prior/joint.py +++ b/bilby/core/prior/joint.py @@ -382,6 +382,7 @@ class MultivariateGaussianDist(BaseJointPriorDist): self.covs = [] self.corrcoefs = [] self.sigmas = [] + self.logprodsigmas = [] # log of product of sigmas, needed for "standard" multivariate normal self.weights = [] self.eigvalues = [] self.eigvectors = [] @@ -528,6 +529,9 @@ class MultivariateGaussianDist(BaseJointPriorDist): self.covs.append(np.eye(self.num_vars)) self.sigmas.append(np.ones(self.num_vars)) + # compute log of product of sigmas, needed for "standard" multivariate normal + self.logprodsigmas.append(np.log(np.prod(self.sigmas[-1]))) + # get eigen values and vectors try: evals, evecs = np.linalg.eig(self.corrcoefs[-1]) @@ -557,9 +561,16 @@ class MultivariateGaussianDist(BaseJointPriorDist): # add the mode self.nmodes += 1 - # add multivariate Gaussian + # add "standard" multivariate normal distribution + # - when the typical scales of the parameters are very different, + # multivariate_normal() may complain that the covariance matrix is singular + # - instead pass zero means and correlation matrix instead of covariance matrix + # to get the equivalent of a standard normal distribution in higher dimensions + # - this modifies the multivariate normal PDF as follows: + # multivariate_normal(mean=mus, cov=cov).logpdf(x) + # = multivariate_normal(mean=0, cov=corrcoefs).logpdf((x - mus)/sigmas) - logprodsigmas self.mvn.append( - scipy.stats.multivariate_normal(mean=self.mus[-1], cov=self.covs[-1]) + scipy.stats.multivariate_normal(mean=np.zeros(self.num_vars), cov=self.corrcoefs[-1]) ) def _rescale(self, samp, **kwargs): @@ -630,7 +641,9 @@ class MultivariateGaussianDist(BaseJointPriorDist): for j in range(samp.shape[0]): # loop over the modes and sum the probabilities for i in range(self.nmodes): - lnprob[j] = np.logaddexp(lnprob[j], self.mvn[i].logpdf(samp[j])) + # self.mvn[i] is a "standard" multivariate normal distribution; see add_mode() + z = (samp[j] - self.mus[i]) / self.sigmas[i] + lnprob[j] = np.logaddexp(lnprob[j], self.mvn[i].logpdf(z) - self.logprodsigmas[i]) # set out-of-bounds values to -inf lnprob[outbounds] = -np.inf diff --git a/test/core/prior/joint_test.py b/test/core/prior/joint_test.py index 20b88a69e6e749f6ba8ec9091132103dd2a7ed48..ebadfcfaeb96ed1cd017a212097c8ff65f5c477c 100644 --- a/test/core/prior/joint_test.py +++ b/test/core/prior/joint_test.py @@ -39,5 +39,61 @@ MultivariateGaussianDist( ) +class TestMultivariateGaussianDistParameterScales(unittest.TestCase): + def _test_mvg_ln_prob_diff_expected(self, mvg, mus, sigmas, corrcoefs): + # the columns of the Cholesky decompsition give the directions along which + # the multivariate Gaussian PDF will decrease by exact differences per unit + # sigma; test that these are as expected + ln_prob_mus = mvg.ln_prob(mus) + d = np.linalg.cholesky(corrcoefs) + for i in np.ndindex(4, 4, 4): + ln_prob_mus_sigmas_d_i = mvg.ln_prob(mus + sigmas * (d @ i)) + diff_ln_prob = ln_prob_mus - ln_prob_mus_sigmas_d_i + diff_ln_prob_expected = 0.5 * np.sum(np.array(i)**2) + self.assertTrue( + np.allclose(diff_ln_prob, diff_ln_prob_expected) + ) + + def test_mvg_unit_scales(self): + # test using order-unity standard deviations and correlations + sigmas = 0.3 * np.ones(3) + corrcoefs = np.identity(3) + mus = np.array([3, 1, 2]) + mvg = bilby.core.prior.MultivariateGaussianDist( + names=['a', 'b', 'c'], + mus=mus, + sigmas=sigmas, + corrcoefs=corrcoefs, + ) + + self._test_mvg_ln_prob_diff_expected(mvg, mus, sigmas, corrcoefs) + + def test_mvg_cw_scales(self): + # test using standard deviations and correlations from the + # inverse Fisher information matrix for the frequency/spindown + # parameters of a continuous wave signal + T = 365.25 * 86400 + rho = 10 + sigmas = np.array([ + 5 * np.sqrt(3) / (2 * np.pi * T * rho), + 6 * np.sqrt(5) / (np.pi * T**2 * rho), + 60 * np.sqrt(7) / (np.pi * T**3 * rho) + ]) + corrcoefs = np.identity(3) + corrcoefs[0, 2] = corrcoefs[2, 0] = -np.sqrt(21) / 5 + + # test MultivariateGaussianDist() can handle parameters with very different scales: + # - f ~ 100, fd ~ 1/T, fdd ~ 1/T^2 + mus = [123.4, -5.6e-8, 9e-18] + mvg = bilby.core.prior.MultivariateGaussianDist( + names=["f", "fd", "fdd"], + mus=mus, + sigmas=sigmas, + corrcoefs=corrcoefs, + ) + + self._test_mvg_ln_prob_diff_expected(mvg, mus, sigmas, corrcoefs) + + if __name__ == "__main__": unittest.main()