Skip to content
Snippets Groups Projects

Speed up core.prior classes

Merged Liting Xiao requested to merge (removed):speed-up-prior into master
All threads resolved!
Files
2
+ 31
72
@@ -13,7 +13,7 @@ import scipy.stats
from scipy.integrate import cumtrapz
from scipy.interpolate import interp1d
from scipy.special import erf, erfinv, xlogy, log1p,\
gammaln, gammainc, gammaincinv, stdtr, stdtrit
gammaln, gammainc, gammaincinv, stdtr, stdtrit, betaln, btdtr, btdtri
from matplotlib.cbook import flatten
# Keep import bilby statement, it is necessary for some eval() statements
@@ -1864,16 +1864,14 @@ class Beta(Prior):
boundary: str
See superclass
"""
super(Beta, self).__init__(minimum=minimum, maximum=maximum, name=name,
latex_label=latex_label, unit=unit, boundary=boundary)
if alpha <= 0. or beta <= 0.:
raise ValueError("alpha and beta must both be positive values")
self._alpha = alpha
self._beta = beta
self._minimum = minimum
self._maximum = maximum
super(Beta, self).__init__(minimum=minimum, maximum=maximum, name=name,
latex_label=latex_label, unit=unit, boundary=boundary)
self._set_dist()
self.alpha = alpha
self.beta = beta
def rescale(self, val):
"""
@@ -1882,9 +1880,7 @@ class Beta(Prior):
This maps to the inverse CDF. This has been analytically solved for this case.
"""
self.test_valid_for_rescaling(val)
# use scipy distribution percentage point function (ppf)
return self._dist.ppf(val)
return btdtri(self.alpha, self.beta, val) * (self.maximum - self.minimum) + self.minimum
def prob(self, val):
"""Return the prior probability of val.
@@ -1897,18 +1893,7 @@ class Beta(Prior):
-------
Union[float, array_like]: Prior probability of val
"""
spdf = self._dist.pdf(val)
if np.all(np.isfinite(spdf)):
return spdf
# deal with the fact that if alpha or beta are < 1 you get infinities at 0 and 1
if isinstance(val, np.ndarray):
pdf = np.zeros(len(val))
pdf[np.isfinite(spdf)] = spdf[np.isfinite]
return spdf
else:
return 0.
return np.exp(self.ln_prob(val))
def ln_prob(self, val):
"""Returns the log prior probability of val.
@@ -1921,61 +1906,35 @@ class Beta(Prior):
-------
Union[float, array_like]: Prior probability of val
"""
_ln_prob = xlogy(self.alpha - 1, val - self.minimum) + xlogy(self.beta - 1, self.maximum - val)\
- betaln(self.alpha, self.beta) - xlogy(self.alpha + self.beta - 1, self.maximum - self.minimum)
spdf = self._dist.logpdf(val)
if np.all(np.isfinite(spdf)):
return spdf
# deal with the fact that if alpha or beta are < 1 you get infinities at 0 and 1
if isinstance(val, np.ndarray):
pdf = -np.inf * np.ones(len(val))
pdf[np.isfinite(spdf)] = spdf[np.isfinite]
return spdf
_ln_prob_sub = -np.inf * np.ones(len(val))
idx = np.isfinite(_ln_prob) & (val >= self.minimum) & (val <= self.maximum)
_ln_prob_sub[idx] = _ln_prob[idx]
return _ln_prob_sub
else:
if np.isfinite(_ln_prob) and val >= self.minimum and val <= self.maximum:
return _ln_prob
return -np.inf
def cdf(self, val):
return self._dist.cdf(val)
def _set_dist(self):
self._dist = scipy.stats.beta(
a=self.alpha, b=self.beta, loc=self.minimum,
scale=(self.maximum - self.minimum))
@property
def maximum(self):
return self._maximum
@maximum.setter
def maximum(self, maximum):
self._maximum = maximum
self._set_dist()
@property
def minimum(self):
return self._minimum
@minimum.setter
def minimum(self, minimum):
self._minimum = minimum
self._set_dist()
@property
def alpha(self):
return self._alpha
@alpha.setter
def alpha(self, alpha):
self._alpha = alpha
self._set_dist()
@property
def beta(self):
return self._beta
@beta.setter
def beta(self, beta):
self._beta = beta
self._set_dist()
if isinstance(val, (float, int)):
if val > self.maximum:
return 1.
elif val < self.minimum:
return 0.
else:
return btdtr(self.alpha, self.beta,
(val - self.minimum) / (self.maximum - self.minimum))
else:
_cdf = np.nan_to_num(btdtr(self.alpha, self.beta,
(val - self.minimum) / (self.maximum - self.minimum)))
_cdf[val < self.minimum] = 0.
_cdf[val > self.maximum] = 1.
return _cdf
class Logistic(Prior):
Loading