Commit 04902daf authored by Colm Talbot's avatar Colm Talbot

add uniform in log prior, reformat uniform prior

parent a2b43c6f
Pipeline #19568 passed with stages
in 5 minutes and 48 seconds
......@@ -123,23 +123,6 @@ class Prior(object):
return label
class Uniform(Prior):
"""Uniform prior"""
def __init__(self, minimum, maximum, name=None, latex_label=None):
Prior.__init__(self, name, latex_label, minimum, maximum)
self.support = maximum - minimum
def rescale(self, val):
Prior.test_valid_for_rescaling(val)
return self.minimum + val * self.support
def prob(self, val):
"""Return the prior probability of val"""
in_prior = (val >= self.minimum) & (val <= self.maximum)
return 1 / self.support * in_prior
class DeltaFunction(Prior):
"""Dirac delta function prior, this always returns peak."""
......@@ -191,6 +174,24 @@ class PowerLaw(Prior):
- self.minimum ** (1 + self.alpha))) * in_prior
class Uniform(PowerLaw):
"""Uniform prior"""
def __init__(self, minimum, maximum, name=None, latex_label=None):
Prior.__init__(self, name, latex_label, minimum, maximum)
self.alpha = 0
class LogUniform(PowerLaw):
"""Uniform prior"""
def __init__(self, minimum, maximum, name=None, latex_label=None):
Prior.__init__(self, name, latex_label, minimum, maximum)
self.alpha = -1
if self.minimum<=0:
logging.warning('You specified a uniform-in-log prior with minimum={}'.format(self.minimum))
class Cosine(Prior):
def __init__(self, name=None, latex_label=None, minimum=-np.pi / 2, maximum=np.pi / 2):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment