Skip to content
Snippets Groups Projects

Resolve #430 (Add normalisation flag to constrained prior)

Merged Resolve #430 (Add normalisation flag to constrained prior)
All threads resolved!
Merged Bruce Edelman requested to merge bruce.edelman/bilby:constraint into master
All threads resolved!
1 file
+ 2
16
Compare changes
  • Side-by-side
  • Inline
+ 2
16
@@ -410,14 +410,7 @@ class PriorDict(dict):
prob = np.product([self[key].prob(sample[key])
for key in sample], **kwargs)
ratio = 1
outsample = self.conversion_function(sample)
# Check if there is a constraint in sample/outsample
if (np.any(isinstance([self[key] for key in sample.keys()], Constraint)) or
np.any(isinstance([self[key] for key in outsample.keys()], Constraint))):
# If constraint exists in keys, caclulate the cached normalization constant
ratio = self.normalize_constraint_factor(sample.keys())
ratio = self.normalize_constraint_factor(sample.keys())
if np.all(prob == 0.):
return prob
else:
@@ -451,14 +444,7 @@ class PriorDict(dict):
ln_prob = np.sum([self[key].ln_prob(sample[key])
for key in sample], axis=axis)
ratio = 1
outsample = self.conversion_function(sample)
# Check if there is a constraint in sample/outsample
if (np.any(isinstance([self[key] for key in sample.keys()], Constraint)) or
np.any(isinstance([self[key] for key in outsample.keys()], Constraint))):
# If constraint exists in keys, caclulate the cached normalization constant
ratio = self.normalize_constraint_factor(sample.keys())
ratio = self.normalize_constraint_factor(sample.keys())
if np.all(np.isinf(ln_prob)):
return ln_prob
else:
Loading