Skip to content
Snippets Groups Projects
Commit 3f71d89d authored by Bruce Edelman's avatar Bruce Edelman
Browse files

remove unnecessary checks for constraints

parent 83ff1129
No related branches found
No related tags found
1 merge request!704Resolve #430 (Add normalisation flag to constrained prior)
......@@ -410,14 +410,7 @@ class PriorDict(dict):
prob = np.product([self[key].prob(sample[key])
for key in sample], **kwargs)
ratio = 1
outsample = self.conversion_function(sample)
# Check if there is a constraint in sample/outsample
if (np.any(isinstance([self[key] for key in sample.keys()], Constraint)) or
np.any(isinstance([self[key] for key in outsample.keys()], Constraint))):
# If constraint exists in keys, caclulate the cached normalization constant
ratio = self.normalize_constraint_factor(sample.keys())
ratio = self.normalize_constraint_factor(sample.keys())
if np.all(prob == 0.):
return prob
else:
......@@ -451,14 +444,7 @@ class PriorDict(dict):
ln_prob = np.sum([self[key].ln_prob(sample[key])
for key in sample], axis=axis)
ratio = 1
outsample = self.conversion_function(sample)
# Check if there is a constraint in sample/outsample
if (np.any(isinstance([self[key] for key in sample.keys()], Constraint)) or
np.any(isinstance([self[key] for key in outsample.keys()], Constraint))):
# If constraint exists in keys, caclulate the cached normalization constant
ratio = self.normalize_constraint_factor(sample.keys())
ratio = self.normalize_constraint_factor(sample.keys())
if np.all(np.isinf(ln_prob)):
return ln_prob
else:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment