Skip to content
Snippets Groups Projects

Resolve #430 (Add normalisation flag to constrained prior)

Merged Resolve #430 (Add normalisation flag to constrained prior)
All threads resolved!
Merged Bruce Edelman requested to merge bruce.edelman/bilby:constraint into master
All threads resolved!
Compare and
1 file
+ 32
2
Compare changes
  • Side-by-side
  • Inline
+ 32
2
@@ -3,6 +3,7 @@ from io import open as ioopen
import json
import numpy as np
import os
from functools import lru_cache
from future.utils import iteritems
from matplotlib.cbook import flatten
@@ -378,6 +379,19 @@ class PriorDict(dict):
if not isinstance(self[key], Constraint)}
return all_samples
@lru_cache()
def normalize_constraint_factor(self, keys):
min_accept = 50
sampling_chunk = 250
samples = self.sample_subset(keys=keys, size=sampling_chunk)
keep = np.array(self.evaluate_constraints(samples))
while np.count_nonzero(keep) < min_accept:
new_samples = self.sample_subset(keys=keys, size=sampling_chunk)
for key in samples:
samples[key] = np.concatenate(samples[key], new_samples[key])
keep = np.array(self.evaluate_constraints(samples))
return len(keep) / np.count_nonzero(keep)
def prob(self, sample, **kwargs):
"""
@@ -396,6 +410,14 @@ class PriorDict(dict):
prob = np.product([self[key].prob(sample[key])
for key in sample], **kwargs)
ratio = 1
outsample = self.conversion_function(sample)
# Check if there is a constraint in sample/outsample
if (np.any(isinstance([self[key] for key in sample.keys()], Constraint)) or
np.any(isinstance([self[key] for key in outsample.keys()], Constraint))):
# If constraint exists in keys, caclulate the cached normalization constant
ratio = self.normalize_constraint_factor(sample.keys())
if np.all(prob == 0.):
return prob
else:
@@ -407,7 +429,7 @@ class PriorDict(dict):
else:
constrained_prob = np.zeros_like(prob)
keep = np.array(self.evaluate_constraints(sample), dtype=bool)
constrained_prob[keep] = prob[keep]
constrained_prob[keep] = prob[keep] * ratio
return constrained_prob
def ln_prob(self, sample, axis=None):
@@ -429,6 +451,14 @@ class PriorDict(dict):
ln_prob = np.sum([self[key].ln_prob(sample[key])
for key in sample], axis=axis)
ratio = 1
outsample = self.conversion_function(sample)
# Check if there is a constraint in sample/outsample
if (np.any(isinstance([self[key] for key in sample.keys()], Constraint)) or
np.any(isinstance([self[key] for key in outsample.keys()], Constraint))):
# If constraint exists in keys, caclulate the cached normalization constant
ratio = self.normalize_constraint_factor(sample.keys())
if np.all(np.isinf(ln_prob)):
return ln_prob
else:
@@ -440,7 +470,7 @@ class PriorDict(dict):
else:
constrained_ln_prob = -np.inf * np.ones_like(ln_prob)
keep = np.array(self.evaluate_constraints(sample), dtype=bool)
constrained_ln_prob[keep] = ln_prob[keep]
constrained_ln_prob[keep] = ln_prob[keep] + np.log(ratio)
return constrained_ln_prob
def rescale(self, keys, theta):
Loading