Skip to content
Snippets Groups Projects
Commit 9844f808 authored by Kipp Cannon's avatar Kipp Cannon
Browse files

far.py: populate_prob_of_instruments_given_signal() no long requires segs

- now that the horizon distances are set to 0 in the histories when the instruments are off, populate_prob_of_instruments_given_signal() no long requires segment lists to do its job
parent 98f22ba7
No related branches found
No related tags found
No related merge requests found
......@@ -146,7 +146,7 @@ if options.verbose:
print >>sys.stderr, "total livetime:\n\t%s" % ",\n\t".join("%s = %s s" % (instrument, str(abs(segs))) for instrument, segs in seglists.items())
# compute the probability of instruments given signal
coincparamsdistributions.populate_prob_of_instruments_given_signal(segs = seglists, n = 1.0, verbose = options.verbose)
coincparamsdistributions.populate_prob_of_instruments_given_signal(n = 1.0, verbose = options.verbose)
# compute the instrument combination counts
coincparamsdistributions.add_instrument_combination_counts(segs = seglists, verbose = options.verbose)
......
......@@ -115,7 +115,7 @@ if options.verbose:
print >>sys.stderr, "total livetime:\n\t%s" % ",\n\t".join("%s = %s s" % (instrument, str(abs(segs))) for instrument, segs in seglists.items())
# Compute the probability of instruments given signal
coincparamsdistributions.populate_prob_of_instruments_given_signal(segs = seglists, n = 1.0, verbose = options.verbose)
coincparamsdistributions.populate_prob_of_instruments_given_signal(n = 1.0, verbose = options.verbose)
# Compute the intrument combination counts
coincparamsdistributions.add_instrument_combination_counts(segs = seglists, verbose = options.verbose)
......
......@@ -331,7 +331,7 @@ def get_likelihood_funcs(likelihood_files, options):
# Compute the probability of instruments given signal and the
# instrument combination counts
#
coinc_params_distributions.populate_prob_of_instruments_given_signal(segs = seglists, n = 1.0, verbose = options.verbose)
coinc_params_distributions.populate_prob_of_instruments_given_signal(n = 1.0, verbose = options.verbose)
coinc_params_distributions.add_instrument_combination_counts(segs = seglists, verbose = options.verbose)
return coinc_params_distributions.coinc_params, snglcoinc.LnLikelihoodRatio(coinc_params_distributions)
......
......@@ -186,7 +186,7 @@ for n in range(2, len(horizon_distances) + 1):
pdf = rate.InterpBinnedArray(lnbinnedarray, fill_value = float("-inf"))
diststats.snr_joint_pdf_cache[key] = pdf, binnedarray, 0
diststats.populate_prob_of_instruments_given_signal(segs)
diststats.populate_prob_of_instruments_given_signal()
#
# Finished with this class
......
......@@ -477,32 +477,17 @@ class ThincaCoincParamsDistributions(snglcoinc.CoincParamsDistributions):
def add_foreground_snrchi_prior(self, n, prefactors_range = (0.01, 0.25), df = 40, inv_snr_pow = 4., verbose = False):
self.add_snrchi_prior(self.injection_rates, n, prefactors_range, df, inv_snr_pow = inv_snr_pow, verbose = verbose)
def populate_prob_of_instruments_given_signal(self, segs, n = 1., verbose = False):
def populate_prob_of_instruments_given_signal(self, n = 1., verbose = False):
#
# populate instrument combination binning
#
assert len(segs) > 1
assert set(self.horizon_history) <= set(segs)
# probability that a signal is detectable by each of the
# instrument combinations
# instrument combinations. because the horizon distance is
# 0'ed when an instrument is off, this marginalization over
# horizon distance histories also accounts for duty cycles
P = P_instruments_given_signal(self.horizon_history)
# multiply by probability that enough instruments are on to
# form each of those combinations
#
# FIXME: if when an instrument is off it has its horizon
# distance set to 0 in the horizon history, then this step
# will not be needed because the marginalization over
# horizon histories will already reflect the duty cycles.
P_live = snglcoinc.CoincSynthesizer(segmentlists = segs).P_live
for instruments in P:
P[instruments] *= sum(sorted(p for on_instruments, p in P_live.items() if on_instruments >= instruments))
# renormalize
total = sum(sorted(P.values()))
for instruments in P:
P[instruments] /= total
# populate binning from probabilities
for instruments, p in P.items():
self.injection_rates["instruments"][instruments,] += n * p
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment