Skip to content
Snippets Groups Projects
Commit e6b5d7a0 authored by Kipp Cannon's avatar Kipp Cannon
Browse files

inspiral_lr: re-enable P(t) in numerator

- This reverts commit 8d438d4a.
- and fixes the feature
- the problem was start-up transients in the whitener leading to bad PSDs
  at the start of analysis jobs.  this was never a problem before because
  we were averaging over the entire experiment, but now the ranking
  statistic believes there is a brief period of insanely high sensitivity
  at the start of segments, and anything found during that time was given
  far too high a significance.  the fix is to have lloidparts check the
  n-samples property of the whitener that provides the PSD, and disregard
  PSDs until it gets close to the configured average-samples value.
parent a65b32a7
No related branches found
No related tags found
No related merge requests found
......@@ -291,21 +291,36 @@ class Handler(simplehandler.Handler):
if message.type == Gst.MessageType.ELEMENT:
if message.get_structure().get_name() == "spectrum":
# get the instrument, psd, and timestamp.
# NOTE: epoch is used for the timestamp, this
# is the middle of the most recent FFT interval
# used to obtain this PSD
# the "stability" is a measure of the
# fraction of the configured averaging
# timescale has been used to yield this
# measurement.
# NOTE: epoch is used for the timestamp,
# this is the middle of the most recent FFT
# interval used to obtain this PSD
instrument = message.src.get_name().split("_")[-1]
psd = pipeio.parse_spectrum_message(message)
timestamp = psd.epoch
stability = float(message.src.get_property("n-samples")) / message.src.get_property("average-samples")
# save
self.psds[instrument] = psd
# update horizon distance history
#
# FIXME: get canonical masses from the template bank bin that we're analyzing
horizon_distance = reference_psd.HorizonDistance(10.0, 0.85 * (psd.f0 + (len(psd.data.data) - 1) * psd.deltaF), psd.deltaF, 1.4, 1.4)(psd, 8.0)[0]
assert not (math.isnan(horizon_distance) or math.isinf(horizon_distance))
# update horizon distance history. if the
# whitener's average is not satisfactorily
# converged, claim the horizon distance is
# 0 (equivalent to claiming the instrument
# to be off at this time). this has the
# effect of vetoing candidates from these
# times.
if stability > 0.3:
# FIXME: get canonical masses from
# the template bank bin that we're
# analyzing
horizon_distance = reference_psd.HorizonDistance(10.0, 0.85 * (psd.f0 + (len(psd.data.data) - 1) * psd.deltaF), psd.deltaF, 1.4, 1.4)(psd, 8.0)[0]
assert not (math.isnan(horizon_distance) or math.isinf(horizon_distance))
else:
horizon_distance = 0.
self.record_horizon_distance(instrument, float(timestamp), horizon_distance)
return True
elif message.type == Gst.MessageType.APPLICATION:
......
......@@ -272,15 +272,15 @@ class LnSignalDensity(LnLRDensity):
if len(snrs) < self.min_instruments:
return float("-inf")
# evaluate SNR PDF. use volume-weighted average horizon
# distance over duration of event to estimate sensitivity
# use volume-weighted average horizon distance over
# duration of event to estimate sensitivity
assert all(segments.values()), "encountered trigger with duration = 0"
horizons = dict((instrument, (self.horizon_history[instrument].functional_integral(map(float, seg), lambda d: d**3.) / float(abs(seg)))**(1./3.)) for instrument, seg in segments.items())
lnP = self.SNRPDF.lnP_instruments(snrs.keys(), horizons, self.min_instruments) + self.SNRPDF.lnP_snrs(snrs, horizons, self.min_instruments)
# P(t) \propto volume within which a signal will produce a
# candidate \propto cube of distance to which the mininum
# required number of instruments can see (I think). we
# compute P(t). P(t) \propto volume within which a signal
# will produce a candidate * number of trials \propto cube
# of distance to which the mininum required number of
# instruments can see (I think) * number of templates. we
# measure the distance in multiples of 150 Mpc just to get
# a number that will be, typically, a little closer to 1,
# in lieu of properly normalizating this factor. we can't
......@@ -297,11 +297,13 @@ class LnSignalDensity(LnLRDensity):
# overall scale: ln L = 0 is not a special value, as it
# would be if the numerator and denominator were both
# normalized properly.
# FIXME: disabled for now
#horizon = sorted(horizons.values())[-self.min_instruments] / 150.
#if not horizon:
# return float("-inf")
#lnP += 3. * math.log(sorted(horizons.values())[-self.min_instruments] / 150.)
horizon = sorted(horizons.values())[-self.min_instruments] / 150.
if not horizon:
return float("-inf")
lnP = 3. * math.log(horizon) + math.log(len(self.template_ids))
# multiply by P(instruments | t) * P(SNRs | t, instruments).
lnP += self.SNRPDF.lnP_instruments(snrs.keys(), horizons, self.min_instruments) + self.SNRPDF.lnP_snrs(snrs, horizons, self.min_instruments)
# evaluate dt and dphi parameters
lnP += inspiral_extrinsics.lnP_dt_dphi_signal(snrs, phase, dt, horizons, self.delta_t)
......@@ -349,16 +351,14 @@ class LnSignalDensity(LnLRDensity):
parameter sets the nominal signal rate in units of Gpc^-3
a^-1.
"""
# FIXME: disabled for now
return 1.
# FIXME: this needs to understand a mass distribution
# model and what part of the mass space this numerator PDF
# is for
seg = (self.horizon_history.minkey(), self.horizon_history.maxkey())
V_times_t = self.horizon_history.functional_integral(seg, lambda horizons: sorted(horizons.values())[-self.min_instruments]**3.)
# Mpc**3 --> Gpc**3
V_times_t *= 1e-9
return V_times_t * rate
# Mpc**3 --> Gpc**3, seconds --> years
V_times_t *= 1e-9 / (86400. * 365.25)
return V_times_t * rate * len(self.template_ids)
def random_sim_params(self, sim, horizon_distance = None, snr_efficiency = 1.0, coinc_only = True):
"""
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment