diff --git a/gstlal-inspiral/bin/gstlal_inspiral b/gstlal-inspiral/bin/gstlal_inspiral index 5c431f58e7da19ede8a5d2a236932d895565e639..2a8c156dd0a36a7516df54957d840252f5585bf8 100755 --- a/gstlal-inspiral/bin/gstlal_inspiral +++ b/gstlal-inspiral/bin/gstlal_inspiral @@ -253,6 +253,7 @@ def parse_command_line(): parser.add_option_group(group) group = OptionGroup(parser, "Ranking Statistic Options", "Adjust ranking statistic behaviour") + group.add_option("--cap-singles", action = "store_true", help = "Cap singles to 1 / livetime if computing FAR. No effect otherwise") group.add_option("--chisq-type", metavar = "type", default = "autochisq", help = "Choose the type of chisq computation to perform. Must be one of (autochisq|timeslicechisq). The default is autochisq.") group.add_option("--coincidence-threshold", metavar = "seconds", type = "float", default = 0.005, help = "Set the coincidence window in seconds (default = 0.005 s). The light-travel time between instruments will be added automatically in the coincidence test.") group.add_option("--min-instruments", metavar = "count", type = "int", default = 2, help = "Set the minimum number of instruments that must contribute triggers to form a candidate (default = 2).") @@ -835,6 +836,7 @@ for output_file_number, (svd_bank_url_dict, output_url, ranking_stat_output_url, tag = options.job_tag, kafka_server = options.output_kafka_server, cluster = True,#options.data_source in ("lvshm", "framexmit"),# If uncommented, we only cluster when running online + cap_singles = options.cap_singles, verbose = options.verbose ) if options.verbose: diff --git a/gstlal-inspiral/python/lloidhandler.py b/gstlal-inspiral/python/lloidhandler.py index 930da3fad40d31e3bfc84fb741306dfdf7e8187b..cd093226604ac13d05a21df935618cdd3f53a43b 100644 --- a/gstlal-inspiral/python/lloidhandler.py +++ b/gstlal-inspiral/python/lloidhandler.py @@ -620,7 +620,7 @@ class Handler(simplehandler.Handler): dumps of segment information, trigger files and background distribution statistics. """ - def __init__(self, mainloop, pipeline, coincs_document, rankingstat, horizon_distance_func, gracedbwrapper, zerolag_rankingstatpdf_url = None, rankingstatpdf_url = None, ranking_stat_output_url = None, ranking_stat_input_url = None, likelihood_snapshot_interval = None, sngls_snr_threshold = None, tag = "", kafka_server = "10.14.0.112:9092", cluster = False, verbose = False): + def __init__(self, mainloop, pipeline, coincs_document, rankingstat, horizon_distance_func, gracedbwrapper, zerolag_rankingstatpdf_url = None, rankingstatpdf_url = None, ranking_stat_output_url = None, ranking_stat_input_url = None, likelihood_snapshot_interval = None, sngls_snr_threshold = None, tag = "", kafka_server = "10.14.0.112:9092", cluster = False, cap_singles = False, verbose = False): """! @param mainloop The main application's event loop @param pipeline The gstreamer pipeline that is being @@ -645,6 +645,7 @@ class Handler(simplehandler.Handler): self.likelihood_snapshot_interval = likelihood_snapshot_interval self.likelihood_snapshot_timestamp = None self.cluster = cluster + self.cap_singles = cap_singles self.gracedbwrapper = gracedbwrapper # FIXME: detangle this @@ -1093,7 +1094,7 @@ class Handler(simplehandler.Handler): if not self.stream_thinca.push(instrument, [event for event in events if event.ifo == instrument], buf_timestamp): continue - flushed_sngls = self.stream_thinca.pull(self.rankingstat, fapfar = self.fapfar, zerolag_rankingstatpdf = self.zerolag_rankingstatpdf, coinc_sieve = self.rankingstat.fast_path_cut_from_triggers, cluster = self.cluster) + flushed_sngls = self.stream_thinca.pull(self.rankingstat, fapfar = self.fapfar, zerolag_rankingstatpdf = self.zerolag_rankingstatpdf, coinc_sieve = self.rankingstat.fast_path_cut_from_triggers, cluster = self.cluster, cap_singles = self.cap_singles) self.coincs_document.commit() # do GraceDB alerts and update eye candy @@ -1292,7 +1293,7 @@ class Handler(simplehandler.Handler): # whatever triggers remain in the queues, and processes # them - flushed_sngls = self.stream_thinca.pull(self.rankingstat, fapfar = self.fapfar, zerolag_rankingstatpdf = self.zerolag_rankingstatpdf, coinc_sieve = self.rankingstat.fast_path_cut_from_triggers, flush = True, cluster = self.cluster) + flushed_sngls = self.stream_thinca.pull(self.rankingstat, fapfar = self.fapfar, zerolag_rankingstatpdf = self.zerolag_rankingstatpdf, coinc_sieve = self.rankingstat.fast_path_cut_from_triggers, flush = True, cluster = self.cluster, cap_singles = self.cap_singles) self.coincs_document.commit() # do GraceDB alerts diff --git a/gstlal-inspiral/python/streamthinca.py b/gstlal-inspiral/python/streamthinca.py index 170ee6bb22165468fa70c128c0031f4311d86b5e..184f4c4c72f1b5428fc7fec893b0c99ce2f39ef1 100644 --- a/gstlal-inspiral/python/streamthinca.py +++ b/gstlal-inspiral/python/streamthinca.py @@ -257,7 +257,7 @@ class StreamThinca(object): return self.time_slide_graph.push(instrument, events, t_complete) - def pull(self, rankingstat, fapfar = None, zerolag_rankingstatpdf = None, coinc_sieve = None, flush = False, cluster = False, cap_singles = True): + def pull(self, rankingstat, fapfar = None, zerolag_rankingstatpdf = None, coinc_sieve = None, flush = False, cluster = False, cap_singles = False): # NOTE: rankingstat is not used to compute the ranking # statistic, it supplies the detector livetime segment # lists to determine which triggers are eligible for