Skip to content
Snippets Groups Projects
Commit ebdf928b authored by Chad Hanna's avatar Chad Hanna
Browse files

cap_singles: allow command line to control if singles have their far capped to...

cap_singles: allow command line to control if singles have their far capped to 1 / livetime in an online-like mode
parent e7f5401f
No related branches found
No related tags found
No related merge requests found
...@@ -253,6 +253,7 @@ def parse_command_line(): ...@@ -253,6 +253,7 @@ def parse_command_line():
parser.add_option_group(group) parser.add_option_group(group)
group = OptionGroup(parser, "Ranking Statistic Options", "Adjust ranking statistic behaviour") group = OptionGroup(parser, "Ranking Statistic Options", "Adjust ranking statistic behaviour")
group.add_option("--cap-singles", action = "store_true", help = "Cap singles to 1 / livetime if computing FAR. No effect otherwise")
group.add_option("--chisq-type", metavar = "type", default = "autochisq", help = "Choose the type of chisq computation to perform. Must be one of (autochisq|timeslicechisq). The default is autochisq.") group.add_option("--chisq-type", metavar = "type", default = "autochisq", help = "Choose the type of chisq computation to perform. Must be one of (autochisq|timeslicechisq). The default is autochisq.")
group.add_option("--coincidence-threshold", metavar = "seconds", type = "float", default = 0.005, help = "Set the coincidence window in seconds (default = 0.005 s). The light-travel time between instruments will be added automatically in the coincidence test.") group.add_option("--coincidence-threshold", metavar = "seconds", type = "float", default = 0.005, help = "Set the coincidence window in seconds (default = 0.005 s). The light-travel time between instruments will be added automatically in the coincidence test.")
group.add_option("--min-instruments", metavar = "count", type = "int", default = 2, help = "Set the minimum number of instruments that must contribute triggers to form a candidate (default = 2).") group.add_option("--min-instruments", metavar = "count", type = "int", default = 2, help = "Set the minimum number of instruments that must contribute triggers to form a candidate (default = 2).")
...@@ -835,6 +836,7 @@ for output_file_number, (svd_bank_url_dict, output_url, ranking_stat_output_url, ...@@ -835,6 +836,7 @@ for output_file_number, (svd_bank_url_dict, output_url, ranking_stat_output_url,
tag = options.job_tag, tag = options.job_tag,
kafka_server = options.output_kafka_server, kafka_server = options.output_kafka_server,
cluster = True,#options.data_source in ("lvshm", "framexmit"),# If uncommented, we only cluster when running online cluster = True,#options.data_source in ("lvshm", "framexmit"),# If uncommented, we only cluster when running online
cap_singles = options.cap_singles,
verbose = options.verbose verbose = options.verbose
) )
if options.verbose: if options.verbose:
......
...@@ -620,7 +620,7 @@ class Handler(simplehandler.Handler): ...@@ -620,7 +620,7 @@ class Handler(simplehandler.Handler):
dumps of segment information, trigger files and background dumps of segment information, trigger files and background
distribution statistics. distribution statistics.
""" """
def __init__(self, mainloop, pipeline, coincs_document, rankingstat, horizon_distance_func, gracedbwrapper, zerolag_rankingstatpdf_url = None, rankingstatpdf_url = None, ranking_stat_output_url = None, ranking_stat_input_url = None, likelihood_snapshot_interval = None, sngls_snr_threshold = None, tag = "", kafka_server = "10.14.0.112:9092", cluster = False, verbose = False): def __init__(self, mainloop, pipeline, coincs_document, rankingstat, horizon_distance_func, gracedbwrapper, zerolag_rankingstatpdf_url = None, rankingstatpdf_url = None, ranking_stat_output_url = None, ranking_stat_input_url = None, likelihood_snapshot_interval = None, sngls_snr_threshold = None, tag = "", kafka_server = "10.14.0.112:9092", cluster = False, cap_singles = False, verbose = False):
"""! """!
@param mainloop The main application's event loop @param mainloop The main application's event loop
@param pipeline The gstreamer pipeline that is being @param pipeline The gstreamer pipeline that is being
...@@ -645,6 +645,7 @@ class Handler(simplehandler.Handler): ...@@ -645,6 +645,7 @@ class Handler(simplehandler.Handler):
self.likelihood_snapshot_interval = likelihood_snapshot_interval self.likelihood_snapshot_interval = likelihood_snapshot_interval
self.likelihood_snapshot_timestamp = None self.likelihood_snapshot_timestamp = None
self.cluster = cluster self.cluster = cluster
self.cap_singles = cap_singles
self.gracedbwrapper = gracedbwrapper self.gracedbwrapper = gracedbwrapper
# FIXME: detangle this # FIXME: detangle this
...@@ -1093,7 +1094,7 @@ class Handler(simplehandler.Handler): ...@@ -1093,7 +1094,7 @@ class Handler(simplehandler.Handler):
if not self.stream_thinca.push(instrument, [event for event in events if event.ifo == instrument], buf_timestamp): if not self.stream_thinca.push(instrument, [event for event in events if event.ifo == instrument], buf_timestamp):
continue continue
flushed_sngls = self.stream_thinca.pull(self.rankingstat, fapfar = self.fapfar, zerolag_rankingstatpdf = self.zerolag_rankingstatpdf, coinc_sieve = self.rankingstat.fast_path_cut_from_triggers, cluster = self.cluster) flushed_sngls = self.stream_thinca.pull(self.rankingstat, fapfar = self.fapfar, zerolag_rankingstatpdf = self.zerolag_rankingstatpdf, coinc_sieve = self.rankingstat.fast_path_cut_from_triggers, cluster = self.cluster, cap_singles = self.cap_singles)
self.coincs_document.commit() self.coincs_document.commit()
# do GraceDB alerts and update eye candy # do GraceDB alerts and update eye candy
...@@ -1292,7 +1293,7 @@ class Handler(simplehandler.Handler): ...@@ -1292,7 +1293,7 @@ class Handler(simplehandler.Handler):
# whatever triggers remain in the queues, and processes # whatever triggers remain in the queues, and processes
# them # them
flushed_sngls = self.stream_thinca.pull(self.rankingstat, fapfar = self.fapfar, zerolag_rankingstatpdf = self.zerolag_rankingstatpdf, coinc_sieve = self.rankingstat.fast_path_cut_from_triggers, flush = True, cluster = self.cluster) flushed_sngls = self.stream_thinca.pull(self.rankingstat, fapfar = self.fapfar, zerolag_rankingstatpdf = self.zerolag_rankingstatpdf, coinc_sieve = self.rankingstat.fast_path_cut_from_triggers, flush = True, cluster = self.cluster, cap_singles = self.cap_singles)
self.coincs_document.commit() self.coincs_document.commit()
# do GraceDB alerts # do GraceDB alerts
......
...@@ -257,7 +257,7 @@ class StreamThinca(object): ...@@ -257,7 +257,7 @@ class StreamThinca(object):
return self.time_slide_graph.push(instrument, events, t_complete) return self.time_slide_graph.push(instrument, events, t_complete)
def pull(self, rankingstat, fapfar = None, zerolag_rankingstatpdf = None, coinc_sieve = None, flush = False, cluster = False, cap_singles = True): def pull(self, rankingstat, fapfar = None, zerolag_rankingstatpdf = None, coinc_sieve = None, flush = False, cluster = False, cap_singles = False):
# NOTE: rankingstat is not used to compute the ranking # NOTE: rankingstat is not used to compute the ranking
# statistic, it supplies the detector livetime segment # statistic, it supplies the detector livetime segment
# lists to determine which triggers are eligible for # lists to determine which triggers are eligible for
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment