From 272b53e76ba9e79161fd056ec74065339a86f8fb Mon Sep 17 00:00:00 2001 From: Chad Hanna <crh184@psu.edu> Date: Mon, 24 Oct 2016 14:43:23 -0700 Subject: [PATCH] gstlal_ll_inspiral_aggregator: allow routes to be specified on the command line --- gstlal-ugly/bin/gstlal_ll_inspiral_aggregator | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/gstlal-ugly/bin/gstlal_ll_inspiral_aggregator b/gstlal-ugly/bin/gstlal_ll_inspiral_aggregator index 043de4a834..ea01c57a6d 100755 --- a/gstlal-ugly/bin/gstlal_ll_inspiral_aggregator +++ b/gstlal-ugly/bin/gstlal_ll_inspiral_aggregator @@ -54,10 +54,11 @@ def parse_command_line(): # directory to put everything in parser.add_argument("--base-dir", action="store", default="aggregator", help="Specify output path") - parser.add_argument("--dump-period", type = float, default = 180., help = "Wait this many seconds between dumps of the URLs (default = 180., set to 0 to disable)") + parser.add_argument("--route", action="append", help="Specify routes to download. Can be given multiple times.") + parser.add_argument("--dump-period", type = float, default = 180., help = "Wait this many seconds between dumps of the URLs (default = 180., set to 0 to disable)") parser.add_argument("--num-jobs", action="store", type=int, default=10, help="number of running jobs") parser.add_argument("--job-tag", help = "Collect URLs for jobs reporting this job tag (default = collect all gstlal_inspiral URLs).") - parser.add_argument("--num-threads", type = int, default = 16, help = "Number of threads to use concurrently") + parser.add_argument("--num-threads", type = int, default = 8, help = "Number of threads to use concurrently, default 8.") args = parser.parse_args() @@ -79,7 +80,7 @@ if __name__ == '__main__': # FIXME don't hardcode some of these? datatypes = [("min", min), ("max", max), ("median", aggregator.median)] jobs = ["%04d" % b for b in numpy.arange(0, options.num_jobs)] - routes = ["latency_history", "snr_history"] + routes = options.route logging.basicConfig(level = logging.INFO, format = "%(asctime)s %(levelname)s:%(processName)s(%(process)d):%(funcName)s: %(message)s") @@ -99,7 +100,6 @@ if __name__ == '__main__': # Then reduce the data across jobs at each level mapargs = [] for start, end in zip(*aggregator.job_expanse(dataspan)): - logging.info("processing reduced data in span [%d,%d] at level %d" % (start, end)) # FIXME don't hardcode this range for level in range(DIRS): this_level_dir = "/".join([options.base_dir, aggregator.gps_to_leaf_directory(start, level = level)]) @@ -110,6 +110,7 @@ if __name__ == '__main__': mapargs.append((jobs, this_level_dir, typ, route, func, level, start, end)) pool.map(aggregator.reduce_across_jobs, mapargs) #map(aggregator.reduce_across_jobs, mapargs) + logging.info("processed reduced data up to %d" % int(aggregator.now())) # # always end on an error so that condor won't think we're done and will -- GitLab