Skip to content
Snippets Groups Projects
Commit 272b53e7 authored by Chad Hanna's avatar Chad Hanna
Browse files

gstlal_ll_inspiral_aggregator: allow routes to be specified on the command line

parent 6d14d17d
No related merge requests found
......@@ -54,10 +54,11 @@ def parse_command_line():
# directory to put everything in
parser.add_argument("--base-dir", action="store", default="aggregator", help="Specify output path")
parser.add_argument("--dump-period", type = float, default = 180., help = "Wait this many seconds between dumps of the URLs (default = 180., set to 0 to disable)")
parser.add_argument("--route", action="append", help="Specify routes to download. Can be given multiple times.")
parser.add_argument("--dump-period", type = float, default = 180., help = "Wait this many seconds between dumps of the URLs (default = 180., set to 0 to disable)")
parser.add_argument("--num-jobs", action="store", type=int, default=10, help="number of running jobs")
parser.add_argument("--job-tag", help = "Collect URLs for jobs reporting this job tag (default = collect all gstlal_inspiral URLs).")
parser.add_argument("--num-threads", type = int, default = 16, help = "Number of threads to use concurrently")
parser.add_argument("--num-threads", type = int, default = 8, help = "Number of threads to use concurrently, default 8.")
args = parser.parse_args()
......@@ -79,7 +80,7 @@ if __name__ == '__main__':
# FIXME don't hardcode some of these?
datatypes = [("min", min), ("max", max), ("median", aggregator.median)]
jobs = ["%04d" % b for b in numpy.arange(0, options.num_jobs)]
routes = ["latency_history", "snr_history"]
routes = options.route
logging.basicConfig(level = logging.INFO, format = "%(asctime)s %(levelname)s:%(processName)s(%(process)d):%(funcName)s: %(message)s")
......@@ -99,7 +100,6 @@ if __name__ == '__main__':
# Then reduce the data across jobs at each level
mapargs = []
for start, end in zip(*aggregator.job_expanse(dataspan)):
logging.info("processing reduced data in span [%d,%d] at level %d" % (start, end))
# FIXME don't hardcode this range
for level in range(DIRS):
this_level_dir = "/".join([options.base_dir, aggregator.gps_to_leaf_directory(start, level = level)])
......@@ -110,6 +110,7 @@ if __name__ == '__main__':
mapargs.append((jobs, this_level_dir, typ, route, func, level, start, end))
pool.map(aggregator.reduce_across_jobs, mapargs)
#map(aggregator.reduce_across_jobs, mapargs)
logging.info("processed reduced data up to %d" % int(aggregator.now()))
#
# always end on an error so that condor won't think we're done and will
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment