diff --git a/gstlal-inspiral/bin/gstlal_ll_inspiral_daily_page b/gstlal-inspiral/bin/gstlal_ll_inspiral_daily_page
index 3eabf04052fc61c791492507a426a49b26b3bf96..67f9366e6385e1e0a86ce4b104e9127d9babdb53 100755
--- a/gstlal-inspiral/bin/gstlal_ll_inspiral_daily_page
+++ b/gstlal-inspiral/bin/gstlal_ll_inspiral_daily_page
@@ -27,141 +27,148 @@ from optparse import OptionParser
 from glue.text_progress_bar import ProgressBar
 from pylal.datatypes import LIGOTimeGPS
 import lal
+from multiprocessing import Pool
 
 def now():
 	return LIGOTimeGPS(lal.UTCToGPS(time.gmtime()), 0)
 
-parser = OptionParser()
-parser.add_option("--directory", default = ".", help = "directory to start looking for results")
-parser.add_option("--injection-file", default = ".", help = "The injection xml file that corresponds to the low latency injections")
-parser.add_option("--web-dir", help = "set the output path to write the ''offline'' style web page to")
-options, massbins = parser.parse_args()
+def process_mass_bin(args): 
+	massbin, result_dirs, n, d, options, tag, typecode, cluster_file = args
 
+	print >> sys.stderr, "processing mass bin %s, tag %s in directory %d of %d: %s" % (massbin, tag, n+1, len(result_dirs), d)
 
-# FIXME should be more clever than this
-# Match 5 digit directories
-dir_pattern = re.compile('[0-9]{5}')
+	# FIXME don't hard code H1L1
+	# FIXME assumes 10 digit GPS
+	db = os.path.join(os.path.join(options.directory, d), 'H1L1-%s_%s-%s00000-100000.sqlite.tmp' % (massbin, tag, d))
+	if os.path.exists(db):
+		os.remove(db)
+	dbfinal = db.replace(".tmp","")
 
-seg_files = glob.glob('%s/total_cumulative_segments.xml.gz' % options.directory)
+	# FIXME we really have to change this hacky convention for injections to start at 1000
+	pattern = re.compile('.*-%d%s_LLOID-.*.xml.gz' % (typecode, massbin[1:]))
 
-noninj_files_to_merge = []
-inj_files_to_merge = []
+	# First do non injections
+	files = sorted([os.path.join(os.path.join(options.directory, d), xml) for xml in os.listdir(os.path.join(options.directory, d)) if pattern.match(xml)])
+	for f in files:
+		subprocess.check_call(["ligolw_sqlite", "--tmp-space", "/dev/shm", "--database", "%s" % db, "%s" % f])
+		subprocess.check_call(["lalapps_run_sqlite", "--tmp-space", "/dev/shm", "--sql-file", cluster_file,  "%s" % db])
 
-# FIXME assume that the correct low latency cluster file is in the working
-# directory. Perhaps this should be a command line argument.
-cluster_file = os.path.join(options.directory, "ll_simplify_and_cluster.sql")
-simplify_file = os.path.join(options.directory, "ll_simplify.sql")
+	# rename files
+	if os.path.exists(db):
+		os.rename(db, dbfinal)
 
-result_dirs = sorted([d for d in os.listdir(options.directory) if dir_pattern.match(d)])
+	return dbfinal	
 
-for n, d in enumerate(result_dirs):
 
-	for num_massbin, massbin in enumerate(massbins):
-	
-		print >> sys.stderr, "processing mass bin %d of %d in directory %d of %d: %s" % (num_massbin, len(massbins), n+1, len(result_dirs), d)
+if __name__ == '__main__':
 
-		# FIXME don't hard code H1L1
-		# FIXME assumes 10 digit GPS
-		noninjdb = os.path.join(os.path.join(options.directory, d), 'H1L1-%s_ALL_LLOID-%s00000-100000.sqlite.tmp' % (massbin, d))
-		injdb = os.path.join(os.path.join(options.directory, d), 'H1L1-%s_ALL_LLOID_INJ-%s00000-100000.sqlite.tmp' % (massbin, d))
-		noninjdbfinal = noninjdb.replace(".tmp","")
-		injdbfinal = injdb.replace(".tmp","")
+	try:
+		pool = Pool(int(os.environ['GSTLAL_LL_INSPIRAL_DAILY_PAGE_CORES']))
+	except KeyError:
+		pool = Pool(2)
+
+	parser = OptionParser()
+	parser.add_option("--directory", default = ".", help = "directory to start looking for results")
+	parser.add_option("--injection-file", default = ".", help = "The injection xml file that corresponds to the low latency injections")
+	parser.add_option("--web-dir", help = "set the output path to write the ''offline'' style web page to")
+	options, massbins = parser.parse_args()
+
+
+	# FIXME should be more clever than this
+	# Match 5 digit directories
+	dir_pattern = re.compile('[0-9]{5}')
+
+	seg_files = glob.glob('%s/total_cumulative_segments.xml.gz' % options.directory)
+
+	noninj_files_to_merge = []
+	inj_files_to_merge = []
+
+	# FIXME assume that the correct low latency cluster file is in the working
+	# directory. Perhaps this should be a command line argument.
+	cluster_file = os.path.join(options.directory, "ll_simplify_and_cluster.sql")
+	simplify_file = os.path.join(options.directory, "ll_simplify.sql")
+
+	result_dirs = sorted([d for d in os.listdir(options.directory) if dir_pattern.match(d)])
+
+	for n, d in enumerate(result_dirs):
+
+		noninjdball = os.path.join(os.path.join(options.directory, d), 'H1L1-ALL_LLOID-%s00000-100000.sqlite' % (d,))
+		injdball = os.path.join(os.path.join(options.directory, d), 'H1L1-ALL_LLOID_INJ-%s00000-100000.sqlite' % (d,))
 
 		# see if this directory has been processed and is old enough to not have to worry about it any more
-		if float(now()) - float("%s00000" % d) > 125000 and os.path.exists(noninjdbfinal) and os.path.exists(injdbfinal):
+		if float(now()) - float("%s00000" % d) > 125000 and os.path.exists(injdball) and os.path.exists(noninjdball):
 			print >> sys.stderr, "directory is greater than 125000 seconds old and has already been processed...continuing"
-			noninj_files_to_merge.append(noninjdbfinal)
-			inj_files_to_merge.append(injdbfinal)
+			noninj_files_to_merge.append(noninjdball)
+			inj_files_to_merge.append(injdball)
 			continue
 
-		# FIXME we really have to change this hacky convention for injections to start at 1000
-		non_inj_pattern = re.compile('.*-%s_LLOID-.*.xml.gz' % massbin)
-		inj_pattern = re.compile('.*-1%s_LLOID-.*.xml.gz' % massbin[1:])
-
-		# First do non injections
-		files = sorted([os.path.join(os.path.join(options.directory, d), xml) for xml in os.listdir(os.path.join(options.directory, d)) if non_inj_pattern.match(xml)])
-		progressbar = ProgressBar("Non injection files processed", len(files))
-		for f in files:
-			subprocess.check_call(["ligolw_sqlite", "--tmp-space", "/dev/shm", "--database", "%s" % noninjdb, "%s" % f])
-			subprocess.check_call(["lalapps_run_sqlite", "--tmp-space", "/dev/shm", "--sql-file", cluster_file,  "%s" % noninjdb])
-			progressbar.increment()
-		del progressbar
-
-		# Then injections
-		files = sorted([os.path.join(os.path.join(options.directory, d), xml) for xml in os.listdir(os.path.join(options.directory, d)) if inj_pattern.match(xml)])
-		progressbar = ProgressBar("Injection files processed", len(files))
-		for f in files:
-			subprocess.check_call(["ligolw_sqlite", "--tmp-space", "/dev/shm", "--database", "%s" % injdb, "%s" % f])
-			subprocess.check_call(["lalapps_run_sqlite", "--tmp-space", "/dev/shm", "--sql-file", cluster_file,  "%s" % injdb])
-			progressbar.increment()
-		del progressbar
-
-
-		# rename files
-		for db,dbf in ((noninjdb, noninjdbfinal), (injdb, injdbfinal)):
-			if os.path.exists(db):
-				os.rename(db, dbf)
-
-		if os.path.exists(noninjdbfinal) and os.path.exists(injdbfinal):
-			noninj_files_to_merge.append(noninjdbfinal)
-			inj_files_to_merge.append(injdbfinal)
-
-
-# FIXME only add *new* files
-noninjdb = os.path.join(options.directory, 'H1L1-ALL_LLOID-0-2000000000.sqlite.tmp')
-injdb = os.path.join(options.directory, 'H1L1-ALL_LLOID_INJ-0-2000000000.sqlite.tmp')
-
-if os.path.exists(noninjdb):
-	os.remove(noninjdb)
-if os.path.exists(injdb):
-	os.remove(injdb)
-
-progressbar = ProgressBar("Merge noninjection files", len(noninj_files_to_merge) + len(seg_files))
-for f in noninj_files_to_merge + seg_files:
-	subprocess.check_call(["ligolw_sqlite", "--tmp-space", "/dev/shm", "--database", "%s" % noninjdb, "%s" % f])
-	# Do not cluster! the online analysis doesn't do a global clustering stage!!
-	subprocess.check_call(["lalapps_run_sqlite", "--tmp-space", "/dev/shm", "--sql-file", simplify_file,  "%s" % noninjdb])
+		# Parallel process the data reduction
+		args = ([massbin, result_dirs, n, d, options, "ALL_LLOID", 0, cluster_file] for massbin in massbins)
+		mass_bin_files_to_merge = list(pool.map(process_mass_bin, args))
+		args = ([massbin, result_dirs, n, d, options, "ALL_LLOID_INJ", 1, cluster_file] for massbin in massbins)
+		inj_mass_bin_files_to_merge = list(pool.map(process_mass_bin, args))
+
+		# Merge the files of this directory
+		subprocess.check_call(["ligolw_sqlite", "--replace", "--verbose", "--database", "%s" % noninjdball] + mass_bin_files_to_merge)
+		subprocess.check_call(["ligolw_sqlite", "--replace", "--verbose", "--database", "%s" % injdball] + inj_mass_bin_files_to_merge)	
+		noninj_files_to_merge.append(noninjdball)
+		inj_files_to_merge.append(injdball)
+
+	# FIXME only add *new* files
+	noninjdb = os.path.join(options.directory, 'H1L1-ALL_LLOID-0-2000000000.sqlite.tmp')
+	injdb = os.path.join(options.directory, 'H1L1-ALL_LLOID_INJ-0-2000000000.sqlite.tmp')
+
+	if os.path.exists(noninjdb):
+		os.remove(noninjdb)
+	if os.path.exists(injdb):
+		os.remove(injdb)
+
+	progressbar = ProgressBar("Merge noninjection files", len(noninj_files_to_merge) + len(seg_files))
+	for f in noninj_files_to_merge + seg_files:
+		subprocess.check_call(["ligolw_sqlite", "--database", "%s" % noninjdb, "%s" % f])
+		# Do not cluster! the online analysis doesn't do a global clustering stage!!
+		subprocess.check_call(["lalapps_run_sqlite", "--sql-file", simplify_file,  "%s" % noninjdb])
+		progressbar.increment()
+	del progressbar
+
+	progressbar = ProgressBar("Merge injection files", len(inj_files_to_merge) + len(seg_files))
+	for f in inj_files_to_merge + seg_files:
+		subprocess.check_call(["ligolw_sqlite", "--database", "%s" % injdb, "%s" % f])
+		# Do not cluster! the online analysis doesn't do a global clustering stage!!
+		subprocess.check_call(["lalapps_run_sqlite", "--sql-file", simplify_file,  "%s" % injdb])
+		progressbar.increment()
+	del progressbar
+
+	# Find injections
+	progressbar = ProgressBar("Find injections", 4)
+	subprocess.check_call(["ligolw_sqlite", "--database", "%s" % injdb, "%s" % options.injection_file])
 	progressbar.increment()
-del progressbar
-
-progressbar = ProgressBar("Merge injection files", len(inj_files_to_merge) + len(seg_files))
-for f in inj_files_to_merge + seg_files:
-	subprocess.check_call(["ligolw_sqlite", "--tmp-space", "/dev/shm", "--database", "%s" % injdb, "%s" % f])
-	# Do not cluster! the online analysis doesn't do a global clustering stage!!
-	subprocess.check_call(["lalapps_run_sqlite", "--tmp-space", "/dev/shm", "--sql-file", simplify_file,  "%s" % injdb])
+	subprocess.check_call(["ligolw_sqlite", "--database", "%s" % injdb, "--extract", "%s.xml" % injdb])
+	progressbar.increment()
+	subprocess.check_call(["ligolw_inspinjfind", "%s.xml" % injdb])
+	progressbar.increment()
+	subprocess.check_call(["ligolw_sqlite", "--database", "%s" % injdb, "--replace", "%s.xml" % injdb])
 	progressbar.increment()
-del progressbar
-
-# Find injections
-progressbar = ProgressBar("Find injections", 4)
-subprocess.check_call(["ligolw_sqlite", "--tmp-space", "/dev/shm", "--database", "%s" % injdb, "%s" % options.injection_file])
-progressbar.increment()
-subprocess.check_call(["ligolw_sqlite", "--tmp-space", "/dev/shm", "--database", "%s" % injdb, "--extract", "%s.xml" % injdb])
-progressbar.increment()
-subprocess.check_call(["ligolw_inspinjfind", "%s.xml" % injdb])
-progressbar.increment()
-subprocess.check_call(["ligolw_sqlite", "--tmp-space", "/dev/shm", "--database", "%s" % injdb, "--replace", "%s.xml" % injdb])
-progressbar.increment()
 
 
-# Make plots and such
-if os.path.exists(os.path.join(options.directory, "plots")):
-	os.rename(os.path.join(options.directory, "plots"), "%s.%s" % (os.path.join(options.directory, "plots"), str(now())))
-os.mkdir(os.path.join(options.directory, "plots"))
+	# Make plots and such
+	if os.path.exists(os.path.join(options.directory, "plots")):
+		os.rename(os.path.join(options.directory, "plots"), "%s.%s" % (os.path.join(options.directory, "plots"), str(now())))
+	os.mkdir(os.path.join(options.directory, "plots"))
 
-pattern = re.compile("(?P<id>[0-9]{4})_prior.xml.gz")
-for d in os.listdir(options.directory):
-	m = pattern.match(d)
-	if m:
-		subprocess.check_call(["gstlal_inspiral_plot_background", "--output-dir", os.path.join(options.directory, "plots"), "--user-tag", m.group("id"), "--verbose", d])
-subprocess.check_call(["gstlal_inspiral_plot_background", "--output-dir", os.path.join(options.directory, "plots"), "--user-tag", "ALL", "--database", noninjdb, "--verbose", "marginalized_likelihood.xml.gz"])
-subprocess.check_call(["gstlal_inspiral_plotsummary", "--tmp-space", "/dev/shm", "--segments-name", "statevectorsegments", "--user-tag", "ALL_LLOID_COMBINED", "--output-dir", "%s" % os.path.join(options.directory, "plots"), "%s" % noninjdb, "%s" % injdb])
+	pattern = re.compile("(?P<id>[0-9]{4})_prior.xml.gz")
+	for d in os.listdir(options.directory):
+		m = pattern.match(d)
+		if m:
+			subprocess.check_call(["gstlal_inspiral_plot_background", "--output-dir", os.path.join(options.directory, "plots"), "--user-tag", m.group("id"), "--verbose", d])
+	subprocess.check_call(["gstlal_inspiral_plot_background", "--output-dir", os.path.join(options.directory, "plots"), "--user-tag", "ALL", "--database", noninjdb, "--verbose", "marginalized_likelihood.xml.gz"])
+	subprocess.check_call(["gstlal_inspiral_plotsummary", "--segments-name", "statevectorsegments", "--user-tag", "ALL_LLOID_COMBINED", "--output-dir", "%s" % os.path.join(options.directory, "plots"), "%s" % noninjdb, "%s" % injdb])
 
-subprocess.check_call(["gstlal_inspiral_plot_sensitivity", "--output-dir", os.path.join(options.directory, "plots"), "--bin-by-chirp-mass", "--tmp-space", "/dev/shm",  "--zero-lag-database", noninjdb, "--dist-bins", "200", "--bin-by-total-mass",  "--user-tag", "ALL_LLOID_COMBINED", "--include-play",  "--bin-by-mass-ratio",  "--bin-by-mass1-mass2",  "--data-segments-name", "statevectorsegments", injdb])
+	subprocess.check_call(["gstlal_inspiral_plot_sensitivity", "--output-dir", os.path.join(options.directory, "plots"), "--bin-by-chirp-mass", "--zero-lag-database", noninjdb, "--dist-bins", "200", "--bin-by-total-mass",  "--user-tag", "ALL_LLOID_COMBINED", "--include-play",  "--bin-by-mass-ratio",  "--bin-by-mass1-mass2",  "--data-segments-name", "statevectorsegments", injdb])
 
-subprocess.check_call(["gstlal_inspiral_summary_page", "--open-box", "--output-user-tag", "ALL_LLOID_COMBINED",  "--glob-path", "%s" % os.path.join(options.directory, "plots"), "--webserver-dir", options.web_dir, "--title", "gstlal-online"])
+	subprocess.check_call(["gstlal_inspiral_summary_page", "--open-box", "--output-user-tag", "ALL_LLOID_COMBINED",  "--glob-path", "%s" % os.path.join(options.directory, "plots"), "--webserver-dir", options.web_dir, "--title", "gstlal-online"])
 
 
-# copy the working files back
-os.rename(noninjdb, noninjdb.replace(".tmp",""))
-os.rename(injdb, injdb.replace(".tmp",""))
+	# copy the working files back
+	os.rename(noninjdb, noninjdb.replace(".tmp",""))
+	os.rename(injdb, injdb.replace(".tmp",""))