Skip to content
Snippets Groups Projects
Commit 3307ab0e authored by Patrick Godwin's avatar Patrick Godwin Committed by Patrick Godwin
Browse files

port python2 print statements in various scripts/modules

parent f5f294fb
No related branches found
No related tags found
1 merge request!122Assorted fixes
This commit is part of merge request !122. Comments created here will be created in the context of that merge request.
......@@ -125,7 +125,7 @@ for i, sim_key in enumerate(sim_id_dict.keys()):
if options.verbose:
print >>sys.stderr, "Cleaning up exact coincidences"
print("Cleaning up exact coincidences", file=sys.stderr)
# clean up exact coincidences
......@@ -138,7 +138,7 @@ for i, sim_key in enumerate(sim_id_dict.keys()):
cursor.execute('DELETE FROM coinc_event WHERE coinc_event_id == ?', (coinc_event_id,))
if options.verbose:
print >>sys.stderr, "Cleaning up nearby coincidences"
print("Cleaning up nearby coincidences", file=sys.stderr)
# clean up nearby coincidences
for coinc_event_id, in cursor.execute('SELECT coinc_event_id FROM coinc_event WHERE coinc_def_id == ?', nearby_inj_coinc_def_id).fetchall():
......@@ -149,7 +149,7 @@ for i, sim_key in enumerate(sim_id_dict.keys()):
cursor.execute('DELETE FROM coinc_event WHERE coinc_event_id == ?', (coinc_event_id,))
if options.verbose:
print >>sys.stderr, "Cleaning up sim_inspiral sngl_inspiral coincidences"
print("Cleaning up sim_inspiral sngl_inspiral coincidences", file=sys.stderr)
# clean up sim_inspiral sngl_inspiral coincs
for coinc_event_id, in cursor.execute('SELECT coinc_event_id FROM coinc_event WHERE coinc_def_id == ?', inj_trigger_coinc_def_id).fetchall():
......@@ -160,7 +160,7 @@ for i, sim_key in enumerate(sim_id_dict.keys()):
cursor.execute('DELETE FROM coinc_event WHERE coinc_event_id == ?', (coinc_event_id,))
if options.verbose:
print >>sys.stderr, "Cleaning up sim_inspiral coinc_inspiral coincidences"
print("Cleaning up sim_inspiral coinc_inspiral coincidences", file=sys.stderr)
# clean up coinc_inspirals
for coinc_event_id, in cursor.execute('SELECT coinc_event_id FROM coinc_event WHERE coinc_def_id == ?', coinc_inspiral_coinc_def_id).fetchall():
......@@ -192,25 +192,25 @@ for i, sim_key in enumerate(sim_id_dict.keys()):
cursor.execute('DELETE FROM coinc_event WHERE coinc_event_id == ?', (coinc_event_id,))
if options.verbose:
print >>sys.stderr, "Cleaning up coinc_inspiral table"
print("Cleaning up coinc_inspiral table", file=sys.stderr)
# now clean up coinc_inspiral and coinc_event_map tables
cursor.execute('DELETE FROM coinc_inspiral WHERE coinc_event_id NOT IN (SELECT coinc_event_id FROM coinc_event)')
if options.verbose:
print >>sys.stderr, "Cleaning up coinc_event_map table"
print("Cleaning up coinc_event_map table", file=sys.stderr)
cursor.execute('DELETE FROM coinc_event_map WHERE coinc_event_id NOT IN (SELECT coinc_event_id FROM coinc_event)')
if options.verbose:
print >>sys.stderr, "Cleaning up sngl_inspiral table"
print("Cleaning up sngl_inspiral table", file=sys.stderr)
# clean up sngl_inspiral table
cursor.execute('DELETE FROM sngl_inspiral WHERE event_id NOT IN (SELECT event_id FROM coinc_event_map WHERE table_name == "sngl_inspiral")')
if options.verbose:
print >>sys.stderr, "Cleaning up sim_inspiral table"
print("Cleaning up sim_inspiral table", file=sys.stderr)
# finally, clean up the sim_inspiral table
cursor.executemany('DELETE FROM sim_inspiral WHERE simulation_id == ?', [(sim_id,) for sim_id in irrelevant_sim_ids])
if options.verbose:
print >>sys.stderr, "Vacuuming"
print("Vacuuming", file=sys.stderr)
cursor.execute('VACUUM')
# Set process end time
......@@ -218,7 +218,7 @@ for i, sim_key in enumerate(sim_id_dict.keys()):
cursor.execute("UPDATE process SET end_time = ? WHERE process_id == ?", (process.end_time, process.process_id))
if options.verbose:
print >>sys.stderr, "Committing"
print("Committing", file=sys.stderr)
connection.commit()
new_number_coinc_events += int(connection.execute('SELECT count(*) FROM coinc_event').fetchone()[0])
connection.close()
......@@ -230,4 +230,4 @@ for i, sim_key in enumerate(sim_id_dict.keys()):
if original_number_coinc_events != new_number_coinc_events:
raise RuntimeError("Number of entries in coinc_event table in original document does not match the number of entries in the output coinc_event_tables. There were %d entries originally, and %d were output. Something has gone terribly wrong, and the output documents should not be trusted." % (original_number_coinc_events, new_number_coinc_events))
else:
print >>sys.stderr, "Confirmed there were %d entries in the original coinc_event table, and %d entries were written to disk in the new coinc_event tables." % (original_number_coinc_events, new_number_coinc_events)
print("Confirmed there were %d entries in the original coinc_event table, and %d entries were written to disk in the new coinc_event tables." % (original_number_coinc_events, new_number_coinc_events), file=sys.stderr)
......@@ -114,7 +114,7 @@ if rankingstat is not None:
fig = plotfar.plot_snr_chi_pdf(rankingstat, "H1", "background_pdf", options.max_snr)
plotname = "coinc_params_binning%s" % options.output_format
if options.verbose:
print >>sys.stderr, "writing %s" % plotname
print("writing %s" % plotname, file=sys.stderr)
fig.savefig(plotname)
......
......@@ -284,7 +284,7 @@ zerolag_ln_likelihood_ratios = load_search_results(filenames, ln_likelihood_rati
if options.verbose:
print >>sys.stderr, "calculating rate posteriors using %d likelihood ratios ..." % len(zerolag_ln_likelihood_ratios)
print("calculating rate posteriors using %d likelihood ratios ..." % len(zerolag_ln_likelihood_ratios), file=sys.stderr)
kwargs = {}
if options.chain_file is not None:
kwargs["chain_file"] = h5py.File(options.chain_file)
......@@ -304,17 +304,17 @@ signal_rate_ln_pdf, noise_rate_ln_pdf = rate_estimation.calculate_rate_posterior
if options.credible_intervals:
if options.verbose:
print >>sys.stderr, "determining credible intervals ..."
print("determining credible intervals ...", file=sys.stderr)
credible_intervals = dict((cred, rate_estimation.confidence_interval_from_lnpdf(signal_rate_ln_pdf, cred)) for cred in options.credible_intervals)
else:
credible_intervals = None
if options.verbose and credible_intervals is not None:
print >>sys.stderr, "rate posterior mean = %g signals/experiment" % rate_estimation.mean_from_lnpdf(signal_rate_ln_pdf)
print >>sys.stderr, "rate posterior median = %g signals/experiment" % rate_estimation.median_from_lnpdf(signal_rate_ln_pdf)
print("rate posterior mean = %g signals/experiment" % rate_estimation.mean_from_lnpdf(signal_rate_ln_pdf), file=sys.stderr)
print("rate posterior median = %g signals/experiment" % rate_estimation.median_from_lnpdf(signal_rate_ln_pdf), file=sys.stderr)
# all modes are the same, pick one and report it
print >>sys.stderr, "maximum-likelihood rate = %g signals/experiment" % list(credible_intervals.values())[0][0]
print("maximum-likelihood rate = %g signals/experiment" % list(credible_intervals.values())[0][0], file=sys.stderr)
for cred, (mode, lo, hi) in sorted(credible_intervals.items()):
print >>sys.stderr, "%g%% credible interval = [%g, %g] signals/experiment" % (cred * 100., lo, hi)
print("%g%% credible interval = [%g, %g] signals/experiment" % (cred * 100., lo, hi), file=sys.stderr)
#
......@@ -334,8 +334,8 @@ ligolw_utils.write_filename(xmldoc, filename, verbose = options.verbose)
fig = plot_rates(signal_rate_ln_pdf, credible_intervals = credible_intervals)
for filename in ("rate_posteriors.png", "rate_posteriors.pdf"):
if options.verbose:
print >>sys.stderr, "writing %s ..." % filename
print("writing %s ..." % filename, file=sys.stderr)
fig.savefig(filename)
if options.verbose:
print >>sys.stderr, "done"
print("done", file=sys.stderr)
......@@ -75,7 +75,7 @@ def parse_command_line():
try:
svd_banks = [inspiral.parse_svdbank_string(svdbank) for svdbank in options.svd_bank]
except ValueError as e:
print "Unable to parse svd banks"
print("Unable to parse svd banks")
raise
options.likelihood_files= []
......
......@@ -78,8 +78,8 @@ def parse_command_line():
options, process_params, segs = parse_command_line()
if options.verbose:
print >> sys.stderr, "Livetime: ", abs(segs.extent_all())
print >> sys.stderr, "Extent: ", segs.extent_all()
print("Livetime: ", abs(segs.extent_all()), file=sys.stderr)
print("Extent: ", segs.extent_all(), file=sys.stderr)
like_cpd, like_rd = far.parse_likelihood_control_doc(ligolw_utils.load_filename(options.background_ranking_file, verbose = options.verbose, contenthandler = far.RankingStat.LIGOLWContentHandler))
......@@ -104,7 +104,7 @@ if not options.skip_seg_and_rd_reset:
zlike_rd.zero_lag_lr_lnpdf.array *= num_zero_lag / zlike_rd.zero_lag_lr_lnpdf.array.sum()
# make the counts integers
if options.verbose:
print >> sys.stderr, "replacing with %f zero lag counts" % zlike_rd.zero_lag_lr_lnpdf.array.sum()
print("replacing with %f zero lag counts" % zlike_rd.zero_lag_lr_lnpdf.array.sum(), file=sys.stderr)
# write out the background file
......
......@@ -559,7 +559,7 @@ class DataBaseSummary(object):
for f in filelist:
if verbose:
print >> sys.stderr, "Gathering stats from: %s...." % (f,)
print("Gathering stats from: %s...." % (f,), file=sys.stderr)
with dbtables.workingcopy(f, tmp_path = tmp_path, discard = True, verbose = verbose) as working_filename:
connection = sqlite3.connect(str(working_filename))
xmldoc = dbtables.get_xml(connection)
......@@ -612,7 +612,7 @@ class DataBaseSummary(object):
segments_to_consider_for_these_injections = self.this_injection_segments.intersection(instruments_set) - self.this_injection_segments.union(set(self.this_injection_segments.keys()) - instruments_set)
found, total, missed = get_min_far_inspiral_injections(connection, segments = segments_to_consider_for_these_injections, table_name = self.table_name)
if verbose:
print >> sys.stderr, "%s total injections: %d; Found injections %d: Missed injections %d" % (instruments, len(total), len(found), len(missed))
print("%s total injections: %d; Found injections %d: Missed injections %d" % (instruments, len(total), len(found), len(missed)), file=sys.stderr)
self.found_injections_by_instrument_set.setdefault(instruments_set, []).extend(found)
self.total_injections_by_instrument_set.setdefault(instruments_set, []).extend(total)
self.missed_injections_by_instrument_set.setdefault(instruments_set, []).extend(missed)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment