Commit 0fddc7a5 authored by Duncan Macleod's avatar Duncan Macleod Committed by Adam Mercer

lalinference: use range instead of xrange

parent 6da92340
......@@ -22,6 +22,8 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
from six.moves import range
import matplotlib
matplotlib.use('Agg') #sets backend to not need to open windows
......@@ -128,7 +130,7 @@ def downsample_and_evidence(data_hdf5, deltaLogP=None, fixedBurnin=None, nDownsa
'Temperature ladder spread over multiple hdf5 files. Use cbcBayesCombinePTMCMCh5s.py to combine.'
highTchains = []
for i in xrange(1,int(posterior_samples['nTemps'][0])):
for i in range(1,int(posterior_samples['nTemps'][0])):
ps, samps = peparser.parse(data_hdf5, deltaLogP=deltaLogP, fixedBurnins=fixedBurnin,
nDownsample=nDownsample, tablename='chain_'+str('%02.f' %i))
highTchains.append(apt.Table(samps, names=ps))
......@@ -140,7 +142,7 @@ def downsample_and_evidence(data_hdf5, deltaLogP=None, fixedBurnin=None, nDownsa
betas[0] = 1./np.median(posterior_samples['temperature'])
logls[0] = np.median(posterior_samples['logl'])
for i in xrange(len(highTchains)):
for i in range(len(highTchains)):
betas[i+1] = 1./np.median(highTchains[i]['temperature'])
logls[i+1] = np.median(highTchains[i]['logl'])
......@@ -185,7 +187,7 @@ def weight_and_combine(pos_chains, verbose=False):
log_evs = np.zeros(len(pos_chains))
log_noise_evs = np.zeros_like(log_evs)
for i in xrange(len(pos_chains)):
for i in range(len(pos_chains)):
log_evs[i] = pos_chains[i]['chain_log_evidence'][0]
log_noise_evs[i] = pos_chains[i]['chain_log_noise_evidence'][0]
if verbose: print('Computed log_evidences: %s'%(str(log_evs)))
......@@ -195,14 +197,14 @@ def weight_and_combine(pos_chains, verbose=False):
fracs=[np.exp(log_ev-max_log_ev) for log_ev in log_evs]
if verbose: print('Relative weights of input files: %s'%(str(fracs)))
Ns=[fracs[i]/len(pos_chains[i]) for i in xrange(len(fracs))]
Ns=[fracs[i]/len(pos_chains[i]) for i in range(len(fracs))]
Ntot=max(Ns)
fracs=[n/Ntot for n in Ns]
if verbose: print('Relative weights of input files taking into account their length: %s'%(str(fracs)))
final_posterior = pos_chains[0][np.random.uniform(size=len(pos_chains[0]))<fracs[0]]
for i in xrange(1,len(pos_chains)):
for i in range(1,len(pos_chains)):
final_posterior = apt.vstack([final_posterior,
pos_chains[i][np.random.uniform(size=len(pos_chains[i]))<fracs[i]]])
......@@ -269,7 +271,7 @@ if __name__ == '__main__':
chain_posteriors = []
for i in xrange(len(datafiles)):
for i in range(len(datafiles)):
chain_posteriors.append(downsample_and_evidence(datafiles[i],
deltaLogP=opts.deltaLogP, fixedBurnin=fixedBurnins[i], nDownsample=opts.downsample, verbose=opts.verbose))
......
......@@ -30,6 +30,7 @@
Populate a sim_inspiral table with random draws from an ASCII table.
"""
from optparse import Option, OptionParser
from six.moves import range
import numpy as np
from glue.ligolw import ligolw
from glue.ligolw import lsctables
......@@ -176,7 +177,7 @@ if __name__ == "__main__":
injections['time_geocent_gps_ns'] = np.modf(est_time)[0] * 10**9
# Populate structured array
injections['waveform'] = [opts.approx for i in xrange(N)]
injections['waveform'] = [opts.approx for i in range(N)]
try:
injections['frequency'] = samples['frequency']
except:
......@@ -205,7 +206,7 @@ if __name__ == "__main__":
xmldoc.childNodes[0].appendChild(sim_table)
# Add empty rows to the sim_inspiral table
for inj in xrange(N):
for inj in range(N):
row = sim_table.RowType()
for slot in row.__slots__: setattr(row, slot, 0)
sim_table.append(row)
......
......@@ -26,6 +26,7 @@
Populate a sim_inspiral table with random draws from an ASCII table.
"""
from optparse import Option, OptionParser
from six.moves import range
import numpy as np
from glue.ligolw import ligolw
from glue.ligolw import lsctables
......@@ -249,11 +250,11 @@ if __name__ == "__main__":
else: # executed if no exception is raised
print(('f_low given in both input file and command line.'
' Using command line argument: %r' % opts.flow))
flow = [opts.flow for i in xrange(N)]
flow = [opts.flow for i in range(N)]
# Populate structured array
injections['waveform'] = [opts.approx for i in xrange(N)]
injections['taper'] = [opts.taper for i in xrange(N)]
injections['waveform'] = [opts.approx for i in range(N)]
injections['taper'] = [opts.taper for i in range(N)]
injections['f_lower'] = flow
injections['mchirp'] = mc
injections['eta'] = eta
......@@ -273,8 +274,8 @@ if __name__ == "__main__":
injections['spin2x'] = s2x
injections['spin2y'] = s2y
injections['spin2z'] = s2z
injections['amp_order'] = [opts.amporder for i in xrange(N)]
injections['numrel_data'] = [ "" for _ in xrange(N)]
injections['amp_order'] = [opts.amporder for i in range(N)]
injections['numrel_data'] = [ "" for _ in range(N)]
# Create a new XML document
xmldoc = ligolw.Document()
......@@ -283,7 +284,7 @@ if __name__ == "__main__":
xmldoc.childNodes[0].appendChild(sim_table)
# Add empty rows to the sim_inspiral table
for inj in xrange(N):
for inj in range(N):
row = sim_table.RowType()
for slot in row.__slots__: setattr(row, slot, 0)
sim_table.append(row)
......
......@@ -19,6 +19,7 @@ from itertools import permutations
import shutil
import numpy as np
import math
from six.moves import range
# We use the GLUE pipeline utilities to construct classes for each
# type of job. Each class has inputs and outputs, which are used to
......@@ -1240,7 +1241,7 @@ class LALInferencePipelineDAG(pipeline.CondorDAG):
combinenodes[i].set_pos_output_file(input_file[:input_file_split_index]+'combine_'+input_file[input_file_split_index:])
combinenodes[i].add_var_arg(input_file)
number_of_mpi_jobs = self.config.getint('mpi','mpi_task_count')
for j in xrange(1,number_of_mpi_jobs):
for j in range(1,number_of_mpi_jobs):
combinenodes[i].add_var_arg(input_file+".%02d" % j)
self.add_node(combinenodes[i])
mergenode=MergeNode(self.merge_job,parents=combinenodes,engine='mcmc')
......
......@@ -19,6 +19,8 @@ Module of routines to compute an effective Fisher matrix and related utilities,
such as finding a region of interest and laying out a grid over it
"""
from six.moves import range
from . import lalsimutils as lsu
import numpy as np
from scipy.optimize import leastsq, brentq
......@@ -61,7 +63,7 @@ def evaluate_ip_on_grid(hfSIG, P, IP, param_names, grid):
Npts = len(grid)
assert len(grid[0])==Nparams
return np.array([update_params_ip(hfSIG, P, IP, param_names, grid[i])
for i in xrange(Npts)])
for i in range(Npts)])
def update_params_ip(hfSIG, P, IP, param_names, vals):
......@@ -276,7 +278,7 @@ def multi_dim_flatgrid(*arrs):
[2,4,6,2,4,6,2,4,6]
"""
outarrs = multi_dim_meshgrid(*arrs)
return tuple([ outarrs[i].flatten() for i in xrange(len(outarrs)) ])
return tuple([ outarrs[i].flatten() for i in range(len(outarrs)) ])
def multi_dim_grid(*arrs):
"""
......
......@@ -25,6 +25,8 @@ import copy
import types
from math import cos, sin, sqrt
from six.moves import range
import numpy as np
from scipy import interpolate
from scipy import signal
......@@ -335,7 +337,7 @@ class InnerProduct(object):
WFD.data.data[:] = np.sqrt(self.weights) # W_FD is 1/sqrt(S_n(f))
WFD.data.data[0] = WFD.data.data[-1] = 0. # zero 0, f_Nyq bins
lal.REAL8FreqTimeFFT(WTD, WFD, revplan) # IFFT to TD
for i in xrange(N_spec/2, self.len2side - N_spec/2):
for i in range(N_spec/2, self.len2side - N_spec/2):
WTD.data.data[i] = 0. # Zero all but T_spec/2 ends of W_TD
lal.REAL8TimeFreqFFT(WFD, WTD, fwdplan) # FFT back to FD
WFD.data.data[0] = WFD.data.data[-1] = 0. # zero 0, f_Nyq bins
......
......@@ -15,6 +15,8 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import math
from six.moves import range
import numpy
from lal.rate import BinnedDensity, NDBins, IrregularBins, LinearBins
......@@ -79,7 +81,7 @@ def get_adaptive_binning(samples, edges, nbins=100, bintype='linear'):
ordering = numpy.argsort(samples)
stride = len(samples) / nbins
bins = [samples[ordering[i]] for i in xrange(stride, nbins*stride, stride)]
bins = [samples[ordering[i]] for i in range(stride, nbins*stride, stride)]
bins.insert(0, edges[0])
bins.append(edges[1])
return BinnedArray(NDBins((IrregularBins(bins),)))
......
......@@ -48,6 +48,7 @@ from sys import exit, stdout, version_info, float_info
from time import time
from numpy import sqrt, array, empty, hstack, min, max, reshape, shape, loadtxt, vstack, append, arange, random, column_stack, concatenate, savetxt, log, exp, size, zeros, argmax, argsort, sort, sum, subtract, array_split
from re import findall
from six.moves import range
py_version = version_info[:2]
if py_version < (2, 7):
......@@ -182,7 +183,7 @@ def TigerPostProcess(configfile):
# FETCHING DATA
tigerruns = []
for i in xrange(len(fetch_type)):
for i in range(len(fetch_type)):
if fetch_type[i] == 'source':
# READING LOCATION FILES
locfp = open(fetch_loc[i],'r')
......@@ -204,7 +205,7 @@ def TigerPostProcess(configfile):
# SAVING DATA BEFORE CUT
html_files_sources = []
stdout.write('Saving data\n')
for i in xrange(len(tigerruns)):
for i in range(len(tigerruns)):
html_files_sources.append(path.join(localdest,tigerruns[i].label+'.pickle'))
tigerruns[i].savetopickle(html_files_sources[-1])
stdout.write('... Saving data: %s\n' % html_files_sources[-1])
......@@ -225,7 +226,7 @@ def TigerPostProcess(configfile):
# SAVING DATA AFTER CUT
#print 'Saving data - after cut'
#for i in xrange(len(tigerruns)):
#for i in range(len(tigerruns)):
#html_files_sources.append(path.join(localdest,tigerruns[i].label+'_cut'+'.pickle'))
#tigerruns[i].savetopickle(html_files_sources[-1])
#html_files_sources.append(path.join(localdest,tigerruns[i].label+'_cut'+'.dat'))
......@@ -305,7 +306,7 @@ def TigerPostProcess(configfile):
html_files_cumfreq = [[]]*len(tigerruns)
if plot_cumfreq == True:
stdout.write("... Cumulative frequency\n")
for i in xrange(len(tigerruns)):
for i in range(len(tigerruns)):
html_files_cumfreq[i] = []
clf()
fig = figure()
......@@ -321,11 +322,11 @@ def TigerPostProcess(configfile):
stdout.write("... Cumulative Bayes\n")
if plot_cumbayes == True:
N_sources_cats = array(N_sources[N_sources!=1])
for i in xrange(len(tigerruns)):
for i in range(len(tigerruns)):
html_files_cumbayes[i] = [[]]*len(N_sources_cats)
for j in xrange(len(N_sources_cats)):
for j in range(len(N_sources_cats)):
html_files_cumbayes[i][j] = []
for k in xrange(tigerruns[i].nsources/N_sources_cats[j]):
for k in range(tigerruns[i].nsources/N_sources_cats[j]):
clf()
fig = figure()
ax = fig.add_subplot(111)
......@@ -391,11 +392,11 @@ def TigerPostProcess(configfile):
htmltxt+='</tr>'
background = 0
for i in xrange(len(tigerruns)):
for i in range(len(tigerruns)):
if i != background: # hardcoded for the time being
htmltxt+='<tr>'
htmltxt+='<td>'+tigerruns[i].label+'</td>'
for j in xrange(len(N_sources)):
for j in range(len(N_sources)):
if i < background:
effstr="%.2f "%html_data_efficiencies[j,i,0]+' | '+ '%.2f'% html_data_efficiencies[j,i,1]
else:
......@@ -417,8 +418,8 @@ def TigerPostProcess(configfile):
<td>p value</td>
</tr>
"""
for i in xrange(len(tigerruns)-1):
for j in xrange(len(N_sources)):
for i in range(len(tigerruns)-1):
for j in range(len(N_sources)):
htmltxt+='<tr>'
htmltxt+='<td>'+'N='+str(N_sources[j])+'</td>'
htmltxt+='<td>'+'%.2f'%html_data_ks[i][j][0]+'</td>'
......@@ -451,7 +452,7 @@ def TigerPostProcess(configfile):
"""
<h2>Individual runs</h2>
"""
for i in xrange(len(tigerruns)):
for i in range(len(tigerruns)):
htmltxt+="<a href='"+tigerruns[i].label+".html"+"'>"+tigerruns[i].label+"</a>&nbsp;&nbsp;&nbsp;&nbsp;"
# CLOSE
......@@ -462,7 +463,7 @@ def TigerPostProcess(configfile):
"""
# CREATE PAGES FOR INIDVIDUAL RUNS
for i in xrange(len(tigerruns)):
for i in range(len(tigerruns)):
indhtmlfile=open(path.join(localdest,tigerruns[i].label+'.html'), 'w')
indhtmltxt=\
"""
......@@ -494,7 +495,7 @@ def TigerPostProcess(configfile):
<h2>Cumulative Bayes factors (per catalog)</h2>
"""
N_sources_cats = array(N_sources[N_sources!=1])
for j in xrange(len(html_files_cumbayes[i])):
for j in range(len(html_files_cumbayes[i])):
indhtmltxt+="<h2>"+str(N_sources_cats[j])+" sources per catalog"+"</h2>"
for item in html_files_cumbayes[i][j]:
if ".png" in item:
......@@ -610,7 +611,7 @@ class TigerRun:
# GET SNR FILES
#snrfilename = "snr_%s_%.1f.dat"%(self.bayesfiles[k].split('_')[1],int(self.gps[k]))
#self.snrfiles = array(["snr_%s_%.1f.dat"%(self.bayesfiles[k].split('_')[1],int(self.gps[k])) for k in xrange(len(self.bayesfiles))])
#self.snrfiles = array(["snr_%s_%.1f.dat"%(self.bayesfiles[k].split('_')[1],int(self.gps[k])) for k in range(len(self.bayesfiles))])
#self.snrfiles = array(["snr_%s_%.1f.dat"%(x,int(y)) for x,y in zip(self.detectors, self.gps)])
self.snrfiles = array([k.replace('_','-').replace('-B.txt','_snr.txt').replace('posterior','lalinferencenest-%s'%(k.split('-')[1].split('.')[0])).replace('%s'%(k.split('-')[0].split('_')[-1]),'%.1f'%(float(k.split('-')[0].split('_')[-1]))) for k in self.bayesfiles])
#self.snrfiles = array(["snr_H1L1V1_"+item+".0.dat" for item in self.gps])
......@@ -673,7 +674,7 @@ class TigerRun:
self.snr = zeros(self.nsources)
return
count = 0
for k in xrange(len(self.gps)):
for k in range(len(self.gps)):
ndet = len(self.detectors[k])/2
if ndet == 1:
self.snr.append(float(snrrawdata[count+ndet-1].strip('%s:'%(self.detectors[k])).strip()))
......@@ -755,7 +756,7 @@ class TigerSet:
# CALCULATING SUBHYPOTHESES
self.testcoef = testcoef # testing coefficients
self.subhyp=[]
for i in xrange(len(self.testcoef)):
for i in range(len(self.testcoef)):
s = combinations(testcoef, i+1)
self.subhyp.extend(["".join(j) for j in s])
self.subhyp.insert(0,'GR')
......@@ -824,7 +825,7 @@ class TigerSet:
CALCULATE THE TOTAL ODDS FROM DATA STRUCTURE
"""
odds = empty(0)
for i in xrange(self.nsources/N):
for i in range(self.nsources/N):
odds = append(odds,OddsFromBayes(self.bayes[i*N:(i+1)*N,:], len(self.testcoef)))
return odds
......@@ -973,7 +974,7 @@ def OddsFromBayes(matrix, Nhyp):
TotalOdds = -float_info.max
# ONE SMALLER THAN THE WIDTH OF THE MATRIX FOR GR
for j in xrange(shape(matrix)[1]-1):
for j in range(shape(matrix)[1]-1):
TotalOdds = lnPLUS(TotalOdds, sum(matrix[:, j+1]-matrix[:, 0]))
TotalOdds -= log(pow(2,Nhyp)-1)
......@@ -983,7 +984,7 @@ def TigerCreateHistogram(list_tigerruns, axis, N=1, xlim=None,bins=50):
"""
CREATE TIGER HISTOGRAMS FROM A LIST OF TIGERRUNS
"""
for i in xrange(len(list_tigerruns)):
for i in range(len(list_tigerruns)):
if N == 1:
# PLOT HISTOGRAMS
axis.hist(list_tigerruns[i].odds(N), \
......@@ -1026,7 +1027,7 @@ def TigerCalculateEfficiency(list_tigerruns, N=1, beta=[0.95], background=0):
efficiencies = []
OddsBeta=[mquantiles(list_tigerruns[background].odds(N),prob=[b]) for b in beta]
efficiencies = empty((len(list_tigerruns)-1,len(beta)))
for i in xrange(len(list_tigerruns)):
for i in range(len(list_tigerruns)):
if N>list_tigerruns[i].nsources:
stdout.write("... Warning: Not sufficient events (%s) to calculate the efficiency for %s sources. Writing zeros\n"%(list_tigerruns[i].nsources,N))
if i < background:
......@@ -1036,7 +1037,7 @@ def TigerCalculateEfficiency(list_tigerruns, N=1, beta=[0.95], background=0):
continue
if i != background:
tmp = list_tigerruns[i].odds(N)
for j in xrange(len(OddsBeta)):
for j in range(len(OddsBeta)):
msk = tmp>OddsBeta[j]
nmsk = tmp<OddsBeta[j]
nabovebeta=len(tmp[msk])
......@@ -1070,7 +1071,7 @@ def TigerCreateSNRvsOdds(list_tigerruns, axis):
axis.set_xlabel("$\mathrm{SNR}$")
axis.set_ylabel("$\ln{O_{\mathrm{GR}}^{\mathrm{modGR}}}$")
for i in xrange(len(list_tigerruns)):
for i in range(len(list_tigerruns)):
axis.scatter(list_tigerruns[i].snr, list_tigerruns[i].odds(), \
color=colors[i], \
marker=markers[i], \
......@@ -1095,12 +1096,12 @@ def TigerCreateCumFreq(tigerrun, axis):
nsnrsteps = len(snrrange)
freqs = zeros((nsnrsteps,tigerrun.nsubhyp))
for i in xrange(nsnrsteps):
for i in range(nsnrsteps):
if snrrange[i] > min(tigerrun.snr):
snrmask = tigerrun.snr<snrrange[i]
bayesmasked = tigerrun.bayes[snrmask,:]
maxarray = list(argmax(bayesmasked, axis=1))
for j in xrange(tigerrun.nsubhyp):
for j in range(tigerrun.nsubhyp):
freqs[i,j] = maxarray.count(j)
# CHECK IF TOTAL ADDS UP TO TOTAL NUMBER OF SOURCES
......@@ -1113,7 +1114,7 @@ def TigerCreateCumFreq(tigerrun, axis):
markerstyles = cycle(['+','*','.','<','d', '^', 'x', 's'])
colorstyles = cycle(["b", "g","r","c","m","y","k"])
for i in xrange(tigerrun.nsubhyp):
for i in range(tigerrun.nsubhyp):
if tigerrun.subhyp[i].split("_")[-1] =="GR":
axis.plot(snrrange, freqs[:,i], label='$\mathcal{H}_{\mathrm{GR}}$', color="black", linewidth=3.0)
else:
......@@ -1155,8 +1156,8 @@ def TigerCreateCumBayes(tigerrun,axis,nthcat,nsourcespercat):
sortorder = argsort(snrcat)
snrcatsorted = sort(snrcat)
bayescatsorted = bayescat[sortorder,:]
cumodds = array([OddsFromBayes(bayescatsorted[:i+1,:],len(tigerrun.testcoef)) for i in xrange(nsourcespercat)])
cumbayes = array([sum(subtract(bayescatsorted[:i+1,1:].T,bayescatsorted[:i+1,0]).T,axis=0) for i in xrange(nsourcespercat)])
cumodds = array([OddsFromBayes(bayescatsorted[:i+1,:],len(tigerrun.testcoef)) for i in range(nsourcespercat)])
cumbayes = array([sum(subtract(bayescatsorted[:i+1,1:].T,bayescatsorted[:i+1,0]).T,axis=0) for i in range(nsourcespercat)])
markerstyles = cycle(['+','*','.','<','d', '^', 'x', 's'])
......@@ -1164,7 +1165,7 @@ def TigerCreateCumBayes(tigerrun,axis,nthcat,nsourcespercat):
axis.set_ylabel("$\ln{{}^{(cat)}B^{i_1 i_2 \ldots i_k}_{\mathrm{GR}}}$")
# PLOT BAYESFACTORS
for i in xrange(tigerrun.nsubhyp-1):
for i in range(tigerrun.nsubhyp-1):
if tigerrun.engine == 'lalnest':
curlabel = "$H_{"+''.join(tigerrun.subhyp[i+1].split("_")[-1].split('dphi'))+"}$"
......
......@@ -27,6 +27,8 @@ import stat
from functools import partial
from argparse import ArgumentParser
from six.moves import range
import numpy as np
import lal
......@@ -312,7 +314,7 @@ else: # do random, uniform placement
cart_grid, sph_grid = eff.uniform_random_ellipsoid(Nrandpts, r1, r2)
# Rotate to get coordinates in parameter basis
cart_grid = np.array([ np.real( np.dot(rot, cart_grid[i]))
for i in xrange(len(cart_grid)) ])
for i in range(len(cart_grid)) ])
# Put in convenient units,
# change from parameter differential (i.e. dtheta)
# to absolute parameter value (i.e. theta = theta_true + dtheta)
......@@ -353,7 +355,7 @@ if opts.save_ellipsoid_data:
# Convert to m1, m2
m1m2_grid = np.array([lsu.m1m2(cart_grid[i][0], cart_grid[i][1])
for i in xrange(len(cart_grid))])
for i in range(len(cart_grid))])
m1m2_grid /= lal.MSUN_SI
if opts.mass_points_xml:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment