Commit b9c1bdbc authored by Rachel Gray's avatar Rachel Gray

Merge branch 'O3a_jacobian_simone_inside' into 'master'

O3a jacobian simone inside

See merge request lscsoft/gwcosmo!55
parents 643b708d efce7330
Pipeline #169596 passed with stage
in 24 seconds
......@@ -62,7 +62,9 @@ parser = OptionParser(
Option("--combine", default=None,
help="Directory of constant_H0 Pdets to combine into single Pdet pickle."),
Option("--outputfile", default=None,
help="Name of output pdet file.")
help="Name of output pdet file."),
Option("--snr", default='12.0', type=float,
help="Network SNR threshold.")
])
opts, args = parser.parse_args()
print(opts)
......@@ -90,7 +92,7 @@ linear = str2bool(opts.linear_cosmology)
basic = str2bool(opts.basic_pdet)
full_waveform = str2bool(opts.full_waveform)
Nsamps = int(opts.Nsamps)
network_snr_threshold = float(opts.snr)
constant_H0 = str2bool(opts.constant_H0)
pdet_path = str(opts.combine)
......@@ -103,7 +105,7 @@ if full_waveform is True:
kind = 'full_waveform'
else:
kind = 'inspiral'
if opts.combine is None:
if mass_distribution == 'BBH-powerlaw':
print("Calculating Pdet with a " + mass_distribution + " mass distribution with alpha = " + str(alpha)
......@@ -114,7 +116,7 @@ if opts.combine is None:
pdet = gwcosmo.detection_probability.DetectionProbability(mass_distribution=mass_distribution, asd=psd, basic=basic,
linear=linear, alpha=alpha, Mmin=Mmin, Mmax=Mmax,
full_waveform=full_waveform, Nsamps=Nsamps,
constant_H0=constant_H0, H0=H0)
constant_H0=constant_H0, H0=H0, network_snr_threshold=network_snr_threshold)
if opts.outputfile is None:
if mass_distribution == 'BBH-powerlaw':
......@@ -128,6 +130,16 @@ if opts.combine is None:
else:
probs = {}
for file in os.listdir(pdet_path):
if file.endswith(".p"):
pdets = pickle.load(open(os.path.join(pdet_path,str(file)), 'rb'))
psd = pdets.asd
alpha = pdets.alpha
Mmin = pdets.Mmin
Mmax = pdets.Mmax
mass_distribution = pdets.mass_distribution
break
for h0 in H0:
try:
pdets = pickle.load(open(pdet_path+'/pdet_'+psd+'_'+str(alpha)+'_'+str(int(h0))+'.p', 'rb'))
......@@ -142,13 +154,53 @@ else:
pdet = gwcosmo.likelihood.detection_probability.DetectionProbability(
mass_distribution=mass_distribution, alpha=alpha,
asd=psd, detectors=['H1', 'L1'], Nsamps=2,
network_snr_threshold=12, Omega_m=0.308,
network_snr_threshold=12.0, Omega_m=0.308,
linear=False, basic=False, M1=50., M2=50.,
constant_H0=False, H0=H0vec, full_waveform=True)
Nsamps = pdets.Nsamps
RAs = pdets.RAs
Decs = pdets.Decs
incs = pdets.incs
psis = pdets.psis
phis = pdets.phis
mass_distribution = pdets.mass_distribution
dl_array = pdets.dl_array
m1 = pdets.m1/1.988e30
m2 = pdets.m2/1.988e30
alpha = pdets.alpha
Mmin = pdets.Mmin
Mmax = pdets.Mmax
psd = pdets.asd
full_waveform = pdets.full_waveform
network_snr_threshold = pdets.snr_threshold
Omega_m = pdets.Omega_m
linear = pdets.linear
seed = pdets.seed
detectors = pdets.detectors
if full_waveform is True:
kind = 'full_waveform'
else:
kind = 'inspiral'
pdet.H0vec = H0vec
pdet.Nsamps = Nsamps
pdet.prob = prob
pdet.RAs = RAs
pdet.Decs = Decs
pdet.incs = incs
pdet.psis = psis
pdet.phis = phis
pdet.mass_distribution = mass_distribution
pdet.dl_array = dl_array
pdet.m1 = m1
pdet.m2 = m2
pdet.Omega_m = Omega_m
pdet.linear = linear
pdet.seed = seed
pdet.detectors = detectors
pdet.network_snr_threshold = network_snr_threshold
logit_prob=logit(prob)
for i in range (len(logit_prob)):
......@@ -157,11 +209,11 @@ else:
pdet.interp_average = interp_average
if mass_distribution == 'BBH-powerlaw':
pdet_path = '{}PSD_{}_alpha_{}_Mmin_{}_Mmax_{}_Nsamps{}_{}.p'.format(psd, mass_distribution,
pdet_path = '{}PSD_{}_alpha_{}_Mmin_{}_Mmax_{}_Nsamps{}_{}_snr_{}.p'.format(psd, mass_distribution,
str(alpha), str(Mmin), str(Mmax),
str(Nsamps), kind)
str(Nsamps), kind,network_snr_threshold)
else:
pdet_path = '{}PSD_{}_Nsamps{}_{}.p'.format(psd, mass_distribution, str(Nsamps), kind)
pdet_path = '{}PSD_{}_Nsamps{}_{}_snr_{}.p'.format(psd, mass_distribution, str(Nsamps), kind,network_snr_threshold)
pickle.dump( pdet, open( pdet_path, "wb" ) )
......@@ -12,7 +12,7 @@ import pickle
import multiprocessing as mp
#Global Imports
import matplotlib
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
matplotlib.rcParams['font.family']= 'Times New Roman'
......@@ -71,7 +71,7 @@ parser = OptionParser(
Option("--galaxy_catalog", default=None,
help="Load galaxy catalog in pickle format"),
Option("--psd", default=None,
help="Select between 'O1' and 'O2' PSDs, by default we use aLIGO at design sensitivity (default=None)."),
help="Select between 'O1' and 'O2' PSDs, by default we use aLIGO at design sensitivity (default=None)."),
Option("--galaxy_weighting", default='False',
help="Set galaxy catalog weighting"),
Option("--catalog_band", default='B', type=str,
......@@ -103,10 +103,16 @@ parser = OptionParser(
Option("--full_waveform", default='True',
help="Use detection probability calculated using full waveforms (default=True) otherwise just use the inspiral part \
(only use full_waveform = False for O2-H0 backwards compatibility"),
Option("--Kcorrections", default='False',
help="Apply K-corrections."),
Option("--reweight", default='True',
help="Reweight samples to pop model."),
Option("--outputfile", default='Posterior',
help="Name of output file"),
Option("--plot", default=None,
help="Plot .npz file")
help="Plot .npz file"),
Option("--snr", default='12.0', type=float,
help="Network SNR threshold.")
])
opts, args = parser.parse_args()
print(opts)
......@@ -134,7 +140,7 @@ if opts.plot is not None:
posterior_uniform_norm = data[2]
posterior_log_norm = data[3]
outputfile = filename[:-4]
else:
if (opts.posterior_samples is None and opts.skymap is None):
parser.error('Provide either posterior samples or skymap.')
......@@ -144,7 +150,7 @@ else:
if (opts.galaxy_catalog is None and opts.method == 'statistical'):
parser.error('The statistical method requires a galaxy catalog. Please provide one.')
if opts.psd is None:
parser.error('Please provide a PSD.')
......@@ -167,7 +173,7 @@ else:
if opts.method == 'counterpart':
galaxy_weighting = False
completion = True
if (opts.counterpart is not None and opts.counterpart[-2:] == '.p'):
if (opts.counterpart is not None and opts.counterpart[-5:] == '.hdf5'):
counterpart = gwcosmo.prior.catalog.galaxyCatalog(catalog_file=opts.counterpart)
else:
if opts.counterpart_ra is not None:
......@@ -183,7 +189,7 @@ else:
min_H0 = float(opts.min_H0)
max_H0 = float(opts.max_H0)
bins_H0 = float(opts.bins_H0)
uncertainty = str2bool(opts.uncertainty)
linear = str2bool(opts.linear_cosmology)
basic = str2bool(opts.basic_pdet)
......@@ -191,12 +197,16 @@ else:
Lambda = float(opts.Lambda)
psd = str(opts.psd)
alpha = float(opts.powerlaw_slope)
Mmin = float(opts.minimum_mass)
Mmax = float(opts.maximum_mass)
mmin = float(opts.minimum_mass)
mmax = float(opts.maximum_mass)
Nsamps = 20000
new_skypatch=str2bool(opts.new_skypatch)
band = str(opts.catalog_band)
full_waveform=str2bool(opts.full_waveform)
Kcorr = str2bool(opts.Kcorrections)
reweight = str2bool(opts.reweight)
network_snr_threshold = float(opts.snr)
population_params = dict(mass_distribution=mass_distribution,alpha=alpha,mmin=mmin,mmax=mmax,Lambda=Lambda)
options_string = opts.method
outputfile = str(opts.outputfile)
......@@ -204,73 +214,61 @@ else:
"Compute P(H0)"
H0 = np.linspace(min_H0, max_H0, bins_H0)
dH0 = H0[1] - H0[0]
if opts.posterior_samples is not None:
print("Loading posterior samples.")
samples = gwcosmo.likelihood.posterior_samples.posterior_samples(posterior_samples)
if opts.skymap is not None:
print("Loading 2D skymap.")
skymap = gwcosmo.likelihood.skymap.skymap(skymap)
if opts.posterior_samples is None:
print("Loading 3D skymap.")
skymap = gwcosmo.likelihood.skymap.skymap(skymap)
samples = None
if opts.method == 'statistical':
if galaxy_catalog[-2:] == '.p':
catalog = gwcosmo.prior.catalog.galaxyCatalog(catalog_file=galaxy_catalog)
if galaxy_catalog[-5:] == '.hdf5':
catalog = gwcosmo.prior.catalog.galaxyCatalog(catalog_file=galaxy_catalog,band=band,Kcorr=Kcorr)
print("Using "+str(band)+"-band maggies.")
else:
print('Not a compatible catalog.')
if catalog.skypatch['allsky'] is not None:
allsky = False
radeclims = catalog.skypatch['allsky']
else:
allsky = True
radeclims = None
counterpart = None
population = False
if opts.method == 'population':
population = True
new_skypatch = False
galaxy_weighting = False
completion = False
catalog = None
allsky = True
radeclims = None
catalog = None
counterpart = None
if (opts.method == "counterpart" and opts.counterpart is None):
catalog = None
counterpart = gwcosmo.prior.catalog.galaxyCatalog()
counterpart = gwcosmo.prior.catalog.galaxy()
if (opts.counterpart_ra is None or
opts.counterpart_dec is None or
opts.counterpart_z is None):
parser.error('The counterpart method requires the ra, dec, and z of the galaxy.')
else:
counterpart.get_galaxy(0).ra = counterpart_ra*np.pi/180.
counterpart.get_galaxy(0).dec = counterpart_dec*np.pi/180.
counterpart.ra = counterpart_ra*np.pi/180.
counterpart.dec = counterpart_dec*np.pi/180.
if (counterpart_z > 0) & (counterpart_z < 3):
counterpart.get_galaxy(0).z = zhelio_to_zcmb(counterpart.ra, counterpart.dec, counterpart_z)
counterpart.z = counterpart_z
else:
counterpart.get_galaxy(0).z = zhelio_to_zcmb(counterpart.ra, counterpart.dec, counterpart_z/speed_of_light)
counterpart.z = counterpart_z/speed_of_light
if (counterpart_v > 0) & (counterpart_v < 1):
counterpart.get_galaxy(0).sigmaz = counterpart_v
counterpart.sigmaz = counterpart_v
else:
counterpart.get_galaxy(0).sigmaz = counterpart_v/speed_of_light
allsky = True
radeclims = None
counterpart.sigmaz = counterpart_v/speed_of_light
population = False
elif (opts.method == "counterpart" and opts.counterpart is not None):
catalog = None
allsky = True
radeclims = None
population = False
if full_waveform is True:
......@@ -279,28 +277,29 @@ else:
kind = 'inspiral'
if mass_distribution == 'BBH-powerlaw':
pdet_path = data_path + '{}PSD_{}_alpha_{}_Mmin_{}_Mmax_{}_Nsamps{}_{}.p'.format(psd, mass_distribution, alpha, Mmin, Mmax, Nsamps, kind)
pdet_path = data_path + '{}PSD_{}_alpha_{}_Mmin_{}_Mmax_{}_Nsamps{}_{}_snr_{}.p'.format(psd, mass_distribution, alpha, mmin, mmax, Nsamps, kind,network_snr_threshold)
print(pdet_path)
if os.path.isfile(pdet_path) is True:
pdet = pickle.load(open(pdet_path, 'rb'))
print('Loading precomputed pdet with a {} mass distribution (alpha = {}) at {} sensitivity using the {}'.format(mass_distribution, alpha, psd, kind))
else:
print('Calculating detection probability from scratch. This will take a while... Consider using a precomputed one for speed.')
pdet = gwcosmo.detection_probability.DetectionProbability(mass_distribution=mass_distribution, asd=psd,
basic=basic, full_waveform=full_waveform)
pdet = gwcosmo.detection_probability.DetectionProbability(mass_distribution=mass_distribution,asd=psd,
basic=basic,full_waveform=full_waveform,H0=H0,network_snr_threshold=network_snr_threshold)
if (mass_distribution == 'BNS' or mass_distribution == 'NSBH'):
pdet_path = data_path + '{}PSD_{}_Nsamps{}_{}.p'.format(psd, mass_distribution, Nsamps, kind)
pdet_path = data_path + '{}PSD_{}_Nsamps{}_{}_snr_{}.p'.format(psd, mass_distribution, Nsamps, kind,network_snr_threshold)
if os.path.isfile(pdet_path) is True:
pdet = pickle.load(open(pdet_path, 'rb'))
print('Loading precomputed pdet with a {} mass distribution at {} sensitivity.'.format(mass_distribution, psd))
me = gwcosmo.gwcosmo.gwcosmoLikelihood(samples,skymap,catalog,pdet,EM_counterpart=counterpart,
linear=linear,weighted=galaxy_weighting,whole_cat=allsky,
radec_lim=radeclims,uncertainty=uncertainty,basic=basic,
rate=rate_evolution,Lambda=Lambda,band=band)
likelihood_original,pxG,pDG,pGD,catalog_fraction, pxnG,pDnG,pnGD, pxnG_rest_of_sky,pDnG_rest_of_sky,rest_fraction = me.likelihood(H0,complete=completion,counterpart_case='direct',new_skypatch=new_skypatch,population=population)
me = gwcosmo.gwcosmo.gwcosmoLikelihood(
H0,samples,skymap,catalog,pdet,reweight=reweight,EM_counterpart=counterpart,
linear=linear,weighted=galaxy_weighting,uncertainty=uncertainty,basic=basic,
rate=rate_evolution,population_params=population_params,Kcorr=Kcorr)
likelihood_original,pxG,pDG,pGD,catalog_fraction,pxnG,pDnG,pnGD,pxnG_rest_of_sky,pDnG_rest_of_sky,rest_fraction = me.likelihood(H0,complete=completion,counterpart_case='direct',new_skypatch=new_skypatch,population=population)
likelihood = np.array(likelihood_original)/np.sum(np.array(likelihood_original)*dH0)
......@@ -308,12 +307,12 @@ else:
posterior_uniform = prior_uniform*likelihood
prior_log = gwcosmo.prior.priors.pH0(H0,prior='log')
posterior_log= prior_log*likelihood
prior_uniform_norm = prior_uniform/np.sum(prior_uniform*dH0)
posterior_uniform_norm = posterior_uniform/np.sum(posterior_uniform*dH0)
prior_log_norm = prior_log/np.sum(prior_log*dH0)
posterior_log_norm = posterior_log/np.sum(posterior_log*dH0)
np.savez(outputfile+'.npz',[H0,likelihood,posterior_uniform_norm,posterior_log_norm,opts])
np.savez(outputfile+'_likelihood_breakdown.npz',[H0,likelihood_original, pxG,pDG,pGD,catalog_fraction, pxnG,pDnG,pnGD,pxnG_rest_of_sky,pDnG_rest_of_sky,rest_fraction])
......@@ -323,7 +322,7 @@ MAP_uniform = confidence_uniform.map
a_uniform = confidence_uniform.lower_level
b_uniform = confidence_uniform.upper_level
print('H0 = %.0f + %.0f - %.0f (MAP and 68.3 percent HDI)' %(MAP_uniform,b_uniform-MAP_uniform,MAP_uniform-a_uniform))
print("Log Prior")
confidence_log = confidence_interval(posterior_log_norm,H0,level=0.683)
MAP_log = confidence_log.map
......
This diff is collapsed.
......@@ -12,7 +12,8 @@ from scipy.stats import ncx2
from scipy.special import logit, expit
import healpy as hp
from gwcosmo.utilities.standard_cosmology import *
from gwcosmo.prior.priors import *
from gwcosmo.prior.priors import mass_sampling as mass_prior
import pickle
import time
import progressbar
......@@ -82,7 +83,7 @@ class DetectionProbability(object):
self.full_waveform = full_waveform
self.constant_H0 = constant_H0
self.seed = seed
np.random.seed(seed)
ASD_data = {}
......@@ -114,18 +115,18 @@ class DetectionProbability(object):
self.psis = np.random.rand(N)*2.0*np.pi
self.phis = np.random.rand(N)*2.0*np.pi
if self.mass_distribution == 'BNS':
m1, m2 = BNS_uniform_distribution(N, mmin=1.0, mmax=3.0)
mass_priors = mass_prior(self.mass_distribution)
self.dl_array = np.linspace(1.0e-100, 1000.0, 500)
if self.mass_distribution == 'NSBH':
m1, _ = BBH_mass_distribution(N, mmin=self.Mmin, mmax=self.Mmax, alpha=self.alpha)
_, m2 = BNS_uniform_distribution(N, mmin=1.0, mmax=3.0)
mass_priors = mass_prior(self.mass_distribution, self.alpha, self.Mmin, self.Mmax)
self.dl_array = np.linspace(1.0e-100, 1000.0, 500)
if self.mass_distribution == 'BBH-powerlaw':
m1, m2 = BBH_mass_distribution(N, mmin=self.Mmin, mmax=self.Mmax, alpha=self.alpha)
mass_priors = mass_prior(self.mass_distribution, self.alpha, self.Mmin, self.Mmax)
self.dl_array = np.linspace(1.0e-100, 15000.0, 500)
if self.mass_distribution == 'BBH-constant':
m1, m2 = BBH_constant_mass(N, M1=self.M1, M2=self.M2)
mass_priors = mass_prior(self.mass_distribution, self.M1, self.M2)
self.dl_array = np.linspace(1.0e-100, 15000.0, 500)
m1, m2 = mass_priors.sample(N)
self.m1 = m1*1.988e30
self.m2 = m2*1.988e30
......
......@@ -10,6 +10,26 @@ from astropy import units as u
from astropy import constants as const
from astropy.table import Table
import h5py
from bilby.core.prior import Uniform, PowerLaw, PriorDict, Constraint
from bilby import gw
from ..utilities.standard_cosmology import z_dlH0, fast_cosmology
from ..prior.priors import *
from astropy.cosmology import FlatLambdaCDM, z_at_value
import astropy.units as u
import astropy.constants as constants
from scipy.interpolate import interp1d
Om0 = 0.308
zmin = 0.0001
zmax = 10
zs = np.linspace(zmin, zmax, 10000)
cosmo = fast_cosmology(Omega_m=Om0)
def constrain_m1m2(parameters):
converted_parameters = parameters.copy()
converted_parameters['m1m2'] = parameters['mass_1'] - parameters['mass_2']
return converted_parameters
class posterior_samples(object):
......@@ -27,10 +47,26 @@ class posterior_samples(object):
except:
print("No posterior samples were specified")
def E(self,z,Om):
return np.sqrt(Om*(1+z)**3 + (1.0-Om))
def dL_by_z_H0(self,z,H0,Om0):
speed_of_light = constants.c.to('km/s').value
cosmo = fast_cosmology(Omega_m=Om0)
return cosmo.dl_zH0(z, H0)/(1+z) + speed_of_light*(1+z)/(H0*self.E(z,Om0))
def jacobian_times_prior(self,z,H0,Om0=0.308):
cosmo = fast_cosmology(Omega_m=Om0)
jacobian = np.power(1+z,2)*self.dL_by_z_H0(z,H0,Om0)
dl = cosmo.dl_zH0(z, H0)
return jacobian*(dl**2)
def load_posterior_samples(self):
"""
Method to handle different types of posterior samples file formats.
Currently it supports .dat (LALinference), .hdf5 (GWTC-1),
Currently it supports .dat (LALinference), .hdf5 (GWTC-1),
.h5 (PESummary) and .hdf (pycbcinference) formats.
"""
if self.posterior_samples[-3:] == 'dat':
......@@ -95,14 +131,79 @@ class posterior_samples(object):
self.nsamples = len(self.distance)
file.close()
def marginalized_distance(self):
def marginalized_sky(self):
"""
Computes the marginalized sky localization posterior KDE.
"""
return gaussian_kde(np.vstack((self.ra, self.dec)))
def compute_source_frame_samples(self, H0):
cosmo = FlatLambdaCDM(H0=H0, Om0=Om0)
dLs = cosmo.luminosity_distance(zs).to(u.Mpc).value
z_at_dL = interp1d(dLs,zs)
redshift = z_at_dL(self.distance)
mass_1_source = self.mass_1/(1+redshift)
mass_2_source = self.mass_2/(1+redshift)
return redshift, mass_1_source, mass_2_source
def reweight_samples(self, H0, name, alpha=1.6, mmin=5, mmax=100, seed=1):
# Prior distribution used in the LVC analysis
prior = distance_distribution(name=name)
# Prior distribution used in this work
new_prior = mass_distribution(name=name, alpha=alpha, mmin=mmin, mmax=mmax)
# Get source frame masses
redshift, mass_1_source, mass_2_source = self.compute_source_frame_samples(H0)
# Re-weight
weights = new_prior.joint_prob(mass_1_source,mass_2_source)/ prior.prob(self.distance)
np.random.seed(seed)
draws = np.random.uniform(0, max(weights), weights.shape)
keep = weights > draws
m1det = self.mass_1[keep]
m2det = self.mass_2[keep]
dl = self.distance[keep]
return dl, weights
def marginalized_redshift_reweight(self, H0, name, alpha=1.6, mmin=5, mmax=100):
"""
Computes the marginalized distance posterior KDE.
"""
return gaussian_kde(self.distance)
# Prior distribution used in this work
new_prior = mass_distribution(name=name, alpha=alpha, mmin=mmin, mmax=mmax)
def marginalized_sky(self):
# Get source frame masses
redshift, mass_1_source, mass_2_source = self.compute_source_frame_samples(H0)
# Re-weight
weights = new_prior.joint_prob(mass_1_source,mass_2_source)/self.jacobian_times_prior(redshift,H0)
norm = np.sum(weights)
return gaussian_kde(redshift,weights=weights), norm
def marginalized_redshift(self, H0):
"""
Computes the marginalized sky localization posterior KDE.
Computes the marginalized distance posterior KDE.
"""
return gaussian_kde(np.vstack((self.ra, self.dec)))
# Get source frame masses
redshift, mass_1_source, mass_2_source = self.compute_source_frame_samples(H0)
# remove dl^2 prior and include dz/ddL jacobian
weights = 1/(self.dL_by_z_H0(redshift,H0,Om0)*cosmo.dl_zH0(redshift,H0)**2)
norm = np.sum(weights)
return gaussian_kde(redshift,weights=weights), norm
def marginalized_distance_reweight(self, H0, name, alpha=1.6, mmin=5, mmax=100, seed=1):
"""
Computes the marginalized distance posterior KDE.
"""
dl, weights = self.reweight_samples(H0, name, alpha=alpha, mmin=mmin, mmax=mmax, seed=seed)
norm = np.sum(weights)/len(weights)
return gaussian_kde(dl), norm
def marginalized_distance(self, H0):
"""
Computes the marginalized distance posterior KDE.
"""
norm = 1
return gaussian_kde(self.distance), norm
......@@ -7,17 +7,61 @@ import numpy as np
import healpy as hp
import pandas as pd
from ligo.skymap.moc import nest2uniq, uniq2nest
import pickle
import h5py
import pkg_resources
import time
import progressbar
from ..utilities.standard_cosmology import *
from ..utilities.schechter_function import *
# Global
catalog_data_path = pkg_resources.resource_filename('gwcosmo',
'data/catalog_data/')
Kcorr_bands = {'B':'B', 'K':'K', 'u':'r', 'g':'r', 'r':'g', 'i':'g', 'z':'r'}
Kcorr_signs = {'B':1, 'K':1, 'u':1, 'g':1, 'r':-1, 'i':-1, 'z':-1}
color_names = {'B':None, 'K':None, 'u':'u - r', 'g':'g - r', 'r':'g - r', 'i':'g - i', 'z':'r - z'}
color_limits = {'u - r':[-0.1,2.9], 'g - r':[-0.1,1.9], 'g - i':[0,3], 'r - z':[0,1.5]}
def redshiftUncertainty(ra, dec, z, sigmaz, m, tempsky, luminosity_weights):
"""
A function which "smears" out galaxies in the catalog, therefore
incorporating redshift uncetainties.
"""
sort = np.argsort(m)
ra, dec, z, sigmaz, m, tempsky = ra[sort], dec[sort], z[sort], sigmaz[sort], m[sort], tempsky[sort]
if luminosity_weights is not 'constant':
mlim = np.percentile(m,0.01) # more draws for galaxies in brightest 0.01 percent