diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index c426003447b82dd1da2bd02b34f241ccab88e6ad..f453fec2fe3eeaabc396f4da60e71eb48b5b7415 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -5,5 +5,6 @@ test-basic:
   stage: test
   image: ligo/software
   script:
-    - python netsim.py
-    - python rates.py
+    - python setup.py install
+    - netsim
+    - rates
diff --git a/check_coincidence b/bin/check_coincidence
similarity index 96%
rename from check_coincidence
rename to bin/check_coincidence
index caa9a1050c5c465124ab4a82b4867984a043d2f5..c4c4638d535402d2cbca1c37f237d2e740e1e113 100755
--- a/check_coincidence
+++ b/bin/check_coincidence
@@ -1,13 +1,14 @@
 #!/usr/bin/env python
+
 import argparse
 import random
 
 import numpy
 import h5py
 
-from eventgen import EventGenerator
-from light_curve_sampling import generate_obs
-import rates
+from gw_event_gen.eventgen import EventGenerator
+from gw_event_gen.light_curve_sampling import generate_obs
+from gw_event_gen import rates
 
 argp = argparse.ArgumentParser()
 argp.add_argument("event_listing", help="Event list detected by LIGO")
diff --git a/bin/em_bright b/bin/em_bright
new file mode 100755
index 0000000000000000000000000000000000000000..b361a9ab9241307ced5e3bccb8a3e4547f00849c
--- /dev/null
+++ b/bin/em_bright
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+#-------------------------------------------------
+
+from gw_event_gen import em_bright
+
+print em_bright.bns_ejecta_mass()
+print em_bright.nsbh_ejecta_mass()
+
+#-------------------------------------------------
+
+from matplotlib import pyplot
+from scipy.stats import uniform
+import numpy
+
+samples = 1000
+m_bh = numpy.random.uniform(3., 10., samples)
+m_ns = numpy.random.uniform(1., 2., samples)
+chi_bh = numpy.random.uniform(0., 1., samples)
+#r_ns = numpy.random.uniform(10.e3, 16.e3, samples)
+r_ns = numpy.ones(samples) * 13.5e3
+dist = numpy.random.power(3, samples) * 1000 # Mpc
+
+dm = []
+for mb, mn, cb, rn in numpy.transpose(numpy.asarray((m_bh, m_ns, chi_bh, r_ns))):
+    dm.append(frac_disk_mass(mb, mn, cb, rn))
+dm = numpy.asarray(dm)
+
+pyplot.figure(figsize=(20, 5))
+mask = dm > 0
+pyplot.scatter(m_bh, m_ns, c='k', edgecolor='none')
+pyplot.scatter(m_bh[mask], m_ns[mask], c=dm[mask], vmin=0., edgecolor='none')
+pyplot.colorbar()
+pyplot.xlabel("BH mass")
+pyplot.ylabel("NS mass")
+pyplot.savefig("em_bright_test.png")
diff --git a/generate_events b/bin/generate_events
similarity index 99%
rename from generate_events
rename to bin/generate_events
index c45b66d86f15ea41770c7dce22f8793b029e68fe..b2cb2ee07a127e905030db75370ecef202799614 100755
--- a/generate_events
+++ b/bin/generate_events
@@ -7,8 +7,8 @@ import argparse
 
 from functools import partial
 
-import eventgen
-from netsim import Network, list_instruments, parse_psd
+from gw_event_gen import eventgen
+from gw_event_gen.netsim import Network, list_instruments, parse_psd
 import numpy
 from numpy.random import uniform
 
@@ -105,7 +105,7 @@ def snr_threshold_reject(e, nifo=args.nifo_abovethreshold, threshold=args.snr_th
     return sorted(snrs, reverse=True)[nifo-1] > threshold
 
 def em_bright(e):
-    from em_bright import frac_disk_mass
+    from gw_event_gen.em_bright import frac_disk_mass
     return (max(frac_disk_mass(e.mass1, e.mass2, e.chi_eff), 0.),)
 
 def em_bright_reject(e):
diff --git a/generate_obs b/bin/generate_obs
similarity index 97%
rename from generate_obs
rename to bin/generate_obs
index 7280ad4593c82701a03222359615ca3ad1509072..95389bdbbf58b13c61fcae9d1910ea3eba436d27 100755
--- a/generate_obs
+++ b/bin/generate_obs
@@ -11,9 +11,9 @@ import healpy
 
 from lalinference.io import fits
 
-import eventgen
-import scan
-from photometry import lsst_bands
+from gw_event_gen import eventgen
+from gw_event_gen import scan
+from gw_event_gen.photometry import lsst_bands
 lsst_bands = list(lsst_bands)
 
 # https://www.lsst.org/sites/default/files/docs/sciencebook/SB_1.pdf
@@ -116,7 +116,7 @@ for i, arg in enumerate(args.light_curves):
     # "depth first", skymap only
     # Explore all viable parts of the map, starting with the highest
     # probability
-    import scan
+    from gw_event_gen import scan
     pos = scan.sky_brute(smap)
 
     # Sum over the angular distance traversed
diff --git a/generate_psd b/bin/generate_psd
similarity index 95%
rename from generate_psd
rename to bin/generate_psd
index af403e4c237eb067054cd359a16e2d080ab6690d..bbbf47193de58e7c019b0f1e4f2433d741121b0e 100755
--- a/generate_psd
+++ b/bin/generate_psd
@@ -5,7 +5,7 @@ import lalsimulation
 import lal
 from lal import series
 from glue.ligolw import utils, ligolw
-import netsim
+from gw_event_gen import netsim
 
 _default_network = ("H1", "L1", "V1")
 
diff --git a/localization_statistics.py b/bin/localization_statistics
old mode 100644
new mode 100755
similarity index 96%
rename from localization_statistics.py
rename to bin/localization_statistics
index d7940f2529dca5ab319763b16926c3709e41ed4c..3306cd6829c5fca32c9ecd3fc2d4b419e749f1f2
--- a/localization_statistics.py
+++ b/bin/localization_statistics
@@ -1,3 +1,5 @@
+#!/usr/bin/env python
+
 from __future__ import division
 import matplotlib
 matplotlib.use("agg")
@@ -5,12 +7,12 @@ matplotlib.use("agg")
 from argparse import ArgumentParser
 from lalinference.io import fits
 
-import skyutils
 import numpy as np
 import matplotlib.pyplot as plt
 import glob
 import healpy
 
+from gw_event_gen import skyutils
 
 # Given an input of names of localized fits files to loop over 
 argp = ArgumentParser()
@@ -41,4 +43,3 @@ for filename in glob.glob(args.input_loc[0]):
     output.write("%s, %.2f, %.2f \n" % (event_num, prb50 * pix_size, prb90 * pix_size))
 
 output.close()
-
diff --git a/localize_events b/bin/localize_events
similarity index 95%
rename from localize_events
rename to bin/localize_events
index b802535f0134cf47347fadc097945b2c78d78649..07a1a47a80bdab5ec28de19f9533879c2974084b 100755
--- a/localize_events
+++ b/bin/localize_events
@@ -9,10 +9,10 @@ import numpy
 
 from lalinference.io import fits
 
-import eventgen
-from netsim import Network, list_instruments
-import kilonovae
-import em_bright
+from gw_event_gen import eventgen
+from gw_event_gen.netsim import Network, list_instruments
+from gw_event_gen import kilonovae
+from gw_event_gen import em_bright
 
 def save_light_curve(t, lc, freq, flux, meta, fname, root="/light_curves/"):
     import h5py
@@ -112,7 +112,7 @@ for i, e in enumerate(event_list._events):
         # FIXME: move standard generation to photometry module
         freq = kilonovae.C_CGS / numpy.logspace(-5, -3, 10000)
 
-        from photometry import _bands
+        from gw_event_gen.photometry import _bands
         lc = {}
         for bnd in _bands:
             mag = kilonovae.bandlimited_mag_at_earth(flux, band=bnd, \
diff --git a/localize_via_bs.sh b/bin/localize_via_bs
similarity index 100%
rename from localize_via_bs.sh
rename to bin/localize_via_bs
diff --git a/bin/netsim b/bin/netsim
new file mode 100755
index 0000000000000000000000000000000000000000..aacf70dd8df8859525deec0d060fa0f22d744ac7
--- /dev/null
+++ b/bin/netsim
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+
+import os
+import math
+
+from functools import partial
+
+import numpy
+from scipy.interpolate import interp1d
+from scipy.integrate import trapz
+from scipy.optimize import brentq
+
+import lal
+_detectors = dict([(d.frDetector.prefix, d) for d in lal.CachedDetectors])
+import lalsimulation
+
+from gw_event_gen import eventgen
+from gw_event_gen import netsim
+
+import matplotlib
+matplotlib.use("agg")
+from matplotlib import pyplot
+from scipy.interpolate import interp1d
+
+aligo = netsim.Network(("H1",))
+
+# net Theta^2 testing
+x = numpy.linspace(0, 1, 100)
+pyplot.plot(x, netsim._fit_theta(x), label="analytic")
+
+zh = aligo.redshift_horizon(mass1=10., mass2=10.)["H1"]
+y = netsim._fit_theta(x) * aligo.snr_correction(mass1=10., mass2=10.0)(x * zh)
+pyplot.plot(x, y, label="analytic, 10+10 corrected")
+
+th_sq, ccdf = aligo._theta_sq_net()
+pyplot.plot(x, netsim._fit_theta(x, x[0], x[-1], interp1d(th_sq, ccdf)), label="1 det")
+
+aligo.add_psd("L1")
+th_sq, ccdf = aligo._theta_sq_net()
+pyplot.plot(x, netsim._fit_theta(x, x[0], x[-1], interp1d(th_sq, ccdf)), label="2 det")
+
+aligo.add_psd("V1")
+th_sq, ccdf = aligo._theta_sq_net()
+pyplot.plot(x, netsim._fit_theta(x, x[0], x[-1], interp1d(th_sq, ccdf)), label="3 det")
+
+pyplot.legend()
+#pyplot.savefig("net_resp.png")
+
+#
+# Usage and unit testing
+#
+network = netsim.Network(("H1", "L1", "V1"))
+
+# Have to cut off our sampling in distance somehow
+_uniform_comoving = partial(eventgen.uniform_comoving, z_max = 0.5)
+
+# Generate non-spinning NSBH in comoving volume
+generator = eventgen.EventGenerator()
+generator.append_generator(("mass1", "mass2"), eventgen.uniform_nsbh_comp_mass)
+generator.append_generator(eventgen._spins_cart, eventgen.zero_spin)
+generator.append_generator(("distance",), _uniform_comoving)
+generator.append_generator(eventgen._extr, eventgen.random_orientation)
+generator.append_generator(("eccentricity",), lambda: (0.0,))
+generator.append_generator(("event_time",), lambda: (1e9,))
+
+# Add post filters
+generator.append_post(("mchirp", "eta"), eventgen.mchirp_eta)
+
+# Reject anything without a coincidence
+generator.append_conditional(partial(netsim.snr_threshold_reject, network))
+
+for e in generator.generate_events(n_itr=1):
+    print e.mass1, e.eta, e.distance, e.inclination
+    e.gen_waveform()
+
+    print network.snr(e)
+
+#generator.to_hdf5("test.hdf", "events")
+#generator.to_xml("test.xml.gz")
diff --git a/bin/photometry b/bin/photometry
new file mode 100755
index 0000000000000000000000000000000000000000..a2d92c33390a0172ad819ea009851c65c7bde4c2
--- /dev/null
+++ b/bin/photometry
@@ -0,0 +1,7 @@
+#!/usr/bin/env python
+
+from gw_event_gen import photometry
+import numpy
+
+print photometry.get_spectrum()
+print photometry.lal.C_SI / numpy.logspace(-7, -5, 10000)
diff --git a/plot_all_light_curves b/bin/plot_all_light_curves
similarity index 98%
rename from plot_all_light_curves
rename to bin/plot_all_light_curves
index 027b8cacdb973b60c76ddfee63b58701f9ede9c2..21f70518361f433e703767f2fdbf1bb1384caee8 100755
--- a/plot_all_light_curves
+++ b/bin/plot_all_light_curves
@@ -28,7 +28,7 @@ plot_colors = {
     "y": cm.jet(1.0)
 }
 
-from photometry import lsst_bands, sort_by_band
+from gw_event_gen.photometry import lsst_bands, sort_by_band
 lsst_bands -= set(args.exclude_band or [])
 lsst_bands = list(lsst_bands)
 n_bands = len(lsst_bands)
diff --git a/plot_loc_stats.py b/bin/plot_loc_stats
old mode 100644
new mode 100755
similarity index 99%
rename from plot_loc_stats.py
rename to bin/plot_loc_stats
index 5ebef6105db048a0319eae92a73ec8984273f64b..5c23fe1dd3a870892ec13aa29a2ed5cee74798ac
--- a/plot_loc_stats.py
+++ b/bin/plot_loc_stats
@@ -1,3 +1,5 @@
+#!/usr/bin/env python
+
 from __future__ import division
 import matplotlib
 matplotlib.use("agg")
@@ -189,5 +191,3 @@ print err_med, err_90
 plt.suptitle(args.title)
 
 plt.savefig('%s_stacked_hist.png' % args.output)
-
-
diff --git a/bin/rates b/bin/rates
new file mode 100755
index 0000000000000000000000000000000000000000..d39ddbcc9c8e1111befa9e0deecb3af601941aa7
--- /dev/null
+++ b/bin/rates
@@ -0,0 +1,240 @@
+#!/usr/bin/env python
+
+from gw_event_gen import rates
+
+import matplotlib
+matplotlib.use("agg")
+from matplotlib import pyplot
+
+import json
+
+#
+# Neutron Star Black Holes
+#
+
+#
+# Adapted from: https://arxiv.org/pdf/1003.2480.pdf
+#
+# numbers in above publication are Mpc^-3 Myr^-1, we convert to Mpc^-3 yr^-1
+GW_RATES_PREDICTION = "https://arxiv.org/abs/1003.2480"
+NSBH_RATES_LOW = ((6e-4 * 1e-6) * rates._invmpc3yr).to(rates._invgpc3yr)
+NSBH_RATES_LIKELY = ((3e-2 * 1e-6) * rates._invmpc3yr).to(rates._invgpc3yr)
+NSBH_RATES_HIGH = ((1. * 1e-6) * rates._invmpc3yr).to(rates._invgpc3yr)
+
+NSBH_MODELLED = rates.RateDistribution("lognorm", low=NSBH_RATES_LOW, \
+                                high=NSBH_RATES_HIGH, \
+                                url=GW_RATES_PREDICTION)
+
+# FIXME: post-O1 UL
+
+# https://arxiv.org/abs/1811.12907
+GWTC1 = "https://arxiv.org/abs/1811.12907"
+NSBH_RATE_UL = json.loads("""{
+    "5": {
+        "isotropic": {
+            "gstlal": 610,
+            "pycbc": 580
+        },
+        "aligned": {
+            "gstlal": 540,
+            "pycbc": 520
+        }
+    },
+    "10": {
+        "isotropic": {
+            "gstlal": 470,
+            "pycbc": 410
+        },
+        "aligned": {
+            "gstlal": 340,
+            "pycbc": 300
+        }
+    },
+    "30": {
+        "isotropic": {
+            "gstlal": 400,
+            "pycbc": 340
+        },
+        "aligned": {
+            "gstlal": 220,
+            "pycbc": 200
+        }
+    }
+}""")
+
+# gstlal isotropic
+NSBH_RATES_UL_BH_5 = NSBH_RATE_UL["5"]["isotropic"]["gstlal"] * rates._invgpc3yr
+NSBH_RATES_UL_BH_10 = NSBH_RATE_UL["10"]["isotropic"]["gstlal"] * rates._invgpc3yr
+NSBH_RATES_UL_BH_30 = NSBH_RATE_UL["30"]["isotropic"]["gstlal"] * rates._invgpc3yr
+
+NSBH_LIMIT_BH_5 = rates.RateDistribution("expon", high=NSBH_RATES_UL_BH_5, \
+                                     url=GWTC1)
+NSBH_LIMIT_BH_10 = rates.RateDistribution("expon", high=NSBH_RATES_UL_BH_10, \
+                                     url=GWTC1)
+NSBH_LIMIT_BH_30 = rates.RateDistribution("expon", high=NSBH_RATES_UL_BH_30, \
+                                     url=GWTC1)
+
+#
+# Binary Neutron Stars
+#
+
+# Adapted from: https://arxiv.org/pdf/1003.2480.pdf
+BNS_RATES_LOW = 1e-2 * rates._invgpc3yr
+BNS_RATES_LIKELY = 1. * rates._invgpc3yr
+BNS_RATES_HIGH = 1e1 * rates._invgpc3yr
+
+BNS_MODELLED = rates.RateDistribution("lognorm", low=BNS_RATES_LOW, \
+                                high=BNS_RATES_HIGH, \
+                                url=GW_RATES_PREDICTION)
+
+# From GW170817 discovery
+# https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.119.161101
+GW170817_DISCOVERY = "https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.119.161101"
+BNS_RATES_LIKELY = 1540 * rates._invgpc3yr
+BNS_RATES_LOW = (1540 - 1220) * rates._invgpc3yr
+BNS_RATES_HIGH = (1540 + 3200) * rates._invgpc3yr
+
+BNS_GW_OBS = rates.RateDistribution("lognorm", low=BNS_RATES_LOW, \
+                                high=BNS_RATES_HIGH, \
+                                url=GW170817_DISCOVERY)
+
+BNS_RATES = json.loads("""{
+    "astro": {
+        "pycbc": [170, 1210, 4440],
+        "gstlal": [130, 920, 3140],
+        "combined": [250, 1050, 3570]
+    },
+    "broad": {
+        "pycbc": [120, 800, 2770],
+        "gstlal": [97, 662, 2271],
+        "combined": [170, 730, 2360]
+    },
+    "union": [110, 3840]
+}""")
+
+BNS_RATES_UNI = rates.RateDistribution("lognorm", \
+                        low=BNS_RATES["broad"]["gstlal"][0] * rates._invgpc3yr, \
+                        high=BNS_RATES["broad"]["gstlal"][-1] * rates._invgpc3yr, \
+                        url=GWTC1)
+
+BNS_RATES_NRM = rates.RateDistribution("lognorm", \
+                        low=BNS_RATES["astro"]["gstlal"][0] * rates._invgpc3yr, \
+                        high=BNS_RATES["astro"]["gstlal"][-1] * rates._invgpc3yr, \
+                        url=GWTC1)
+
+BNS_GW_OBS <<= rates.RateDistribution("lognorm", \
+                        low=BNS_RATES["union"][0] * rates._invgpc3yr, \
+                        high=BNS_RATES["union"][-1] * rates._invgpc3yr, \
+                        url=GWTC1)
+
+#
+# Binary Black Holes
+#
+
+# FIXME: Add GW150914, post-O1
+
+# From GW170104
+# https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.118.221101
+GW170104_DISCOVERY = "https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.118.221101"
+# numbers in above publication are Gpc^-3 yr^-1, we convert to Mpc^-3 yr^-1
+BBH_RATES_LOW = 12 * rates._invgpc3yr
+BBH_RATES_HIGH = 213 * rates._invgpc3yr
+# NOTE: "likely" isn't accurate since we merged two distributions
+BBH_RATES_LIKELY = 103 * rates._invgpc3yr
+
+BBH_GW_OBS = rates.RateDistribution("lognorm", low=BBH_RATES_LOW, \
+                                high=BBH_RATES_HIGH, \
+                                url=GW170104_DISCOVERY)
+
+# Aligned = gstlal and isotropic = pycbc
+BBH_RATES = json.loads("""{
+    "uniform_in_log": {
+        "aligned": [9.4, 18.1, 31.0],
+        "isotropic": [9.8, 19.5, 34.7],
+        "combined": [10.8, 19, 32]
+    },
+    "power_law": {
+        "aligned": [29, 56, 101],
+        "isotropic": [28, 57, 104],
+        "combined": [32, 57, 97]
+    },
+    "union": [9.7, 101]
+}""")
+
+BBH_RATES_PL = rates.RateDistribution("lognorm", \
+                low=BBH_RATES["power_law"]["combined"][0] * rates._invgpc3yr,
+                high=BBH_RATES["power_law"]["combined"][-1] * rates._invgpc3yr,
+                url=GWTC1)
+
+BBH_RATES_ULM = rates.RateDistribution("lognorm", \
+                low=BBH_RATES["uniform_in_log"]["combined"][0] * rates._invgpc3yr,
+                high=BBH_RATES["uniform_in_log"]["combined"][-1] * rates._invgpc3yr,
+                url=GWTC1)
+
+# Union from GWTC-1
+BBH_GW_OBS <<= rates.RateDistribution("lognorm", \
+                                low=BBH_RATES["union"][0] * rates._invgpc3yr, \
+                                high=BBH_RATES["union"][-1] * rates._invgpc3yr, \
+                                url=GWTC1)
+
+# https://arxiv.org/abs/1811.12907
+GW_BBH_POPULATIONS = "https://arxiv.org/abs/1811.12907"
+# O1O2 Populations analysis, Model B: https://arxiv.org/abs/1811.12940
+BBH_RATES_LIKELY = 53 * rates._invgpc3yr
+BBH_RATES_LOW = (53 - 29) * rates._invgpc3yr
+BBH_RATES_HIGH = (53 + 59) * rates._invgpc3yr
+
+BBH_GW_OBS <<= rates.RateDistribution("lognorm", low=BBH_RATES_LOW, \
+                                high=BBH_RATES_HIGH, \
+                                url=GW_BBH_POPULATIONS)
+
+# https://arxiv.org/abs/1704.04628
+GW_IMBH_O1 = "https://arxiv.org/abs/1704.04628"
+# O1 IMBH 100 + 100 nonspinning
+IMBH_RATES_UL_100_100 = 2 * rates._invgpc3yr
+
+#
+# Intermediate Mass Binary Black Holes
+#
+IMBH_GW_OBS = rates.RateDistribution("expon", high=IMBH_RATES_UL_100_100, \
+                                url=GW_IMBH_O1)
+
+print BBH_GW_OBS
+dist = BBH_GW_OBS.distr
+low, high = BBH_GW_OBS._low, BBH_GW_OBS._high
+print "BBH rates, cdf at {0.value:1.3e} {0.unit}: {1:1.3f}, {2.value:1.3e} {2.unit}: {3:1.3f}".format(low, dist.cdf(low), high, dist.cdf(high))
+
+print BNS_GW_OBS
+dist = BNS_GW_OBS.distr
+low, high = BNS_GW_OBS._low, BNS_GW_OBS._high
+print "BNS rates, cdf at {0.value:1.3e} {0.unit}: {1:1.3f}, {2.value:1.3e} {2.unit}: {3:1.3f}".format(low, dist.cdf(low), high, dist.cdf(high))
+
+print NSBH_LIMIT_BH_5
+dist = NSBH_LIMIT_BH_5.distr
+high = NSBH_LIMIT_BH_5._high
+print "NSBH UL rates, cdf at {0.value:1.3e} {0.unit}: {1:1.3f}".format(high, dist.cdf(high))
+
+print NSBH_MODELLED
+dist = NSBH_MODELLED.distr
+low, high = NSBH_MODELLED._low, NSBH_MODELLED._high
+print "NSBH model rates, cdf at {0.value:1.3e} {0.unit}: {1:1.3f}, {2.value:1.3e} {2.unit}: {3:1.3f}".format(low, dist.cdf(low), high, dist.cdf(high))
+
+print IMBH_GW_OBS
+dist = IMBH_GW_OBS.distr
+low, high = IMBH_GW_OBS._low, IMBH_GW_OBS._high
+print "IMBH UL rates, cdf at {0.value:1.3e} {0.unit}: {1:1.3f}".format(high, dist.cdf(high))
+
+x = NSBH_MODELLED._range
+pyplot.subplot(2, 1, 1)
+pyplot.plot(x, dist.pdf(x))
+rvs = dist.rvs(100000)
+
+# FIXME: Reenable when CI runners have more modern matplotlib
+#pyplot.hist(rvs, bins=x, histtype='stepfilled', density=True)
+pyplot.hist(rvs, bins=x, histtype='stepfilled', normed=True)
+pyplot.semilogx()
+pyplot.subplot(2, 1, 2)
+pyplot.plot(x, dist.cdf(x))
+pyplot.semilogx()
+pyplot.legend()
+pyplot.savefig("rates_test.png")
diff --git a/bin/scan b/bin/scan
new file mode 100755
index 0000000000000000000000000000000000000000..3ec2c8a261367523ee3d4f35979b56675963dc3b
--- /dev/null
+++ b/bin/scan
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+from gw_event_gen import scan
+import numpy
+import healpy
+
+# Generate random healpix compatible sky map
+smap = numpy.random.uniform(0, 1, 12 * 128**2)
+smap /= smap.sum()
+
+# Resample down to a different resolution
+smap_ds = scan.resample_sky_map(smap, verbose=True)
+ns = healpy.npix2nside(len(smap_ds))
+
+# Brute force, start at highest prob and work downwards
+pos = scan.sky_brute(smap_ds)
+
+# Sum over the angular distance traversed
+xyz_pos = healpy.ang2vec(*pos)
+ang_trav = numpy.asarray([ang_dist(*p) for p in \
+                zip(xyz_pos[:-1], xyz_pos[1:])])
+print ang_trav.sum()
diff --git a/gw_event_gen/__init__.py b/gw_event_gen/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/em_bright.py b/gw_event_gen/em_bright.py
similarity index 85%
rename from em_bright.py
rename to gw_event_gen/em_bright.py
index 92229af0c9a820b91673c2fee23ff76b8835bcc4..543c752cfb4826309c28dbf3046c9deb8638dd84 100644
--- a/em_bright.py
+++ b/gw_event_gen/em_bright.py
@@ -188,39 +188,3 @@ def bns_ejecta_mass(m_ns1=1.4, m_ns2=1.75, r_ns1=10e3, r_ns2=10e3, mb_ns1=0., mb
 
     return ((a* (m_ns2 / m_ns1)**(1 / 3.) * (1 - 2 * c1) / c1 + b * (m_ns2 / m_ns1)**n + c * (1.- m_ns1 / mb_ns1)) * mb_ns1  \
            + (a * (m_ns1 / m_ns2)**(1/3.) * (1 -2 * c2) / c2 + b * (m_ns1 / m_ns2)**n + c * (1. - m_ns2 / mb_ns2)) * mb_ns2 + d) * 10**-3
-
-#
-# Testing
-#
-if __name__ == "__main__":
-    #print r_isco(0.0), r_isco(-0.5), r_isco(0.9)
-    #print solve_xi_tidal(10., 1.4, 0.5, 13.5e3)
-    #print frac_disk_mass(m_bh=1.4 * 4, m_ns=1.4, chi_bh=0.8, r_ns=13.5e3)
-
-    print bns_ejecta_mass()
-    print nsbh_ejecta_mass()
-
-
-    from matplotlib import pyplot
-    from scipy.stats import uniform
-    samples = 1000
-    m_bh = numpy.random.uniform(3., 10., samples)
-    m_ns = numpy.random.uniform(1., 2., samples)
-    chi_bh = numpy.random.uniform(0., 1., samples)
-    #r_ns = numpy.random.uniform(10.e3, 16.e3, samples)
-    r_ns = numpy.ones(samples) * 13.5e3
-    dist = numpy.random.power(3, samples) * 1000 # Mpc
-
-    dm = []
-    for mb, mn, cb, rn in numpy.transpose(numpy.asarray((m_bh, m_ns, chi_bh, r_ns))):
-        dm.append(frac_disk_mass(mb, mn, cb, rn))
-    dm = numpy.asarray(dm)
-
-    pyplot.figure(figsize=(20, 5))
-    mask = dm > 0
-    pyplot.scatter(m_bh, m_ns, c='k', edgecolor='none')
-    pyplot.scatter(m_bh[mask], m_ns[mask], c=dm[mask], vmin=0., edgecolor='none')
-    pyplot.colorbar()
-    pyplot.xlabel("BH mass")
-    pyplot.ylabel("NS mass")
-    pyplot.savefig("em_bright_test.png")
diff --git a/eventgen.py b/gw_event_gen/eventgen.py
similarity index 99%
rename from eventgen.py
rename to gw_event_gen/eventgen.py
index 89bf2ff11baf6b047621030376b3e80654ee1c6e..fc8c12b166699df2901ec592ec9e720df84472d6 100644
--- a/eventgen.py
+++ b/gw_event_gen/eventgen.py
@@ -1023,12 +1023,3 @@ def _mp_generate_events(egen, np=1, n_itr=None, n_event=None, \
         egen._events.extend(elist)
 
     return egen
-    
-if __name__ == "__main__":
-    # Multiprocessing tests
-    #egen = EventGenerator()
-    #egen.append_generator(("mass1", "mass2"), uniform_bns_comp_mass)
-    #_mp_generate_events(egen, np=6, n_event=1000, verbose=True)
-
-    import doctest
-    doctest.testmod()
diff --git a/kilonovae.py b/gw_event_gen/kilonovae.py
similarity index 100%
rename from kilonovae.py
rename to gw_event_gen/kilonovae.py
diff --git a/light_curve_sampling.py b/gw_event_gen/light_curve_sampling.py
similarity index 100%
rename from light_curve_sampling.py
rename to gw_event_gen/light_curve_sampling.py
diff --git a/netsim.py b/gw_event_gen/netsim.py
similarity index 88%
rename from netsim.py
rename to gw_event_gen/netsim.py
index ae318917f3ca652aa2734721dd90306ca64dff9a..094ef9e1cffff2627478b62ad3020d50c5028024 100644
--- a/netsim.py
+++ b/gw_event_gen/netsim.py
@@ -10,7 +10,7 @@ import lal
 _detectors = dict([(d.frDetector.prefix, d) for d in lal.CachedDetectors])
 import lalsimulation
 
-import eventgen
+from . import eventgen
 
 #
 # Analytic volume utilities
@@ -531,78 +531,11 @@ class Network(object):
             return numpy.sqrt(ant) / n
         return numpy.sqrt(ant)
 
-if __name__ == "__main__":
-
-    import matplotlib
-    matplotlib.use("agg")
-    from matplotlib import pyplot
-    from scipy.interpolate import interp1d
-
-    aligo = Network(("H1",))
-
-    # net Theta^2 testing
-    x = numpy.linspace(0, 1, 100)
-    pyplot.plot(x, _fit_theta(x), label="analytic")
-
-    zh = aligo.redshift_horizon(mass1=10., mass2=10.)["H1"]
-    y = _fit_theta(x) * aligo.snr_correction(mass1=10., mass2=10.0)(x * zh)
-    pyplot.plot(x, y, label="analytic, 10+10 corrected")
-
-    th_sq, ccdf = aligo._theta_sq_net()
-    pyplot.plot(x, _fit_theta(x, x[0], x[-1], \
-                                interp1d(th_sq, ccdf)), label="1 det")
-
-    aligo.add_psd("L1")
-    th_sq, ccdf = aligo._theta_sq_net()
-    pyplot.plot(x, _fit_theta(x, x[0], x[-1], \
-                                interp1d(th_sq, ccdf)), label="2 det")
-
-    aligo.add_psd("V1")
-    th_sq, ccdf = aligo._theta_sq_net()
-    pyplot.plot(x, _fit_theta(x, x[0], x[-1], \
-                                interp1d(th_sq, ccdf)), label="3 det")
-
-    pyplot.legend()
-    #pyplot.savefig("net_resp.png")
-
-    #
-    # Usage and unit testing
-    #
-    network = Network(("H1", "L1", "V1"))
-
-    def snr_threshold_reject(e, nifo=2, threshold=5.5):
-        """
-        Reject an event that does not have at least 'threshold' SNR in 'nifo' instruments.
-        """
-        snrs = network.snr(e).values()
-        if sorted(snrs)[nifo-1] < threshold:
-            return False
-        return True
-
-    # Have to cut off our sampling in distance somehow
-    from functools import partial
-    _uniform_comoving = partial(eventgen.uniform_comoving, z_max = 0.5)
-
-    # Generate non-spinning NSBH in comoving volume
-    generator = eventgen.EventGenerator()
-    generator.append_generator(("mass1", "mass2"), eventgen.uniform_nsbh_comp_mass)
-    generator.append_generator(eventgen._spins_cart, eventgen.zero_spin)
-    generator.append_generator(("distance",), _uniform_comoving)
-    generator.append_generator(eventgen._extr, eventgen.random_orientation)
-    generator.append_generator(("eccentricity",), lambda: (0.0,))
-    generator.append_generator(("event_time",), lambda: (1e9,))
-
-    # Add post filters
-    generator.append_post(("mchirp", "eta"), eventgen.mchirp_eta)
-
-    # Reject anything without a coincidence
-    generator.append_conditional(snr_threshold_reject)
-
-    for e in generator.generate_events(n_itr=1):
-        print e.mass1, e.eta, e.distance, e.inclination
-        e.gen_waveform()
-
-        print network.snr(e)
-
-    #generator.to_hdf5("test.hdf", "events")
-    #generator.to_xml("test.xml.gz")
+def snr_threshold_reject(network, e, nifo=2, threshold=5.5):
+    """
+    Reject an event that does not have at least 'threshold' SNR in 'nifo' instruments.
+    """
+    snrs = network.snr(e).values()
+    if sorted(snrs)[nifo-1] < threshold:
+        return False
+    return True
diff --git a/photometry.py b/gw_event_gen/photometry.py
similarity index 93%
rename from photometry.py
rename to gw_event_gen/photometry.py
index bf54feef56a10c46933f439c8c7921ca09f471e1..104662b7ec22d2c8174257e1747e325fa42ae92a 100644
--- a/photometry.py
+++ b/gw_event_gen/photometry.py
@@ -81,8 +81,3 @@ def get_spectrum(bands=None, nbins=10000):
             l = min(l, bp_l)
             u = max(u, bp_u)
     return lal.C_SI / numpy.logspace(numpy.log10(l), numpy.log10(u), nbins)
-
-if __name__ == "__main__":
-    print get_spectrum()
-    print lal.C_SI / numpy.logspace(-7, -5, 10000)
-    # Make a 'rainbow' plot of the filters
diff --git a/gw_event_gen/rates.py b/gw_event_gen/rates.py
new file mode 100644
index 0000000000000000000000000000000000000000..6ac7be6014b2775828bec13f91c7019f9092944b
--- /dev/null
+++ b/gw_event_gen/rates.py
@@ -0,0 +1,186 @@
+#!/usr/bin/env python
+import json
+import itertools
+
+import numpy
+
+from scipy.special import erfinv
+from scipy.stats import lognorm, poisson, expon
+from scipy.integrate import fixed_quad, trapz, cumtrapz
+
+from astropy import units
+_invmpc3yr = units.Unit("Mpc^-3 yr^-1")
+_invgpc3yr = units.Unit("Gpc^-3 yr^-1")
+
+#
+# Utilities for generating rate distributions
+#
+
+def solve_lognorm_params(low, high):
+    """
+    Obtain parameters for the scipy lognorm distribution by fitting mu and sigm to the low and high numbers for a given category of rates. Thanks to Will Farr for some guidance in this matter.
+    """
+
+    try:
+        mu = 0.5 * (numpy.log(high) + numpy.log(low))
+        # Nominally, we used 0.25 (2 sigma) here, but this is a better
+        # approximation
+        sigma = 0.25 * (numpy.log(high) - numpy.log(low))
+    except (units.core.UnitTypeError, TypeError):
+        mu = 0.5 * (numpy.log(high.value) + numpy.log(low.value))
+        # Nominally, we used 0.25 (2 sigma) here, but this is a better
+        # approximation
+        sigma = 0.25 * (numpy.log(high.value) - numpy.log(low.value))
+
+    dR = numpy.log(high.value) - numpy.log(low.value)
+    derf = erfinv(2 * 0.95 - 1) - erfinv(2 * 0.05 - 1)
+    sigma = dR / derf
+
+    dR = numpy.log(high.value) + numpy.log(low.value)
+    derf = erfinv(2 * 0.95 - 1) + erfinv(2 * 0.05 - 1)
+    mu = dR + derf * sigma
+
+    sigma /= numpy.sqrt(2)
+    mu /= 2
+
+    return mu, sigma
+
+def generate_rate_prior(low, high):
+    # Deprecated
+    mu, sigma = solve_lognorm_params(low, high)
+    return lognorm(sigma, scale=numpy.exp(mu))
+
+def n_given_rate(r, n, vt):
+    _lambda = r * vt
+    return poisson(_lambda).pmf(n)
+
+#
+# Distributional objects
+#
+
+class CitedObject(object):
+    def __init__(self, url=None):
+        super(CitedObject, self).__init__()
+        self._url = url
+        # related or superceding
+        self._related = set()
+
+    def __lshift__(self, value):
+        if not isinstance(value, CitedObject):
+            raise TypeError("Attempt to append an object not of type `CitedObject`")
+
+        value._related.add(self)
+        return self
+
+    def __ilshift__(self, value):
+        if not isinstance(value, CitedObject):
+            raise TypeError("Attempt to append an object not of type `CitedObject`")
+
+        value._related.add(self)
+        return value
+
+#
+# Rate Distributions
+#
+class RateDistribution(CitedObject):
+    def __init__(self, dtype, low=None, high=None, url=None):
+        super(RateDistribution, self).__init__(url)
+
+        self._low, self._high = low, high
+
+        # lognormal or UL (exponential like)
+        if dtype == "lognorm":
+            mu, sigma = solve_lognorm_params(low, high)
+            self.distr = lognorm(sigma, scale=numpy.exp(mu))
+            try:
+                self._range = numpy.logspace(numpy.log10(low) - 1, \
+                                    numpy.log10(high) + 1, 100) * _invgpc3yr
+            except (units.core.UnitTypeError, TypeError):
+                self._range = numpy.logspace(numpy.log10(low.value) - 1, \
+                                numpy.log10(high.value) + 1, 100) * _invgpc3yr
+
+        elif dtype == "expon":
+            # FIXME: high is assumed 90% CI
+            scale = -high / numpy.log(1 - .9)
+            self.distr = expon(scale=scale)
+            try:
+                self._range = numpy.logspace(numpy.log10(scale) - 2, \
+                                            numpy.log10(scale) + 2, 100)
+            except (units.core.UnitTypeError, TypeError):
+                self._range = numpy.logspace(numpy.log10(scale.value) - 2, \
+                                numpy.log10(scale.value) + 2, 100) * _invgpc3yr
+        else:
+            raise NotImplementedError("Distributional type {0} not yet supported.".format(dtype))
+
+    def __str__(self):
+        return "{0}: ({1:.2f}, {2:.2f})".format(self.distr.dist.name, \
+                                                self._low or 0., self._high)
+
+    def integrate_rates(self, n, vt):
+
+        # This is our rate distribution
+        rate_distr = self.distr
+
+        # We're integrating the rate prior against p(n|R), likely a poissionian
+        # distribution with the Poisson mean given by RVT
+        def integrand(r, n, vt):
+            return rate_distr.pdf(r) * n_given_rate(r, n, vt=vt)
+
+        intg = [integrand(self._range, _n, vt) for _n in n]
+
+        p_n = numpy.array([trapz(intg, self._range)]).flatten()
+
+        return p_n, numpy.asarray(intg)
+
+#
+# Rate integration
+#
+
+def integrate_rates(r, n, rlow, rhigh, vt):
+    # Deprecated
+
+    # This is our rate distribution
+    rate_distr = generate_rate_prior(rlow, rhigh)
+
+    # We're integrating the rate prior against p(n|R), likely a poissionian
+    # distribution with the Poisson mean given by RVT
+    def integrand(r, n, vt):
+        return rate_distr.pdf(r) * n_given_rate(r, n, vt=vt)
+
+    intg = [integrand(r, _n, vt) for _n in n]
+
+    p_n = numpy.array([trapz(intg, r)]).flatten()
+
+    return p_n, numpy.asarray(intg)
+
+#
+# Event based detection probabilities
+#
+
+def _prb_n_detect(prb):
+    nprb = 1. - prb
+    idx = set(range(len(prb)))
+    n_prb = numpy.zeros(len(prb)+1)
+    n_prb[0] = numpy.prod(nprb)
+    n_prb[-1] = numpy.prod(prb)
+    for i in range(1, len(prb)):
+        # Detection probs
+        for comb in itertools.combinations(idx, i):
+            det = numpy.asarray(comb)
+            # Complement is misses
+            miss = numpy.asarray(list(idx - set(det)))
+            n_prb[i] += numpy.prod(prb[det]) * numpy.prod(nprb[miss])
+
+    return n_prb
+
+def prb_n_detect(prb):
+    """
+    The straightforward way of calculating the P(n_obs|{p}_i) is computation unfeasinble for large i \\in N. This is the "discrete Fourier transform" method, explained in https://en.wikipedia.org/wiki/Poisson_binomial_distribution . It is a refinement of a recursive method which is much faster, but numerically unstable.
+    """
+    n = numpy.arange(0, len(prb)+1)
+    c_n = numpy.exp(2j * numpy.pi / len(n))
+    c_l = c_n**(n + 1)
+    n_prb = 1 + numpy.outer(prb, c_l - 1)
+    c_k = numpy.asarray([c_l**-k for k in n])
+    n_prb = c_k * n_prb.prod(axis=0)
+    return (1. / len(n)) * numpy.real_if_close(n_prb.sum(axis=1))
diff --git a/scan.py b/gw_event_gen/scan.py
similarity index 92%
rename from scan.py
rename to gw_event_gen/scan.py
index 0f5d8f5bd1cdc75b0b32d9cf0bf60692124034f2..597fb867afff6a74c186378ef73b6914a3c67d28 100644
--- a/scan.py
+++ b/gw_event_gen/scan.py
@@ -225,27 +225,3 @@ def resample_sky_map(smap, new_res=3.5**2, verbose=False):
 
     # resample skymap to be closer to instrument FoV
     return healpy.pixelfunc.ud_grade(smap, ns_ds, power=-2)
-
-
-if __name__ == "__main__":
-
-    # Generate random healpix compatible sky map
-    smap = numpy.random.uniform(0, 1, 12 * 128**2)
-    smap /= smap.sum()
-
-    # Resample down to a different resolution
-    smap_ds = resample_sky_map(smap, verbose=True)
-    ns = healpy.npix2nside(len(smap_ds))
-
-    # Brute force, start at highest prob and work downwards
-    pos = sky_brute(smap_ds)
-
-    # Sum over the angular distance traversed
-    xyz_pos = healpy.ang2vec(*pos)
-    ang_trav = numpy.asarray([ang_dist(*p) for p in \
-                    zip(xyz_pos[:-1], xyz_pos[1:])])
-    print ang_trav.sum()
-
-    smap_graph = construct_sm_graph(smap_ds)
-    for pix_node in smap_graph:
-        [d for l, d in pix_node]
diff --git a/skyutils.py b/gw_event_gen/skyutils.py
similarity index 100%
rename from skyutils.py
rename to gw_event_gen/skyutils.py
diff --git a/rates.py b/rates.py
deleted file mode 100644
index 58f979830f2f34ce5783b419f0635df7e4e6204d..0000000000000000000000000000000000000000
--- a/rates.py
+++ /dev/null
@@ -1,422 +0,0 @@
-#!/usr/bin/env python
-import json
-import itertools
-
-import numpy
-
-from scipy.special import erfinv
-from scipy.stats import lognorm, poisson, expon
-from scipy.integrate import fixed_quad, trapz, cumtrapz
-
-from astropy import units
-_invmpc3yr = units.Unit("Mpc^-3 yr^-1")
-_invgpc3yr = units.Unit("Gpc^-3 yr^-1")
-
-#
-# Utilities for generating rate distributions
-#
-
-def solve_lognorm_params(low, high):
-    """
-    Obtain parameters for the scipy lognorm distribution by fitting mu and sigm to the low and high numbers for a given category of rates. Thanks to Will Farr for some guidance in this matter.
-    """
-
-    try:
-        mu = 0.5 * (numpy.log(high) + numpy.log(low))
-        # Nominally, we used 0.25 (2 sigma) here, but this is a better
-        # approximation
-        sigma = 0.25 * (numpy.log(high) - numpy.log(low))
-    except (units.core.UnitTypeError, TypeError):
-        mu = 0.5 * (numpy.log(high.value) + numpy.log(low.value))
-        # Nominally, we used 0.25 (2 sigma) here, but this is a better
-        # approximation
-        sigma = 0.25 * (numpy.log(high.value) - numpy.log(low.value))
-
-    dR = numpy.log(high.value) - numpy.log(low.value)
-    derf = erfinv(2 * 0.95 - 1) - erfinv(2 * 0.05 - 1)
-    sigma = dR / derf
-
-    dR = numpy.log(high.value) + numpy.log(low.value)
-    derf = erfinv(2 * 0.95 - 1) + erfinv(2 * 0.05 - 1)
-    mu = dR + derf * sigma
-
-    sigma /= numpy.sqrt(2)
-    mu /= 2
-
-    return mu, sigma
-
-def generate_rate_prior(low, high):
-    # Deprecated
-    mu, sigma = solve_lognorm_params(low, high)
-    return lognorm(sigma, scale=numpy.exp(mu))
-
-def n_given_rate(r, n, vt):
-    _lambda = r * vt
-    return poisson(_lambda).pmf(n)
-
-#
-# Distributional objects
-#
-
-class CitedObject(object):
-    def __init__(self, url=None):
-        super(CitedObject, self).__init__()
-        self._url = url
-        # related or superceding
-        self._related = set()
-
-    def __lshift__(self, value):
-        if not isinstance(value, CitedObject):
-            raise TypeError("Attempt to append an object not of type `CitedObject`")
-
-        value._related.add(self)
-        return self
-
-    def __ilshift__(self, value):
-        if not isinstance(value, CitedObject):
-            raise TypeError("Attempt to append an object not of type `CitedObject`")
-
-        value._related.add(self)
-        return value
-
-#
-# Rate Distributions
-#
-class RateDistribution(CitedObject):
-    def __init__(self, dtype, low=None, high=None, url=None):
-        super(RateDistribution, self).__init__(url)
-
-        self._low, self._high = low, high
-
-        # lognormal or UL (exponential like)
-        if dtype == "lognorm":
-            mu, sigma = solve_lognorm_params(low, high)
-            self.distr = lognorm(sigma, scale=numpy.exp(mu))
-            try:
-                self._range = numpy.logspace(numpy.log10(low) - 1, \
-                                    numpy.log10(high) + 1, 100) * _invgpc3yr
-            except (units.core.UnitTypeError, TypeError):
-                self._range = numpy.logspace(numpy.log10(low.value) - 1, \
-                                numpy.log10(high.value) + 1, 100) * _invgpc3yr
-
-        elif dtype == "expon":
-            # FIXME: high is assumed 90% CI
-            scale = -high / numpy.log(1 - .9)
-            self.distr = expon(scale=scale)
-            try:
-                self._range = numpy.logspace(numpy.log10(scale) - 2, \
-                                            numpy.log10(scale) + 2, 100)
-            except (units.core.UnitTypeError, TypeError):
-                self._range = numpy.logspace(numpy.log10(scale.value) - 2, \
-                                numpy.log10(scale.value) + 2, 100) * _invgpc3yr
-        else:
-            raise NotImplementedError("Distributional type {0} not yet supported.".format(dtype))
-
-    def __str__(self):
-        return "{0}: ({1:.2f}, {2:.2f})".format(self.distr.dist.name, \
-                                                self._low or 0., self._high)
-
-    def integrate_rates(self, n, vt):
-
-        # This is our rate distribution
-        rate_distr = self.distr
-
-        # We're integrating the rate prior against p(n|R), likely a poissionian
-        # distribution with the Poisson mean given by RVT
-        def integrand(r, n, vt):
-            return rate_distr.pdf(r) * n_given_rate(r, n, vt=vt)
-
-        intg = [integrand(self._range, _n, vt) for _n in n]
-
-        p_n = numpy.array([trapz(intg, self._range)]).flatten()
-
-        return p_n, numpy.asarray(intg)
-
-#
-# Neutron Star Black Holes
-#
-
-#
-# Adapted from: https://arxiv.org/pdf/1003.2480.pdf
-#
-# numbers in above publication are Mpc^-3 Myr^-1, we convert to Mpc^-3 yr^-1
-GW_RATES_PREDICTION = "https://arxiv.org/abs/1003.2480"
-NSBH_RATES_LOW = ((6e-4 * 1e-6) * _invmpc3yr).to(_invgpc3yr)
-NSBH_RATES_LIKELY = ((3e-2 * 1e-6) * _invmpc3yr).to(_invgpc3yr)
-NSBH_RATES_HIGH = ((1. * 1e-6) * _invmpc3yr).to(_invgpc3yr)
-
-NSBH_MODELLED = RateDistribution("lognorm", low=NSBH_RATES_LOW, \
-                                high=NSBH_RATES_HIGH, \
-                                url=GW_RATES_PREDICTION)
-
-# FIXME: post-O1 UL
-
-# https://arxiv.org/abs/1811.12907
-GWTC1 = "https://arxiv.org/abs/1811.12907"
-NSBH_RATE_UL = json.loads("""{
-    "5": {
-        "isotropic": {
-            "gstlal": 610,
-            "pycbc": 580
-        },
-        "aligned": {
-            "gstlal": 540,
-            "pycbc": 520
-        }
-    },
-    "10": {
-        "isotropic": {
-            "gstlal": 470,
-            "pycbc": 410
-        },
-        "aligned": {
-            "gstlal": 340,
-            "pycbc": 300
-        }
-    },
-    "30": {
-        "isotropic": {
-            "gstlal": 400,
-            "pycbc": 340
-        },
-        "aligned": {
-            "gstlal": 220,
-            "pycbc": 200
-        }
-    }
-}""")
-
-# gstlal isotropic
-NSBH_RATES_UL_BH_5 = NSBH_RATE_UL["5"]["isotropic"]["gstlal"] * _invgpc3yr
-NSBH_RATES_UL_BH_10 = NSBH_RATE_UL["10"]["isotropic"]["gstlal"] * _invgpc3yr
-NSBH_RATES_UL_BH_30 = NSBH_RATE_UL["30"]["isotropic"]["gstlal"] * _invgpc3yr
-
-NSBH_LIMIT_BH_5 = RateDistribution("expon", high=NSBH_RATES_UL_BH_5, \
-                                     url=GWTC1)
-NSBH_LIMIT_BH_10 = RateDistribution("expon", high=NSBH_RATES_UL_BH_10, \
-                                     url=GWTC1)
-NSBH_LIMIT_BH_30 = RateDistribution("expon", high=NSBH_RATES_UL_BH_30, \
-                                     url=GWTC1)
-
-#
-# Binary Neutron Stars
-#
-
-# Adapted from: https://arxiv.org/pdf/1003.2480.pdf
-BNS_RATES_LOW = 1e-2 * _invgpc3yr
-BNS_RATES_LIKELY = 1. * _invgpc3yr
-BNS_RATES_HIGH = 1e1 * _invgpc3yr
-
-BNS_MODELLED = RateDistribution("lognorm", low=BNS_RATES_LOW, \
-                                high=BNS_RATES_HIGH, \
-                                url=GW_RATES_PREDICTION)
-
-# From GW170817 discovery
-# https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.119.161101
-GW170817_DISCOVERY = "https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.119.161101"
-BNS_RATES_LIKELY = 1540 * _invgpc3yr
-BNS_RATES_LOW = (1540 - 1220) * _invgpc3yr
-BNS_RATES_HIGH = (1540 + 3200) * _invgpc3yr
-
-BNS_GW_OBS = RateDistribution("lognorm", low=BNS_RATES_LOW, \
-                                high=BNS_RATES_HIGH, \
-                                url=GW170817_DISCOVERY)
-
-BNS_RATES = json.loads("""{
-    "astro": {
-        "pycbc": [170, 1210, 4440],
-        "gstlal": [130, 920, 3140],
-        "combined": [250, 1050, 3570]
-    },
-    "broad": {
-        "pycbc": [120, 800, 2770],
-        "gstlal": [97, 662, 2271],
-        "combined": [170, 730, 2360]
-    },
-    "union": [110, 3840]
-}""")
-
-BNS_RATES_UNI = RateDistribution("lognorm", \
-                        low=BNS_RATES["broad"]["gstlal"][0] * _invgpc3yr, \
-                        high=BNS_RATES["broad"]["gstlal"][-1] * _invgpc3yr, \
-                        url=GWTC1)
-
-BNS_RATES_NRM = RateDistribution("lognorm", \
-                        low=BNS_RATES["astro"]["gstlal"][0] * _invgpc3yr, \
-                        high=BNS_RATES["astro"]["gstlal"][-1] * _invgpc3yr, \
-                        url=GWTC1)
-
-BNS_GW_OBS <<= RateDistribution("lognorm", \
-                        low=BNS_RATES["union"][0] * _invgpc3yr, \
-                        high=BNS_RATES["union"][-1] * _invgpc3yr, \
-                        url=GWTC1)
-
-#
-# Binary Black Holes
-#
-
-# FIXME: Add GW150914, post-O1
-
-# From GW170104
-# https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.118.221101
-GW170104_DISCOVERY = "https://journals.aps.org/prl/abstract/10.1103/PhysRevLett.118.221101"
-# numbers in above publication are Gpc^-3 yr^-1, we convert to Mpc^-3 yr^-1
-BBH_RATES_LOW = 12 * _invgpc3yr
-BBH_RATES_HIGH = 213 * _invgpc3yr
-# NOTE: "likely" isn't accurate since we merged two distributions
-BBH_RATES_LIKELY = 103 * _invgpc3yr
-
-BBH_GW_OBS = RateDistribution("lognorm", low=BBH_RATES_LOW, \
-                                high=BBH_RATES_HIGH, \
-                                url=GW170104_DISCOVERY)
-
-# Aligned = gstlal and isotropic = pycbc
-BBH_RATES = json.loads("""{
-    "uniform_in_log": {
-        "aligned": [9.4, 18.1, 31.0],
-        "isotropic": [9.8, 19.5, 34.7],
-        "combined": [10.8, 19, 32]
-    },
-    "power_law": {
-        "aligned": [29, 56, 101],
-        "isotropic": [28, 57, 104],
-        "combined": [32, 57, 97]
-    },
-    "union": [9.7, 101]
-}""")
-
-BBH_RATES_PL = RateDistribution("lognorm", \
-                low=BBH_RATES["power_law"]["combined"][0] * _invgpc3yr,
-                high=BBH_RATES["power_law"]["combined"][-1] * _invgpc3yr,
-                url=GWTC1)
-
-BBH_RATES_ULM = RateDistribution("lognorm", \
-                low=BBH_RATES["uniform_in_log"]["combined"][0] * _invgpc3yr,
-                high=BBH_RATES["uniform_in_log"]["combined"][-1] * _invgpc3yr,
-                url=GWTC1)
-
-# Union from GWTC-1
-BBH_GW_OBS <<= RateDistribution("lognorm", \
-                                low=BBH_RATES["union"][0] * _invgpc3yr, \
-                                high=BBH_RATES["union"][-1] * _invgpc3yr, \
-                                url=GWTC1)
-
-# https://arxiv.org/abs/1811.12907
-GW_BBH_POPULATIONS = "https://arxiv.org/abs/1811.12907"
-# O1O2 Populations analysis, Model B: https://arxiv.org/abs/1811.12940
-BBH_RATES_LIKELY = 53 * _invgpc3yr
-BBH_RATES_LOW = (53 - 29) * _invgpc3yr
-BBH_RATES_HIGH = (53 + 59) * _invgpc3yr
-
-BBH_GW_OBS <<= RateDistribution("lognorm", low=BBH_RATES_LOW, \
-                                high=BBH_RATES_HIGH, \
-                                url=GW_BBH_POPULATIONS)
-
-# https://arxiv.org/abs/1704.04628
-GW_IMBH_O1 = "https://arxiv.org/abs/1704.04628"
-# O1 IMBH 100 + 100 nonspinning
-IMBH_RATES_UL_100_100 = 2 * _invgpc3yr
-
-#
-# Intermediate Mass Binary Black Holes
-#
-IMBH_GW_OBS = RateDistribution("expon", high=IMBH_RATES_UL_100_100, \
-                                url=GW_IMBH_O1)
-
-#
-# Rate integration
-#
-
-def integrate_rates(r, n, rlow, rhigh, vt):
-    # Deprecated
-
-    # This is our rate distribution
-    rate_distr = generate_rate_prior(rlow, rhigh)
-
-    # We're integrating the rate prior against p(n|R), likely a poissionian
-    # distribution with the Poisson mean given by RVT
-    def integrand(r, n, vt):
-        return rate_distr.pdf(r) * n_given_rate(r, n, vt=vt)
-
-    intg = [integrand(r, _n, vt) for _n in n]
-
-    p_n = numpy.array([trapz(intg, r)]).flatten()
-
-    return p_n, numpy.asarray(intg)
-
-#
-# Event based detection probabilities
-#
-
-def _prb_n_detect(prb):
-    nprb = 1. - prb
-    idx = set(range(len(prb)))
-    n_prb = numpy.zeros(len(prb)+1)
-    n_prb[0] = numpy.prod(nprb)
-    n_prb[-1] = numpy.prod(prb)
-    for i in range(1, len(prb)):
-        # Detection probs
-        for comb in itertools.combinations(idx, i):
-            det = numpy.asarray(comb)
-            # Complement is misses
-            miss = numpy.asarray(list(idx - set(det)))
-            n_prb[i] += numpy.prod(prb[det]) * numpy.prod(nprb[miss])
-
-    return n_prb
-
-def prb_n_detect(prb):
-    """
-    The straightforward way of calculating the P(n_obs|{p}_i) is computation unfeasinble for large i \\in N. This is the "discrete Fourier transform" method, explained in https://en.wikipedia.org/wiki/Poisson_binomial_distribution . It is a refinement of a recursive method which is much faster, but numerically unstable.
-    """
-    n = numpy.arange(0, len(prb)+1)
-    c_n = numpy.exp(2j * numpy.pi / len(n))
-    c_l = c_n**(n + 1)
-    n_prb = 1 + numpy.outer(prb, c_l - 1)
-    c_k = numpy.asarray([c_l**-k for k in n])
-    n_prb = c_k * n_prb.prod(axis=0)
-    return (1. / len(n)) * numpy.real_if_close(n_prb.sum(axis=1))
-
-
-if __name__ == "__main__":
-    import matplotlib
-    matplotlib.use("agg")
-    from matplotlib import pyplot
-
-    print BBH_GW_OBS
-    dist = BBH_GW_OBS.distr
-    low, high = BBH_GW_OBS._low, BBH_GW_OBS._high
-    print "BBH rates, cdf at {0.value:1.3e} {0.unit}: {1:1.3f}, {2.value:1.3e} {2.unit}: {3:1.3f}".format(low, dist.cdf(low), high, dist.cdf(high))
-
-    print BNS_GW_OBS
-    dist = BNS_GW_OBS.distr
-    low, high = BNS_GW_OBS._low, BNS_GW_OBS._high
-    print "BNS rates, cdf at {0.value:1.3e} {0.unit}: {1:1.3f}, {2.value:1.3e} {2.unit}: {3:1.3f}".format(low, dist.cdf(low), high, dist.cdf(high))
-
-    print NSBH_LIMIT_BH_5
-    dist = NSBH_LIMIT_BH_5.distr
-    high = NSBH_LIMIT_BH_5._high
-    print "NSBH UL rates, cdf at {0.value:1.3e} {0.unit}: {1:1.3f}".format(high, dist.cdf(high))
-
-    print NSBH_MODELLED
-    dist = NSBH_MODELLED.distr
-    low, high = NSBH_MODELLED._low, NSBH_MODELLED._high
-    print "NSBH model rates, cdf at {0.value:1.3e} {0.unit}: {1:1.3f}, {2.value:1.3e} {2.unit}: {3:1.3f}".format(low, dist.cdf(low), high, dist.cdf(high))
-
-    print IMBH_GW_OBS
-    dist = IMBH_GW_OBS.distr
-    low, high = IMBH_GW_OBS._low, IMBH_GW_OBS._high
-    print "IMBH UL rates, cdf at {0.value:1.3e} {0.unit}: {1:1.3f}".format(high, dist.cdf(high))
-
-    x = NSBH_MODELLED._range
-    pyplot.subplot(2, 1, 1)
-    pyplot.plot(x, dist.pdf(x))
-    rvs = dist.rvs(100000)
-    # FIXME: Reenable when CI runners have more modern matplotlib
-    #pyplot.hist(rvs, bins=x, histtype='stepfilled', density=True)
-    pyplot.hist(rvs, bins=x, histtype='stepfilled', normed=True)
-    pyplot.semilogx()
-    pyplot.subplot(2, 1, 2)
-    pyplot.plot(x, dist.cdf(x))
-    pyplot.semilogx()
-    pyplot.legend()
-    pyplot.savefig("rates_test.png")
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..d7d95886ee1f89093aac3c10dcd6f4b37e9b7a7e
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+__usage__ = "setup.py command [--options]"
+__description__ = "standard install script"
+__author__ = "Chris Pankow (chris.pankow@ligo.org)"
+
+#-------------------------------------------------
+
+from setuptools import (setup, find_packages)
+import glob
+
+setup(
+    name = 'gw_event_gen',
+    version = '0.0',
+    url = 'https://git.ligo.org/chris-pankow/gw_event_gen',
+    author = __author__,
+    author_email = 'chris.pankow@ligo.org',
+    description = __description__,
+    license = 'MIT',
+    scripts = glob.glob('bin/*'),
+    packages = find_packages(),
+    data_files = [],
+    requires = [],
+)