Skip to content
Snippets Groups Projects
Commit efaad0b1 authored by chad.hanna's avatar chad.hanna
Browse files

gstlal_inspiral_activation_counts: improve initial version

parent 8f4b0e86
No related branches found
No related tags found
No related merge requests found
Pipeline #83735 failed
......@@ -12,6 +12,7 @@ dist_bin_SCRIPTS = \
gstlal_mock_data_server \
gstlal_ilwdify \
gstlal_inspiral_activation_counts \
gstlal_inspiral_activation_counts_dag \
gstlal_inspiral_add_metric_overlaps \
gstlal_inspiral_best_coinc_file \
gstlal_inspiral_calc_likelihood_by_bin \
......
......@@ -33,8 +33,9 @@ parser = argparse.ArgumentParser()
parser.add_argument("--output", help = "provide the output file")
parser.add_argument("--psd-xml-file", help = "provide a psd xml file")
parser.add_argument("--svd-file", help = "provide the bank file for which overlaps will be calculated")
parser.add_argument("--m1", action = "append", help = "provide the start:stop:num parameters for mass 1, e.g., 1:3:100")
parser.add_argument("--m2", action = "append", help = "provide the start:stop:num parameters for mass 2, e.g., 1:3:100")
parser.add_argument("--m1", action = "append", help = "provide the num:start:stop parameters for mass 1, e.g., 100:1:3")
parser.add_argument("--m2", action = "append", help = "provide the num:start:stop parameters for mass 2, e.g., 100:1:3")
parser.add_argument("--s1", action = "append", help = "provide the num:start:stop parameters for spin 1, e.g., 11:-1:1")
args = parser.parse_args()
......@@ -66,46 +67,45 @@ def x_y_z_zn_from_row(row):
print >> outfile, "m1 start, m1 stop, m2 start, m2 stop, activation counts"
# compute template match functions:
# put the templates in a useful form
tmps = numpy.array([x_y_z_zn_from_row(row) for row in sngl_inspiral_table])
print "computing metrics"
metrics = [(tmp, g_ij(tmp)[0]) for tmp in tmps]
match_funcs = []
for tmp in (x_y_z_zn_from_row(row) for row in sngl_inspiral_table):
def match(vec2, vec1 = tmp, g = g_ij(tmp)[0]):
return g_ij.pseudo_match(g, vec1, vec2)
match_funcs.append(match)
for m1, m2 in zip(args.m1, args.m2):
snrs = numpy.linspace(8., 20., 13)
snrnorm = sum(snrs**-4)
print m1, m2
for m1, m2, s1 in zip(args.m1, args.m2, args.s1):
m1s,m1e,m1n = [float(x) for x in m1.split(":")]
m2s,m2e,m2n = [float(x) for x in m2.split(":")]
print m1, m2
m1n,m1s,m1e = [float(x) for x in m1.split(":")]
m2n,m2s,m2e = [float(x) for x in m2.split(":")]
s1n,s1s,s1e = [float(x) for x in s1.split(":")]
# uniform population in m1 / m2 space
signals = []
weights = []
len_signals = 0
print "calculating signal pop"
for m1,m2 in itertools.product(numpy.logspace(numpy.log10(m1s), numpy.log10(m1e), m1n), numpy.logspace(numpy.log10(m2s), numpy.log10(m2e), m2n)):
for m1,m2,s1 in itertools.product(numpy.logspace(numpy.log10(m1s), numpy.log10(m1e), m1n), numpy.logspace(numpy.log10(m2s), numpy.log10(m2e), m2n), numpy.linspace(s1s, s1e, s1n)):
# assume zero spin
len_signals += 1
if mchirp(m1, m2) <= max_mchirp and mchirp(m1, m2) >= min_mchirp:
signals.append(numpy.array([metric_module.x_from_m1_m2_s1_s2(m1, m2, 0, 0), metric_module.y_from_m1_m2_s1_s2(m1, m2, 0, 0), 0, 0]))
signals.append(numpy.array([metric_module.x_from_m1_m2_s1_s2(m1, m2, s1, 0), metric_module.y_from_m1_m2_s1_s2(m1, m2, s1, 0), metric_module.z_from_m1_m2_s1_s2(m1, m2, s1, 0), 0]))
# Correct for the logarithmic distribution
weights.append(m1*m2)
weights = numpy.array(weights)
signals = numpy.array(signals)
count = 0
print "computing counts for %d signals" % len(signals)
for n, (tmp, g) in enumerate(metrics):
print "tmp %d/%d" % (n, len(metrics))
def match(vec2, vec1 = tmp, g = g):
return g_ij.pseudo_match(g, vec1, vec2)
# Assume an SNR 10 signal...What should this be?? Some function like this?
matches = map(match, signals)
snrs = numpy.linspace(8., 20., 13)
for rho in snrs:
count += rho**-4 * sum(numpy.exp(-rho**2*(1.0 - x)**2/2.) * w for x, w in zip(matches, weights) if x <= 1.0) / len_signals / sum(snrs**-4)
if len(signals) > 0:
for n, match_func in enumerate(match_funcs):
matches = numpy.apply_along_axis(match_func, 1, signals)
for rho in snrs:
count += rho**-4 / snrnorm / len_signals * (numpy.exp(-rho**2*(1.0 - matches)**2/2.) * weights).sum()
print >> outfile, "%f, %f, %f, %f, %e" % (m1s, m1e, m2s, m2e, count)
#!/usr/bin/python
import os
import argparse
from gstlal import dagparts
from lal.utils import CacheEntry
parser = argparse.ArgumentParser()
parser.add_argument("--output", help = "provide the output file")
parser.add_argument("--psd-xml-file", help = "provide a psd xml file")
parser.add_argument("--svd-cache", help = "provide the svd bank cache.")
parser.add_argument("--m1", action = "append", help = "provide the num:start:stop parameters for mass 1, e.g., 100:1:3")
parser.add_argument("--m2", action = "append", help = "provide the num:start:stop parameters for mass 2, e.g., 100:1:3")
parser.add_argument("--s1", action = "append", help = "provide the num:start:stop parameters for spin 1, e.g., 11:-1:1")
parser.add_argument("--accounting-group", help = "set accounting group")
parser.add_argument("--accounting-group-user", help = "set accounting group user")
args = parser.parse_args()
try:
os.mkdir("logs")
except:
pass
dag = dagparts.DAG("ac_counts")
acJob = dagparts.DAGJob("gstlal_inspiral_activation_counts", condor_commands = {"accounting_group": args.accounting_group, "accounting_group_user": args.accounting_group_user})
for bin, line in enumerate(open(args.svd_cache)):
dagparts.DAGNode(acJob,
dag,
parent_nodes = [],
opts = {"m1": args.m1, "m2": args.m2, "s1":args.s1},
input_files = {"psd-xml-file": args.psd_xml_file, "svd-file": CacheEntry(line).path},
output_files = {"output": "%04d-COUNTS.txt" % bin}
)
dag.write_sub_files()
dag.write_dag()
dag.write_script()
dag.write_cache()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment