Skip to content
Snippets Groups Projects
Commit 1d6e4f39 authored by Patrick Godwin's avatar Patrick Godwin Committed by ChiWai Chan
Browse files

switch offline workflow to computing expected SNR of injections as part of...

switch offline workflow to computing expected SNR of injections as part of separate program, fix issues with rerank/injection dags
parent 2d7b8597
No related branches found
No related tags found
No related merge requests found
......@@ -53,7 +53,7 @@ if args.command == "init":
if os.path.exists(os.path.join(os.getcwd(), "Makefile")) and not args.force:
print("Makefile already exists. To overwrite, run with --force", file=sys.stderr)
else:
write_makefile(config, "Makefile.offline_inspiral_template", workflow=args.workflow)
write_makefile(config, f"Makefile.offline_inspiral_{args.workflow}_template", workflow=args.workflow)
# generate dag
elif args.command == "create":
......@@ -76,6 +76,11 @@ elif args.command == "create":
median_psd = dag.median_psd(ref_psd)
svd_bank = dag.svd_bank(median_psd, split_bank)
if config.filter.injections:
injections = dag.split_injections()
injections = dag.calc_expected_snr(median_psd, injections)
lnlr_injections = dag.match_injections(injections)
triggers, dist_stats = dag.filter(ref_psd, svd_bank)
if config.filter.injections:
inj_triggers = dag.filter_injections(ref_psd, svd_bank)
......@@ -87,10 +92,10 @@ elif args.command == "create":
# load filter data products
ref_psd = DataCache.find(DataType.REFERENCE_PSD, root=config.data.analysis_dir)
median_psd = DataCache.find(DataType.MEDIAN_PSD, root=config.data.analysis_dir)
injections = DataCache.find(DataType.MATCHED_INJECTIONS, root=filter_dir, svd_bins="*", subtype="*")
injections = DataCache.find(DataType.SPLIT_INJECTIONS, root=filter_dir, svd_bins="*", subtype="*")
lnlr_injections = DataCache.find(DataType.MATCHED_INJECTIONS, root=filter_dir, svd_bins="*", subtype="*")
svd_bank = DataCache.find(DataType.SVD_BANK, root=filter_dir, svd_bins="*")
dist_stats = DataCache.find(DataType.DIST_STATS, root=filter_dir, svd_bins="*")
triggers = DataCache.find(DataType.TRIGGERS, root=filter_dir, svd_bins="*", subtype="*")
inj_triggers = DataCache.find(DataType.TRIGGERS, root=filter_dir, svd_bins="*")
triggers += inj_triggers
......@@ -104,12 +109,11 @@ elif args.command == "create":
pdfs = dag.marginalize_pdf(pdfs)
triggers = dag.calc_likelihood(triggers, dist_stats)
triggers = dag.cluster(triggers)
triggers = dag.cluster(triggers, injections)
if config.filter.injections:
triggers, injections = dag.find_injections(triggers)
injections = dag.match_injections(injections)
lnlr_cdfs = dag.measure_lnlr_cdf(dist_stats, injections)
triggers, inj_triggers = dag.find_injections(triggers)
lnlr_cdfs = dag.measure_lnlr_cdf(dist_stats, lnlr_injections)
triggers, pdfs = dag.compute_far(triggers, pdfs)
......@@ -124,10 +128,11 @@ elif args.command == "create":
elif args.workflow == "injection":
# input data products
ref_psd = DataCache.find(DataType.REFERENCE_PSD, root=config.data.analysis_dir)
median_psd = DataCache.find(DataType.MEDIAN_PSD, root=config.data.analysis_dir)
svd_bank = DataCache.find(DataType.SVD_BANK, root=filter_dir, svd_bins="*")
dist_stats = DataCache.find(DataType.MARG_DIST_STATS, root=rank_dir, svd_bins="*")
pdfs = DataCache.find(DataType.DIST_STAT_PDFS, root=rank_dir)
orig_zl_triggers = DataCache.find(DataType.TRIGGER_DATABASE, root=rank_dir)
orig_zl_triggers = DataCache.find(DataType.TRIGGERS, root=rank_dir, extension="sqlite")
# make a copy of zerolag triggers
zerolag_triggers = orig_zl_triggers.copy(root="rank")
......@@ -135,16 +140,17 @@ elif args.command == "create":
shutil.copy2(src_trg, dest_trg)
# generate injection-only dag layers
injections = dag.match_injections()
injections = dag.split_injections()
injections = dag.calc_expected_snr(median_psd, injections)
lnlr_injections = dag.match_injections(injections)
triggers = dag.filter_injections(ref_psd, svd_bank)
triggers = dag.aggregate(triggers)
triggers = dag.calc_likelihood(triggers, dist_stats)
triggers = dag.cluster(triggers)
triggers = dag.cluster(triggers, injections)
triggers, injections = dag.find_injections(triggers)
injections = dag.match_injections(injections)
lnlr_cdfs = dags.measure_lnlr_cdf(dist_stats, injections)
lnlr_cdfs = dag.measure_lnlr_cdf(dist_stats, lnlr_injections)
triggers += zerolag_triggers
triggers, pdfs = dag.compute_far(triggers, pdfs)
......
......@@ -575,7 +575,12 @@ def calc_likelihood_layer(config, dag, trigger_cache, dist_stat_cache):
calc_triggers = calc_trigger_cache.groupby("bin", "subtype", "dirname")
dist_stats = dist_stat_cache.groupby("bin")
for (svd_bin, inj_type, dirname), triggers in trigger_cache.groupby("bin", "subtype", "dirname").items():
calc_dirname = dirname.replace("filter", "rank")
# find path relative to current directory
# where assigned triggers will reside
split_dirname = dirname.split(os.sep)
dir_idx = split_dirname.index("filter")
calc_dirname = os.path.join(*split_dirname[dir_idx:]).replace("filter", "rank")
arguments = [
Option("force"),
......@@ -606,7 +611,7 @@ def calc_likelihood_layer(config, dag, trigger_cache, dist_stat_cache):
return calc_trigger_cache
def cluster_layer(config, dag, trigger_cache):
def cluster_layer(config, dag, trigger_cache, injection_cache):
# cluster triggers by likelihood
combine_layer = Layer(
"ligolw_add",
......@@ -681,6 +686,7 @@ def cluster_layer(config, dag, trigger_cache):
dag.attach(cluster_round1_layer)
# combine/cluster triggers across time
injections = injection_cache.groupby("subtype")
combined_triggers_by_time = combined_trigger_cache.groupby("subtype")
for inj_type, trigger_dbs_by_time in trigger_db_cache.groupby("subtype").items():
for span, trigger_dbs in trigger_dbs_by_time.groupby_bins("time", config.time_boundaries).items():
......@@ -689,8 +695,7 @@ def cluster_layer(config, dag, trigger_cache):
# add input files for sqlite jobs
xml_files = []
if inj_type:
injection_file = config.filter.injections[inj_type.lower()]["file"]
xml_files.append(injection_file)
xml_files.extend(injections[inj_type].files)
xml_files.extend(combined_triggers[span].files)
xml_files.append(config.source.frame_segments_file)
......@@ -741,7 +746,8 @@ def find_injections_layer(config, dag, trigger_db_cache):
# set up data caches
grouped_trigger_dbs = trigger_db_cache.groupby("subtype")
grouped_trigger_dbs.pop("")
if "" in grouped_trigger_dbs:
grouped_trigger_dbs.pop("")
inj_trigger_cache = DataCache.generate(
DataType.TRIGGERS,
config.all_ifos,
......@@ -837,77 +843,132 @@ def compute_far_layer(config, dag, trigger_cache, pdf_cache):
return trigger_cache, post_pdf_cache
def match_injections_layer(config, dag, injection_cache):
split_layer = Layer(
def split_injections_layer(config, dag):
layer = Layer(
"gstlal_injsplitter",
name="split_injections",
requirements={"request_cpus": 1, "request_memory": 2000, **config.condor.submit},
transfer_files=config.condor.transfer_files,
)
match_layer = Layer(
# split up injections across time, tuned for job runtime
num_splits = time_to_num_split_injections(config.span)
# split injections up
injection_cache = DataCache(DataType.SPLIT_INJECTIONS)
for inj_type, inj_params in config.filter.injections.items():
inj_tag = inj_type.upper()
inj_file = inj_params["file"]
# infer metadata from injection filename
try:
entry = CacheEntry.from_T050017(inj_file)
except ValueError:
ifos = "H1K1L1V1"
span = segment(0, 0)
else:
ifos = entry.observatory
span = entry.segment
# create split injection jobs
split_injections = DataCache.generate(
DataType.SPLIT_INJECTIONS,
ifos,
span,
svd_bins=[f"{i:04d}" for i in range(num_splits)],
subtype=inj_tag,
root="filter",
)
injection_cache += split_injections
out_path = DataType.SPLIT_INJECTIONS.directory(root="filter", start=span[0])
layer += Node(
arguments = [
Option("nsplit", num_splits),
Option("usertag", inj_tag),
],
inputs = Argument("injection-file", inj_file),
outputs = [
Option("output-path", out_path, remap=False),
Argument("split-injections", split_injections.files, suppress=True),
],
)
dag.attach(layer)
return injection_cache
def match_injections_layer(config, dag, injection_cache):
layer = Layer(
"gstlal_inspiral_injection_template_match",
name="match_injections",
requirements={"request_cpus": 1, "request_memory": 2000, **config.condor.submit},
transfer_files=config.condor.transfer_files,
)
# roughly one file per 100000 seconds
num_splits = int(abs(config.span)) // 100000
# split up injections across time, tuned for job runtime
num_splits = time_to_num_split_injections(config.span)
inj_types = list(config.filter.injections.keys())
split_inj_cache = DataCache(DataType.SPLIT_INJECTIONS)
match_inj_cache = DataCache.generate(
DataType.MATCHED_INJECTIONS,
config.all_ifos,
config.span,
svd_bins=[f"{i:04d}" for i in range(num_splits)],
subtype=[inj_type.upper() for inj_type in inj_types],
root="rank",
root="filter",
)
# split injections up
for inj_type, injections in injection_cache.groupby("subtype").items():
inj_tag = inj_type.upper()
for inj_entry in injections.cache:
split_injections = DataCache.generate(
DataType.SPLIT_INJECTIONS,
inj_entry.observatory,
inj_entry.segment,
svd_bins=[f"{i:04d}" for i in range(num_splits)],
subtype=inj_tag,
root="rank",
)
split_inj_cache += split_injections
out_path = DataType.SPLIT_INJECTIONS.directory(root="rank", start=inj_entry.segment[0])
split_layer += Node(
arguments = [
Option("nsplit", num_splits),
Option("usertag", inj_tag),
],
inputs = Argument("injection-file", inj_entry.path),
outputs = [
Option("output-path", out_path, remap=False),
Argument("split-injections", split_injections.files, suppress=True),
],
)
dag.attach(split_layer)
if isinstance(config.data.template_bank, Mapping):
template_banks = list(config.data.template_bank.values())
else:
template_banks = config.data.template_bank
# match injections to templates
matched_injections = match_inj_cache.groupby("subtype", "bin")
for (inj_type, split_bin), injections in split_inj_cache.groupby("subtype", "bin").items():
match_layer += Node(
for (inj_type, split_bin), injections in injection_cache.groupby("subtype", "bin").items():
layer += Node(
inputs = [
Option("injection-file", injections.files),
Option("template-bank", config.data.template_bank),
Option("template-bank", template_banks),
],
outputs = Option("output", matched_injections[(inj_type, split_bin)].files),
)
dag.attach(match_layer)
dag.attach(layer)
return match_inj_cache
def calc_expected_snr_layer(config, dag, psd_cache, injection_cache):
layer = Layer(
"gstlal_inspiral_injection_snr",
name="calc_expected_snr",
requirements={"request_cpus": 2, "request_memory": 2000, **config.condor.submit},
transfer_files=config.condor.transfer_files,
)
arguments = []
if hasattr(config, "injections") and config.injections.expected_snr:
if "f_low" in config.injections.expected_snr:
arguments.append(Option("flow", config.injections.expected_snr.f_low))
if "f_high" in config.injections.expected_snr:
arguments.append(Option("fmax", config.injections.expected_snr.f_high))
# calculate expected snr for injections
for (inj_type, split_bin), injections in injection_cache.groupby("subtype", "bin").items():
layer += Node(
arguments = arguments,
inputs = [
Option("injection-file", injections.files),
Option("reference-psd", psd_cache.files),
],
outputs = Argument("calc-injection-file", injections.files, suppress=True),
)
dag.attach(layer)
return injection_cache
def measure_lnlr_cdf_layer(config, dag, dist_stats_cache, injection_cache):
layer = Layer(
"gstlal_inspiral_lnlrcdf_signal",
......@@ -981,7 +1042,7 @@ def plot_analytic_vt_layer(config, dag, trigger_cache, pdf_cache, lnlr_cdf_cache
inputs = [
Argument("lnlr-cdfs", lnlr_cdfs.files),
Option("ranking-stat-pdf", pdf_cache.files),
Option("injection-files", injection_file),
Option("injection-files", os.path.join(config.data.analysis_dir, injection_file)),
],
outputs = Option("output-dir", "plots"),
)
......@@ -1731,6 +1792,12 @@ def ifo_to_string(ifos):
return "".join(sorted(list(ifos)))
def time_to_num_split_injections(span, time_per_split=20000):
"""Determine how many injection splits given analysis start/end times.
"""
return int(abs(span)) // time_per_split
@plugins.register
def layers():
return {
......@@ -1747,8 +1814,10 @@ def layers():
"calc_likelihood": calc_likelihood_layer,
"cluster": cluster_layer,
"compute_far": compute_far_layer,
"split_injections": split_injections_layer,
"find_injections": find_injections_layer,
"match_injections": match_injections_layer,
"calc_expected_snr": calc_expected_snr_layer,
"measure_lnlr_cdf": measure_lnlr_cdf_layer,
"plot_analytic_vt": plot_analytic_vt_layer,
"plot_horizon_distance": plot_horizon_distance_layer,
......
......@@ -2,7 +2,10 @@ pkgpythondir = $(pkgpyexecdir)
templatesdir = $(pkgpythondir)/workflows/templates
templates_PYTHON = \
Makefile.offline_inspiral_template
Makefile.offline_inspiral_filter_template \
Makefile.offline_inspiral_full_template \
Makefile.offline_inspiral_injection_template \
Makefile.offline_inspiral_rerank_template
EXTRA_DIST = \
__init__.py
......@@ -36,8 +36,8 @@ vetoes.xml.gz : {{ config.segments.vetoes.category }}_vetoes.xml.gz
cp $< $@
@echo ""
*_vetoes.xml.gz :
gstlal_query_gwosc_veto_segments -o $@ {{ config.start }} {{ config.stop }} {% for instrument in config.ifos %} {{ instrument }}{% endfor %} --category $* --cumulative
{{ config.segments.vetoes.category }}_vetoes.xml.gz :
gstlal_query_gwosc_veto_segments -o $@ {{ config.start }} {{ config.stop }} {% for instrument in config.ifos %} {{ instrument }}{% endfor %} --category {{ config.segments.vetoes.category }} --cumulative
@echo ""
{% elif config.segments.backend == 'dqsegdb' %}
segments.xml.gz : CAT1_vetoes.xml.gz
......
all : dag
.PHONY: launch
launch : {{ workflow }}_inspiral_dag.dag
condor_submit_dag $<
.PHONY: dag
dag : {{ workflow }}_inspiral_dag.dag
@echo ""
.PHONY: summary
summary :
mkdir -p {{ config.summary.webdir }}
gstlal_inspiral_summary_page \
--title gstlal-{{ config.start }}-{{ config.stop }}-closed-box \
--webserver-dir {{ config.summary.webdir }} \
--output-user-tag ALL_COMBINED \
--output-user-tag PRECESSION_COMBINED \
{% for inj_name in config.filter.injections.keys() %}
--output-user-tag {{ inj_name.upper() }}_INJECTION \
--output-user-tag {{ inj_name.upper() }}_INJECTION_PRECESSION \
{% endfor %}
--glob-path plots
{% if config.segments.backend == 'gwosc' %}
segments.xml.gz : CAT1_vetoes.xml.gz
gstlal_query_gwosc_segments -o $@ {{ config.start }} {{ config.stop }}{% for instrument in config.ifos %} {{ instrument }}{% endfor %}
gstlal_segments_operations --segment-name vetoes --output-segment-name datasegments --union --output-file CAT1_vetoes_renamed.xml.gz $< $<
gstlal_segments_operations --diff --output-file $@ $@ CAT1_vetoes_renamed.xml.gz
gstlal_segments_trim --trim 0 --gps-start-time {{ config.start }} --gps-end-time {{ config.stop }} --min-length 512 --output $@ $@
rm CAT1_vetoes_renamed.xml.gz
@echo ""
vetoes.xml.gz : {{ config.segments.vetoes.category }}_vetoes.xml.gz
cp $< $@
@echo ""
{{ config.segments.vetoes.category }}_vetoes.xml.gz :
gstlal_query_gwosc_veto_segments -o $@ {{ config.start }} {{ config.stop }} {% for instrument in config.ifos %} {{ instrument }}{% endfor %} --category {{ config.segments.vetoes.category }} --cumulative
@echo ""
{% elif config.segments.backend == 'dqsegdb' %}
segments.xml.gz : CAT1_vetoes.xml.gz
gstlal_query_dqsegdb_segments -o $@ {{ config.start }} {{ config.stop }}{% for instrument in config.ifos %} {{ instrument }}{% endfor %} {% for instrument, flag in config.segments.science.items() %} -f {{ "{}:{}".format(instrument, flag) }}{% endfor %}
gstlal_segments_operations --segment-name vetoes --output-segment-name datasegments --union --output-file CAT1_vetoes_renamed.xml.gz $< $<
gstlal_segments_operations --diff --output-file $@ $@ CAT1_vetoes_renamed.xml.gz
gstlal_segments_trim --trim 0 --gps-start-time {{ config.start }} --gps-end-time {{ config.stop }} --min-length 512 --output $@ $@
rm CAT1_vetoes_renamed.xml.gz
@echo ""
vetoes.xml.gz : {{ config.segments.vetoes.category }}_vetoes.xml.gz
cp $< $@
@echo ""
{{ config.segments.vetoes.category }}_vetoes.xml.gz : {{ config.segments.vetoes.veto_definer.file }}
gstlal_query_dqsegdb_veto_segments -o $@ {{ config.start }} {{ config.stop }} {% for instrument in config.ifos %} {{ instrument }}{% endfor %} --category {{ config.segments.vetoes.category }} --cumulative --veto-definer-file $<
@echo ""
{{ config.segments.vetoes.veto_definer.file }} :
git archive --remote=git@git.ligo.org:detchar/veto-definitions.git {{ config.segments.vetoes.veto_definer.version }}:cbc/{{ config.segments.vetoes.veto_definer.epoch }} $@ | tar -x
ligolw_no_ilwdchar $@
{% endif %}
tisi.xml : inj_tisi.xml
lalapps_gen_timeslides {% for instrument, slides in config.filter.time_slides.items() %} --instrument={{ instrument }}={{ slides }}{% endfor %} bg_tisi.xml
ligolw_add --output $@ bg_tisi.xml $<
@echo ""
inj_tisi.xml :
lalapps_gen_timeslides {% for instrument in config.ifos %} --instrument={{ instrument }}=0:0:0{% endfor %} $@
@echo ""
{% if config.data.template_bank is mapping %}
{{ config.svd.manifest }} :{% for bank_file in config.data.template_bank.values() %} {{ bank_file }}{% endfor %}
{% else %}
{{ config.svd.manifest }} : {{ config.data.template_bank }}
{% endif %}
mkdir -p split_bank
{% if config.svd.sub_banks %}
{% for bank_name, params in config.svd.sub_banks.items() %}
gstlal_inspiral_bank_splitter \
--f-low {{ params.f_low }} \
--group-by-chi {{ params.num_chi_bins }} \
--output-path split_bank \
{% for approx in config.svd.approximant %}
--approximant {{ approx }} \
{% endfor %}
--overlap {{ params.overlap }} \
--instrument {% for instrument in config.ifos %}{{ instrument }}{% endfor %} \
--n {{ params.num_split_templates }} \
--sort-by template_duration \
--f-final {{ config.svd.max_f_final }} \
--num-banks {{ params.num_banks }} \
--stats-file $@ \
--bank-name {{ bank_name }} \
{{ config.data.template_bank[bank_name] }}
{% endfor %}
{% else %}
gstlal_inspiral_bank_splitter \
--f-low {{ config.svd.f_low }} \
--group-by-chi {{ config.svd.num_chi_bins }} \
--output-path split_bank \
{% for approx in config.svd.approximant %}
--approximant {{ approx }} \
{% endfor %}
--overlap {{ config.svd.overlap }} \
--instrument {% for instrument in config.ifos %}{{ instrument }}{% endfor %} \
--n {{ config.svd.num_split_templates }} \
--sort-by template_duration \
--f-final {{ config.svd.max_f_final }} \
--num-banks {{ config.svd.num_banks }} \
--stats-file $@ \
$<
{% endif %}
@echo ""
%_inspiral_dag.dag : {{ config.svd.manifest }} vetoes.xml.gz segments.xml.gz tisi.xml x509_proxy plots {% for inj in config.filter.injections.values() %} {{ inj.file }}{% endfor %}
gstlal_inspiral_workflow create -c config.yml --workflow $*
{% if config.injections.sets %}
{% for inj_name, params in config.injections.sets.items() %}
{{ params.file }} :
lalapps_inspinj \
--gps-start-time {{ config.start + params.time.shift }} \
--gps-end-time {{ config.stop }} \
--enable-spin \
--aligned \
--i-distr uniform \
--l-distr random \
--t-distr uniform \
--dchirp-distr uniform \
--m-distr {{ params.mass_distr }} \
{% for param in ['mass1', 'mass2', 'spin1', 'spin2', 'distance'] %}
{% for stat, val in params[param].items() %}
--{{ stat }}-{{ param }} {{ val }} \
{% endfor %}
{% endfor %}
--f-lower {{ params.f_low }} \
--waveform {{ params.waveform }} \
--time-step {{ params.time.step }} \
--time-interval {{ params.time.interval }} \
--taper-injection startend \
--seed {{ params.seed }} \
--output $@
ligolw_no_ilwdchar $@
@echo ""
{% endfor %}
{% endif %}
{% if config.injections.combine %}
{{ config.injections.combined_file }} : {% for inj in config.injections.sets.values() %} {{ inj.file }}{% endfor %}
gstlal_inspiral_combine_injection_sets $^ --single-output -o $(basename {{ config.injections.combined_file }})
rm injection_str.txt
{% endif %}
x509_proxy :
cp /tmp/x509up_u$(shell id -u $$USER) x509_proxy
plots :
mkdir -p $@
clean :
rm -rf segments.xml.gz *tisi.xml x509_proxy
rm -rf split_bank *vetoes.xml.gz {{ config.svd.manifest }}
rm -rf reference_psd median_psd
rm -rf filter rank plots
rm -rf logs *inspiral_dag.dag* *inspiral_dag.sh *.sub _condor_stdout
rm -rf {% for inj in config.filter.injections.values() %} {{ inj.file }}{% endfor %}
{% if config.injections.sets %}
rm -rf {% for inj in config.injections.sets.values() %} {{ inj.file }}{% endfor %}
{% endif %}
clean-lite :
rm -rf logs/* *inspiral_dag.dag* *inspiral_dag.sh *.sub
all : dag
.PHONY: launch
launch : {{ workflow }}_inspiral_dag.dag
condor_submit_dag $<
.PHONY: dag
dag : {{ workflow }}_inspiral_dag.dag
@echo ""
.PHONY: summary
summary :
mkdir -p {{ config.summary.webdir }}
gstlal_inspiral_summary_page \
--title gstlal-{{ config.start }}-{{ config.stop }}-closed-box \
--webserver-dir {{ config.summary.webdir }} \
--output-user-tag ALL_COMBINED \
--output-user-tag PRECESSION_COMBINED \
{% for inj_name in config.filter.injections.keys() %}
--output-user-tag {{ inj_name.upper() }}_INJECTION \
--output-user-tag {{ inj_name.upper() }}_INJECTION_PRECESSION \
{% endfor %}
--glob-path plots
segments.xml.gz :
cp {{ config.data.analysis_dir }}/$@ $@
@echo ""
vetoes.xml.gz :
cp {{ config.data.analysis_dir }}/$@ $@
@echo ""
{{ config.svd.manifest }} :
cp {{ config.data.analysis_dir }}/$@ $@
tisi.xml : inj_tisi.xml
lalapps_gen_timeslides {% for instrument, slides in config.filter.time_slides.items() %} --instrument={{ instrument }}={{ slides }}{% endfor %} bg_tisi.xml
ligolw_add --output $@ bg_tisi.xml $<
@echo ""
inj_tisi.xml :
lalapps_gen_timeslides {% for instrument in config.ifos %} --instrument={{ instrument }}=0:0:0{% endfor %} $@
@echo ""
%_inspiral_dag.dag : {{ config.svd.manifest }} vetoes.xml.gz segments.xml.gz tisi.xml x509_proxy plots {% for inj in config.filter.injections.values() %} {{ inj.file }}{% endfor %}
gstlal_inspiral_workflow create -c config.yml --workflow $*
{% if config.injections.sets %}
{% for inj_name, params in config.injections.sets.items() %}
{{ params.file }} :
lalapps_inspinj \
--gps-start-time {{ config.start + params.time.shift }} \
--gps-end-time {{ config.stop }} \
--enable-spin \
--aligned \
--i-distr uniform \
--l-distr random \
--t-distr uniform \
--dchirp-distr uniform \
--m-distr {{ params.mass_distr }} \
{% for param in ['mass1', 'mass2', 'spin1', 'spin2', 'distance'] %}
{% for stat, val in params[param].items() %}
--{{ stat }}-{{ param }} {{ val }} \
{% endfor %}
{% endfor %}
--f-lower {{ params.f_low }} \
--waveform {{ params.waveform }} \
--time-step {{ params.time.step }} \
--time-interval {{ params.time.interval }} \
--taper-injection startend \
--seed {{ params.seed }} \
--output $@
ligolw_no_ilwdchar $@
@echo ""
{% endfor %}
{% endif %}
{% if config.injections.combine %}
{{ config.injections.combined_file }} : {% for inj in config.injections.sets.values() %} {{ inj.file }}{% endfor %}
gstlal_inspiral_combine_injection_sets $^ --single-output -o $(basename {{ config.injections.combined_file }})
rm injection_str.txt
{% endif %}
x509_proxy :
cp /tmp/x509up_u$(shell id -u $$USER) x509_proxy
plots :
mkdir -p $@
clean :
rm -rf segments.xml.gz *tisi.xml x509_proxy
rm -rf split_bank *vetoes.xml.gz
rm -rf reference_psd median_psd
rm -rf filter rank plots
rm -rf logs *inspiral_dag.dag* *inspiral_dag.sh *.sub _condor_stdout
rm -rf {% for inj in config.filter.injections.values() %} {{ inj.file }}{% endfor %}
{% if config.injections.sets %}
rm -rf {% for inj in config.injections.sets.values() %} {{ inj.file }}{% endfor %}
{% endif %}
clean-lite :
rm -rf logs/* *inspiral_dag.dag* *inspiral_dag.sh *.sub
all : dag
.PHONY: launch
launch : {{ workflow }}_inspiral_dag.dag
condor_submit_dag $<
.PHONY: dag
dag : {{ workflow }}_inspiral_dag.dag
@echo ""
.PHONY: summary
summary :
mkdir -p {{ config.summary.webdir }}
gstlal_inspiral_summary_page \
--title gstlal-{{ config.start }}-{{ config.stop }}-closed-box \
--webserver-dir {{ config.summary.webdir }} \
--output-user-tag ALL_COMBINED \
--output-user-tag PRECESSION_COMBINED \
{% for inj_name in config.filter.injections.keys() %}
--output-user-tag {{ inj_name.upper() }}_INJECTION \
--output-user-tag {{ inj_name.upper() }}_INJECTION_PRECESSION \
{% endfor %}
--glob-path plots
segments.xml.gz :
cp {{ config.data.analysis_dir }}/$@ $@
@echo ""
vetoes.xml.gz :
cp {{ config.data.analysis_dir }}/$@ $@
@echo ""
{{ config.svd.manifest }} :
cp {{ config.data.analysis_dir }}/$@ $@
%_inspiral_dag.dag : {{ config.svd.manifest }} vetoes.xml.gz segments.xml.gz x509_proxy plots
gstlal_inspiral_workflow create -c config.yml --workflow $*
x509_proxy :
cp /tmp/x509up_u$(shell id -u $$USER) x509_proxy
plots :
mkdir -p $@
clean :
rm -rf segments.xml.gz *tisi.xml x509_proxy
rm -rf rank plots
rm -rf logs *inspiral_dag.dag* *inspiral_dag.sh *.sub _condor_stdout
clean-lite :
rm -rf logs/* *inspiral_dag.dag* *inspiral_dag.sh *.sub
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment