Skip to content
Snippets Groups Projects
Commit 7007cd22 authored by Gregory Ashton's avatar Gregory Ashton
Browse files

Merge branch...

Merge branch '591-get_event_data-still-used-in-some-examples-despite-being-removed-as-a-deprecated-function' into 'master'

Resolve "`get_event_data` still used in some examples despite being removed as a deprecated function"

Closes #591

See merge request !1050
parents 55ce2652 714d3dc0
No related branches found
Tags 1.1.5
1 merge request!1050Resolve "`get_event_data` still used in some examples despite being removed as a deprecated function"
Pipeline #354615 passed
Showing
with 215 additions and 346 deletions
......@@ -9,7 +9,7 @@ repos:
hooks:
- id: black
language_version: python3
files: ^bilby/bilby_mcmc/
files: ^(bilby/bilby_mcmc/|examples/gw_examples/data_examples)
- repo: https://github.com/codespell-project/codespell
rev: v2.1.0
hooks:
......@@ -20,4 +20,4 @@ repos:
hooks:
- id: isort # sort imports alphabetically and separates import into sections
args: [-w=88, -m=3, -tc, -sp=setup.cfg ]
files: ^bilby/bilby_mcmc/
files: ^(bilby/bilby_mcmc/|examples/gw_examples/data_examples)
mass_ratio = Uniform(name='mass_ratio', minimum=0.125, maximum=1, boundary='reflective')
chirp_mass = Uniform(name='chirp_mass', minimum=25, maximum=35, unit='$M_{\odot}$', boundary='reflective')
chirp_mass = bilby.gw.prior.UniformInComponentsChirpMass(name='chirp_mass', minimum=25, maximum=35, unit='$M_{\odot}$')
mass_ratio = bilby.gw.prior.UniformInComponentsMassRatio(name='mass_ratio', minimum=0.125, maximum=1)
mass_1 = Constraint(name='mass_1', minimum=10, maximum=80)
mass_2 = Constraint(name='mass_2', minimum=10, maximum=80)
a_1 = Uniform(name='a_1', minimum=0, maximum=0.99, boundary='reflective')
a_2 = Uniform(name='a_2', minimum=0, maximum=0.99, boundary='reflective')
tilt_1 = Sine(name='tilt_1', boundary='reflective')
tilt_2 = Sine(name='tilt_2', boundary='reflective')
a_1 = Uniform(name='a_1', minimum=0, maximum=0.99)
a_2 = Uniform(name='a_2', minimum=0, maximum=0.99)
tilt_1 = Sine(name='tilt_1')
tilt_2 = Sine(name='tilt_2')
phi_12 = Uniform(name='phi_12', minimum=0, maximum=2 * np.pi, boundary='periodic')
phi_jl = Uniform(name='phi_jl', minimum=0, maximum=2 * np.pi, boundary='periodic')
luminosity_distance = PowerLaw(alpha=2, name='luminosity_distance', minimum=50, maximum=2000, unit='Mpc', latex_label='$d_L$')
dec = Cosine(name='dec', boundary='reflective')
dec = Cosine(name='dec')
ra = Uniform(name='ra', minimum=0, maximum=2 * np.pi, boundary='periodic')
theta_jn = Sine(name='theta_jn', boundary='reflective')
theta_jn = Sine(name='theta_jn')
psi = Uniform(name='psi', minimum=0, maximum=np.pi, boundary='periodic')
phase = Uniform(name='phase', minimum=0, maximum=2 * np.pi, boundary='periodic')
geocent_time = Uniform(name='geocent_time', minimum =1126259460.4, maximum=1126259464.4)
......@@ -13,12 +13,16 @@ import bilby
from gwpy.timeseries import TimeSeries
logger = bilby.core.utils.logger
outdir = 'outdir'
label = 'GW150914'
# Data set up
trigger_time = 1126259462
outdir = "outdir"
label = "GW150914"
# Note you can get trigger times using the gwosc package, e.g.:
# > from gwosc import datasets
# > datasets.event_gps("GW150914")
trigger_time = 1126259462.4
detectors = ["H1", "L1"]
maximum_frequency = 512
minimum_frequency = 20
roll_off = 0.4 # Roll off duration of tukey window in seconds, default is 0.4s
duration = 4 # Analysis segment duration
post_trigger_duration = 2 # Time between trigger time and end of segment
......@@ -31,7 +35,7 @@ psd_end_time = start_time
# We now use gwpy to obtain analysis and psd data and create the ifo_list
ifo_list = bilby.gw.detector.InterferometerList([])
for det in ["H1", "L1"]:
for det in detectors:
logger.info("Downloading analysis data for ifo {}".format(det))
ifo = bilby.gw.detector.get_empty_interferometer(det)
data = TimeSeries.fetch_open_data(det, start_time, end_time)
......@@ -41,13 +45,13 @@ for det in ["H1", "L1"]:
psd_data = TimeSeries.fetch_open_data(det, psd_start_time, psd_end_time)
psd_alpha = 2 * roll_off / duration
psd = psd_data.psd(
fftlength=duration,
overlap=0,
window=("tukey", psd_alpha),
method="median"
fftlength=duration, overlap=0, window=("tukey", psd_alpha), method="median"
)
ifo.power_spectral_density = bilby.gw.detector.PowerSpectralDensity(
frequency_array=psd.frequencies.value, psd_array=psd.value)
frequency_array=psd.frequencies.value, psd_array=psd.value
)
ifo.maximum_frequency = maximum_frequency
ifo.minimum_frequency = minimum_frequency
ifo_list.append(ifo)
logger.info("Saving data plots to {}".format(outdir))
......@@ -59,7 +63,12 @@ ifo_list.plot_data(outdir=outdir, label=label)
# The prior is printed to the terminal at run-time.
# You can overwrite this using the syntax below in the file,
# or choose a fixed value by just providing a float value as the prior.
priors = bilby.gw.prior.BBHPriorDict(filename='GW150914.prior')
priors = bilby.gw.prior.BBHPriorDict(filename="GW150914.prior")
# Add the geocent time prior
priors["geocent_time"] = bilby.core.prior.Uniform(
trigger_time - 0.1, trigger_time + 0.1, name="geocent_time"
)
# In this step we define a `waveform_generator`. This is the object which
# creates the frequency-domain strain. In this instance, we are using the
......@@ -69,19 +78,36 @@ priors = bilby.gw.prior.BBHPriorDict(filename='GW150914.prior')
waveform_generator = bilby.gw.WaveformGenerator(
frequency_domain_source_model=bilby.gw.source.lal_binary_black_hole,
parameter_conversion=bilby.gw.conversion.convert_to_lal_binary_black_hole_parameters,
waveform_arguments={'waveform_approximant': 'IMRPhenomPv2',
'reference_frequency': 50})
waveform_arguments={
"waveform_approximant": "IMRPhenomPv2",
"reference_frequency": 50,
},
)
# In this step, we define the likelihood. Here we use the standard likelihood
# function, passing it the data and the waveform generator.
# Note, phase_marginalization is formally invalid with a precessing waveform such as IMRPhenomPv2
likelihood = bilby.gw.likelihood.GravitationalWaveTransient(
ifo_list, waveform_generator, priors=priors, time_marginalization=True,
phase_marginalization=True, distance_marginalization=True)
ifo_list,
waveform_generator,
priors=priors,
time_marginalization=True,
phase_marginalization=False,
distance_marginalization=True,
)
# Finally, we run the sampler. This function takes the likelihood and prior
# along with some options for how to do the sampling and how to save the data
result = bilby.run_sampler(
likelihood, priors, sampler='dynesty', outdir=outdir, label=label,
nlive=1000, walks=100, n_check_point=10000, check_point_plot=True,
conversion_function=bilby.gw.conversion.generate_all_bbh_parameters)
likelihood,
priors,
sampler="dynesty",
outdir=outdir,
label=label,
nlive=1000,
check_point_delta_t=600,
check_point_plot=True,
npool=1,
conversion_function=bilby.gw.conversion.generate_all_bbh_parameters,
)
result.plot_corner()
#!/usr/bin/env python
"""
This tutorial includes advanced specifications for analysing known events.
Here GW150914 is used as an example.
LIST OF AVAILABLE EVENTS:
>> from gwosc import datasets
>> for event in datasets.find_datasets(type="event"):
... print(event, datasets.event_gps(event))
List of events in BILBY dict: run -> help(bilby.gw.utils.get_event_time(event))
"""
import bilby
from gwpy.timeseries import TimeSeries
outdir = 'outdir'
label = 'GW150914'
logger = bilby.core.utils.logger
time_of_event = bilby.gw.utils.get_event_time(label)
bilby.core.utils.setup_logger(outdir=outdir, label=label)
# GET DATA FROM INTERFEROMETER
interferometer_names = ['H1', 'L1'] # include 'V1' for appropriate O2 events
duration = 4 # length of data segment containing the signal
post_trigger_duration = 2 # time between trigger time and end of segment
end_time = time_of_event + post_trigger_duration
start_time = end_time - duration
roll_off = 0.4 # smoothness in a tukey window, default is 0.4s
# This determines the time window used to fetch open data
psd_duration = 32 * duration
psd_start_time = start_time - psd_duration
psd_end_time = start_time
filter_freq = None # low pass filter frequency to cut signal content above
# Nyquist frequency. The condition is 2 * filter_freq >= sampling_frequency
ifo_list = bilby.gw.detector.InterferometerList([])
for det in interferometer_names:
logger.info("Downloading analysis data for ifo {}".format(det))
ifo = bilby.gw.detector.get_empty_interferometer(det)
data = TimeSeries.fetch_open_data(det, start_time, end_time)
ifo.set_strain_data_from_gwpy_timeseries(data)
# Additional arguments you might need to pass to TimeSeries.fetch_open_data:
# - sample_rate = 4096, most data are stored by LOSC at this frequency
# there may be event-related data releases with a 16384Hz rate.
# - tag = 'CLN' for clean data; C00/C01 for raw data (different releases)
# note that for O2 events a "tag" is required to download the data.
# - channel = {'H1': 'H1:DCS-CALIB_STRAIN_C02',
# 'L1': 'L1:DCS-CALIB_STRAIN_C02',
# 'V1': 'V1:FAKE_h_16384Hz_4R'}}
# for some events can specify channels: source data stream for LOSC data.
logger.info("Downloading psd data for ifo {}".format(det))
psd_data = TimeSeries.fetch_open_data(det, psd_start_time, psd_end_time)
psd_alpha = 2 * roll_off / duration # shape parameter of tukey window
psd = psd_data.psd(
fftlength=duration,
overlap=0,
window=("tukey", psd_alpha),
method="median")
ifo.power_spectral_density = bilby.gw.detector.PowerSpectralDensity(
frequency_array=psd.frequencies.value, psd_array=psd.value)
ifo_list.append(ifo)
logger.info("Saving data plots to {}".format(outdir))
bilby.core.utils.check_directory_exists_and_if_not_mkdir(outdir)
ifo_list.plot_data(outdir=outdir, label=label)
# CHOOSE PRIOR FILE
# DEFAULT PRIOR FILES: GW150914.prior, binary_black_holes.prior,
# binary_neutron_stars.prior (if bns, use BNSPriorDict)
# Needs to specify path if you want to use any other prior file.
prior = bilby.gw.prior.BBHPriorDict(filename='GW150914.prior')
# GENERATE WAVEFORM
sampling_frequency = 4096. # same at which the data is stored
conversion = bilby.gw.conversion.convert_to_lal_binary_black_hole_parameters
# OVERVIEW OF APPROXIMANTS:
# https://www.lsc-group.phys.uwm.edu/ligovirgo/cbcnote/Waveforms/Overview
waveform_arguments = {
'waveform_approximant': 'IMRPhenomPv2',
'reference_frequency': 50 # most sensitive frequency
}
waveform_generator = bilby.gw.WaveformGenerator(
parameter_conversion=conversion,
frequency_domain_source_model=bilby.gw.source.lal_binary_black_hole,
waveform_arguments=waveform_arguments)
# CHOOSE LIKELIHOOD FUNCTION
# Time marginalisation uses FFT and can use a prior file.
# Distance marginalisation uses a look up table calculated at run time.
# Phase marginalisation is done analytically using a Bessel function.
# If prior given, used in the distance and phase marginalization.
likelihood = bilby.gw.likelihood.GravitationalWaveTransient(
interferometers=ifo_list, waveform_generator=waveform_generator,
priors=prior, time_marginalization=False, distance_marginalization=False,
phase_marginalization=False)
# RUN SAMPLER
# Can use log_likelihood_ratio, rather than just the log_likelihood.
# A function can be specified in 'conversion_function' and applied
# to posterior to generate additional parameters e.g. source-frame masses.
# Implemented Samplers:
# LIST OF AVAILABLE SAMPLERS: Run -> bilby.sampler.implemented_samplers
npoints = 512 # number of live points for the nested sampler
n_steps = 100 # min number of steps before proposing a new live point,
# defaults `ndim * 10`
sampler = 'dynesty'
# Different samplers can have different additional kwargs,
# visit https://lscsoft.docs.ligo.org/bilby/samplers.html for details.
result = bilby.run_sampler(
likelihood, prior, outdir=outdir, label=label,
sampler=sampler, nlive=npoints, use_ratio=False,
walks=n_steps, n_check_point=10000, check_point_plot=True,
conversion_function=bilby.gw.conversion.generate_all_bbh_parameters)
result.plot_corner()
#!/usr/bin/env python
"""
Tutorial to demonstrate the minimum number of steps required to run parameter
stimation on GW150914 using open data.
"""
import bilby
prior = bilby.gw.prior.BBHPriorDict(filename='GW150914.prior')
interferometers = bilby.gw.detector.get_event_data("GW150914")
likelihood = bilby.gw.likelihood.get_binary_black_hole_likelihood(interferometers)
result = bilby.run_sampler(likelihood, prior, label='GW150914')
result.plot_corner()
#!/usr/bin/env python
"""
This tutorial includes advanced specifications
for analysing binary neutron star event data.
Here GW170817 is used as an example.
"""
import bilby
outdir = 'outdir'
label = 'GW170817'
time_of_event = bilby.gw.utils.get_event_time(label)
bilby.core.utils.setup_logger(outdir=outdir, label=label)
# GET DATA FROM INTERFEROMETER
# include 'V1' for appropriate O2 events
interferometer_names = ['H1', 'L1', 'V1']
duration = 32
roll_off = 0.2 # how smooth is the transition from no signal
# to max signal in a Tukey Window.
psd_offset = -512 # PSD is estimated using data from
# `center_time+psd_offset` to `center_time+psd_offset + psd_duration`
# This determines the time window used to fetch open data.
psd_duration = 1024
coherence_test = False # coherence between detectors
filter_freq = None # low pass filter frequency to cut signal content above
# Nyquist frequency. The condition is 2 * filter_freq >= sampling_frequency
# All keyword arguments are passed to
# `gwpy.timeseries.TimeSeries.fetch_open_data()'
kwargs = {}
# Data are stored by LOSC at 4096 Hz, however
# there may be event-related data releases with a 16384 Hz rate.
kwargs['sample_rate'] = 4096
# For O2 events a "tag" is required to download the data.
# CLN = clean data; C02 = raw data
kwargs['tag'] = 'C02'
interferometers = bilby.gw.detector.get_event_data(
label,
interferometer_names=interferometer_names,
duration=duration,
roll_off=roll_off,
psd_offset=psd_offset,
psd_duration=psd_duration,
cache=True,
filter_freq=filter_freq,
**kwargs)
# CHOOSE PRIOR FILE
prior = bilby.gw.prior.BNSPriorDict(filename='GW170817.prior')
deltaT = 0.1
prior['geocent_time'] = bilby.core.prior.Uniform(
minimum=time_of_event - deltaT / 2,
maximum=time_of_event + deltaT / 2,
name='geocent_time',
latex_label='$t_c$',
unit='$s$')
# GENERATE WAVEFORM
# OVERVIEW OF APPROXIMANTS:
# https://www.lsc-group.phys.uwm.edu/ligovirgo/cbcnote/Waveforms/Overview
duration = None # duration and sampling frequency will be overwritten
# to match the ones in interferometers.
sampling_frequency = kwargs['sample_rate']
start_time = 0 # set the starting time of the time array
waveform_arguments = {
'waveform_approximant': 'IMRPhenomPv2_NRTidal', 'reference_frequency': 20}
source_model = bilby.gw.source.lal_binary_neutron_star
convert_bns = bilby.gw.conversion.convert_to_lal_binary_neutron_star_parameters
waveform_generator = bilby.gw.WaveformGenerator(
duration=duration,
sampling_frequency=sampling_frequency,
start_time=start_time,
frequency_domain_source_model=source_model,
parameter_conversion=convert_bns,
waveform_arguments=waveform_arguments,)
# CHOOSE LIKELIHOOD FUNCTION
# Time marginalisation uses FFT.
# Distance marginalisation uses a look up table calculated at run time.
# Phase marginalisation is done analytically using a Bessel function.
likelihood = bilby.gw.likelihood.GravitationalWaveTransient(
interferometers,
waveform_generator,
time_marginalization=False,
distance_marginalization=False,
phase_marginalization=False,)
# RUN SAMPLER
# Implemented Samplers:
# LIST OF AVAILABLE SAMPLERS: Run -> bilby.sampler.implemented_samplers
# conversion function = bilby.gw.conversion.generate_all_bns_parameters
npoints = 512
sampler = 'dynesty'
result = bilby.run_sampler(
likelihood,
prior,
outdir=outdir,
label=label,
sampler=sampler,
npoints=npoints,
use_ratio=False,
conversion_function=bilby.gw.conversion.generate_all_bns_parameters)
result.plot_corner()
chirp_mass = bilby.gw.prior.UniformInComponentsChirpMass(name='chirp_mass', minimum=1.485, maximum=1.49, unit='$M_{\odot}$')
mass_ratio = bilby.gw.prior.UniformInComponentsMassRatio(name='mass_ratio', minimum=0.125, maximum=1)
mass_1 = Constraint(name='mass_1', minimum=1, maximum=2.5)
mass_2 = Constraint(name='mass_2', minimum=1, maximum=2.5)
a_1 = Uniform(name='a_1', minimum=0, maximum=0.05)
a_2 = Uniform(name='a_2', minimum=0, maximum=0.05)
tilt_1 = Sine(name='tilt_1')
tilt_2 = Sine(name='tilt_2')
phi_12 = Uniform(name='phi_12', minimum=0, maximum=2 * np.pi, boundary='periodic')
phi_jl = Uniform(name='phi_jl', minimum=0, maximum=2 * np.pi, boundary='periodic')
luminosity_distance = PowerLaw(alpha=2, name='luminosity_distance', minimum=10, maximum=1000, unit='Mpc', latex_label='$d_L$')
dec = Cosine(name='dec')
ra = Uniform(name='ra', minimum=0, maximum=2 * np.pi, boundary='periodic')
theta_jn = Sine(name='theta_jn')
psi = Uniform(name='psi', minimum=0, maximum=np.pi, boundary='periodic')
phase = Uniform(name='phase', minimum=0, maximum=2 * np.pi, boundary='periodic')
lambda_1 = Uniform(name='lambda_1', minimum=0, maximum=5000)
lambda_2 = Uniform(name='lambda_2', minimum=0, maximum=5000)
#!/usr/bin/env python
"""
Tutorial to demonstrate running parameter estimation on GW190425
This example estimates all 17 parameters of the binary neutron star system using
commonly used prior distributions. This will take several hours to run. The
data is obtained using gwpy, see [1] for information on how to access data on
the LIGO Data Grid instead.
[1] https://gwpy.github.io/docs/stable/timeseries/remote-access.html
"""
import bilby
from gwpy.timeseries import TimeSeries
logger = bilby.core.utils.logger
outdir = "outdir"
label = "GW190425"
# Note you can get trigger times using the gwosc package, e.g.:
# > from gwosc import datasets
# > datasets.event_gps("GW190425")
trigger_time = 1240215503.0
detectors = ["L1", "V1"]
maximum_frequency = 512
minimum_frequency = 20
roll_off = 0.4 # Roll off duration of tukey window in seconds, default is 0.4s
duration = 128 # Analysis segment duration
post_trigger_duration = 2 # Time between trigger time and end of segment
end_time = trigger_time + post_trigger_duration
start_time = end_time - duration
psd_duration = 1024
psd_start_time = start_time - psd_duration
psd_end_time = start_time
# We now use gwpy to obtain analysis and psd data and create the ifo_list
ifo_list = bilby.gw.detector.InterferometerList([])
for det in detectors:
logger.info("Downloading analysis data for ifo {}".format(det))
ifo = bilby.gw.detector.get_empty_interferometer(det)
data = TimeSeries.fetch_open_data(det, start_time, end_time)
ifo.strain_data.set_from_gwpy_timeseries(data)
logger.info("Downloading psd data for ifo {}".format(det))
psd_data = TimeSeries.fetch_open_data(det, psd_start_time, psd_end_time)
psd_alpha = 2 * roll_off / duration
psd = psd_data.psd(
fftlength=duration, overlap=0, window=("tukey", psd_alpha), method="median"
)
ifo.power_spectral_density = bilby.gw.detector.PowerSpectralDensity(
frequency_array=psd.frequencies.value, psd_array=psd.value
)
ifo.maximum_frequency = maximum_frequency
ifo.minimum_frequency = minimum_frequency
ifo_list.append(ifo)
logger.info("Saving data plots to {}".format(outdir))
bilby.core.utils.check_directory_exists_and_if_not_mkdir(outdir)
ifo_list.plot_data(outdir=outdir, label=label)
# We now define the prior.
# We have defined our prior distribution in a local file, GW190425.prior
# The prior is printed to the terminal at run-time.
# You can overwrite this using the syntax below in the file,
# or choose a fixed value by just providing a float value as the prior.
priors = bilby.gw.prior.BBHPriorDict(filename="GW190425.prior")
# Add the geocent time prior
priors["geocent_time"] = bilby.core.prior.Uniform(
trigger_time - 0.1, trigger_time + 0.1, name="geocent_time"
)
# In this step we define a `waveform_generator`. This is the object which
# creates the frequency-domain strain. In this instance, we are using the
# `lal_binary_black_hole model` source model. We also pass other parameters:
# the waveform approximant and reference frequency and a parameter conversion
# which allows us to sample in chirp mass and ratio rather than component mass
waveform_generator = bilby.gw.WaveformGenerator(
frequency_domain_source_model=bilby.gw.source.lal_binary_neutron_star,
parameter_conversion=bilby.gw.conversion.convert_to_lal_binary_neutron_star_parameters,
waveform_arguments={
"waveform_approximant": "IMRPhenomPv2_NRTidalv2",
"reference_frequency": 20,
},
)
# In this step, we define the likelihood. Here we use the standard likelihood
# function, passing it the data and the waveform generator.
# Note, phase_marginalization is formally invalid with a precessing waveform such as IMRPhenomPv2
likelihood = bilby.gw.likelihood.GravitationalWaveTransient(
ifo_list,
waveform_generator,
priors=priors,
time_marginalization=True,
phase_marginalization=False,
distance_marginalization=True,
)
# Finally, we run the sampler. This function takes the likelihood and prior
# along with some options for how to do the sampling and how to save the data
result = bilby.run_sampler(
likelihood,
priors,
sampler="dynesty",
outdir=outdir,
label=label,
nlive=1000,
check_point_delta_t=600,
check_point_plot=True,
npool=1,
conversion_function=bilby.gw.conversion.generate_all_bbh_parameters,
)
result.plot_corner()
#!/usr/bin/env python
""" Helper script to facilitate downloading data from LOSC
Usage: To download the GW150914 data from https://losc.ligo.org/events/
$ python get_LOSC_event_data -e GW150914
"""
import numpy as np
import os
import argparse
parser = argparse.ArgumentParser(description='Script to download LOSC data.')
parser.add_argument('-e', '--event', metavar='event', type=str)
parser.add_argument('-o', '--outdir', metavar='outdir',
default='tutorial_data')
args = parser.parse_args()
url_dictionary = dict(
GW150914="https://losc.ligo.org/s/events/GW150914/{}-{}1_LOSC_4_V2-1126259446-32.txt.gz",
LVT151012="https://losc.ligo.org/s/events/LVT151012/{}-{}1_LOSC_4_V2-1128678884-32.txt.gz",
GW151226="https://losc.ligo.org/s/events/GW151226/{}-{}1_LOSC_4_V2-1135136334-32.txt.gz",
GW170104="https://losc.ligo.org/s/events/GW170104/{}-{}1_LOSC_4_V1-1167559920-32.txt.gz",
GW170608="https://losc.ligo.org/s/events/GW170608/{}-{}1_LOSC_CLN_4_V1-1180922478-32.txt.gz",
GW170814="https://dcc.ligo.org/public/0146/P1700341/001/{}-{}1_LOSC_CLN_4_V1-1186741845-32.txt.gz",
GW170817="https://dcc.ligo.org/public/0146/P1700349/001/{}-{}1_LOSC_CLN_4_V1-1187007040-2048.txt.gz")
outdir = 'tutorial_data'
data = []
for det, in ['H', 'L']:
url = url_dictionary[args.event].format(det, det)
filename = os.path.basename(url)
if os.path.isfile(filename.rstrip('.gz')) is False:
print("Downloading data from {}".format(url))
os.system("wget {} ".format(url))
os.system("gunzip {}".format(filename))
filename = filename.rstrip('.gz')
data.append(np.loadtxt(filename))
with open(filename, 'r') as f:
header = f.readlines()[:3]
event = header[0].split(' ')[5]
detector = header[0].split(' ')[7]
sampling_frequency = header[1].split(' ')[4]
starttime = header[2].split(' ')[3]
duration = header[2].split(' ')[5]
print('Loaded data for event={}, detector={}, sampling_frequency={}'
', starttime={}, duration={}'.format(
event, detector, sampling_frequency, starttime, duration))
os.remove(filename)
time = np.arange(0, int(duration), 1 / int(sampling_frequency)) + int(starttime)
arr = [time] + data
outfile = '{}/{}_strain_data.npy'.format(args.outdir, args.event)
np.save(outfile, arr)
print("Saved data to {}".format(outfile))
......@@ -8,28 +8,28 @@ This tutorial must be run on a LIGO cluster.
import bilby
label = 'GW170608'
outdir = label + '_out'
label = "GW170608"
outdir = label + "_out"
# List of detectors involved in the event. GW170608 is pre-August 2017 so only
# H1 and L1 were involved
detectors = ['H1', 'L1']
detectors = ["H1", "L1"]
# Names of the channels and query types to use, and the event GraceDB ID - ref.
# https://ldas-jobs.ligo.caltech.edu/~eve.chase/monitor/online_pe/C02_clean/
# C02_HL_Pv2/lalinferencenest/IMRPhenomPv2pseudoFourPN/config.ini
channel_names = ['H1:DCH-CLEAN_STRAIN_C02', 'L1:DCH-CLEAN_STRAIN_C02']
gracedb = 'G288686'
channel_names = ["H1:DCH-CLEAN_STRAIN_C02", "L1:DCH-CLEAN_STRAIN_C02"]
gracedb = "G288686"
# Duration of data around the event to use
duration = 16
# Duration of PSD data
psd_duration = 1024
# Minimum frequency and reference frequency
minimum_frequency = 10 # Hz
sampling_frequency = 2048.
sampling_frequency = 2048.0
# Get trigger time
candidate = bilby.gw.utils.gracedb_to_json(gracedb, outdir=outdir)
trigger_time = candidate['gpstime']
gps_start_time = trigger_time + 2. - duration
trigger_time = candidate["gpstime"]
gps_start_time = trigger_time + 2.0 - duration
# Load the PSD data starting after the segment you want to analyze
psd_start_time = gps_start_time + duration
......@@ -39,9 +39,14 @@ interferometers = bilby.gw.detector.InterferometerList([])
for channel_name in channel_names:
interferometer = bilby.gw.detector.load_data_by_channel_name(
start_time=gps_start_time, segment_duration=duration,
psd_duration=psd_duration, psd_start_time=psd_start_time,
channel_name=channel_name, sampling_frequency=sampling_frequency, outdir=outdir)
start_time=gps_start_time,
segment_duration=duration,
psd_duration=psd_duration,
psd_start_time=psd_start_time,
channel_name=channel_name,
sampling_frequency=sampling_frequency,
outdir=outdir,
)
interferometer.minimum_frequency = minimum_frequency
interferometers.append(interferometer)
......
......@@ -8,17 +8,17 @@ This tutorial must be run on a LIGO cluster.
import bilby
label = 'GW170608'
outdir = label + '_out'
label = "GW170608"
outdir = label + "_out"
# List of detectors involved in the event. GW170608 is pre-August 2017 so only
# H1 and L1 were involved
detectors = ['H1', 'L1']
detectors = ["H1", "L1"]
# Names of the channels and query types to use, and the event GraceDB ID - ref.
# https://ldas-jobs.ligo.caltech.edu/~eve.chase/monitor/online_pe/C02_clean/
# C02_HL_Pv2/lalinferencenest/IMRPhenomPv2pseudoFourPN/config.ini
channel_names = ['H1:DCH-CLEAN_STRAIN_C02', 'L1:DCH-CLEAN_STRAIN_C02']
query_types = ['H1_CLEANED_HOFT_C02', 'L1_CLEANED_HOFT_C02']
gracedb = 'G288686'
channel_names = ["H1:DCH-CLEAN_STRAIN_C02", "L1:DCH-CLEAN_STRAIN_C02"]
query_types = ["H1_CLEANED_HOFT_C02", "L1_CLEANED_HOFT_C02"]
gracedb = "G288686"
# Calibration number to inform bilby's guess of query type if those provided are
# not recognised
calibration = 2
......@@ -33,14 +33,16 @@ minimum_frequency = 10 # Hz
# Get frame caches
candidate, frame_caches = bilby.gw.utils.get_gracedb(
gracedb, outdir, duration, calibration, detectors, query_types)
gracedb, outdir, duration, calibration, detectors, query_types
)
# Set up interferometer objects from the cache files
interferometers = bilby.gw.detector.InterferometerList([])
for cache_file, channel_name in zip(frame_caches, channel_names):
interferometer = bilby.gw.detector.load_data_from_cache_file(
cache_file, trigger_time, duration, psd_duration, channel_name)
cache_file, trigger_time, duration, psd_duration, channel_name
)
interferometer.minimum_frequency = minimum_frequency
interferometers.append(interferometer)
......
......@@ -21,3 +21,6 @@ gw =
astropy
gwpy
lalsuite
[isort]
known_third_party = astropy,attr,bilby,deepdish,gwin,gwinc,gwpy,lal,lalsimulation,matplotlib,mock,nflows,numpy,packaging,pandas,past,pycbc,pymc3,pytest,scipy,setuptools,skimage,torch
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment