Skip to content
Snippets Groups Projects
Commit 323c49fd authored by Gregory Ashton's avatar Gregory Ashton
Browse files

Clean up GW150914.py analysis script

- Removes cached data
- Adds script to automate downloading of data
- Adds comments into analysis script
- Updates to run under recent changes in the API
- Removes obsolete notebook
parent d1584202
No related branches found
No related tags found
No related merge requests found
%% Cell type:markdown id: tags:
# GW150914 analysis
%% Cell type:markdown id: tags:
Analyse GW150914 data using TUPAK
%% Cell type:code id: tags:
``` python
%pylab inline
# %matplotlib notebook
import numpy as np
import pylab as plt
%load_ext autoreload
%autoreload 2
import peyote
import corner
import logging
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel('DEBUG')
import matplotlib.mlab as mlab
```
%% Output
Populating the interactive namespace from numpy and matplotlib
%% Cell type:markdown id: tags:
## load and plot GW150914 data
data has been downloaded from the LIGO Open Science Center (LOSC) and pre-formatted
%% Cell type:code id: tags:
``` python
data_file = 'tutorial_data/GW150914_strain_data.npy'
time_series, strain_H1, strain_L1 = np.load(data_file)
time_of_event = 1126259462.44
plt.plot(time_series - time_of_event, strain_H1 * 1e18, 'r', label = 'H1')
plt.plot(time_series - time_of_event, strain_L1 * 1e18, 'b', label = 'L1')
plt.xlabel('time [s] since GW150914')
plt.ylabel(r'strain $[10^{-18}]$')
plt.legend(loc='lower right')
```
%% Output
<matplotlib.legend.Legend at 0x1111c7d50>
%% Cell type:code id: tags:
``` python
sampling_frequency = np.int(peyote.utils.sampling_frequency(time_series))
NFFT = 4 * sampling_frequency
power_spectral_density_H1, frequency_series = mlab.psd(strain_H1, Fs = sampling_frequency, NFFT = NFFT)
power_spectral_density_L1, frequency_series = mlab.psd(strain_L1, Fs = sampling_frequency, NFFT = NFFT)
## smoothed power spectral density -- see LOSC tutorial (https://losc.ligo.org/s/events/GW150914/LOSC_Event_tutorial_GW150914.html)
# this can be used as the psd in the searches instead
power_spectral_density_smooted = (1.e-22*(18./(0.1+frequency_series))**2)**2+0.7e-23**2+((frequency_series/2000.)*4.e-23)**2
plt.loglog(frequency_series, np.sqrt(power_spectral_density_H1), 'r', label='H1')
plt.loglog(frequency_series, np.sqrt(power_spectral_density_L1), 'b', label='L1')
plt.loglog(frequency_series, np.sqrt(power_spectral_density_smooted), 'k')
plt.grid('on')
plt.ylabel(r'amplitude spectral density [strain/$\sqrt{\rm Hz}$]')
plt.ylabel(r'frequency [Hz]')
plt.ylim(1e-24, 1e-19)
plt.xlim(20, 2000)
plt.legend(loc='best')
```
%% Output
<matplotlib.legend.Legend at 0x112a629d0>
# coding: utf-8
# # GW150914 analysis
# Analyse GW150914 data using TUPAK
# In[1]:
from __future__ import division
import os
import numpy as np
import pylab as plt
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
import peyote
import corner
import logging
peyote.setup_logger()
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel('DEBUG')
import matplotlib.mlab as mlab
event = 'GW150914'
outdir = 'GW150914_results'
if os.path.isdir(outdir) is False:
os.mkdir(outdir)
# Load in the data
data_file = 'tutorial_data/GW150914_strain_data.npy'
if os.path.isfile(data_file) is False:
os.system('python get_LOSC_event_data.py -e GW150914 -o tutorial_data')
time_series, strain_H1, strain_L1 = np.load(data_file)
time_duration = time_series[-1] - time_series[0]
time_of_event = 1126259462.44
# Create and save PSDs
sampling_frequency = np.int(peyote.utils.sampling_frequency(time_series))
NFFT = 4 * sampling_frequency
power_spectral_density_H1, frequency_series = mlab.psd(strain_H1, Fs=sampling_frequency, NFFT=NFFT)
power_spectral_density_L1, frequency_series = mlab.psd(strain_L1, Fs=sampling_frequency, NFFT=NFFT)
with open('150914_PSD_H1.txt', 'w+') as file:
for f, p in zip(frequency_series, power_spectral_density_H1):
psd_H1, psd_frequencies = mlab.psd(strain_H1, Fs=sampling_frequency, NFFT=NFFT)
psd_L1, psd_frequencies = mlab.psd(strain_L1, Fs=sampling_frequency, NFFT=NFFT)
with open('GW150914_PSD_H1.txt', 'w+') as file:
for f, p in zip(psd_frequencies, psd_H1):
file.write('{} {}\n'.format(f, p))
with open('150914_PSD_L1.txt', 'w+') as file:
for f, p in zip(frequency_series, power_spectral_density_L1):
with open('GW150914_PSD_L1.txt', 'w+') as file:
for f, p in zip(psd_frequencies, psd_L1):
file.write('{} {}\n'.format(f, p))
# Cut out 1 second period around the data and make IFOs with this data
search_idxs = (time_series > time_of_event - 0.5) * (time_series < time_of_event + 0.5)
time_series = time_series[search_idxs]
strain_H1 = strain_H1[search_idxs]
strain_L1 = strain_L1[search_idxs]
time_duration = time_series[-1] - time_series[0]
H1 = peyote.detector.H1
H1.power_spectral_density = peyote.detector.PowerSpectralDensity(psd_file='./150914_PSD_H1.txt')
H1.power_spectral_density = peyote.detector.PowerSpectralDensity(
psd_file='./150914_PSD_H1.txt')
H1.set_data(sampling_frequency, time_duration,
frequency_domain_strain=peyote.utils.nfft(strain_H1, sampling_frequency)[0])
frequency_domain_strain=peyote.utils.nfft(
strain_H1, sampling_frequency)[0])
L1 = peyote.detector.L1
L1.power_spectral_density = peyote.detector.PowerSpectralDensity(psd_file='./150914_PSD_L1.txt')
L1.power_spectral_density = peyote.detector.PowerSpectralDensity(
psd_file='./150914_PSD_L1.txt')
L1.set_data(sampling_frequency, time_duration,
frequency_domain_strain=peyote.utils.nfft(strain_L1, sampling_frequency)[0])
frequency_domain_strain=peyote.utils.nfft(
strain_L1, sampling_frequency)[0])
IFOs = [H1, L1]
source = peyote.source.BinaryBlackHole('BBH', sampling_frequency, time_duration, spin_1=[0, 0, 0], spin_2=[0, 0, 0],
luminosity_distance=410., iota=2.97305, phase=1.145,
waveform_approximant='IMRPhenomPv2', reference_frequency=50., ra=1.375,
dec=-1.2108, geocent_time=1126259642.413, psi=2.659, mass_1=32, mass_2=32)
# ignore the fact that I hardcoded in some masses
likelihood = peyote.likelihood.Likelihood(IFOs, source)
prior = source.copy()
prior.mass_1 = peyote.parameter.Parameter(
# Plot the data and PSDs
fig, axes = plt.subplots(nrows=2, figsize=(8, 8))
for ax, IFO in zip(axes, IFOs):
ax.loglog(IFO.frequency_array, IFO.data, '-C0', label=IFO.name, lw=1.5)
ax.loglog(IFO.frequency_array,
np.abs(IFO.amplitude_spectral_density_array), '-C1', lw=0.5,
label=IFO.name+' PSD')
ax.grid('on')
ax.set_ylabel(r'amplitude spectral density [strain/$\sqrt{\rm Hz}$]')
ax.set_xlabel(r'frequency [Hz]')
ax.set_ylim(1e-24, 1e-19)
ax.set_xlim(20, 2000)
ax.legend(loc='best')
fig.savefig('{}/frequency_domain_data.png'.format(outdir))
# Create the waveformgenerator
waveformgenerator = peyote.source.WaveformGenerator(
'BBH', sampling_frequency, time_duration, peyote.source.LALBinaryBlackHole)
# Define the prior
prior = dict(spin_1=[0, 0, 0], spin_2=[0, 0, 0], luminosity_distance=410.,
iota=2.97305, phase=1.145, waveform_approximant='IMRPhenomPv2',
reference_frequency=50., ra=1.375, dec=-1.2108,
geocent_time=1126259642.413, psi=2.659, mass_1=32, mass_2=32)
prior['mass_1'] = peyote.parameter.Parameter(
'mass_1', prior=peyote.prior.Uniform(lower=32, upper=41),
latex_label='$m_1$')
prior.mass_2 = peyote.parameter.Parameter(
prior['mass_2'] = peyote.parameter.Parameter(
'mass_2', prior=peyote.prior.Uniform(lower=25, upper=33),
latex_label='$m_2$')
# Define a likelihood
likelihood = peyote.likelihood.Likelihood(IFOs, waveformgenerator)
# Run the sampler
result = peyote.run_sampler(likelihood, prior, sampler='pymultinest',
n_live_points=400, verbose=True)
n_live_points=400, verbose=True, outdir=outdir)
fig = corner.corner(result.samples)
fig.savefig('test')
""" Helper script to faciliate downloading data from LOSC
Usage: To download the GW150914 data from https://losc.ligo.org/events/
$ python get_LOSC_event_data -e GW150914
"""
from __future__ import division
import numpy as np
import os
import argparse
parser = argparse.ArgumentParser(description='Script to download LOSC data.')
parser.add_argument('-e', '--event', metavar='event', type=str)
parser.add_argument('-o', '--outdir', metavar='outdir',
default='tutorial_data')
args = parser.parse_args()
url_dictionary = dict(
GW150914="https://losc.ligo.org/s/events/GW150914/{}-{}1_LOSC_4_V2-1126259446-32.txt.gz",
LVT151012="https://losc.ligo.org/s/events/LVT151012/{}-{}1_LOSC_4_V2-1128678884-32.txt.gz",
GW151226="https://losc.ligo.org/s/events/GW151226/{}-{}1_LOSC_4_V2-1135136334-32.txt.gz",
GW170104="https://losc.ligo.org/s/events/GW170104/{}-{}1_LOSC_4_V1-1167559920-32.txt.gz",
GW170608="https://losc.ligo.org/s/events/GW170608/{}-{}1_LOSC_CLN_4_V1-1180922478-32.txt.gz",
GW170814="https://dcc.ligo.org/public/0146/P1700341/001/{}-{}1_LOSC_CLN_4_V1-1186741845-32.txt.gz",
GW170817="https://dcc.ligo.org/public/0146/P1700349/001/{}-{}1_LOSC_CLN_4_V1-1187007040-2048.txt.gz")
outdir = 'tutorial_data'
data = []
for det, in ['H', 'L']:
url = url_dictionary[args.event].format(det, det)
filename = os.path.basename(url)
if os.path.isfile(filename.rstrip('.gz')) is False:
print("Downloading data from {}".format(url))
os.system("wget {} ".format(url))
os.system("gunzip {}".format(filename))
filename = filename.rstrip('.gz')
data.append(np.loadtxt(filename))
with open(filename, 'r') as f:
header = f.readlines()[:3]
event = header[0].split(' ')[5]
detector = header[0].split(' ')[7]
sampling_frequency = header[1].split(' ')[4]
starttime = header[2].split(' ')[3]
duration = header[2].split(' ')[5]
print('Loaded data for event={}, detector={}, sampling_frequency={}'
', starttime={}, duration={}'.format(
event, detector, sampling_frequency, starttime, duration))
os.remove(filename)
time = np.arange(0, int(duration), 1/int(sampling_frequency)) + int(starttime)
arr = [time] + data
outfile = '{}/{}_strain_data.npy'.format(args.outdir, args.event)
np.save(outfile, arr)
print("Saved data to {}".format(outfile))
File deleted
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment