Commit 55629c63 authored by Gregory Ashton's avatar Gregory Ashton

Merge branch 'master' of git.ligo.org:Monash/tupak

parents 59832b92 662bcaa6
Pipeline #25414 passed with stages
in 1 minute and 35 seconds
......@@ -373,7 +373,7 @@ class TestInterferometerStrainData(unittest.TestCase):
time_domain_strain, sampling_frequency)
self.assertTrue(np.all(
frequency_domain_strain == self.ifosd.frequency_domain_strain))
self.ifosd.frequency_domain_strain == frequency_domain_strain * self.ifosd.frequency_mask))
#def test_sampling_duration_init(self):
# self.assertIsNone(self.ifo.duration)
......
......@@ -671,6 +671,10 @@ class LogUniform(PowerLaw):
if self.minimum <= 0:
logger.warning('You specified a uniform-in-log prior with minimum={}'.format(self.minimum))
def __repr__(self):
"""Call to helper method in the super class."""
return Prior._subclass_repr_helper(self)
class Cosine(Prior):
......
......@@ -408,7 +408,10 @@ class Result(dict):
typeB = type(B)
if typeA == typeB:
if typeA in [str, float, int, dict, list]:
return A == B
try:
return A == B
except ValueError:
return False
elif typeA in [np.ndarray]:
return np.all(A == B)
return False
......@@ -461,6 +464,8 @@ def plot_multiple(results, filename=None, labels=None, colours=None,
c = colours[i]
else:
c = 'C{}'.format(i)
hist_kwargs = kwargs.get('hist_kwargs', dict())
hist_kwargs['color'] = c
fig = result.plot_corner(fig=fig, save=False, color=c, **kwargs)
default_filename += '_{}'.format(result.label)
lines.append(matplotlib.lines.Line2D([0], [0], color=c))
......
......@@ -13,6 +13,7 @@ logger = logging.getLogger('tupak')
speed_of_light = 299792458.0 # speed of light in m/s
parsec = 3.085677581 * 1e16
solar_mass = 1.98855 * 1e30
radius_of_earth = 6371 * 1e3 # metres
def get_sampling_frequency(time_series):
......@@ -452,12 +453,13 @@ else:
logger.debug('No $DISPLAY environment variable found, so importing \
matplotlib.pyplot with non-interactive "Agg" backend.')
import matplotlib
import matplotlib.pyplot as plt
non_gui_backends = matplotlib.rcsetup.non_interactive_bk
for backend in non_gui_backends:
try:
logger.debug("Trying backend {}".format(backend))
matplotlib.use(backend, warn=False)
matplotlib.pyplot.switch_backend(backend)
plt.switch_backend(backend)
break
except Exception as e:
print(traceback.format_exc())
......@@ -9,14 +9,14 @@ try:
from astropy.cosmology import z_at_value, Planck15
import astropy.units as u
except ImportError:
logger.warning("You do not have astropy installed currently. You will "
" not be able to use some of the prebuilt functions.")
logger.warning("You do not have astropy installed currently. You will"
" not be able to use some of the prebuilt functions.")
try:
import lalsimulation as lalsim
except ImportError:
logger.warning("You do not have lalsuite installed currently. You will "
" not be able to use some of the prebuilt functions.")
logger.warning("You do not have lalsuite installed currently. You will"
" not be able to use some of the prebuilt functions.")
def redshift_to_luminosity_distance(redshift):
......@@ -378,7 +378,8 @@ def generate_all_bbh_parameters(sample, likelihood=None, priors=None):
output_sample['waveform_approximant'] = likelihood.waveform_generator.waveform_arguments['waveform_approximant']
output_sample = fill_from_fixed_priors(output_sample, priors)
output_sample, _ = convert_to_lal_binary_black_hole_parameters(output_sample, [key for key in output_sample.keys()], remove=False)
output_sample, _ = convert_to_lal_binary_black_hole_parameters(
output_sample, [key for key in output_sample.keys()], remove=False)
output_sample = generate_non_standard_parameters(output_sample)
output_sample = generate_component_spins(output_sample)
compute_snrs(output_sample, likelihood)
......@@ -457,23 +458,23 @@ def generate_component_spins(sample):
output_sample = sample.copy()
spin_conversion_parameters = ['iota', 'phi_jl', 'tilt_1', 'tilt_2', 'phi_12', 'a_1', 'a_2', 'mass_1',
'mass_2', 'reference_frequency', 'phase']
if all(key in output_sample.keys() for key in spin_conversion_parameters) and isinstance(output_sample, dict):
if all(key in output_sample.keys() for key in spin_conversion_parameters)\
and isinstance(output_sample, dict):
output_sample['iota'], output_sample['spin_1x'], output_sample['spin_1y'], output_sample['spin_1z'], \
output_sample['spin_2x'], output_sample['spin_2y'], output_sample['spin_2z'] = \
lalsim.SimInspiralTransformPrecessingNewInitialConditions(output_sample['iota'], output_sample['phi_jl'],
output_sample['tilt_1'], output_sample['tilt_2'],
output_sample['phi_12'], output_sample['a_1'],
output_sample['a_2'], output_sample['mass_1']
* tupak.core.utils.solar_mass,
output_sample['mass_2']
* tupak.core.utils.solar_mass,
output_sample['reference_frequency'],
output_sample['phase'])
lalsim.SimInspiralTransformPrecessingNewInitialConditions(
output_sample['iota'], output_sample['phi_jl'],
output_sample['tilt_1'], output_sample['tilt_2'],
output_sample['phi_12'], output_sample['a_1'], output_sample['a_2'],
output_sample['mass_1'] * tupak.core.utils.solar_mass,
output_sample['mass_2'] * tupak.core.utils.solar_mass,
output_sample['reference_frequency'], output_sample['phase'])
output_sample['phi_1'] = np.arctan(output_sample['spin_1y'] / output_sample['spin_1x'])
output_sample['phi_2'] = np.arctan(output_sample['spin_2y'] / output_sample['spin_2x'])
elif all(key in output_sample.keys() for key in spin_conversion_parameters) and isinstance(output_sample, pd.DataFrame):
elif all(key in output_sample.keys() for key in spin_conversion_parameters)\
and isinstance(output_sample, pd.DataFrame):
logger.debug('Extracting component spins.')
new_spin_parameters = ['spin_1x', 'spin_1y', 'spin_1z', 'spin_2x', 'spin_2y', 'spin_2z']
new_spins = {name: np.zeros(len(output_sample)) for name in new_spin_parameters}
......@@ -482,9 +483,11 @@ def generate_component_spins(sample):
new_spins['iota'], new_spins['spin_1x'][ii], new_spins['spin_1y'][ii], new_spins['spin_1z'][ii], \
new_spins['spin_2x'][ii], new_spins['spin_2y'][ii], new_spins['spin_2z'][ii] = \
lalsim.SimInspiralTransformPrecessingNewInitialConditions(
output_sample['iota'][ii], output_sample['phi_jl'][ii], output_sample['tilt_1'][ii], output_sample['tilt_2'][ii],
output_sample['iota'][ii], output_sample['phi_jl'][ii],
output_sample['tilt_1'][ii], output_sample['tilt_2'][ii],
output_sample['phi_12'][ii], output_sample['a_1'][ii], output_sample['a_2'][ii],
output_sample['mass_1'][ii] * tupak.core.utils.solar_mass, output_sample['mass_2'][ii] * tupak.core.utils.solar_mass,
output_sample['mass_1'][ii] * tupak.core.utils.solar_mass,
output_sample['mass_2'][ii] * tupak.core.utils.solar_mass,
output_sample['reference_frequency'][ii], output_sample['phase'][ii])
for name in new_spin_parameters:
......@@ -532,8 +535,9 @@ def compute_snrs(sample, likelihood):
for ii in range(len(temp_sample)):
for key in set(temp_sample.keys()).intersection(likelihood.waveform_generator.parameters.keys()):
likelihood.waveform_generator.parameters[key] = temp_sample[key][ii]
for key in likelihood.waveform_generator.non_standard_sampling_parameter_keys:
likelihood.waveform_generator.parameters[key] = temp_sample[key][ii]
if likelihood.waveform_generator.non_standard_sampling_parameter_keys is not None:
for key in likelihood.waveform_generator.non_standard_sampling_parameter_keys:
likelihood.waveform_generator.parameters[key] = temp_sample[key][ii]
signal_polarizations = likelihood.waveform_generator.frequency_domain_strain()
for interferometer in all_interferometers:
signal = interferometer.get_detector_response(signal_polarizations,
......
This diff is collapsed.
# The proposed Einstein Telescope located at the site of Virgo.
# LIGO-T980044-10
# https://dcc.ligo.org/LIGO-P1600143/public
name = 'ET'
power_spectral_density = PowerSpectralDensity(psd_file='ET_B_psd.txt')
length = 10
minimum_frequency = 10
maximum_frequency = 2048
latitude = 43 + 37. / 60 + 53.0921 / 3600
longitude = 10 + 30. / 60 + 16.1878 / 3600
elevation = 51.884
xarm_azimuth = 70.5674
yarm_azimuth = 130.5674
shape = 'Triangle'
......@@ -146,7 +146,8 @@ class GravitationalWaveTransient(likelihood.Likelihood):
matched_filter_snr_squared = 0
optimal_snr_squared = 0
matched_filter_snr_squared_tc_array = np.zeros(self.interferometers.frequency_array[0:-1].shape, dtype=np.complex128)
matched_filter_snr_squared_tc_array = np.zeros(
self.interferometers.frequency_array[0:-1].shape, dtype=np.complex128)
for interferometer in self.interferometers:
signal_ifo = interferometer.get_detector_response(waveform_polarizations,
self.waveform_generator.parameters)
......@@ -157,14 +158,15 @@ class GravitationalWaveTransient(likelihood.Likelihood):
signal_ifo, interferometer, self.waveform_generator.duration)
if self.time_marginalization:
interferometer.time_marginalization = self.time_marginalization
matched_filter_snr_squared_tc_array += 4. * (1. / interferometer.duration) * np.fft.ifft(
matched_filter_snr_squared_tc_array += 4. * (1. / self.waveform_generator.duration) * np.fft.ifft(
signal_ifo.conjugate()[0:-1] * interferometer.frequency_domain_strain[0:-1]
/ interferometer.power_spectral_density_array[0:-1]) * len(interferometer.frequency_domain_strain[0:-1])
/ interferometer.power_spectral_density_array[0:-1])\
* len(interferometer.frequency_domain_strain[0:-1])
if self.time_marginalization:
delta_tc = 1. / self.waveform_generator.sampling_frequency
tc_log_norm = np.log(self.interferometers.duration * delta_tc)
tc_log_norm = np.log(self.waveform_generator.duration * delta_tc)
if self.distance_marginalization:
......@@ -216,8 +218,7 @@ class GravitationalWaveTransient(likelihood.Likelihood):
self.waveform_generator.parameters['luminosity_distance'] ** 2 \
/ self.ref_dist ** 2.
rho_mf_ref = matched_filter_snr_squared * \
self.waveform_generator.parameters['luminosity_distance'] \
/ self.ref_dist
self.waveform_generator.parameters['luminosity_distance'] / self.ref_dist
return rho_mf_ref, rho_opt_ref
def log_likelihood(self):
......@@ -233,15 +234,17 @@ class GravitationalWaveTransient(likelihood.Likelihood):
self.distance_prior_array = np.array([self.prior['luminosity_distance'].prob(distance)
for distance in self.distance_array])
### Make the lookup table ###
# Make the lookup table ###
logger.info('Building lookup table for distance marginalisation.')
self.ref_dist = 1000 # 1000 Mpc
self.dist_margd_loglikelihood_array = np.zeros((400, 800))
self.rho_opt_ref_array = np.logspace(-3, 4, self.dist_margd_loglikelihood_array.shape[
0]) # optimal filter snr at fiducial distance of ref_dist Mpc
self.rho_mf_ref_array = np.hstack((-np.logspace(2, -3, self.dist_margd_loglikelihood_array.shape[1] / 2), \
np.logspace(-3, 4, self.dist_margd_loglikelihood_array.shape[
1] / 2))) # matched filter snr at fiducial distance of ref_dist Mpc
# optimal filter snr at fiducial distance of ref_dist Mpc
self.rho_opt_ref_array = np.logspace(-3, 4, self.dist_margd_loglikelihood_array.shape[0])
# matched filter snr at fiducial distance of ref_dist Mpc
self.rho_mf_ref_array = np.hstack(
(-np.logspace(2, -3, self.dist_margd_loglikelihood_array.shape[1] / 2),
np.logspace(-3, 4, self.dist_margd_loglikelihood_array.shape[1] / 2)))
for ii, rho_opt_ref in enumerate(self.rho_opt_ref_array):
......@@ -249,15 +252,14 @@ class GravitationalWaveTransient(likelihood.Likelihood):
optimal_snr_squared_array = rho_opt_ref * self.ref_dist ** 2. \
/ self.distance_array ** 2
matched_filter_snr_squared_array = rho_mf_ref * self.ref_dist \
/ self.distance_array
matched_filter_snr_squared_array = rho_mf_ref * self.ref_dist / self.distance_array
self.dist_margd_loglikelihood_array[ii][jj] = \
logsumexp(matched_filter_snr_squared_array - \
optimal_snr_squared_array / 2, \
logsumexp(matched_filter_snr_squared_array -
optimal_snr_squared_array / 2,
b=self.distance_prior_array * self.delta_distance)
log_norm = logsumexp(0. / self.distance_array - 0. / self.distance_array ** 2., \
log_norm = logsumexp(0. / self.distance_array - 0. / self.distance_array ** 2.,
b=self.distance_prior_array * self.delta_distance)
self.dist_margd_loglikelihood_array -= log_norm
......
from __future__ import division
import os
import numpy as np
......@@ -295,7 +296,7 @@ def get_event_time(event):
def get_open_strain_data(
name, t1, t2, outdir, cache=False, buffer_time=0, **kwargs):
name, start_time, end_time, outdir, cache=False, buffer_time=0, **kwargs):
""" A function which accesses the open strain data
This uses `gwpy` to download the open data and then saves a cached copy for
......@@ -305,7 +306,7 @@ def get_open_strain_data(
----------
name: str
The name of the detector to get data for
t1, t2: float
start_time, end_time: float
The GPS time of the start and end of the data
outdir: str
The output directory to place data in
......@@ -321,26 +322,26 @@ def get_open_strain_data(
strain: gwpy.timeseries.TimeSeries
"""
filename = '{}/{}_{}_{}.txt'.format(outdir, name, t1, t2)
filename = '{}/{}_{}_{}.txt'.format(outdir, name, start_time, end_time)
if buffer_time < 0:
raise ValueError("buffer_time < 0")
t1 = t1 - buffer_time
t2 = t2 + buffer_time
start_time = start_time - buffer_time
end_time = end_time + buffer_time
if os.path.isfile(filename) and cache:
logger.info('Using cached data from {}'.format(filename))
strain = TimeSeries.read(filename)
else:
logger.info('Fetching open data from {} to {} with buffer time {}'
.format(t1, t2, buffer_time))
strain = TimeSeries.fetch_open_data(name, t1, t2, **kwargs)
.format(start_time, end_time, buffer_time))
strain = TimeSeries.fetch_open_data(name, start_time, end_time, **kwargs)
logger.info('Saving data to {}'.format(filename))
strain.write(filename)
return strain
def read_frame_file(file_name, t1, t2, channel=None, buffer_time=1, **kwargs):
def read_frame_file(file_name, start_time, end_time, channel=None, buffer_time=1, **kwargs):
""" A function which accesses the open strain data
This uses `gwpy` to download the open data and then saves a cached copy for
......@@ -350,10 +351,10 @@ def read_frame_file(file_name, t1, t2, channel=None, buffer_time=1, **kwargs):
----------
file_name: str
The name of the frame to read
t1, t2: float
start_time, end_time: float
The GPS time of the start and end of the data
buffer_time: float
Read in data with `t1-buffer_time` and `t2+buffer_time`
Read in data with `t1-buffer_time` and `end_time+buffer_time`
channel: str
The name of the channel being searched for, some standard channel names are attempted
if channel is not specified or if specified channel is not found.
......@@ -369,7 +370,7 @@ def read_frame_file(file_name, t1, t2, channel=None, buffer_time=1, **kwargs):
strain = None
if channel is not None:
try:
strain = TimeSeries.read(source=file_name, channel=channel, start=t1, end=t2, **kwargs)
strain = TimeSeries.read(source=file_name, channel=channel, start=start_time, end=end_time, **kwargs)
loaded = True
logger.info('Successfully loaded {}.'.format(channel))
except RuntimeError:
......@@ -380,7 +381,7 @@ def read_frame_file(file_name, t1, t2, channel=None, buffer_time=1, **kwargs):
if loaded:
continue
try:
strain = TimeSeries.read(source=file_name, channel=channel, start=t1-buffer_time, end=t2+buffer_time, **kwargs)
strain = TimeSeries.read(source=file_name, channel=channel, start=start_time, end=end_time, **kwargs)
loaded = True
logger.info('Successfully loaded {}.'.format(channel))
except RuntimeError:
......@@ -391,38 +392,3 @@ def read_frame_file(file_name, t1, t2, channel=None, buffer_time=1, **kwargs):
else:
logger.warning('No data loaded.')
return None
def process_strain_data(strain, alpha=0.25, filter_freq=1024):
"""
Helper function to obtain an Interferometer instance with appropriate
PSD and data, given an center_time.
Parameters
----------
strain: array_like
Strain data to be processed
alpha: float
The tukey window shape parameter passed to `scipy.signal.tukey`.
filter_freq: float
Low pass filter frequency
Returns
-------
tupak.detector.Interferometer: An Interferometer instance with a PSD and frequency-domain strain data.
"""
sampling_frequency = int(strain.sample_rate.value)
# Low pass filter
bp = filter_design.lowpass(filter_freq, strain.sample_rate)
strain = strain.filter(bp, filtfilt=True)
strain = strain.crop(*strain.span.contract(1))
time_series = strain.times.value
# Apply Tukey window
strain = strain * signal.windows.tukey(len(time_series), alpha=alpha)
frequency_domain_strain, frequencies = nfft(strain.value, sampling_frequency)
return frequency_domain_strain, frequencies
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment