utils.py 18 KB
Newer Older
Colm Talbot's avatar
Colm Talbot committed
1
from __future__ import division
Gregory Ashton's avatar
Gregory Ashton committed
2
import logging
3
import os
4
import numpy as np
5
from math import fmod
6
from gwpy.timeseries import TimeSeries
7
import argparse
Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
8

Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
9 10
# Constants
speed_of_light = 299792458.0  # speed of light in m/s
Colm Talbot's avatar
Colm Talbot committed
11 12
parsec = 3.085677581 * 1e16
solar_mass = 1.98855 * 1e30
13

14
def get_sampling_frequency(time_series):
15 16 17 18 19
    """
    Calculate sampling frequency from a time series
    """
    tol = 1e-10
    if np.ptp(np.diff(time_series)) > tol:
20
        raise ValueError("Your time series was not evenly sampled")
21
    else:
Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
22
        return 1. / (time_series[1] - time_series[0])
23 24


25 26 27
def create_time_series(sampling_frequency, duration, starting_time = 0.):
    return np.arange(starting_time, duration, 1./sampling_frequency)

Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
28

29
def ra_dec_to_theta_phi(ra, dec, gmst):
Colm Talbot's avatar
Colm Talbot committed
30
    """
31 32 33 34 35 36 37 38
    Convert from RA and DEC to polar coordinates on celestial sphere
    Input:
    ra - right ascension in radians
    dec - declination in radians
    gmst - Greenwich mean sidereal time of arrival of the signal in radians
    Output:
    theta - zenith angle in radians
    phi - azimuthal angle in radians
Colm Talbot's avatar
Colm Talbot committed
39
    """
40
    phi = ra - gmst
Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
41
    theta = np.pi / 2 - dec
42
    return theta, phi
Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
43 44


45
def gps_time_to_gmst(gps_time):
Colm Talbot's avatar
Colm Talbot committed
46
    """
Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
47
    Convert gps time to Greenwich mean sidereal time in radians
48 49 50 51 52

    This method assumes a constant rotation rate of earth since 00:00:00, 1 Jan. 2000
    A correction has been applied to give the exact correct value for 00:00:00, 1 Jan. 2018
    Error accumulates at a rate of ~0.0001 radians/decade.

Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
53 54 55 56
    Input:
    time - gps time
    Output:
    gmst - Greenwich mean sidereal time in radians
Colm Talbot's avatar
Colm Talbot committed
57
    """
58 59 60 61 62 63
    omega_earth = 2 * np.pi * (1 / 365.2425 + 1) / 86400.
    gps_2000 = 630720013.
    gmst_2000 = (6 + 39. / 60 + 51.251406103947375 / 3600) * np.pi / 12
    correction_2018 = -0.00017782487379358614
    sidereal_time = omega_earth * (gps_time - gps_2000) + gmst_2000 + correction_2018
    gmst = fmod(sidereal_time, 2 * np.pi)
Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
64
    return gmst
Nikhil Sarin's avatar
Nikhil Sarin committed
65

Gregory Ashton's avatar
Gregory Ashton committed
66

Colm Talbot's avatar
Colm Talbot committed
67 68
def create_fequency_series(sampling_frequency, duration):
    """
69 70
    Create a frequency series with the correct length and spacing.

Colm Talbot's avatar
Colm Talbot committed
71 72 73 74 75 76 77 78 79 80 81 82 83
    :param sampling_frequency: sampling frequency
    :param duration: duration of data
    :return: frequencies, frequency series
    """
    number_of_samples = duration * sampling_frequency
    number_of_samples = int(np.round(number_of_samples))

    # prepare for FFT
    number_of_frequencies = (number_of_samples-1)//2
    delta_freq = 1./duration

    frequencies = delta_freq * np.linspace(1, number_of_frequencies, number_of_frequencies)

Colm Talbot's avatar
Colm Talbot committed
84
    if len(frequencies) % 2 == 1:
Colm Talbot's avatar
Colm Talbot committed
85 86 87 88 89 90 91 92
        frequencies = np.concatenate(([0], frequencies, [sampling_frequency / 2.]))
    else:
        # no Nyquist frequency when N=odd
        frequencies = np.concatenate(([0], frequencies))

    return frequencies


Nikhil Sarin's avatar
Nikhil Sarin committed
93
def create_white_noise(sampling_frequency, duration):
Colm Talbot's avatar
Colm Talbot committed
94
    """
Nikhil Sarin's avatar
Nikhil Sarin committed
95
    Create white_noise which is then coloured by a given PSD
96 97


Colm Talbot's avatar
Colm Talbot committed
98 99
    :param sampling_frequency: sampling frequency
    :param duration: duration of data
Colm Talbot's avatar
Colm Talbot committed
100
    """
Nikhil Sarin's avatar
Nikhil Sarin committed
101 102 103 104 105 106

    number_of_samples = duration * sampling_frequency
    number_of_samples = int(np.round(number_of_samples))

    delta_freq = 1./duration

Colm Talbot's avatar
Colm Talbot committed
107
    frequencies = create_fequency_series(sampling_frequency, duration)
Nikhil Sarin's avatar
Nikhil Sarin committed
108 109

    norm1 = 0.5*(1./delta_freq)**0.5
Colm Talbot's avatar
Colm Talbot committed
110 111 112
    re1 = np.random.normal(0, norm1, len(frequencies))
    im1 = np.random.normal(0, norm1, len(frequencies))
    htilde1 = re1 + 1j*im1
Nikhil Sarin's avatar
Nikhil Sarin committed
113 114 115 116

    # convolve data with instrument transfer function
    otilde1 = htilde1 * 1.
    # set DC and Nyquist = 0
Colm Talbot's avatar
Colm Talbot committed
117 118
    otilde1[0] = 0
    # no Nyquist frequency when N=odd
Nikhil Sarin's avatar
Nikhil Sarin committed
119
    if np.mod(number_of_samples, 2) == 0:
Colm Talbot's avatar
Colm Talbot committed
120
        otilde1[-1] = 0
Nikhil Sarin's avatar
Nikhil Sarin committed
121 122 123 124 125

    # normalise for positive frequencies and units of strain/rHz
    white_noise = otilde1
    # python: transpose for use with infft
    white_noise = np.transpose(white_noise)
Colm Talbot's avatar
Colm Talbot committed
126
    frequencies = np.transpose(frequencies)
Nikhil Sarin's avatar
Nikhil Sarin committed
127

Colm Talbot's avatar
Colm Talbot committed
128
    return white_noise, frequencies
Gregory Ashton's avatar
Gregory Ashton committed
129 130 131


def nfft(ht, Fs):
132
    """
Gregory Ashton's avatar
Gregory Ashton committed
133 134 135 136 137 138 139 140 141
    performs an FFT while keeping track of the frequency bins
    assumes input time series is real (positive frequencies only)

    ht = time series
    Fs = sampling frequency

    returns
    hf = single-sided FFT of ft normalised to units of strain / sqrt(Hz)
    f = frequencies associated with hf
142
    """
Gregory Ashton's avatar
Gregory Ashton committed
143 144 145 146 147
    # add one zero padding if time series does not have even number of sampling times
    if np.mod(len(ht), 2) == 1:
        ht = np.append(ht, 0)
    LL = len(ht)
    # frequency range
moritz's avatar
moritz committed
148
    ff = Fs / 2 * np.linspace(0, 1, int(LL/2+1))
Gregory Ashton's avatar
Gregory Ashton committed
149 150 151 152 153 154 155 156 157 158 159 160

    # calculate FFT
    # rfft computes the fft for real inputs
    hf = np.fft.rfft(ht)

    # normalise to units of strain / sqrt(Hz)
    hf = hf / Fs

    return hf, ff


def infft(hf, Fs):
161
    """
Gregory Ashton's avatar
Gregory Ashton committed
162 163 164 165 166 167 168
    inverse FFT for use in conjunction with nfft
    eric.thrane@ligo.org
    input:
    hf = single-side FFT calculated by fft_eht
    Fs = sampling frequency
    output:
    h = time series
169
    """
Gregory Ashton's avatar
Gregory Ashton committed
170 171 172 173 174 175
    # use irfft to work with positive frequencies only
    h = np.fft.irfft(hf)
    # undo LAL/Lasky normalisation
    h = h*Fs

    return h
Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
176 177


Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
178
def asd_from_freq_series(freq_data, df):
179
    """
Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
180
    Calculate the ASD from the frequency domain output of gaussian_noise()    
181 182 183 184 185 186 187 188 189 190
    Input:
    freq_data - array of complex frequency domain data
    df - spacing of freq_data, 1/(segment length) used to generate the gaussian noise
    Output:
    asd = array of real-valued normalized frequency domain ASD data
    """
    asd = np.absolute(freq_data) * (2 * df)**0.5
    return asd


Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
191
def psd_from_freq_series(freq_data, df):
192 193
    """
    Calculate the PSD from the frequency domain output of gaussian_noise()
Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
194
    Calls asd_from_freq_series() and squares the output
195 196 197 198
    Input:
    freq_data - array of complex frequency domain data
    df - spacing of freq_data, 1/(segment length) used to generate the gaussian noise
    Output:
Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
199
    psd - array of real-valued normalized frequency domain PSD data
200
    """
Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
201
    psd = np.power(asd_from_freq_series(freq_data, df), 2)
202 203 204
    return psd


Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
205
def time_delay_geocentric(detector1, detector2, ra, dec, time):
206
    """
207
    Calculate time delay between two detectors in geocentric coordinates based on XLALArrivaTimeDiff in TimeDelay.c
Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
208 209 210 211 212 213 214 215 216 217
    Input:
    detector1 - cartesian coordinate vector for the first detector in the geocentric frame
                generated by the Interferometer class as self.vertex
    detector2 - cartesian coordinate vector for the second detector in the geocentric frame
    To get time delay from Earth center, use detector2 = np.array([0,0,0])
    ra - right ascension of the source in radians
    dec - declination of the source in radians
    time - GPS time in the geocentric frame
    Output:
    delta_t - time delay between the two detectors in the geocentric frame
218
    """
Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
219 220 221
    gmst = gps_time_to_gmst(time)
    theta, phi = ra_dec_to_theta_phi(ra, dec, gmst)
    omega = np.array([np.sin(theta) * np.cos(phi), np.sin(theta) * np.sin(phi), np.cos(theta)])
222 223
    delta_d = detector2 - detector1
    delta_t = np.dot(omega, delta_d) / speed_of_light
Sylvia Biscoveanu's avatar
Sylvia Biscoveanu committed
224
    return delta_t
225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249


def get_polarization_tensor(ra, dec, time, psi, mode):
    """
    Calculate the polarization tensor for a given sky location and time

    See Nishizawa et al. (2009) arXiv:0903.0528 for definitions of the polarisation tensors.
    [u, v, w] represent the Earth-frame
    [m, n, omega] represent the wave-frame
    Note: there is a typo in the definition of the wave-frame in Nishizawa et al.

    :param ra: right ascension in radians
    :param dec: declination in radians
    :param time: geocentric GPS time
    :param psi: binary polarisation angle counter-clockwise about the direction of propagation
    :param mode: polarisation mode
    :return: polarization_tensor(ra, dec, time, psi, mode): polarization tensor for the specified mode.
    """
    greenwich_mean_sidereal_time = gps_time_to_gmst(time)
    theta, phi = ra_dec_to_theta_phi(ra, dec, greenwich_mean_sidereal_time)
    u = np.array([np.cos(phi) * np.cos(theta), np.cos(theta) * np.sin(phi), -np.sin(theta)])
    v = np.array([-np.sin(phi), np.cos(phi), 0])
    m = -u * np.sin(psi) - v * np.cos(psi)
    n = -u * np.cos(psi) + v * np.sin(psi)

250 251 252 253 254 255 256 257 258 259 260 261 262 263
    if mode.lower() == 'plus':
        return np.einsum('i,j->ij', m, m) - np.einsum('i,j->ij', n, n)
    elif mode.lower() == 'cross':
        return np.einsum('i,j->ij', m, n) + np.einsum('i,j->ij', n, m)
    elif mode.lower() == 'breathing':
        return np.einsum('i,j->ij', m, m) + np.einsum('i,j->ij', n, n)

    omega = np.cross(m, n)
    if mode.lower() == 'longitudinal':
        return np.sqrt(2) * np.einsum('i,j->ij', omega, omega)
    elif mode.lower() == 'x':
        return np.einsum('i,j->ij', m, omega) + np.einsum('i,j->ij', omega, m)
    elif mode.lower() == 'y':
        return np.einsum('i,j->ij', n, omega) + np.einsum('i,j->ij', omega, n)
264
    else:
265
        logging.warning("{} not a polarization mode!".format(mode))
266
        return None
267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283


def get_vertex_position_geocentric(latitude, longitude, elevation):
    """
    Calculate the position of the IFO vertex in geocentric coordiantes in meters.

    Based on arXiv:gr-qc/0008066 Eqs. B11-B13 except for the typo in the definition of the local radius.
    See Section 2.1 of LIGO-T980044-10 for the correct expression
    """
    semi_major_axis = 6378137  # for ellipsoid model of Earth, in m
    semi_minor_axis = 6356752.314  # in m
    radius = semi_major_axis**2 * (semi_major_axis**2 * np.cos(latitude)**2
                                   + semi_minor_axis**2 * np.sin(latitude)**2)**(-0.5)
    x_comp = (radius + elevation) * np.cos(latitude) * np.cos(longitude)
    y_comp = (radius + elevation) * np.cos(latitude) * np.sin(longitude)
    z_comp = ((semi_minor_axis / semi_major_axis)**2 * radius + elevation) * np.sin(latitude)
    return np.array([x_comp, y_comp, z_comp])
Gregory Ashton's avatar
Gregory Ashton committed
284 285


286
def setup_logger(outdir=None, label=None, log_level=None):
Gregory Ashton's avatar
Gregory Ashton committed
287 288 289 290
    """ Setup logging output: call at the start of the script to use

    Parameters
    ----------
Gregory Ashton's avatar
Gregory Ashton committed
291 292
    outdir, label: str
        If supplied, write the logging output to outdir/label.log
Gregory Ashton's avatar
Gregory Ashton committed
293 294 295 296 297 298 299 300 301 302
    log_level = ['debug', 'info', 'warning']
        Either a string from the list above, or an interger as specified
        in https://docs.python.org/2/library/logging.html#logging-levels
    """

    if type(log_level) is str:
        try:
            LEVEL = getattr(logging, log_level.upper())
        except AttributeError:
            raise ValueError('log_level {} not understood'.format(log_level))
303 304
    elif log_level is None:
        LEVEL = command_line_args.log_level
Gregory Ashton's avatar
Gregory Ashton committed
305 306 307
    else:
        LEVEL = int(log_level)

Gregory Ashton's avatar
Gregory Ashton committed
308 309 310 311
    logger = logging.getLogger()
    stream_handler = logging.StreamHandler()
    stream_handler.setFormatter(logging.Formatter(
        '%(asctime)s %(levelname)-8s: %(message)s', datefmt='%H:%M'))
Gregory Ashton's avatar
Gregory Ashton committed
312 313 314 315
    logger.setLevel(LEVEL)
    stream_handler.setLevel(LEVEL)
    logger.addHandler(stream_handler)

Gregory Ashton's avatar
Gregory Ashton committed
316 317 318 319 320 321 322 323 324 325 326 327 328
    if label:
        if outdir:
            check_directory_exists_and_if_not_mkdir(outdir)
        else:
            outdir = '.'
        log_file = '{}/{}.log'.format(outdir, label)
        file_handler = logging.FileHandler(log_file)
        file_handler.setFormatter(logging.Formatter(
            '%(asctime)s %(levelname)-8s: %(message)s', datefmt='%H:%M'))

        file_handler.setLevel(LEVEL)
        logger.addHandler(file_handler)

329 330 331 332 333
    version_file = os.path.join(os.path.dirname(__file__), '.version')
    with open(version_file, 'r') as f:
        version = f.readline()
    logging.info('Running tupak version: {}'.format(version))

334

335 336 337 338 339 340 341 342
def get_progress_bar(module='tqdm'):
    if module in ['tqdm']:
        try:
            from tqdm import tqdm
        except ImportError:
            def tqdm(x, *args, **kwargs):
                return x
        return tqdm
Gregory Ashton's avatar
Gregory Ashton committed
343 344 345 346 347 348 349
    elif module in ['tqdm_notebook']:
        try:
            from tqdm import tqdm_notebook as tqdm
        except ImportError:
            def tqdm(x, *args, **kwargs):
                return x
        return tqdm
350

351

Colm Talbot's avatar
Colm Talbot committed
352 353 354 355 356 357 358 359 360 361 362
def spherical_to_cartesian(radius, theta, phi):
    """
    Convert from spherical coordinates to cartesian.

    :param radius: radial coordinate
    :param theta: axial coordinate
    :param phi: azimuthal coordinate
    :return cartesian: cartesian vector
    """
    cartesian = [radius * np.sin(theta) * np.cos(phi), radius * np.sin(theta) * np.sin(phi), radius * np.cos(theta)]
    return cartesian
363 364


365 366 367 368 369 370
def check_directory_exists_and_if_not_mkdir(directory):
    if not os.path.exists(directory):
        os.makedirs(directory)
        logging.debug('Making directory {}'.format(directory))
    else:
        logging.debug('Directory {} exists'.format(directory))
371

moritz's avatar
moritz committed
372

373
def inner_product(aa, bb, frequency, PSD):
moritz's avatar
moritz committed
374
    """
375 376 377 378 379 380 381 382 383
    Calculate the inner product defined in the matched filter statistic

    arguments:
    aai, bb: single-sided Fourier transform, created, e.g., by the nfft function above
    frequency: an array of frequencies associated with aa, bb, also returned by nfft
    PSD: PSD object

    Returns:
    The matched filter inner product for aa and bb
moritz's avatar
moritz committed
384
    """
385 386
    PSD_interp = PSD.power_spectral_density_interpolated(frequency)

moritz's avatar
moritz committed
387
    # calculate the inner product
388 389 390 391 392 393 394 395
    integrand = np.conj(aa) * bb / PSD_interp

    df = frequency[1] - frequency[0]
    integral = np.sum(integrand) * df

    product = 4. * np.real(integral)

    return product
396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430


def noise_weighted_inner_product(aa, bb, power_spectral_density, time_duration):
    """
    Calculate the noise weighted inner product between two arrays.

    Parameters
    ----------
    aa: array
        Array to be complex conjugated
    bb: array
        Array not to be complex conjugated
    power_spectral_density: array
        Power spectral density
    time_duration: float
        time_duration of the data

    Return
    ------
    Noise-weighted inner product.
    """

    # caluclate the inner product
    integrand = np.conj(aa) * bb / power_spectral_density
    product = 4 / time_duration * np.sum(integrand)
    return product


def matched_filter_snr_squared(signal, interferometer, time_duration):
    return noise_weighted_inner_product(signal, interferometer.data, interferometer.power_spectral_density_array,
                                        time_duration)


def optimal_snr_squared(signal, interferometer, time_duration):
    return noise_weighted_inner_product(signal, signal, interferometer.power_spectral_density_array, time_duration)
Colm Talbot's avatar
Colm Talbot committed
431 432 433 434 435 436 437 438


def get_event_time(event):
    """
    Get the merger time for known GW events.

    We currently know about:
        GW150914
Colm Talbot's avatar
Colm Talbot committed
439 440 441 442 443 444
        LVT151012
        GW151226
        GW170104
        GW170608
        GW170814
        GW170817
Colm Talbot's avatar
Colm Talbot committed
445 446 447 448 449 450 451 452 453 454 455

    Parameters
    ----------
    event: str
        Event descriptor, this can deal with some prefixes, e.g., '150914', 'GW150914', 'LVT151012'

    Return
    ------
    event_time: float
        Merger time
    """
Colm Talbot's avatar
Colm Talbot committed
456 457 458
    event_times = {'150914': 1126259462.422, '151012': 1128678900.4443,  '151226': 1135136350.65,
                   '170104': 1167559936.5991, '170608': 1180922494.4902, '170814': 1186741861.5268,
                   '170817': 1187008882.4457}
Colm Talbot's avatar
Colm Talbot committed
459 460 461 462 463 464 465 466 467
    if 'GW' or 'LVT' in event:
        event = event[-6:]

    try:
        event_time = event_times[event[-6:]]
        return event_time
    except KeyError:
        print('Unknown event {}.'.format(event))
        return None
468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488


def get_open_strain_data(
        name, t1, t2, outdir, cache=False, raw_data_file=None, **kwargs):
    """ A function which accesses the open strain data

    This uses `gwpy` to download the open data and then saves a cached copy for
    later use

    Parameters
    ----------
    name: str
        The name of the detector to get data for
    t1, t2: float
        The GPS time of the start and end of the data
    outdir: str
        The output directory to place data in
    cache: bool
        If true, cache the data
    **kwargs:
        Passed to `gwpy.timeseries.TimeSeries.fetch_open_data`
moritz's avatar
moritz committed
489
    raw_data_file
490 491

    Returns
moritz's avatar
moritz committed
492
    -----------
493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515
    strain: gwpy.timeseries.TimeSeries

    """
    filename = '{}/{}_{}_{}.txt'.format(outdir, name, t1, t2)
    if raw_data_file:
        logging.info('Using raw_data_file {}'.format(raw_data_file))
        strain = TimeSeries.read(raw_data_file)
        if (t1 > strain.times[0].value) and (t2 < strain.times[-1].value):
            logging.info('Using supplied raw data file')
            strain = strain.crop(t1, t2)
        else:
            raise ValueError('Supplied file does not contain requested data')
    elif os.path.isfile(filename) and cache:
        logging.info('Using cached data from {}'.format(filename))
        strain = TimeSeries.read(filename)
    else:
        logging.info('Fetching open data ...')
        strain = TimeSeries.fetch_open_data(name, t1, t2, **kwargs)
        logging.info('Saving data to {}'.format(filename))
        strain.write(filename)
    return strain


516 517 518 519 520 521 522 523 524 525 526 527 528
def set_up_command_line_arguments():
    parser = argparse.ArgumentParser(
        description="Command line interface for tupak scripts")
    parser.add_argument("-v", "--verbose", action="store_true",
                        help=("Increase output verbosity [logging.DEBUG]." +
                              " Overridden by script level settings"))
    parser.add_argument("-q", "--quite", action="store_true",
                        help=("Decrease output verbosity [logging.WARNING]." +
                              " Overridden by script level settings"))
    parser.add_argument("-c", "--clean", action="store_true",
                        help="Force clean data, never use cached data")
    parser.add_argument("-u", "--use-cached", action="store_true",
                        help="Force cached data and do not check its validity")
529 530 531 532
    parser.add_argument("-d", "--detectors",  nargs='+',
                        default=['H1', 'L1', 'V1'],
                        help=("List of detectors to use in open data calls, "
                              "e.g. -d H1 L1 for H1 and L1"))
533 534 535 536 537 538 539 540 541 542 543 544 545 546
    args, _ = parser.parse_known_args()

    if args.quite:
        args.log_level = logging.WARNING
    elif args.verbose:
        args.log_level = logging.DEBUG
    else:
        args.log_level = logging.INFO

    return args


command_line_args = set_up_command_line_arguments()

Gregory Ashton's avatar
Gregory Ashton committed
547 548 549 550 551 552 553 554
if 'DISPLAY' in os.environ:
    pass
else:
    logging.info('No $DISPLAY environment variable found, so importing \
                  matplotlib.pyplot with non-interactive "Agg" backend.')
    import matplotlib
    matplotlib.use('Agg')

555 556


557