Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • charlie.hoy/bilby_pipe
  • lscsoft/bilby_pipe
  • duncanmmacleod/bilby_pipe
  • colm.talbot/bilby_pipe
  • kaylee.desoto/bilby_pipe
  • tathagata.ghosh/bilby_pipe
  • aditya.vijaykumar/bilby_pipe
  • matthew-pitkin/bilby_pipe
  • philip.relton/bilby_pipe
  • geraint.pratten/bilby_pipe
  • francisco.hernandez/bilby_pipe
  • cjhaster/bilby_pipe
  • joshua.brandt/bilby_pipe
  • lewis.lakerink/bilby_pipe
  • simone.mozzon/bilby_pipe
  • michael.williams/bilby_pipe
  • alexandresebastien.goettel/bilby_pipe
  • hyungwon.lee/bilby_pipe
  • john-veitch/bilby_pipe
  • stephanie.letourneau/bilby_pipe
  • isaac.legred/bilby_pipe
  • noah.wolfe/bilby_pipe
  • david-keitel/bilby_pipe
  • angel.garron/bilby_pipe
  • anna.puecher/bilby-pipe-relbin
  • christian.adamcewicz/bilby_pipe
  • raffi.enficiaud/bilby_pipe
  • tomasz.baka/bilby_pipe
  • chun-fung.wong/bilby_pipe
29 results
Show changes
Commits on Source (43)
Showing
with 636 additions and 279 deletions
......@@ -17,11 +17,10 @@ python-3.7:
stage: test
image: bilbydev/bilby_pipe-test-suite-python37
script:
- pip install lalsuite gwpy bilby black
- python setup.py install
# Run black check
- black --check bilby_pipe/
- black --check bilby_pipe/ --diff
# Run pyflakes
- flake8 .
......@@ -40,17 +39,24 @@ python-3.6:
stage: test
image: bilbydev/bilby_pipe-test-suite-python36
script:
- pip install lalsuite gwpy bilby
- python setup.py install
# Run tests
# run tests
- pytest
python-3.5:
stage: test
image: bilbydev/bilby_pipe-test-suite-python35
script:
- python setup.py install
# run tests
- pytest
documentation:
stage: test
image: bilbydev/bilby_pipe-test-suite-python37
script:
- pip install bilby
- python setup.py install
# Make the documentation
......
|pipeline status| |coverage report| |version|
|pipeline status| |coverage report| |pypi| |conda| |version|
bilby_pipe
==========
......@@ -16,4 +16,8 @@ A package for automating transient gravitational wave parameter estimation
.. |coverage report| image:: https://lscsoft.docs.ligo.org/bilby_pipe/coverage_badge.svg
:target: https://lscsoft.docs.ligo.org/bilby_pipe/htmlcov/
.. |version| image:: https://img.shields.io/pypi/pyversions/bilby-pipe.svg
:target: https://pypi.org/project/bilby_pipe/
.. |pypi| image:: https://badge.fury.io/py/bilby-pipe.svg
:target: https://pypi.org/project/bilby_pipe/
.. |conda| image:: https://img.shields.io/conda/vn/conda-forge/bilby_pipe.svg
:target: https://anaconda.org/conda-forge/bilby_pipe
......@@ -13,4 +13,4 @@ from . import bilbyargparser
from . import utils
from . import parser
__version__ = utils.get_version_information()
__version__ = utils.get_version_information().split(":", 1)[0]
......@@ -6,7 +6,7 @@ from numpy import linspace
import bilby
import matplotlib.pyplot as plt
from bilby_pipe.utils import duration_lookups
from bilby_pipe.utils import DURATION_LOOKUPS
def plot_SNRs(ax, label, prior, waveform_generator, n_samples=200):
......@@ -37,7 +37,7 @@ if "plot" in sys.argv:
snr_fig, snr_axes = plt.subplots(nrows=len(filenames), sharex=True, figsize=(5, 9))
for ii, filename in enumerate(filenames):
duration = duration_lookups[filename.rstrip(".prior")]
duration = DURATION_LOOKUPS[filename.rstrip(".prior")]
waveform_generator = bilby.gw.WaveformGenerator(
sampling_frequency=8192,
duration=duration,
......
chirp_mass = Uniform(name='chirp_mass', minimum={mc_min}, maximum={mc_max}, unit='$M_{{\\odot}}$', boundary='reflective')
mass_ratio = Uniform(name='mass_ratio', minimum=0.125, maximum=1, boundary='reflective')
mass_2 = Constraint(name='mass_2', minimum=1.001398, maximum=1000)
a_1 = Uniform(name='a_1', minimum=0, maximum=0.8, boundary='reflective')
a_2 = Uniform(name='a_2', minimum=0, maximum=0.8, boundary='reflective')
tilt_1 = Sine(name='tilt_1', boundary='reflective')
tilt_2 = Sine(name='tilt_2', boundary='reflective')
phi_12 = Uniform(name='phi_12', minimum=0, maximum=2 * np.pi, boundary='periodic')
phi_jl = Uniform(name='phi_jl', minimum=0, maximum=2 * np.pi, boundary='periodic')
luminosity_distance = bilby.gw.prior.UniformSourceFrame(name='luminosity_distance', minimum={d_min}, maximum={d_max}, unit='Mpc', boundary='reflective')
dec = Cosine(name='dec', boundary='reflective')
ra = Uniform(name='ra', minimum=0, maximum=2 * np.pi, boundary='periodic')
theta_jn = Sine(name='theta_jn', boundary='reflective')
psi = Uniform(name='psi', minimum=0, maximum=np.pi, boundary='periodic')
phase = Uniform(name='phase', minimum=0, maximum=2 * np.pi, boundary='periodic')
......@@ -35,11 +35,11 @@ except ImportError:
)
try:
import LDAStools # noqa
import LDAStools.frameCPP # noqa
except ImportError:
logger.warning(
"You do not have LDAStools (python-ldas-tools-framecpp) installed."
" You may experience problems accessing interferometer data."
"You do not have LDAStools.frameCPP (python-ldas-tools-framecpp) "
"installed. You may experience problems accessing interferometer data."
)
......@@ -200,7 +200,9 @@ class DataGenerationInput(Input):
)
)
else:
raise BilbyPipeError("Unable to set the psd length")
raise BilbyPipeError(
"Unable to set the psd duration from psd_length={}".format(psd_length)
)
@property
def psd_start_time(self):
......@@ -209,7 +211,11 @@ class DataGenerationInput(Input):
return self._psd_start_time
elif self.trigger_time is not None:
psd_start_time = -self.psd_duration
logger.info("Using default PSD start time {}".format(psd_start_time))
logger.info(
"Using default PSD start time {} relative to start time".format(
psd_start_time
)
)
return psd_start_time
else:
raise BilbyPipeError("PSD start time not set")
......@@ -270,26 +276,6 @@ class DataGenerationInput(Input):
"Detector {} not given in the channel-dict".format(det)
)
@property
def detectors(self):
""" A list of the detectors to search over, e.g., ['H1', 'L1'] """
return self._detectors
@detectors.setter
def detectors(self, detectors):
""" Handles various types of user input """
if isinstance(detectors, list):
if len(detectors) == 1:
det_list = self._convert_string_to_list(detectors[0])
else:
det_list = detectors
else:
raise ValueError("Input `detectors` = {} not understood".format(detectors))
det_list.sort()
det_list = [det.upper() for det in det_list]
self._detectors = det_list
@property
def sampling_frequency(self):
return self._sampling_frequency
......@@ -541,14 +527,34 @@ class DataGenerationInput(Input):
if data is None and self.data_dict is not None:
data = self._gwpy_read(det, channel, start_time, end_time)
if data is None:
data = self._gwpy_get(det, channel, start_time, end_time)
data = self._gwpy_get(channel, start_time, end_time)
if data is None:
data = self._gwpy_fetch_open_data(det, channel, start_time, end_time)
data = self._gwpy_fetch_open_data(det, start_time, end_time)
data = data.resample(self.sampling_frequency)
return data
def _gwpy_read(self, det, channel, start_time, end_time):
def _gwpy_read(self, det, channel, start_time, end_time, dtype="float64"):
""" Wrapper function to gwpy.timeseries.TimeSeries.read()
Parameters
----------
det: str
The detector name corresponding to the key in data-dict
channel: str
The name of the channel to read, e.g. 'L1:GDS-CALIB_STRAIN'
start_time, end_time: float
GPS start and end time of required data
dtype: str or np.dtype
Data type requested
Returns
-------
data: TimeSeries
If succesful, the data, otherwise None is returned
"""
logger.debug("data-dict provided, attempt read of data")
if det not in self.data_dict:
......@@ -558,21 +564,31 @@ class DataGenerationInput(Input):
if self.data_format is not None:
kwargs = dict(format=self.data_format)
logger.info(
"Calling TimeSeries.read('{}', '{}', start={}, end={}, format='{}')".format(
self.data_dict[det], channel, start_time, end_time, self.data_format
"Calling TimeSeries.read('{}', '{}', start={}, end={}, format='{}', dtype={})".format(
self.data_dict[det],
channel,
start_time,
end_time,
self.data_format,
dtype,
)
)
else:
kwargs = {}
logger.info(
"Calling TimeSeries.read('{}', '{}', start={}, end={})".format(
self.data_dict[det], channel, start_time, end_time
"Calling TimeSeries.read('{}', '{}', start={}, end={}, dtype={})".format(
self.data_dict[det], channel, start_time, end_time, dtype
)
)
try:
data = gwpy.timeseries.TimeSeries.read(
self.data_dict[det], channel, start=start_time, end=end_time, **kwargs
self.data_dict[det],
channel,
start=start_time,
end=end_time,
dtype=dtype,
**kwargs
)
if data.duration.value != self.duration:
logger.warning(
......@@ -587,16 +603,33 @@ class DataGenerationInput(Input):
logger.info("Reading of data failed with error {}".format(e))
return None
def _gwpy_get(self, det, channel, start_time, end_time):
def _gwpy_get(self, channel, start_time, end_time, dtype="float64"):
""" Wrapper function to gwpy.timeseries.TimeSeries.get()
Parameters
----------
channel: str
The name of the channel to read, e.g. 'L1:GDS-CALIB_STRAIN'
start_time, end_time: float
GPS start and end time of required data
dtype: str or np.dtype
Data type requested
Returns
-------
data: TimeSeries
If succesful, the data, otherwise None is returned
"""
logger.debug("Attempt to locate data")
logger.info(
"Calling TimeSeries.get('{}', start={}, end={})".format(
channel, start_time, end_time
"Calling TimeSeries.get('{}', start={}, end={}, dtype={})".format(
channel, start_time, end_time, dtype
)
)
try:
data = gwpy.timeseries.TimeSeries.get(
channel, start_time, end_time, verbose=False
channel, start_time, end_time, verbose=False, dtype=dtype
)
return data
except RuntimeError as e:
......@@ -605,10 +638,31 @@ class DataGenerationInput(Input):
except ImportError:
logger.info("Unable to read data as NDS2 is not installed")
def _gwpy_fetch_open_data(self, det, channel, start_time, end_time):
def _gwpy_fetch_open_data(self, det, start_time, end_time):
""" Wrapper function to gwpy.timeseries.TimeSeries.fetch_open_data()
Parameters
----------
det: str
The detector name, e.g 'H1'
start_time, end_time: float
GPS start and end time of required data
Returns
-------
data: TimeSeries
If succesful, the data, otherwise None is returned
"""
logger.info(
"Previous attempts to download data failed, trying with `fetch_open_data`"
)
logger.info(
"Calling TimeSeries.fetch_open_data('{}', start={}, end={})".format(
det, start_time, end_time
)
)
data = gwpy.timeseries.TimeSeries.fetch_open_data(det, start_time, end_time)
return data
......@@ -672,6 +726,10 @@ class DataGenerationInput(Input):
)
params = np.genfromtxt(self.roq_folder + "/params.dat", names=True)
params["flow"] *= self.roq_scale_factor
params["fhigh"] *= self.roq_scale_factor
params["seglen"] /= self.roq_scale_factor
if params["seglen"] != self.duration:
raise BilbyPipeError(
"Segment duration {} does not match ROQ basis seglen={}".format(
......@@ -679,10 +737,6 @@ class DataGenerationInput(Input):
)
)
params["flow"] *= self.roq_scale_factor
params["fhigh"] *= self.roq_scale_factor
params["seglen"] /= self.roq_scale_factor
freq_nodes_linear = np.load(self.roq_folder + "/fnodes_linear.npy")
freq_nodes_quadratic = np.load(self.roq_folder + "/fnodes_quadratic.npy")
freq_nodes_linear *= self.roq_scale_factor
......@@ -713,6 +767,8 @@ class DataGenerationInput(Input):
quadratic_matrix=basis_matrix_quadratic,
)
del basis_matrix_linear, basis_matrix_quadratic
if self.injection_parameters is not None:
likelihood.parameters.update(self.injection_parameters)
logger.info(
......
""" Tools for using gracedb events accessed through bilby_pipe_gracedb """
""" Tool for running online bilby PE using gracedb events
The functionality of much of these utility assumes the user is running on the
CIT cluster, e.g. the ROQ and calibration directories are in there usual place
"""
import argparse
import json
import os
import shutil
import numpy as np
import bilby
import bilby_pipe
from .utils import (
BilbyPipeError,
check_directory_exists_and_if_not_mkdir,
logger,
duration_lookups,
maximum_frequency_lookups,
DEFAULT_DISTANCE_LOOKUPS,
write_config_file,
run_command_line,
)
# Default channels set from: https://wiki.ligo.org/LSC/JRPComm/ObsRun3
DEFAULT_CHANNEL_DICT = dict(
H1="GDS-CALIB_STRAIN_CLEAN", L1="GDS-CALIB_STRAIN_CLEAN", V1="Hrec_hoft_16384Hz"
)
def x509userproxy(outdir):
""" Copies X509_USER_PROXY certificate from user's os.environ and
places it inside the outdir, if the X509_USER_PROXY exists.
......@@ -87,31 +99,169 @@ def read_from_gracedb(gracedb, gracedb_url, outdir):
return candidate
def read_from_coinc(coinc):
""" Read GraceDB events from json file with coinc contents
def read_from_json(json_file):
""" Read GraceDB events from json file
Parameters
----------
coinc: str
Filename of coinc json file output
json_file: str
Filename of json json file output
Returns
-------
candidate:
Contains contents of GraceDB event from coinc, json format
candidate: dict
Contains contents of GraceDB event from json, json format
"""
if os.path.isfile(json_file) is False:
raise FileNotFoundError("File {} not found".format(json_file))
try:
with open(coinc, "r") as file:
with open(json_file, "r") as file:
candidate = json.load(file)
except IOError:
print("Unable to load event contents of json file")
logger.warning("Unable to load event contents of json file")
return candidate
def create_config_file(candidate, gracedb, outdir):
def calibration_lookup(trigger_time, detector):
""" Lookup function for the relevant calibration file
Assumes that it is running on CIT where the calibration files are stored
under /home/cbc/pe/O3/calibrationenvelopes
Parameters
----------
trigger_time: float
The trigger time of interest
detector: str [H1, L1, V1]
Detector string
Returns
-------
filepath: str
The path to the relevant calibration envelope file. If no calibration
file can be determined, None is returned.
"""
base = "/home/cbc/pe/O3/calibrationenvelopes"
CALENVS_LOOKUP = dict(
H1=os.path.join(base, "LIGO_Hanford/H_CalEnvs.txt"),
L1=os.path.join(base, "LIGO_Livingston/L_CalEnvs.txt"),
V1=os.path.join(base, "Virgo/V_CalEnvs.txt"),
)
if os.path.isdir(base) is False:
raise BilbyPipeError("Unable to read from calibration folder {}".format(base))
calenv = CALENVS_LOOKUP[detector]
times = []
files = []
with open(calenv, "r") as f:
for line in f:
time, filename = line.rstrip("\n").split(" ")
times.append(float(time))
files.append(filename)
if trigger_time < times[0]:
raise BilbyPipeError(
"Requested trigger time prior to earliest calibration file"
)
for i, time in enumerate(times):
if trigger_time > time:
directory = os.path.dirname(calenv)
calib_file = "{}/{}".format(directory, files[i])
return os.path.abspath(calib_file)
def calibration_dict_lookup(trigger_time, detectors):
""" Dictionary lookup function for the relevant calibration files
Parameters
----------
trigger_time: float
The trigger time of interest
detectors: list
List of detector string
Returns
-------
calibration_model, calibration_dict: str, dict
Calibration model string and dictionary of paths to the relevant
calibration envelope file.
"""
try:
calibration_dict = {
det: calibration_lookup(trigger_time, det) for det in detectors
}
return "CubicSpline", calibration_dict
except BilbyPipeError:
return None, None
def read_candidate(candidate):
""" Read a gracedb candidate json dictionary """
try:
chirp_mass = candidate["extra_attributes"]["CoincInspiral"]["mchirp"]
except KeyError:
raise BilbyPipeError(
"Unable to determine chirp mass for {} from GraceDB".format(
candidate["graceid"]
)
)
trigger_time = candidate["gpstime"]
singleinspiraltable = candidate["extra_attributes"]["SingleInspiral"]
ifos = [sngl["ifo"] for sngl in singleinspiraltable]
return chirp_mass, trigger_time, ifos
def prior_lookup(duration, scale_factor, outdir):
""" Lookup the appropriate prior
Parameters
----------
duration: float
Inferred duration of the signal
scale_factor: float
outdir: str
Output directory
Returns
-------
prior_file, roq_folder: str
Path to the prior file to use usually written to the outdir, and the
roq folder
minimum_frequency, maximum_frequency: int
The minimum and maximum frequency to use
"""
roq_folder = "/home/cbc/ROQ_data/IMRPhenomPv2/{}s".format(duration)
if os.path.isdir(roq_folder) is False:
logger.warning("Requested ROQ folder does not exist")
return "{}s".format(duration), None, 20, 1024
roq_params = np.genfromtxt(os.path.join(roq_folder, "params.dat"), names=True)
prior_file = generate_prior_from_template(
duration=duration,
roq_params=roq_params,
scale_factor=scale_factor,
outdir=outdir,
)
minimum_frequency = roq_params["flow"] * scale_factor
maximum_frequency = roq_params["fhigh"] * scale_factor
duration /= scale_factor
return prior_file, roq_folder, minimum_frequency, maximum_frequency
def create_config_file(candidate, gracedb, outdir, roq=True):
""" Creates ini file from defaults and candidate contents
Parameters
......@@ -121,7 +271,9 @@ def create_config_file(candidate, gracedb, outdir):
gracedb: str
GraceDB id of event
outdir: str
Output directory
Output directory where the ini file and all output is written
roq: bool
If True, use the default ROQ settings if required
Returns
-------
......@@ -130,54 +282,67 @@ def create_config_file(candidate, gracedb, outdir):
"""
try:
chirp_mass = candidate["extra_attributes"]["CoincInspiral"]["mchirp"]
except KeyError:
raise ValueError(
"Unable to determine chirp mass for {} from GraceDB".format(gracedb)
)
trigger_time = candidate["gpstime"]
singleinspiraltable = candidate["extra_attributes"]["SingleInspiral"]
chirp_mass, trigger_time, ifos = read_candidate(candidate)
ifos = [sngl["ifo"] for sngl in singleinspiraltable]
channels = [sngl["channel"] for sngl in singleinspiraltable]
ifo_channel = zip(ifos, channels)
channel_dict = {}
for ifo, channel in ifo_channel:
channel_dict[ifo] = channel
duration, scale_factor = determine_duration_and_scale_factor_from_parameters(
chirp_mass
)
distance_marginalization_lookup_table = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"data_files",
"{}s_distance_marginalization_lookup.npz".format(duration),
)
prior = determine_prior_file_from_parameters(chirp_mass)
prior_file, roq_folder, minimum_frequency, maximum_frequency = prior_lookup(
duration, scale_factor, outdir
)
calibration_model, calib_dict = calibration_dict_lookup(trigger_time, ifos)
config_dict = dict(
label=gracedb,
outdir=outdir,
accounting="ligo.dev.o3.cbc.pe.lalinference",
maximum_frequency=maximum_frequency_lookups[prior],
minimum_frequency=20,
sampling_frequency=maximum_frequency_lookups[prior] * 4,
maximum_frequency=maximum_frequency,
minimum_frequency=minimum_frequency,
sampling_frequency=16384,
reference_frequency=20,
trigger_time=trigger_time,
detectors="[H1, L1, V1]",
channel_dict=channel_dict,
detectors=ifos,
channel_dict=DEFAULT_CHANNEL_DICT,
deltaT=0.2,
prior_file=prior,
duration=duration_lookups[prior],
prior_file=prior_file,
duration=duration,
roq_scale_factor=scale_factor,
sampler="dynesty",
sampler_kwargs="{nlive: 1000, walks: 100, n_check_point: 5000}",
sampler_kwargs="{nlive: 1000, walks: 100, check_point_plot=True, n_check_point: 5000}",
create_plots=True,
local_generation=True,
local_plot=True,
transfer_files=False,
time_marginalization=True,
distance_marginalization=True,
phase_marginalization=True,
distance_marginalization_lookup_table=distance_marginalization_lookup_table,
n_parallel=4,
create_summary=True,
calibration_model=calibration_model,
spline_calibration_envelope_dict=calib_dict,
spline_calibration_nodes=5,
)
filename = "{}.ini".format(config_dict["label"])
write_config_file(config_dict, filename)
if roq and config_dict["duration"] > 4 and roq_folder is not None:
config_dict["likelihood-type"] = "ROQGravitationalWaveTransient"
config_dict["roq-folder"] = roq_folder
filename = "{}/{}.ini".format(outdir, config_dict["label"])
write_config_file(config_dict, filename, remove_none=True)
return filename
def determine_prior_file_from_parameters(chirp_mass):
""" Determine appropriate prior from chirp mass
def determine_duration_and_scale_factor_from_parameters(chirp_mass):
""" Determine appropriate duration and roq scale factor from chirp mass
Parameters
----------
......@@ -186,53 +351,111 @@ def determine_prior_file_from_parameters(chirp_mass):
Returns
-------
prior: str
A string repesentation of the appropriate prior to use
duration: int
roq_scale_factor: float
"""
if chirp_mass > 40:
prior = "high_mass"
roq_scale_factor = 1
if chirp_mass > 90:
duration = 4
roq_scale_factor = 4
elif chirp_mass > 35:
duration = 4
roq_scale_factor = 2
elif chirp_mass > 13.53:
prior = "4s"
duration = 4
elif chirp_mass > 8.73:
prior = "8s"
duration = 8
elif chirp_mass > 5.66:
prior = "16s"
duration = 16
elif chirp_mass > 3.68:
prior = "32s"
duration = 32
elif chirp_mass > 2.39:
prior = "64s"
duration = 64
elif chirp_mass > 1.43:
duration = 128
elif chirp_mass > 1.3:
duration = 128
roq_scale_factor = 1 / 1.6
else:
prior = "128s"
duration = 128
roq_scale_factor = 1 / 2
return duration, round(1 / roq_scale_factor, 1)
def generate_prior_from_template(
duration, roq_params, scale_factor=1, outdir=".", template=None
):
""" Generate a prior file from a template and write it to file
return prior
Parameters:
duration: float
The segment duration
roq_params: dict
Dictionary of the ROQ params.dat file
scale_factor: float
Rescaling factor
outdir: str
Path to the outdir (the prior is written to outdir/online.prior)
template: str
Alternative template file to use, otherwise the
data_files/roq.prior.template file is used
"""
distance_bounds = DEFAULT_DISTANCE_LOOKUPS[str(duration) + "s"]
mc_min = roq_params["chirpmassmin"] / scale_factor
mc_max = roq_params["chirpmassmax"] / scale_factor
if template is None:
template = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "data_files/roq.prior.template"
)
with open(template, "r") as old_prior:
prior_string = old_prior.read().format(
mc_min=mc_min,
mc_max=mc_max,
d_min=distance_bounds[0],
d_max=distance_bounds[1],
)
prior_file = os.path.join(outdir, "online.prior")
with open(prior_file, "w") as new_prior:
new_prior.write(prior_string)
return prior_file
def main():
parser = argparse.ArgumentParser(prog="bilby_pipe gracedb access", usage="")
def create_parser():
parser = argparse.ArgumentParser(prog="bilby_pipe gracedb access", usage=__doc__)
group1 = parser.add_mutually_exclusive_group(required=True)
group1.add_argument("--gracedb", type=str, help="GraceDB event id")
group1.add_argument("--coinc", type=str, help="Path to coinc_file")
group1.add_argument("--json", type=str, help="Path to json gracedb file")
group2 = parser.add_mutually_exclusive_group(required=False)
group2.add_argument("--local", action="store_true", help="Run the job locally")
group2.add_argument("--submit", action="store_true", help="Submit the job")
parser.add_argument("--outdir", type=str, help="Output directory")
parser.add_argument(
"--outdir",
type=str,
help="Output directory where the ini file and all output is written",
)
parser.add_argument(
"--gracedb-url",
type=str,
help="GraceDB service url",
default="https://gracedb.ligo.org/api/",
)
return parser
def main(args=None):
args = parser.parse_args()
if args is None:
args = create_parser().parse_args()
if args.outdir:
outdir = args.outdir
if args.coinc:
coinc = args.coinc
candidate = read_from_coinc(coinc)
if args.json:
json = args.json
candidate = read_from_json(json)
gracedb = candidate["graceid"]
if args.outdir is None:
outdir = "outdir_{}".format(gracedb)
......
......@@ -37,7 +37,7 @@ class Input(object):
@known_detectors.setter
def known_detectors(self, known_detectors):
self._known_detectors = self._convert_detectors_input(known_detectors)
self._known_detectors = utils.convert_detectors_input(known_detectors)
@property
def detectors(self):
......@@ -46,26 +46,9 @@ class Input(object):
@detectors.setter
def detectors(self, detectors):
self._detectors = self._convert_detectors_input(detectors)
self._detectors = utils.convert_detectors_input(detectors)
self._check_detectors_against_known_detectors()
def _convert_detectors_input(self, detectors):
if isinstance(detectors, str):
det_list = self._split_string_by_space(detectors)
elif isinstance(detectors, list):
if len(detectors) == 1:
det_list = self._split_string_by_space(detectors[0])
else:
det_list = detectors
else:
raise BilbyPipeError(
"Input `detectors` = {} not understood".format(detectors)
)
det_list.sort()
det_list = [det.upper() for det in det_list]
return det_list
def _check_detectors_against_known_detectors(self):
for element in self.detectors:
if element not in self.known_detectors:
......@@ -485,7 +468,7 @@ class Input(object):
@property
def combined_default_prior_dicts(self):
d = bilby.core.prior.__dict__
d = bilby.core.prior.__dict__.copy()
d.update(bilby.gw.prior.__dict__)
return d
......
......@@ -4,17 +4,24 @@ bilby_pipe is a command line tools for taking user input (as command line
arguments or an ini file) and creating DAG files for submitting bilby parameter
estimation jobs.
"""
import itertools
from collections import namedtuple
import os
import sys
import shutil
import sys
import subprocess
import itertools
from collections import namedtuple
import pycondor
from .utils import logger, parse_args, BilbyPipeError, DataDump, ArgumentsString
from . import utils
from .utils import (
logger,
parse_args,
BilbyPipeError,
DataDump,
ArgumentsString,
get_command_line_arguments,
request_memory_generation_lookup,
)
from . import create_injections
from .input import Input
from .parser import create_parser
......@@ -59,8 +66,6 @@ class MainInput(Input):
logger.debug("Creating new Input object")
logger.info("Command line arguments: {}".format(args))
logger.debug("Known detector list = {}".format(self.known_detectors))
self.unknown_args = unknown_args
self.ini = args.ini
self.submit = args.submit
......@@ -68,7 +73,6 @@ class MainInput(Input):
self.singularity_image = args.singularity_image
self.outdir = args.outdir
self.label = args.label
self.queue = 1
self.create_summary = args.create_summary
self.accounting = args.accounting
self.sampler = args.sampler
......@@ -88,6 +92,7 @@ class MainInput(Input):
self.run_local = args.local
self.local_generation = args.local_generation
self.local_plot = args.local_plot
self.gps_file = args.gps_file
self.trigger_time = args.trigger_time
......@@ -114,7 +119,7 @@ class MainInput(Input):
@property
def initialdir(self):
return os.path.abspath(os.path.dirname(self.ini))
return os.getcwd()
@property
def singularity_image(self):
......@@ -239,7 +244,7 @@ class MainInput(Input):
def request_memory_generation(self, request_memory_generation):
if request_memory_generation is None:
roq = self.likelihood_type == "ROQGravitationalWaveTransient"
request_memory_generation = utils.request_memory_generation_lookup(
request_memory_generation = request_memory_generation_lookup(
self.duration, roq=roq
)
logger.info("request_memory_generation={}GB".format(request_memory_generation))
......@@ -503,7 +508,6 @@ class Dag(object):
initialdir=self.initialdir,
notification=self.notification,
requirements=self.requirements,
queue=self.inputs.queue,
extra_lines=extra_lines,
dag=self.dag,
arguments=arguments.print(),
......@@ -649,7 +653,6 @@ class Dag(object):
initialdir=self.initialdir,
notification=self.notification,
requirements=self.requirements,
queue=self.inputs.queue,
extra_lines=extra_lines,
dag=self.dag,
arguments=arguments.print(),
......@@ -693,7 +696,6 @@ class Dag(object):
initialdir=self.initialdir,
notification=self.notification,
requirements=self.requirements,
queue=self.inputs.queue,
dag=self.dag,
extra_lines=extra_lines,
arguments=self.inputs.postprocessing_arguments,
......@@ -753,7 +755,6 @@ class Dag(object):
initialdir=self.initialdir,
notification=self.notification,
requirements=self.requirements,
queue=self.inputs.queue,
dag=self.dag,
extra_lines=extra_lines,
arguments=arguments,
......@@ -768,6 +769,11 @@ class Dag(object):
def create_plot_jobs(self):
if self.inputs.local_plot:
universe = "local"
else:
universe = self.universe
if self.merged_runs:
files = [self.merged_runs_result_file]
parent_jobs = [self.merged_runs_job]
......@@ -796,13 +802,12 @@ class Dag(object):
name=job_name,
executable=shutil.which("bilby_pipe_plot"),
submit=self.inputs.submit_directory,
request_memory="32 GB",
request_memory="16 GB",
getenv=self.getenv,
universe=self.universe,
universe=universe,
initialdir=self.initialdir,
notification=self.notification,
requirements=self.requirements,
queue=self.inputs.queue,
dag=self.dag,
extra_lines=extra_lines,
arguments=arguments.print(),
......@@ -812,42 +817,6 @@ class Dag(object):
job.add_parent(parent_job)
logger.debug("Adding plot job")
@property
def summary_jobs_inputs(self):
""" Input for the summary jobs """
sampler = self.inputs.sampler
webdir = self.inputs.webdir
email = self.inputs.email
existing_dir = self.inputs.existing_dir
detectors_list = []
detectors_list.append(self.inputs.detectors)
if self.inputs.coherence_test:
for detector in self.inputs.detectors:
detectors_list.append([detector])
level_B_prod_list = self.inputs.sampler
level_A_jobs_numbers = range(self.inputs.n_level_A_jobs)
jobs_inputs = []
for idx in list(level_A_jobs_numbers):
for sampler in level_B_prod_list:
jobs_inputs.append(
JobInput(
idx=idx,
meta_label=self.inputs.level_A_labels[idx],
kwargs=dict(
detectors_list=detectors_list,
sampler=sampler,
webdir=webdir,
email=email,
existing_dir=existing_dir,
),
)
)
logger.debug("List of job inputs = {}".format(jobs_inputs))
return jobs_inputs
def create_summary_jobs(self):
""" Create a condor job for pesummary and add it to the dag """
logger.debug("Generating pesummary jobs")
......@@ -880,7 +849,9 @@ class Dag(object):
arguments.append(
"-a {}".format(" ".join([self.inputs.waveform_approximant] * len(files)))
)
arguments.append("--labels {}".format(" ".join(files)))
arguments.append(
"--labels {}".format(" ".join([os.path.basename(f) for f in files]))
)
if existing_dir is not None:
arguments.add("existing_webdir", existing_dir)
......@@ -896,7 +867,6 @@ class Dag(object):
initialdir=self.initialdir,
notification=self.notification,
requirements=self.requirements,
queue=self.inputs.queue,
extra_lines=extra_lines,
dag=self.dag,
arguments=arguments.print(),
......@@ -933,14 +903,10 @@ class Dag(object):
)
def create_main_parser():
return create_parser(top_level=True)
def main():
""" Top-level interface for bilby_pipe """
parser = create_main_parser()
args, unknown_args = parse_args(utils.get_command_line_arguments(), parser)
parser = create_parser(top_level=True)
args, unknown_args = parse_args(get_command_line_arguments(), parser)
inputs = MainInput(args, unknown_args)
......
......@@ -176,7 +176,7 @@ def create_parser(top_level=True):
det_parser.add(
"--psd-length",
default=32,
type=float,
type=int,
help=("Number of duration-lengths used to generate the PSD, default" " is 32."),
)
det_parser.add(
......@@ -266,6 +266,10 @@ def create_parser(top_level=True):
"data generation job locally."
),
)
submission_parser.add(
"--local-plot", action="store_true", help="Run the plot job locally"
)
submission_parser.add("--outdir", type=str, default=".", help="Output directory")
parser.add(
"--periodic-restart-time",
......
......@@ -39,15 +39,17 @@ def main():
label = result.label
result.plot_marginals(priors=True)
result.plot_calibration_posterior()
result.plot_corner()
logger.info("Generating source mass corner")
result.plot_corner(
["mass_1", "mass_2", "chirp_mass", "mass_ratio"],
["mass_1_source", "mass_2_source", "chirp_mass_source", "mass_ratio_source"],
filename="{}/{}_mass_corner.png".format(outdir, label),
)
logger.info("Generating distance sky time corner")
result.plot_corner(
["luminosity_distance", "theta_jn", "ra", "dec", "geocent_time"],
filename="{}/{}_distance-sky-time_corner.png".format(outdir, label),
)
logger.info("Generating waveform plots")
result.plot_waveform_posterior(interferometers=data_dump.interferometers)
result.plot_skymap(1000)
logger.info("Generating skymap")
result.plot_skymap(maxpts=2000)
......@@ -6,17 +6,19 @@ import glob
import json
import os
from bilby.core.result import read_in_result, make_pp_plot
from bilby.core.result import read_in_result, make_pp_plot, ResultList
import matplotlib.pyplot as plt
import matplotlib as mpl
import numpy as np
import tqdm
import matplotlib as mpl
from .utils import logger
mpl.rcParams.update(mpl.rcParamsDefault)
def main():
def create_parser():
parser = argparse.ArgumentParser(
prog="bilby_pipe PP test",
usage="Generates a pp plot from a directory containing a set of results",
......@@ -32,23 +34,34 @@ def main():
parser.add_argument(
"-n", type=int, help="Number of samples to truncate to", default=None
)
args, _ = parser.parse_known_args()
return parser
def get_results_filenames(args):
results_files = []
for extension in ["json", "h5", "hdf5"]:
glob_string = os.path.join(args.directory, "*result*" + extension)
results_files += glob.glob(glob_string)
results_files = [rf for rf in results_files if os.path.isfile(rf)]
if len(results_files) == 0:
raise ValueError("No results found in path {}".format(args.directory))
raise FileNotFoundError("No results found in path {}".format(args.directory))
if args.n is not None:
results_files = results_files[: args.n]
return results_files
def check_consistency(results):
results._check_consistent_sampler()
results._check_consistent_data()
results._check_consistent_parameters()
results._check_consistent_priors()
def read_in_result_list(args, results_filenames):
print("Reading in results ...")
results = []
for f in tqdm.tqdm(results_files):
for f in tqdm.tqdm(results_filenames):
try:
results.append(read_in_result(f))
except json.decoder.JSONDecodeError:
......@@ -70,32 +83,44 @@ def main():
print(
"List of result-labels: {}".format(sorted([res.label for res in results]))
)
return ResultList(results)
r0 = results[0]
sampler = r0.sampler
def get_basename(args):
if args.outdir is None:
args.outdir = args.directory
basename = "{}/{}".format(args.outdir, sampler)
basename = "{}/".format(args.outdir)
if args.label is not None:
basename += "_{}".format(args.label)
basename += "{}_".format(args.label)
return basename
def main(args=None):
if args is None:
args = create_parser().parse_known_args()
results_filenames = get_results_filenames(args)
results = read_in_result_list(args, results_filenames)
check_consistency(results)
basename = get_basename(args)
print("Create the PP plot")
keys = r0.priors.keys()
print("Parameters = {}".format(keys))
make_pp_plot(results, filename="{}_pp.png".format(basename), keys=keys)
logger.info("Generating PP plot")
keys = results[0].priors.keys()
logger.info("Parameters = {}".format(keys))
make_pp_plot(results, filename="{}pp.png".format(basename), keys=keys)
print("Create sampling-time histogram")
logger.info("Create sampling-time histogram")
stimes = [r.sampling_time for r in results]
fig, ax = plt.subplots()
ax.hist(np.array(stimes) / 3600, bins=50)
ax.set_xlabel("Sampling time [hr]")
fig.tight_layout()
fig.savefig("{}_sampling_times.png".format(basename))
fig.savefig("{}sampling_times.png".format(basename))
print("Create optimal SNR plot")
logger.info("Create optimal SNR plot")
fig, ax = plt.subplots()
snrs = []
for det in ["H1", "L1"]:
detectors = list(results[0].meta_data["likelihood"]["interferometers"].keys())
for det in detectors:
snrs.append(
[
r.meta_data["likelihood"]["interferometers"][det]["optimal_SNR"]
......@@ -107,4 +132,4 @@ def main():
ax.hist(network_snr, bins=50, label=det)
ax.set_xlabel("Network optimal SNR")
fig.tight_layout()
fig.savefig("{}_optimal_SNR.png".format(basename))
fig.savefig("{}optimal_SNR.png".format(basename))
......@@ -7,8 +7,8 @@ import bilby
import bilby_pipe
from .utils import (
check_directory_exists_and_if_not_mkdir,
duration_lookups,
maximum_frequency_lookups,
DURATION_LOOKUPS,
MAXIMUM_FREQUENCY_LOOKUPS,
write_config_file,
run_command_line,
)
......@@ -75,7 +75,7 @@ def get_date_string():
def get_default_config_dict(args, review_name):
if args.duration is None:
args.duration = duration_lookups[args.prior]
args.duration = DURATION_LOOKUPS[args.prior]
base_label = "{}_{}".format(review_name, args.prior)
if args.roq:
......@@ -93,8 +93,8 @@ def get_default_config_dict(args, review_name):
sampler="dynesty",
sampler_kwargs="{nlive: 1000, walks: 100, n_check_point: 5000}",
create_plots=None,
sampling_frequency=4 * maximum_frequency_lookups[args.prior],
maximum_frequency=maximum_frequency_lookups[args.prior],
sampling_frequency=4 * MAXIMUM_FREQUENCY_LOOKUPS[args.prior],
maximum_frequency=MAXIMUM_FREQUENCY_LOOKUPS[args.prior],
time_marginalization=True,
distance_marginalization=True,
phase_marginalization=True,
......
......@@ -11,6 +11,7 @@ import ast
import urllib
import urllib.request
import subprocess
from pathlib import Path
class BilbyPipeError(Exception):
......@@ -86,7 +87,17 @@ class DataDump(object):
return res
duration_lookups = {
DEFAULT_DISTANCE_LOOKUPS = {
"high_mass": (1e2, 5e3),
"4s": (1e2, 5e3),
"8s": (1e2, 5e3),
"16s": (1e2, 4e3),
"32s": (1e2, 3e3),
"64s": (50, 2e3),
"128s": (1, 5e2),
}
DURATION_LOOKUPS = {
"high_mass": 4,
"4s": 4,
"8s": 8,
......@@ -97,7 +108,7 @@ duration_lookups = {
}
maximum_frequency_lookups = {
MAXIMUM_FREQUENCY_LOOKUPS = {
"high_mass": 1024,
"4s": 1024,
"8s": 2048,
......@@ -225,14 +236,13 @@ def setup_logger(outdir=None, label=None, log_level="INFO", print_version=False)
def get_version_information():
version_file = os.path.join(
os.path.dirname(os.path.dirname(__file__)), "bilby_pipe/.version"
)
version_file = Path(__file__).parent / ".version"
try:
with open(version_file, "r") as f:
return f.readline().rstrip()
except EnvironmentError:
except FileNotFoundError:
print("No version information file '.version' found")
return ""
def convert_string_to_dict(string, key):
......@@ -269,16 +279,23 @@ def convert_string_to_dict(string, key):
return dic
def write_config_file(config_dict, filename):
def write_config_file(config_dict, filename, remove_none=False):
""" Writes ini file
Parameters
----------
config_dict: dict
Dictionary of parameters for ini file
filename: str
Filename to write the config file to
remove_none: bool
If true, remove None's from the config_dict before writing otherwise
a ValueError is raised
"""
if remove_none:
config_dict = {key: val for key, val in config_dict.items() if val is not None}
if None in config_dict.values():
raise ValueError("config-dict is not complete")
with open(filename, "w+") as file:
......@@ -289,7 +306,7 @@ def write_config_file(config_dict, filename):
def test_connection():
""" A generic test to see if the network is reachable """
try:
urllib.request.urlopen("https://google.com", timeout=0.1)
urllib.request.urlopen("https://google.com", timeout=1.0)
except urllib.error.URLError:
raise BilbyPipeError(
"It appears you are not connected to a network and so won't be "
......@@ -333,5 +350,41 @@ def request_memory_generation_lookup(duration, roq=False):
return 8
def convert_detectors_input(string):
""" Convert string inputs into a standard form for the detectors
Parameters
----------
string: str
A string representation to be converted
Returns
-------
detectors: list
A sorted list of detectors
"""
if string is None:
raise BilbyPipeError("No detector input")
if isinstance(string, list):
string = ",".join(string)
if isinstance(string, str) is False:
raise BilbyPipeError("Detector input {} not understood".format(string))
# Remove square brackets
string = string.replace("[", "").replace("]", "")
# Remove added quotes
string = strip_quotes(string)
# Replace multiple spaces with a single space
string = " ".join(string.split())
# Spaces can be either space or comma in input, convert to comma
string = string.replace(" ,", ",").replace(", ", ",").replace(" ", ",")
detectors = string.split(",")
detectors.sort()
detectors = [det.upper() for det in detectors]
return detectors
setup_logger(print_version=True)
logger = logging.getLogger("bilby_pipe")
......@@ -36,9 +36,11 @@ RUN wget https://github.com/sylabs/singularity/releases/download/2.5.2/singulari
&& make install
# Install conda-installable programs
RUN conda install -y matplotlib numpy scipy pandas
RUN conda install -y matplotlib numpy scipy pandas astropy
RUN conda install -c conda-forge ligo-gracedb gwpy lalsuite ligo.skymap
RUN conda install -c conda-forge ligo-gracedb gwpy lalsuite ligo.skymap bilby
RUN pip install pesummary # Tempory fix to get 0.17 installation
# Install requirements
RUN pip install --upgrade pip \
......@@ -46,11 +48,8 @@ RUN pip install --upgrade pip \
&& pip install future \
pycondor>=0.5 \
configargparse \
spython \
jinja2 \
flake8 \
urllib3 \
deepdish \
coverage \
pytest-cov \
coverage-badge
......@@ -36,9 +36,10 @@ RUN wget https://github.com/sylabs/singularity/releases/download/2.5.2/singulari
&& make install
# Install conda-installable programs
RUN conda install -y matplotlib numpy scipy pandas
RUN conda install -y matplotlib numpy scipy pandas astropy
RUN conda install -c conda-forge ligo-gracedb gwpy lalsuite ligo.skymap
RUN conda install -c conda-forge ligo-gracedb gwpy lalsuite ligo.skymap bilby
RUN pip install pesummary # Tempory fix to get 0.17 installation
# Install requirements
RUN pip install --upgrade pip \
......@@ -46,11 +47,8 @@ RUN pip install --upgrade pip \
&& pip install future \
pycondor>=0.5 \
configargparse \
spython \
jinja2 \
flake8 \
urllib3 \
deepdish \
coverage \
pytest-cov \
coverage-badge
......@@ -33,9 +33,10 @@ RUN wget https://github.com/sylabs/singularity/releases/download/2.5.2/singulari
&& make install
# Install conda-installable programs
RUN conda install -y matplotlib numpy scipy pandas
RUN conda install -y matplotlib numpy scipy pandas astropy
RUN conda install -c conda-forge ligo-gracedb gwpy lalsuite ligo.skymap
RUN conda install -c conda-forge ligo-gracedb gwpy lalsuite ligo.skymap bilby black
RUN pip install pesummary # Tempory fix to get 0.17 installation
# Install requirements
RUN pip install --upgrade pip \
......@@ -43,11 +44,8 @@ RUN pip install --upgrade pip \
&& pip install future \
pycondor>=0.5 \
configargparse \
spython \
jinja2 \
flake8 \
urllib3 \
deepdish \
coverage \
pytest-cov \
coverage-badge
......@@ -2,23 +2,35 @@
Installation
============
Installing bilby_pipe from conda (recommended)
----------------------------------------------
Installing bilby_pipe from release
----------------------------------
.. tabs::
(To be implemented)
.. tab:: conda
Installing bilby_pipe from pip
------------------------------
To install the latest :code:`bilby_pipe` release from `conda-forge
<https://anaconda.org/conda-forge/bilby_pipe>`_, run
To install the latest :code:`bilby_pipe` release from `PyPi
<https://pypi.org/project/bilby-pipe/>`_, run
.. code-block:: console
.. code-block:: console
$ conda install -c conda-forge bilby_pipe
Note, this is the recommended installation process as it ensures all
dependencies are met.
.. tab:: pypi
To install the latest :code:`bilby_pipe` release from `PyPi
<https://pypi.org/project/bilby-pipe/>`_, run
.. code-block:: console
$ pip install --upgrade bilby_pipe
$ pip install --upgrade bilby_pipe
Note, that this released version usually lags the development version. If you
need to test the latest features, see below for how to obtain this.
WARNING: this is not the recommended installation process, some
dependencies (see below) are only automatically installed by using the
conda installation method.
Install bilby_pipe for development
......@@ -93,7 +105,7 @@ Python installation
.. code-block:: console
$ source /cvmfs/ligo-containers.opensciencegrid.org/lscsoft/conda/latest/etc/profile.d/conda.sh
$ source /cvmfs/ligo-containers.opensciencegrid.org/lscsoft/conda/latest/etc/profile.d/conda.sh
$ conda activate ligo-py37
......@@ -167,7 +179,8 @@ When requesting data, we first look for local frame-files, then use the `NDS2
library to fetch proprietary data remotely, finally we search the open data.
To best utilise this tool, you should ensure your python installation has
access to `LDAS-tools <https://anaconda.org/conda-forge/ldas-tools-framecpp>`_
access to `LDAStools-frameCPP
<https://anaconda.org/conda-forge/python-ldas-tools-framecpp>`_
for local frame-file lookup and `NDS2
<https://anaconda.org/conda-forge/python-nds2-client>`_ for proprietary remote
data look up. These libraries are typically part of most LIGO data stacks and
......
#!/usr/bin/env python
import os
import subprocess
import sys
from pathlib import Path
from setuptools import setup
import subprocess
# check that python version is 3.5 or above
python_version = sys.version_info
print("Running Python version %s.%s.%s" % python_version[:3])
if python_version < (3, 5):
sys.exit("Python < 3.5 is not supported, aborting setup")
else:
print("Confirmed Python version 3.5.0 or above")
print("Confirmed Python version 3.5.0 or above")
def write_version_file(version):
......@@ -25,9 +26,11 @@ def write_version_file(version):
Returns
-------
version_file: str
A path to the version file
A path to the version file (relative to the bilby_pipe
package directory)
"""
version_file = Path("bilby_pipe") / ".version"
try:
git_log = subprocess.check_output(
["git", "log", "-1", "--pretty=%h %ai"]
......@@ -36,20 +39,27 @@ def write_version_file(version):
subprocess.check_output(["git", "diff", "."])
+ subprocess.check_output(["git", "diff", "--cached", "."])
).decode("utf-8")
if git_diff == "":
git_status = "(CLEAN) " + git_log
else:
git_status = "(UNCLEAN) " + git_log
except Exception as e:
print("Unable to obtain git version information, exception: {}".format(e))
git_status = ""
except subprocess.CalledProcessError as exc: # git calls failed
# we already have a version file, let's use it
if version_file.is_file():
return version_file.name
# otherwise error out
exc.args = (
"unable to obtain git version information, and {} doesn't "
"exist, cannot continue ({})".format(version_file, str(exc)),
)
raise
else:
git_version = "{}: ({}) {}".format(
version, "UNCLEAN" if git_diff else "CLEAN", git_log.rstrip()
)
print("parsed git version info as: {!r}".format(git_version))
version_file = ".version"
if os.path.isfile(version_file) is False:
with open("bilby_pipe/" + version_file, "w+") as f:
f.write("{}: {}".format(version, git_status))
with open(version_file, "w") as f:
print(git_version, file=f)
print("created {}".format(version_file))
return version_file
return version_file.name
def get_long_description():
......@@ -80,12 +90,14 @@ setup(
"pycondor>=0.5",
"configargparse",
"ligo-gracedb",
"bilby>=0.4.1",
"bilby>=0.5.1",
"scipy>=1.2.0",
"gwpy",
"matplotlib",
"numpy",
"tqdm",
"dynesty>=0.9.7",
"pesummary",
],
entry_points={
"console_scripts": [
......
......@@ -6,7 +6,7 @@ import bilby_pipe
class TestDagCommandLine(unittest.TestCase):
def setUp(self):
self.default_args = ["tests/test_dag_ini_file.ini"]
self.parser = bilby_pipe.main.create_main_parser()
self.parser = bilby_pipe.main.create_parser()
def tearDown(self):
pass
......