Commit e4647db5 authored by Gregory Ashton's avatar Gregory Ashton

Merge branch 'python36' into 'master'

Bump minium python version to 36

See merge request !353
parents 9a35107f 9fdea0df
Pipeline #150665 passed with stages
in 4 minutes and 34 seconds
......@@ -24,3 +24,13 @@ repos:
hooks:
- id: isort # sort imports alphabetically and separates import into sections
args: [-w=88, -m=3, -tc, -sp=setup.cfg ]
- repo: local
hooks:
- id: flynt
name: flynt
entry: flynt
args: [--fail-on-change]
types: [python]
language: python
additional_dependencies:
- flynt
......@@ -147,29 +147,25 @@ class BilbyArgParser(configargparse.ArgParser):
comment=None,
):
if os.path.isfile(filename) and not overwrite:
logger.warning(
"File {} already exists, not writing to file.".format(filename)
)
logger.warning(f"File {filename} already exists, not writing to file.")
with open(filename, "w") as ff:
__version__ = get_version_information()
if include_description:
print(
"## This file was written with bilby_pipe version {}\n".format(
__version__
),
f"## This file was written with bilby_pipe version {__version__}\n",
file=ff,
)
if isinstance(comment, str):
print("#" + comment + "\n", file=ff)
for group in self._action_groups[2:]:
print("#" * 80, file=ff)
print("## {}".format(group.title), file=ff)
print(f"## {group.title}", file=ff)
if include_description:
print("# {}".format(group.description), file=ff)
print(f"# {group.description}", file=ff)
print("#" * 80 + "\n", file=ff)
for action in group._group_actions:
if include_description:
print("# {}".format(action.help), file=ff)
print(f"# {action.help}", file=ff)
dest = action.dest
hyphen_dest = HyphenStr(dest)
if isinstance(args, dict):
......@@ -200,7 +196,7 @@ class BilbyArgParser(configargparse.ArgParser):
comment = self.inline_comments.get(self.numbers[hyphen_dest], "")
else:
comment = ""
print("{}={}{}".format(hyphen_dest, value, comment), file=ff)
print(f"{hyphen_dest}={value}{comment}", file=ff)
class BilbyConfigFileParser(configargparse.DefaultConfigFileParser):
......@@ -247,9 +243,7 @@ class BilbyConfigFileParser(configargparse.DefaultConfigFileParser):
continue
raise configargparse.ConfigFileParserException(
"Unexpected line {} in {}: {}".format(
ii, getattr(stream, "name", "stream"), line
)
f"Unexpected line {ii} in {getattr(stream, 'name', 'stream')}: {line}"
)
items = self.reconstruct_multiline_dictionary(items)
......@@ -265,7 +259,7 @@ class BilbyConfigFileParser(configargparse.DefaultConfigFileParser):
if val != "{":
sub_dict_vals.append(val.rstrip("{"))
while True:
next_line = "{}: {}".format(keys[ii + sub_ii], vals[ii + sub_ii])
next_line = f"{keys[ii + sub_ii]}: {vals[ii + sub_ii]}"
items.pop(keys[ii + sub_ii])
if "}" not in next_line:
if "{" in next_line:
......
......@@ -196,7 +196,7 @@ class InjectionCreator(Input):
if isinstance(n_injection, int) is False or n_injection < 1:
raise BilbyPipeCreateInjectionsError(
"n_injection={}, but must be a positive integer".format(n_injection)
f"n_injection={n_injection}, but must be a positive integer"
)
self._n_injection = n_injection
......@@ -230,9 +230,9 @@ class InjectionCreator(Input):
dataframe.to_csv(path, index=False, header=True, sep=" ")
else:
raise BilbyPipeCreateInjectionsError(
"Extension {} not implemented".format(extension)
f"Extension {extension} not implemented"
)
logger.info("Created injection file {}".format(path))
logger.info(f"Created injection file {path}")
def generate_injection_file(self, filepath, extension):
"""Sets the generation seed and randomly generates parameters to create inj"""
......@@ -251,7 +251,7 @@ def get_full_path(filename, extension):
"""Makes filename and ext consistent amongst user input"""
ext_in_filename = os.path.splitext(filename)[1].lstrip(".")
if ext_in_filename == "":
path = "{}.{}".format(filename, extension)
path = f"{filename}.{extension}"
elif ext_in_filename == extension:
path = filename
else:
......
......@@ -42,7 +42,7 @@ class DataAnalysisInput(Input):
"""
def __init__(self, args, unknown_args, test=False):
logger.info("Command line arguments: {}".format(args))
logger.info(f"Command line arguments: {args}")
# Generic initialisation
self.meta_data = dict()
......@@ -120,7 +120,7 @@ class DataAnalysisInput(Input):
sampling_seed = np.random.randint(1, 1e6)
self._samplng_seed = sampling_seed
np.random.seed(sampling_seed)
logger.info("Sampling seed set to {}".format(sampling_seed))
logger.info(f"Sampling seed set to {sampling_seed}")
if self.sampler == "cpnest":
self.sampler_kwargs["seed"] = self.sampler_kwargs.get(
......@@ -134,10 +134,10 @@ class DataAnalysisInput(Input):
except AttributeError:
ifos = self.data_dump.interferometers
names = [ifo.name for ifo in ifos]
logger.info("Found data for detectors = {}".format(names))
logger.info(f"Found data for detectors = {names}")
ifos_to_use = [ifo for ifo in ifos if ifo.name in self.detectors]
names_to_use = [ifo.name for ifo in ifos_to_use]
logger.info("Using data for detectors = {}".format(names_to_use))
logger.info(f"Using data for detectors = {names_to_use}")
self._interferometers = bilby.gw.detector.InterferometerList(ifos_to_use)
self.print_detector_information(self._interferometers)
return self._interferometers
......
......@@ -51,10 +51,8 @@ for ii, filename in enumerate(filenames):
waveform_arguments=waveform_arguments,
)
print("Generating lookup table for prior-file {}".format(filename))
dest = "{}_distance_marginalization_lookup.npz".format(
os.path.splitext(filename)[0]
)
print(f"Generating lookup table for prior-file {filename}")
dest = f"{os.path.splitext(filename)[0]}_distance_marginalization_lookup.npz"
priors = bilby.gw.prior.BBHPriorDict(filename)
if "plot" in sys.argv:
......
......@@ -64,8 +64,8 @@ class DataGenerationInput(Input):
def __init__(self, args, unknown_args, create_data=True):
logger.info("Command line arguments: {}".format(args))
logger.info("Unknown command line arguments: {}".format(unknown_args))
logger.info(f"Command line arguments: {args}")
logger.info(f"Unknown command line arguments: {unknown_args}")
# Generic initialisation
self.meta_data = dict(
......@@ -109,9 +109,7 @@ class DataGenerationInput(Input):
if args.timeslide_dict is not None:
self.timeslide_dict = convert_string_to_dict(args.timeslide_dict)
logger.info(
"Read-in timeslide dict directly: {}".format(self.timeslide_dict)
)
logger.info(f"Read-in timeslide dict directly: {self.timeslide_dict}")
elif args.timeslide_file is not None:
self.gps_file = args.gps_file
self.timeslide_file = args.timeslide_file
......@@ -252,7 +250,7 @@ class DataGenerationInput(Input):
generation_seed = generation_seed + self.idx
self._generation_seed = generation_seed
np.random.seed(generation_seed)
logger.info("Generation seed set to {}".format(generation_seed))
logger.info(f"Generation seed set to {generation_seed}")
@property
def injection_parameters(self):
......@@ -282,7 +280,7 @@ class DataGenerationInput(Input):
self.psd_duration = psd_length * self.duration
else:
raise BilbyPipeError("Unable to set psd_length={}".format(psd_length))
raise BilbyPipeError(f"Unable to set psd_length={psd_length}")
@property
def psd_duration(self):
......@@ -319,9 +317,7 @@ class DataGenerationInput(Input):
elif self.trigger_time is not None:
psd_start_time = -self.psd_duration
logger.info(
"Using default PSD start time {} relative to start time".format(
psd_start_time
)
f"Using default PSD start time {psd_start_time} relative to start time"
)
return psd_start_time
else:
......@@ -362,7 +358,7 @@ class DataGenerationInput(Input):
elif isinstance(data_dict, dict):
self._data_dict = data_dict
else:
raise BilbyPipeError("Input data-dict={} not understood".format(data_dict))
raise BilbyPipeError(f"Input data-dict={data_dict} not understood")
@property
def channel_dict(self):
......@@ -383,9 +379,7 @@ class DataGenerationInput(Input):
if det in self.channel_dict:
return self.channel_dict[det]
else:
raise BilbyPipeError(
"Detector {} not given in the channel-dict".format(det)
)
raise BilbyPipeError(f"Detector {det} not given in the channel-dict")
@property
def sampling_frequency(self):
......@@ -438,12 +432,12 @@ class DataGenerationInput(Input):
"start_time",
"duration",
]:
logger.info("{} = {}".format(prop, getattr(self, prop)))
logger.info(f"{prop} = {getattr(self, prop)}")
self._set_interferometers_from_gaussian_noise()
waveform_arguments = self.get_injection_waveform_arguments()
logger.info("Using waveform arguments: {}".format(waveform_arguments))
logger.info(f"Using waveform arguments: {waveform_arguments}")
waveform_generator = self.waveform_generator_class(
duration=self.duration,
start_time=self.start_time,
......@@ -510,7 +504,7 @@ class DataGenerationInput(Input):
outdir = None
label = None
logger.info("Injecting with {}".format(self.injection_waveform_approximant))
logger.info(f"Injecting with {self.injection_waveform_approximant}")
(
signal_and_data,
meta_data,
......@@ -558,7 +552,7 @@ class DataGenerationInput(Input):
def _set_psd_from_file(self, ifo):
psd_file = self.psd_dict[ifo.name]
logger.info("Setting {} PSD from file {}".format(ifo.name, psd_file))
logger.info(f"Setting {ifo.name} PSD from file {psd_file}")
ifo.power_spectral_density = PowerSpectralDensity.from_power_spectral_density_file(
psd_file=psd_file
)
......@@ -583,14 +577,14 @@ class DataGenerationInput(Input):
psd_data = None
self._set_psd_from_file(ifo)
else:
logger.info("Setting PSD for {} from data".format(det))
logger.info(f"Setting PSD for {det} from data")
psd_data = self.__get_psd_data(det)
psd = self.__generate_psd(psd_data, roll_off)
ifo.power_spectral_density = PowerSpectralDensity(
frequency_array=psd.frequencies.value, psd_array=psd.value
)
logger.info("Getting analysis-segment data for {}".format(det))
logger.info(f"Getting analysis-segment data for {det}")
data = self._get_data(
det, self.get_channel_type(det), self.start_time, end_time
)
......@@ -610,7 +604,7 @@ class DataGenerationInput(Input):
# so here we calculate the actual start time
actual_psd_start_time = self.start_time + self.psd_start_time
actual_psd_end_time = actual_psd_start_time + self.psd_duration
logger.info("Getting psd-segment data for {}".format(det))
logger.info(f"Getting psd-segment data for {det}")
psd_data = self._get_data(
det, self.get_channel_type(det), actual_psd_start_time, actual_psd_end_time
)
......@@ -676,16 +670,14 @@ class DataGenerationInput(Input):
if plot_psd:
strain_spectogram_plot(
data=psd_strain_data,
extra_label="D{}".format(int(psd_time[1] - psd_time[0])),
extra_label=f"D{int(psd_time[1] - psd_time[0])}",
**plot_kwargs,
)
# plot psd_strain_data+strain_data and zoom into strain_data segment
data_with_psd = psd_strain_data.append(strain_data, inplace=False)
strain_spectogram_plot(
data=data_with_psd,
extra_label="D{}".format(int(time[1] - time[0])),
**plot_kwargs,
data=data_with_psd, extra_label=f"D{int(time[1] - time[0])}", **plot_kwargs
)
def _get_data(self, det, channel_type, start_time, end_time, resample=True):
......@@ -739,7 +731,7 @@ class DataGenerationInput(Input):
if data is None and channel_type == "GWOSC":
data = self._gwpy_fetch_open_data(det, start_time, end_time)
channel = "{}:{}".format(det, channel_type)
channel = f"{det}:{channel_type}"
if data is None and self.data_dict is not None:
data = self._gwpy_read(det, channel, start_time, end_time)
if data is None:
......@@ -821,10 +813,7 @@ class DataGenerationInput(Input):
# Create data quality flag
channel_num = 1
quality_flag = f"{det}:DMT-SCIENCE:{channel_num}"
logger.info(
"Checking data quality {} {}-{}"
"".format(quality_flag, start_time, end_time)
)
logger.info(f"Checking data quality {quality_flag} {start_time}-{end_time}")
try:
flag = gwpy.segments.DataQualityFlag.query(
quality_flag, gwpy.time.to_gps(start_time), gwpy.time.to_gps(end_time)
......@@ -851,7 +840,7 @@ class DataGenerationInput(Input):
data_is_good = True
logger.info("Data quality check: PASSED.")
except Exception as e:
logger.warning("Error in Data Quality Check: {}.".format(e))
logger.warning(f"Error in Data Quality Check: {e}.")
data_is_good = None
return data_is_good
......@@ -880,7 +869,7 @@ class DataGenerationInput(Input):
logger.debug("data-dict provided, attempt read of data")
if det not in self.data_dict:
logger.info("Detector {} not found in data-dict".format(det))
logger.info(f"Detector {det} not found in data-dict")
return None
else:
source = self.data_dict[det]
......@@ -888,9 +877,9 @@ class DataGenerationInput(Input):
# If the source contains a glob-path, e.g. *gwf, glob it first
if "*" in source:
logger.info("Globbing {}".format(source))
logger.info(f"Globbing {source}")
source = glob.glob(source)
logger.info("Setting source={}".format(source))
logger.info(f"Setting source={source}")
if "gwf" in format_ext:
kwargs = dict(
......@@ -913,11 +902,9 @@ class DataGenerationInput(Input):
kwargs_string = ""
for key, val in kwargs.items():
if isinstance(val, str):
val = "'{}'".format(val)
kwargs_string += "{}={}, ".format(key, val)
logger.info(
"Running: gwpy.timeseries.TimeSeries.read({})".format(kwargs_string)
)
val = f"'{val}'"
kwargs_string += f"{key}={val}, "
logger.info(f"Running: gwpy.timeseries.TimeSeries.read({kwargs_string})")
data = gwpy.timeseries.TimeSeries.read(**kwargs)
data = data.crop(start=start_time, end=end_time)
......@@ -941,7 +928,7 @@ class DataGenerationInput(Input):
return data
except ValueError as e:
logger.info("Reading of data failed with error {}".format(e))
logger.info(f"Reading of data failed with error {e}")
return None
def _gwpy_get(self, channel, start_time, end_time, dtype="float64"):
......@@ -970,7 +957,7 @@ class DataGenerationInput(Input):
)
if self.data_format:
kwargs = dict(format=self.data_format)
logger.info("Extra kwargs passed to get(): {}".format(kwargs))
logger.info(f"Extra kwargs passed to get(): {kwargs}")
else:
kwargs = dict()
try:
......@@ -979,8 +966,8 @@ class DataGenerationInput(Input):
)
return data
except RuntimeError as e:
logger.info("Unable to read data for channel {}".format(channel))
logger.debug("Error message {}".format(e))
logger.info(f"Unable to read data for channel {channel}")
logger.debug(f"Error message {e}")
except ImportError:
logger.info("Unable to read data as NDS2 is not installed")
except TypeError:
......@@ -1029,21 +1016,21 @@ class DataGenerationInput(Input):
def add_calibration_model_to_interferometers(self, ifo):
if self.calibration_model == "CubicSpline":
ifo.calibration_model = bilby.gw.calibration.CubicSpline(
prefix="recalib_{}_".format(ifo.name),
prefix=f"recalib_{ifo.name}_",
minimum_frequency=ifo.minimum_frequency,
maximum_frequency=ifo.maximum_frequency,
n_points=self.spline_calibration_nodes,
)
else:
raise BilbyPipeError(
"calibration model {} not implemented".format(self.calibration_model)
f"calibration model {self.calibration_model} not implemented"
)
@interferometers.setter
def interferometers(self, interferometers):
for ifo in interferometers:
if isinstance(ifo, bilby.gw.detector.Interferometer) is False:
raise BilbyPipeError("ifo={} is not a bilby Interferometer".format(ifo))
raise BilbyPipeError(f"ifo={ifo} is not a bilby Interferometer")
if self.minimum_frequency is not None:
ifo.minimum_frequency = self.minimum_frequency_dict[ifo.name]
if self.maximum_frequency is not None:
......
......@@ -126,7 +126,7 @@ def read_from_json(json_file):
"""
if os.path.isfile(json_file) is False:
raise FileNotFoundError("File {} not found".format(json_file))
raise FileNotFoundError(f"File {json_file} not found")
try:
with open(json_file, "r") as file:
......@@ -165,7 +165,7 @@ def calibration_lookup(trigger_time, detector):
)
if os.path.isdir(base) is False:
raise BilbyPipeError("Unable to read from calibration folder {}".format(base))
raise BilbyPipeError(f"Unable to read from calibration folder {base}")
calenv = CALENVS_LOOKUP[detector]
times = list()
......@@ -185,7 +185,7 @@ def calibration_lookup(trigger_time, detector):
for time in times:
if trigger_time > time:
directory = os.path.dirname(calenv)
calib_file = "{}/{}".format(directory, files[time])
calib_file = f"{directory}/{files[time]}"
return os.path.abspath(calib_file)
......@@ -230,9 +230,7 @@ def read_candidate(candidate):
def _read_cbc_candidate(candidate):
if "mchirp" not in candidate["extra_attributes"]["CoincInspiral"]:
raise BilbyPipeError(
"Unable to determine chirp mass for {} from GraceDB".format(
candidate["graceid"]
)
f"Unable to determine chirp mass for {candidate['graceid']} from GraceDB"
)
chirp_mass = candidate["extra_attributes"]["CoincInspiral"]["mchirp"]
superevent = candidate["superevent"]
......@@ -270,10 +268,10 @@ def prior_lookup(duration, scale_factor, outdir, template=None):
"""
roq_folder = "/home/cbc/ROQ_data/IMRPhenomPv2/{}s".format(duration)
roq_folder = f"/home/cbc/ROQ_data/IMRPhenomPv2/{duration}s"
if os.path.isdir(roq_folder) is False:
logger.warning("Requested ROQ folder does not exist")
return "{}s".format(duration), None, duration, 20, 1024
return f"{duration}s", None, duration, 20, 1024
roq_params = np.genfromtxt(os.path.join(roq_folder, "params.dat"), names=True)
......@@ -345,7 +343,7 @@ def create_config_file(
distance_marginalization_lookup_table = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"data_files",
"{}s_distance_marginalization_lookup.npz".format(duration),
f"{duration}s_distance_marginalization_lookup.npz",
)
if sampler_kwargs == "FastTest":
......@@ -417,7 +415,7 @@ def create_config_file(
)
else:
raise BilbyPipeError(
"search_type should be either 'cbc' or 'burst', not {}".format(search_type)
f"search_type should be either 'cbc' or 'burst', not {search_type}"
)
config_dict = dict(
......@@ -460,7 +458,7 @@ def create_config_file(
"# Configuration ini file generated from GraceDB "
"for event id {} superevent id {}".format(gracedb, superevent)
)
filename = "{}/bilby_config.ini".format(outdir)
filename = f"{outdir}/bilby_config.ini"
_parser = parser.create_parser()
_parser.write_to_file(
filename=filename,
......@@ -690,7 +688,7 @@ def main(args=None, unknown_args=None):
if len(unknown_args) > 1 and args.output == "ini":
msg = [
tcolors.WARNING,
"Unrecognized arguments {}, these will be ignored".format(unknown_args),
f"Unrecognized arguments {unknown_args}, these will be ignored",
tcolors.END,
]
logger.warning(" ".join(msg))
......@@ -702,13 +700,13 @@ def main(args=None, unknown_args=None):
candidate = read_from_json(json)
gracedb = candidate["graceid"]
if outdir is None:
outdir = "outdir_{}".format(gracedb)
outdir = f"outdir_{gracedb}"
check_directory_exists_and_if_not_mkdir(outdir)
elif args.gracedb:
gracedb = args.gracedb
gracedb_url = args.gracedb_url
if outdir is None:
outdir = "outdir_{}".format(gracedb)
outdir = f"outdir_{gracedb}"
check_directory_exists_and_if_not_mkdir(outdir)
candidate = read_from_gracedb(gracedb, gracedb_url, outdir)
else:
......@@ -730,7 +728,7 @@ def main(args=None, unknown_args=None):
convert_to_flat_in_component_mass = False
else:
raise BilbyPipeError(
"Candidate pipeline {} not recognised.".format(candidate["pipeline"])
f"Candidate pipeline {candidate['pipeline']} not recognised."
)
filename = create_config_file(
......
......@@ -32,7 +32,7 @@ class Input(object):
@property
def complete_ini_file(self):
return "{}/{}_config_complete.ini".format(self.outdir, self.label)
return f"{self.outdir}/{self.label}_config_complete.ini"
@property
def idx(self):
......@@ -183,22 +183,20 @@ class Input(object):
elif os.path.isfile(gps_file):
self._gps_file = os.path.relpath(gps_file)
else:
raise FileNotFoundError(
"Input file gps_file={} not understood".format(gps_file)
)
raise FileNotFoundError(f"Input file gps_file={gps_file} not understood")
self._parse_gps_file()
def _parse_gps_file(self):
gpstimes = self.read_gps_file()
n = len(gpstimes)
logger.info("{} start times found in gps_file={}".format(n, self.gps_file))
logger.info(f"{n} start times found in gps_file={self.gps_file}")
self.gpstimes = gpstimes
def read_gps_file(self):
gpstimes = np.loadtxt(self.gps_file, ndmin=2, delimiter=",")
if gpstimes.ndim > 1:
logger.info("Reading column 0 from gps_file={}".format(self.gps_file))
logger.info(f"Reading column 0 from gps_file={self.gps_file}")
gpstimes = gpstimes[:, 0]
return gpstimes
......@@ -225,7 +223,7 @@ class Input(object):
self._timeslide_file = os.path.relpath(timeslide_file)
else:
raise FileNotFoundError(
"Input file timeslide_file={} not understood".format(timeslide_file)
f"Input file timeslide_file={timeslide_file} not understood"
)
if hasattr(self, "_timeslide_file"):
......@@ -270,9 +268,7 @@ class Input(object):
for i in range(len(self.detectors)):
self.timeslides.update({self.detectors[i]: times[i].flatten()})
logger.info(
"{} timeslides found in timeslide_file={}".format(
number_rows, self.timeslide_file
)
f"{number_rows} timeslides found in timeslide_file={self.timeslide_file}"
)
def get_timeslide_dict(self, idx):
......@@ -285,12 +281,12 @@ class Input(object):
raise BilbyPipeError("Timeslide file must be provided.")
if any(len(t) <= idx for t in self.timeslides.values()):
raise BilbyPipeError(
"Timeslide index={} > number of timeslides available.".format(idx)
f"Timeslide index={idx} > number of timeslides available."
)
timeslide_val = {
det: timeslide[idx] for det, timeslide in self.timeslides.items()
}
logger.info("Timeslide value: {}".format(timeslide_val))
logger.info(f"Timeslide value: {timeslide_val}")
return timeslide_val
@property
......@@ -302,7 +298,7 @@ class Input(object):