diff --git a/tupak/core/prior.py b/tupak/core/prior.py
index 02cab1ba53d97aaaab917df1e3e33b4443f6b4b4..2bb3534dc1e40f087e1b25015936580fea92de7c 100644
--- a/tupak/core/prior.py
+++ b/tupak/core/prior.py
@@ -6,9 +6,10 @@ from scipy.interpolate import interp1d
 from scipy.integrate import cumtrapz
 from scipy.special import erf, erfinv
 import scipy.stats
-import logging
 import os
 
+from tupak.core.utils import logger
+
 
 class PriorSet(dict):
     def __init__(self, dictionary=None, filename=None):
@@ -39,7 +40,7 @@ class PriorSet(dict):
         """
 
         prior_file = os.path.join(outdir, "{}_prior.txt".format(label))
-        logging.debug("Writing priors to {}".format(prior_file))
+        logger.debug("Writing priors to {}".format(prior_file))
         with open(prior_file, "w") as outfile:
             for key in self.keys():
                 outfile.write(
@@ -72,10 +73,10 @@ class PriorSet(dict):
                 continue
             elif isinstance(self[key], float) or isinstance(self[key], int):
                 self[key] = DeltaFunction(self[key])
-                logging.debug(
+                logger.debug(
                     "{} converted to delta function prior.".format(key))
             else:
-                logging.debug(
+                logger.debug(
                     "{} cannot be converted to delta function prior."
                     .format(key))
 
@@ -120,7 +121,7 @@ class PriorSet(dict):
                 default_prior = create_default_prior(missing_key, default_priors_file)
                 if default_prior is None:
                     set_val = likelihood.parameters[missing_key]
-                    logging.warning(
+                    logger.warning(
                         "Parameter {} has no default prior and is set to {}, this"
                         " will not be sampled and may cause an error."
                         .format(missing_key, set_val))
@@ -168,7 +169,7 @@ class PriorSet(dict):
             if isinstance(self[key], Prior):
                 samples[key] = self[key].sample(size=size)
             else:
-                logging.debug('{} not a known prior.'.format(key))
+                logger.debug('{} not a known prior.'.format(key))
         return samples
 
     def prob(self, sample):
@@ -241,7 +242,7 @@ def create_default_prior(name, default_priors_file=None):
     """
 
     if default_priors_file is None:
-        logging.debug(
+        logger.debug(
             "No prior file given.")
         prior = None
     else:
@@ -249,7 +250,7 @@ def create_default_prior(name, default_priors_file=None):
         if name in default_priors.keys():
             prior = default_priors[name]
         else:
-            logging.debug(
+            logger.debug(
                 "No default prior found for variable {}.".format(name))
             prior = None
     return prior
@@ -668,7 +669,7 @@ class LogUniform(PowerLaw):
         """
         PowerLaw.__init__(self, name=name, latex_label=latex_label, minimum=minimum, maximum=maximum, alpha=-1)
         if self.minimum <= 0:
-            logging.warning('You specified a uniform-in-log prior with minimum={}'.format(self.minimum))
+            logger.warning('You specified a uniform-in-log prior with minimum={}'.format(self.minimum))
 
 
 class Cosine(Prior):
@@ -989,7 +990,7 @@ class Interped(Prior):
 
     def __initialize_attributes(self):
         if np.trapz(self.yy, self.xx) != 1:
-            logging.debug('Supplied PDF for {} is not normalised, normalising.'.format(self.name))
+            logger.debug('Supplied PDF for {} is not normalised, normalising.'.format(self.name))
         self.yy /= np.trapz(self.yy, self.xx)
         self.YY = cumtrapz(self.yy, self.xx, initial=0)
         # Need last element of cumulative distribution to be exactly one.
@@ -1028,9 +1029,9 @@ class FromFile(Interped):
             xx, yy = np.genfromtxt(self.id).T
             Interped.__init__(self, xx=xx, yy=yy, minimum=minimum, maximum=maximum, name=name, latex_label=latex_label)
         except IOError:
-            logging.warning("Can't load {}.".format(self.id))
-            logging.warning("Format should be:")
-            logging.warning(r"x\tp(x)")
+            logger.warning("Can't load {}.".format(self.id))
+            logger.warning("Format should be:")
+            logger.warning(r"x\tp(x)")
 
     def __repr__(self):
         """Call to helper method in the super class."""
diff --git a/tupak/core/result.py b/tupak/core/result.py
index c941d6b1f997194aa87d9ce3a3d92ac61d6b1b50..aed6eea8c1151349cbb25dd23850932975bfba80 100644
--- a/tupak/core/result.py
+++ b/tupak/core/result.py
@@ -1,4 +1,3 @@
-import logging
 import os
 import numpy as np
 import deepdish
@@ -8,6 +7,7 @@ import matplotlib
 import matplotlib.pyplot as plt
 
 from tupak.core import utils
+from tupak.core.utils import logger
 
 
 def result_file_name(outdir, label):
@@ -163,16 +163,16 @@ class Result(dict):
         if os.path.isdir(self.outdir) is False:
             os.makedirs(self.outdir)
         if os.path.isfile(file_name):
-            logging.debug(
+            logger.debug(
                 'Renaming existing file {} to {}.old'.format(file_name,
                                                              file_name))
             os.rename(file_name, file_name + '.old')
 
-        logging.debug("Saving result to {}".format(file_name))
+        logger.debug("Saving result to {}".format(file_name))
         try:
             deepdish.io.save(file_name, dict(self))
         except Exception as e:
-            logging.error("\n\n Saving the data has failed with the "
+            logger.error("\n\n Saving the data has failed with the "
                           "following message:\n {} \n\n".format(e))
 
     def save_posterior_samples(self):
@@ -301,7 +301,7 @@ class Result(dict):
 
         if save:
             filename = '{}/{}_corner.png'.format(self.outdir, self.label)
-            logging.debug('Saving corner plot to {}'.format(filename))
+            logger.debug('Saving corner plot to {}'.format(filename))
             fig.savefig(filename, dpi=dpi)
 
         return fig
@@ -309,7 +309,7 @@ class Result(dict):
     def plot_walkers(self, save=True, **kwargs):
         """ Method to plot the trace of the walkers in an ensmble MCMC plot """
         if hasattr(self, 'walkers') is False:
-            logging.warning("Cannot plot_walkers as no walkers are saved")
+            logger.warning("Cannot plot_walkers as no walkers are saved")
             return
 
         if utils.command_line_args.test:
@@ -331,16 +331,16 @@ class Result(dict):
 
         fig.tight_layout()
         filename = '{}/{}_walkers.png'.format(self.outdir, self.label)
-        logging.debug('Saving walkers plot to {}'.format('filename'))
+        logger.debug('Saving walkers plot to {}'.format('filename'))
         fig.savefig(filename)
 
     def plot_walks(self, save=True, **kwargs):
         """DEPRECATED"""
-        logging.warning("plot_walks deprecated")
+        logger.warning("plot_walks deprecated")
 
     def plot_distributions(self, save=True, **kwargs):
         """DEPRECATED"""
-        logging.warning("plot_distributions deprecated")
+        logger.warning("plot_distributions deprecated")
 
     def samples_to_posterior(self, likelihood=None, priors=None,
                              conversion_function=None):
@@ -396,7 +396,7 @@ class Result(dict):
         """
         A = getattr(self, name, False)
         B = getattr(other_object, name, False)
-        logging.debug('Checking {} value: {}=={}'.format(name, A, B))
+        logger.debug('Checking {} value: {}=={}'.format(name, A, B))
         if (A is not False) and (B is not False):
             typeA = type(A)
             typeB = type(B)
diff --git a/tupak/core/sampler.py b/tupak/core/sampler.py
index 1885ead29537f8f5769954be1131aa1c2d46a57d..31e5aa3679d8c5005727fad4ce29d0a28574db37 100644
--- a/tupak/core/sampler.py
+++ b/tupak/core/sampler.py
@@ -1,7 +1,6 @@
 from __future__ import print_function, division, absolute_import
 
 import inspect
-import logging
 import os
 import sys
 import numpy as np
@@ -9,6 +8,7 @@ import datetime
 import deepdish
 import pandas as pd
 
+from tupak.core.utils import logger
 from tupak.core.result import Result, read_in_result
 from tupak.core.prior import Prior
 from tupak.core import utils
@@ -151,7 +151,7 @@ class Sampler(object):
         bad_keys = []
         for user_input in self.kwargs.keys():
             if user_input not in args:
-                logging.warning(
+                logger.warning(
                     "Supplied argument '{}' not an argument of '{}', removing."
                     .format(user_input, self.external_sampler_function))
                 bad_keys.append(user_input)
@@ -174,11 +174,11 @@ class Sampler(object):
                     self.priors[key].sample()
                 self.__fixed_parameter_keys.append(key)
 
-        logging.info("Search parameters:")
+        logger.info("Search parameters:")
         for key in self.__search_parameter_keys:
-            logging.info('  {} = {}'.format(key, self.priors[key]))
+            logger.info('  {} = {}'.format(key, self.priors[key]))
         for key in self.__fixed_parameter_keys:
-            logging.info('  {} = {}'.format(key, self.priors[key].peak))
+            logger.info('  {} = {}'.format(key, self.priors[key].peak))
 
     def _initialise_result(self):
         """
@@ -205,7 +205,7 @@ class Sampler(object):
             try:
                 self.likelihood.parameters[key] = self.priors[key].sample()
             except AttributeError as e:
-                logging.warning('Cannot sample from {}, {}'.format(key, e))
+                logger.warning('Cannot sample from {}, {}'.format(key, e))
 
     def _verify_parameters(self):
         """ Sets initial values for likelihood.parameters. Raises TypeError if likelihood can't be evaluated."""
@@ -214,7 +214,7 @@ class Sampler(object):
             t1 = datetime.datetime.now()
             self.likelihood.log_likelihood()
             self._sample_log_likelihood_eval = (datetime.datetime.now() - t1).total_seconds()
-            logging.info("Single likelihood evaluation took {:.3e} s".format(self._sample_log_likelihood_eval))
+            logger.info("Single likelihood evaluation took {:.3e} s".format(self._sample_log_likelihood_eval))
         except TypeError as e:
             raise TypeError(
                 "Likelihood evaluation failed with message: \n'{}'\n"
@@ -225,17 +225,17 @@ class Sampler(object):
         """Checks if use_ratio is set. Prints a warning if use_ratio is set but not properly implemented."""
         self._check_if_priors_can_be_sampled()
         if self.use_ratio is False:
-            logging.debug("use_ratio set to False")
+            logger.debug("use_ratio set to False")
             return
 
         ratio_is_nan = np.isnan(self.likelihood.log_likelihood_ratio())
 
         if self.use_ratio is True and ratio_is_nan:
-            logging.warning(
+            logger.warning(
                 "You have requested to use the loglikelihood_ratio, but it "
                 " returns a NaN")
         elif self.use_ratio is None and not ratio_is_nan:
-            logging.debug(
+            logger.debug(
                 "use_ratio not spec. but gives valid answer, setting True")
             self.use_ratio = True
 
@@ -304,9 +304,9 @@ class Sampler(object):
     def check_draw(self, draw):
         """ Checks if the draw will generate an infinite prior or likelihood """
         if np.isinf(self.log_likelihood(draw)):
-            logging.warning('Prior draw {} has inf likelihood'.format(draw))
+            logger.warning('Prior draw {} has inf likelihood'.format(draw))
         if np.isinf(self.log_prior(draw)):
-            logging.warning('Prior draw {} has inf prior'.format(draw))
+            logger.warning('Prior draw {} has inf prior'.format(draw))
 
     def _run_external_sampler(self):
         """A template method to run in subclasses"""
@@ -325,7 +325,7 @@ class Sampler(object):
         """ Check if the cached data file exists and can be used """
 
         if utils.command_line_args.clean:
-            logging.debug("Command line argument clean given, forcing rerun")
+            logger.debug("Command line argument clean given, forcing rerun")
             self.cached_result = None
             return
 
@@ -335,10 +335,10 @@ class Sampler(object):
             self.cached_result = None
 
         if utils.command_line_args.use_cached:
-            logging.debug("Command line argument cached given, no cache check performed")
+            logger.debug("Command line argument cached given, no cache check performed")
             return
 
-        logging.debug("Checking cached data")
+        logger.debug("Checking cached data")
         if self.cached_result:
             check_keys = ['search_parameter_keys', 'fixed_parameter_keys',
                           'kwargs']
@@ -346,7 +346,7 @@ class Sampler(object):
             for key in check_keys:
                 if self.cached_result.check_attribute_match_to_other_object(
                         key, self) is False:
-                    logging.debug("Cached value {} is unmatched".format(key))
+                    logger.debug("Cached value {} is unmatched".format(key))
                     use_cache = False
             if use_cache is False:
                 self.cached_result = None
@@ -364,7 +364,7 @@ class Sampler(object):
                 elif type(kwargs_print[k]) == pd.core.frame.DataFrame:
                     kwargs_print[k] = ('DataFrame, shape={}'
                                        .format(kwargs_print[k].shape))
-            logging.info("Using sampler {} with kwargs {}".format(
+            logger.info("Using sampler {} with kwargs {}".format(
                 self.__class__.__name__, kwargs_print))
 
 
@@ -512,7 +512,7 @@ class Dynesty(Sampler):
             if self.kwargs['resume']:
                 resume = self.read_saved_state(nested_sampler, continuing=True)
                 if resume:
-                    logging.info('Resuming from previous run.')
+                    logger.info('Resuming from previous run.')
 
             old_ncall = nested_sampler.ncall
             maxcall = self.kwargs['n_check_point']
@@ -701,7 +701,7 @@ class Dynesty(Sampler):
 
     def generate_trace_plots(self, dynesty_results):
         filename = '{}/{}_trace.png'.format(self.outdir, self.label)
-        logging.debug("Writing trace plot to {}".format(filename))
+        logger.debug("Writing trace plot to {}".format(filename))
         from dynesty import plotting as dyplot
         fig, axes = dyplot.traceplot(dynesty_results,
                                      labels=self.result.parameter_labels)
@@ -782,7 +782,7 @@ class Emcee(Sampler):
             a=a)
 
         if 'pos0' in self.kwargs:
-            logging.debug("Using given initial positions for walkers")
+            logger.debug("Using given initial positions for walkers")
             pos0 = self.kwargs['pos0']
             if type(pos0) == pd.core.frame.DataFrame:
                 pos0 = pos0[self.search_parameter_keys].values
@@ -792,11 +792,11 @@ class Emcee(Sampler):
             if pos0.shape != (self.nwalkers, self.ndim):
                 raise ValueError(
                     'Input pos0 should be of shape ndim, nwalkers')
-            logging.debug("Checking input pos0")
+            logger.debug("Checking input pos0")
             for draw in pos0:
                 self.check_draw(draw)
         else:
-            logging.debug("Generating initial walker positions from prior")
+            logger.debug("Generating initial walker positions from prior")
             pos0 = [self.get_random_draw_from_prior()
                     for i in range(self.nwalkers)]
 
@@ -813,10 +813,10 @@ class Emcee(Sampler):
         self.result.log_evidence_err = np.nan
 
         try:
-            logging.info("Max autocorr time = {}".format(
+            logger.info("Max autocorr time = {}".format(
                          np.max(sampler.get_autocorr_time())))
         except emcee.autocorr.AutocorrError as e:
-            logging.info("Unable to calculate autocorr time: {}".format(e))
+            logger.info("Unable to calculate autocorr time: {}".format(e))
         return self.result
 
     def lnpostfn(self, theta):
@@ -858,9 +858,9 @@ class Ptemcee(Emcee):
         self.result.log_evidence = np.nan
         self.result.log_evidence_err = np.nan
 
-        logging.info("Max autocorr time = {}"
+        logger.info("Max autocorr time = {}"
                      .format(np.max(sampler.get_autocorr_time())))
-        logging.info("Tswap frac = {}"
+        logger.info("Tswap frac = {}"
                      .format(sampler.tswap_acceptance_fraction))
         return self.result
 
@@ -942,7 +942,7 @@ def run_sampler(likelihood, priors=None, label='label', outdir='outdir',
                                 **kwargs)
 
         if sampler.cached_result:
-            logging.warning("Using cached result")
+            logger.warning("Using cached result")
             return sampler.cached_result
 
         start_time = datetime.datetime.now()
@@ -957,7 +957,7 @@ def run_sampler(likelihood, priors=None, label='label', outdir='outdir',
 
         end_time = datetime.datetime.now()
         result.sampling_time = (end_time - start_time).total_seconds()
-        logging.info('Sampling time: {}'.format(end_time - start_time))
+        logger.info('Sampling time: {}'.format(end_time - start_time))
 
         if sampler.use_ratio:
             result.log_noise_evidence = likelihood.noise_log_likelihood()
@@ -979,8 +979,8 @@ def run_sampler(likelihood, priors=None, label='label', outdir='outdir',
         result.save_to_file()
         if plot:
             result.plot_corner()
-        logging.info("Sampling finished, results saved to {}/".format(outdir))
-        logging.info("Summary of results:\n{}".format(result))
+        logger.info("Sampling finished, results saved to {}/".format(outdir))
+        logger.info("Summary of results:\n{}".format(result))
         return result
     else:
         raise ValueError(
diff --git a/tupak/core/utils.py b/tupak/core/utils.py
index 74704d931327e7fc5a95b14cd20e5d8376d46168..711e50383da36a9f99787001dce1e65612be6745 100644
--- a/tupak/core/utils.py
+++ b/tupak/core/utils.py
@@ -6,6 +6,8 @@ from math import fmod
 import argparse
 import traceback
 
+logger = logging.getLogger('tupak')
+
 # Constants
 
 speed_of_light = 299792458.0  # speed of light in m/s
@@ -308,13 +310,14 @@ def setup_logger(outdir=None, label=None, log_level=None, print_version=False):
     else:
         LEVEL = int(log_level)
 
-    logger = logging.getLogger()
+    logger = logging.getLogger('tupak')
+    logger.propagate = False
     logger.setLevel(LEVEL)
 
     if any([type(h) == logging.StreamHandler for h in logger.handlers]) is False:
         stream_handler = logging.StreamHandler()
         stream_handler.setFormatter(logging.Formatter(
-            '%(asctime)s %(levelname)-8s: %(message)s', datefmt='%H:%M'))
+            '%(asctime)s %(name)s %(levelname)-8s: %(message)s', datefmt='%H:%M'))
         stream_handler.setLevel(LEVEL)
         logger.addHandler(stream_handler)
 
@@ -339,7 +342,9 @@ def setup_logger(outdir=None, label=None, log_level=None, print_version=False):
         os.path.dirname(os.path.dirname(__file__)), '.version')
     with open(version_file, 'r') as f:
         version = f.readline().rstrip()
-    logging.info('Running tupak version: {}'.format(version))
+
+    if print_version:
+        logger.info('Running tupak version: {}'.format(version))
 
 
 def get_progress_bar(module='tqdm'):
@@ -393,9 +398,9 @@ def check_directory_exists_and_if_not_mkdir(directory):
     """
     if not os.path.exists(directory):
         os.makedirs(directory)
-        logging.debug('Making directory {}'.format(directory))
+        logger.debug('Making directory {}'.format(directory))
     else:
-        logging.debug('Directory {} exists'.format(directory))
+        logger.debug('Directory {} exists'.format(directory))
 
 
 def set_up_command_line_arguments():
@@ -441,16 +446,16 @@ command_line_args = set_up_command_line_arguments()
 setup_logger(print_version=True)
 
 if 'DISPLAY' in os.environ:
-    logging.debug("DISPLAY={} environment found".format(os.environ['DISPLAY']))
+    logger.debug("DISPLAY={} environment found".format(os.environ['DISPLAY']))
     pass
 else:
-    logging.debug('No $DISPLAY environment variable found, so importing \
+    logger.debug('No $DISPLAY environment variable found, so importing \
                    matplotlib.pyplot with non-interactive "Agg" backend.')
     import matplotlib
     non_gui_backends = matplotlib.rcsetup.non_interactive_bk
     for backend in non_gui_backends:
         try:
-            logging.debug("Trying backend {}".format(backend))
+            logger.debug("Trying backend {}".format(backend))
             matplotlib.use(backend, warn=False)
             matplotlib.pyplot.switch_backend(backend)
             break
diff --git a/tupak/gw/conversion.py b/tupak/gw/conversion.py
index 39fcaec6cf5bc49044b031f6898c9b01307d32af..dfae235d1d5ed2bda0d5b5e806ec752192a3ac6d 100644
--- a/tupak/gw/conversion.py
+++ b/tupak/gw/conversion.py
@@ -2,14 +2,15 @@ from __future__ import division
 import tupak
 import numpy as np
 import pandas as pd
-import logging
 from astropy.cosmology import z_at_value, Planck15
 import astropy.units as u
 
+from tupak.core.utils import logger
+
 try:
     import lalsimulation as lalsim
 except ImportError:
-    logging.warning("You do not have lalsuite installed currently. You will "
+    logger.warning("You do not have lalsuite installed currently. You will "
                     " not be able to use some of the prebuilt functions.")
 
 
@@ -468,7 +469,7 @@ def generate_component_spins(sample):
         output_sample['phi_2'] = np.arctan(output_sample['spin_2y'] / output_sample['spin_2x'])
 
     elif all(key in output_sample.keys() for key in spin_conversion_parameters) and isinstance(output_sample, pd.DataFrame):
-        logging.debug('Extracting component spins.')
+        logger.debug('Extracting component spins.')
         new_spin_parameters = ['spin_1x', 'spin_1y', 'spin_1z', 'spin_2x', 'spin_2y', 'spin_2z']
         new_spins = {name: np.zeros(len(output_sample)) for name in new_spin_parameters}
 
@@ -488,7 +489,7 @@ def generate_component_spins(sample):
         output_sample['phi_2'] = np.arctan(output_sample['spin_2y'] / output_sample['spin_2x'])
 
     else:
-        logging.warning("Component spin extraction failed.")
+        logger.warning("Component spin extraction failed.")
 
     return output_sample
 
@@ -519,7 +520,7 @@ def compute_snrs(sample, likelihood):
                 sample['{}_optimal_snr'.format(interferometer.name)] = tupak.gw.utils.optimal_snr_squared(
                     signal, interferometer, likelihood.waveform_generator.duration) ** 0.5
         else:
-            logging.info('Computing SNRs for every sample, this may take some time.')
+            logger.info('Computing SNRs for every sample, this may take some time.')
             all_interferometers = likelihood.interferometers
             matched_filter_snrs = {interferometer.name: [] for interferometer in all_interferometers}
             optimal_snrs = {interferometer.name: [] for interferometer in all_interferometers}
@@ -545,4 +546,4 @@ def compute_snrs(sample, likelihood):
             print([interferometer.name for interferometer in likelihood.interferometers])
 
     else:
-        logging.debug('Not computing SNRs.')
+        logger.debug('Not computing SNRs.')
diff --git a/tupak/gw/detector.py b/tupak/gw/detector.py
index 947b5d27738e4cbe64d1a4cd066243d0baca83d5..faed80f40677f23aa4f3e278cdab8e2c2b56897a 100644
--- a/tupak/gw/detector.py
+++ b/tupak/gw/detector.py
@@ -1,6 +1,5 @@
 from __future__ import division, print_function, absolute_import
 
-import logging
 import os
 
 import matplotlib.pyplot as plt
@@ -10,12 +9,13 @@ from scipy.interpolate import interp1d
 
 import tupak.gw.utils
 from tupak.core import utils
+from tupak.core.utils import logger
 
 try:
     import gwpy
 except ImportError:
-    logging.warning("You do not have gwpy installed currently. You will "
-                    " not be able to use some of the prebuilt functions.")
+    logger.warning("You do not have gwpy installed currently. You will "
+                   " not be able to use some of the prebuilt functions.")
 
 
 class InterferometerSet(list):
@@ -173,10 +173,10 @@ class InterferometerStrainData(object):
 
         """
         if time < self.start_time:
-            logging.debug("Time is before the start_time")
+            logger.debug("Time is before the start_time")
             return False
         elif time > self.start_time + self.duration:
-            logging.debug("Time is after the start_time + duration")
+            logger.debug("Time is after the start_time + duration")
             return False
         else:
             return True
@@ -233,8 +233,8 @@ class InterferometerStrainData(object):
         if self._frequency_domain_strain is not None:
             return self._frequency_domain_strain * self.frequency_mask
         elif self._time_domain_strain is not None:
-            logging.info("Generating frequency domain strain from given time "
-                         "domain strain.")
+            logger.info("Generating frequency domain strain from given time "
+                        "domain strain.")
             self.low_pass_filter()
             self.apply_tukey_window()
             frequency_domain_strain, _ = utils.nfft(
@@ -251,18 +251,18 @@ class InterferometerStrainData(object):
         """ Low pass filter the data """
 
         if filter_freq is None:
-            logging.debug(
+            logger.debug(
                 "Setting low pass filter_freq using given maximum frequency")
             filter_freq = self.maximum_frequency
 
         if 2 * filter_freq >= self.sampling_frequency:
-            logging.info(
+            logger.info(
                 "Low pass filter frequency of {}Hz requested, this is equal"
                 " or greater than the Nyquist frequency so no filter applied"
                 .format(filter_freq))
             return
 
-        logging.debug("Applying low pass filter with filter frequency {}"
+        logger.debug("Applying low pass filter with filter frequency {}"
                       .format(filter_freq))
         bp = gwpy.signal.filter_design.lowpass(
             filter_freq, self.sampling_frequency)
@@ -274,11 +274,11 @@ class InterferometerStrainData(object):
     def get_tukey_window(self, N, duration):
         alpha = 2 * self.roll_off / duration
         window = scipy.signal.windows.tukey(N, alpha=alpha)
-        logging.debug("Generated Tukey window with alpha = {}".format(alpha))
+        logger.debug("Generated Tukey window with alpha = {}".format(alpha))
         return window
 
     def apply_tukey_window(self):
-        logging.debug("Applying Tukey window with roll_off {}"
+        logger.debug("Applying Tukey window with roll_off {}"
                       .format(self.roll_off))
         N = len(self.time_domain_strain)
         window = self.get_tukey_window(N, duration=self.duration)
@@ -382,7 +382,7 @@ class InterferometerStrainData(object):
             sampling_frequency=sampling_frequency, duration=duration,
             time_array=time_array)
 
-        logging.debug('Setting data using provided time_domain_strain')
+        logger.debug('Setting data using provided time_domain_strain')
         if np.shape(time_domain_strain) == np.shape(self.time_array):
             self._time_domain_strain = time_domain_strain
         else:
@@ -401,7 +401,7 @@ class InterferometerStrainData(object):
         timeseries: gwpy.timeseries.timeseries.TimeSeries
 
         """
-        logging.debug('Setting data using provided gwpy TimeSeries object')
+        logger.debug('Setting data using provided gwpy TimeSeries object')
         if type(timeseries) != gwpy.timeseries.timeseries.TimeSeries:
             raise ValueError("Input timeseries is not a gwpy TimeSeries")
         self.start_time = timeseries.epoch.value
@@ -508,7 +508,7 @@ class InterferometerStrainData(object):
             sampling_frequency=sampling_frequency, duration=duration,
             frequency_array=frequency_array)
 
-        logging.debug('Setting data using provided frequency_domain_strain')
+        logger.debug('Setting data using provided frequency_domain_strain')
         if np.shape(frequency_domain_strain) == np.shape(self.frequency_array):
             self._frequency_domain_strain = frequency_domain_strain
         else:
@@ -536,7 +536,7 @@ class InterferometerStrainData(object):
         self.duration = duration
         self.start_time = start_time
 
-        logging.debug(
+        logger.debug(
             'Setting data using noise realization from provided'
             'power_spectal_density')
         frequency_domain_strain, frequency_array = \
@@ -566,7 +566,7 @@ class InterferometerStrainData(object):
         self.duration = duration
         self.start_time = start_time
 
-        logging.debug('Setting zero noise data')
+        logger.debug('Setting zero noise data')
         self._frequency_domain_strain = np.zeros_like(self.frequency_array,
                                                       dtype=np.complex)
 
@@ -597,7 +597,7 @@ class InterferometerStrainData(object):
         self.duration = duration
         self.start_time = start_time
 
-        logging.info('Reading data from frame')
+        logger.info('Reading data from frame')
         strain = tupak.gw.utils.read_frame_file(
             frame_file, t1=start_time, t2=start_time+duration,
             buffer_time=buffer_time, channel=channel_name,
@@ -1091,7 +1091,7 @@ class Interferometer(object):
                 ' None. This can be caused if, e.g., mass_2 > mass_1.')
 
         if not self.strain_data.time_within_data(parameters['geocent_time']):
-            logging.warning(
+            logger.warning(
                 'Injecting signal outside segment, start_time={}, merger time={}.'
                 .format(self.strain_data.start_time, parameters['geocent_time']))
 
@@ -1099,7 +1099,7 @@ class Interferometer(object):
         if np.shape(self.frequency_domain_strain).__eq__(np.shape(signal_ifo)):
             self.strain_data.add_to_frequency_domain_strain(signal_ifo)
         else:
-            logging.info('Injecting into zero noise.')
+            logger.info('Injecting into zero noise.')
             self.set_strain_data_from_frequency_domain_strain(
                 signal_ifo,
                 sampling_frequency=self.strain_data.sampling_frequency,
@@ -1112,11 +1112,11 @@ class Interferometer(object):
             signal=signal_ifo, interferometer=self,
             duration=self.strain_data.duration).real)
 
-        logging.info("Injected signal in {}:".format(self.name))
-        logging.info("  optimal SNR = {:.2f}".format(opt_snr))
-        logging.info("  matched filter SNR = {:.2f}".format(mf_snr))
+        logger.info("Injected signal in {}:".format(self.name))
+        logger.info("  optimal SNR = {:.2f}".format(opt_snr))
+        logger.info("  matched filter SNR = {:.2f}".format(mf_snr))
         for key in parameters:
-            logging.info('  {} = {}'.format(key, parameters[key]))
+            logger.info('  {} = {}'.format(key, parameters[key]))
 
         return injection_polarizations
 
@@ -1343,7 +1343,7 @@ class PowerSpectralDensity(object):
                 m = getattr(self, 'set_from_{}'.format(expanded_key))
                 m(**kwargs)
             except AttributeError:
-                logging.info("Tried setting PSD from init kwarg {} and failed"
+                logger.info("Tried setting PSD from init kwarg {} and failed"
                              .format(key))
 
     def set_from_amplitude_spectral_density_file(self, asd_file):
@@ -1359,11 +1359,11 @@ class PowerSpectralDensity(object):
         self.amplitude_spectral_density_file = asd_file
         self.import_amplitude_spectral_density()
         if min(self.amplitude_spectral_density) < 1e-30:
-            logging.warning("You specified an amplitude spectral density file.")
-            logging.warning("{} WARNING {}".format("*" * 30, "*" * 30))
-            logging.warning("The minimum of the provided curve is {:.2e}.".format(
+            logger.warning("You specified an amplitude spectral density file.")
+            logger.warning("{} WARNING {}".format("*" * 30, "*" * 30))
+            logger.warning("The minimum of the provided curve is {:.2e}.".format(
                 min(self.amplitude_spectral_density)))
-            logging.warning(
+            logger.warning(
                 "You may have intended to provide this as a power spectral density.")
 
     def set_from_power_spectral_density_file(self, psd_file):
@@ -1379,11 +1379,11 @@ class PowerSpectralDensity(object):
         self.power_spectral_density_file = psd_file
         self.import_power_spectral_density()
         if min(self.power_spectral_density) > 1e-30:
-            logging.warning("You specified a power spectral density file.")
-            logging.warning("{} WARNING {}".format("*" * 30, "*" * 30))
-            logging.warning("The minimum of the provided curve is {:.2e}.".format(
+            logger.warning("You specified a power spectral density file.")
+            logger.warning("{} WARNING {}".format("*" * 30, "*" * 30))
+            logger.warning("The minimum of the provided curve is {:.2e}.".format(
                 min(self.power_spectral_density)))
-            logging.warning(
+            logger.warning(
                 "You may have intended to provide this as an amplitude spectral density.")
 
     def set_from_frame_file(self, frame_file, psd_start_time, psd_duration,
@@ -1431,8 +1431,8 @@ class PowerSpectralDensity(object):
 
     def set_from_aLIGO(self):
         psd_file = 'aLIGO_ZERO_DET_high_P_psd.txt'
-        logging.info("No power spectral density provided, using aLIGO,"
-                     "zero detuning, high power.")
+        logger.info("No power spectral density provided, using aLIGO,"
+                    "zero detuning, high power.")
         self.set_from_power_spectral_density_file(psd_file)
 
     @property
@@ -1568,7 +1568,7 @@ def get_empty_interferometer(name):
         interferometer = load_interferometer(filename)
         return interferometer
     except FileNotFoundError:
-        logging.warning('Interferometer {} not implemented'.format(name))
+        logger.warning('Interferometer {} not implemented'.format(name))
 
 
 def load_interferometer(filename):
@@ -1634,7 +1634,7 @@ def get_interferometer_with_open_data(
 
     """
 
-    logging.warning(
+    logger.warning(
         "Parameter estimation for real interferometer data in tupak is in "
         "alpha testing at the moment: the routines for windowing and filtering"
         " have not been reviewed.")
@@ -1813,7 +1813,7 @@ def get_event_data(
                 outdir=outdir, label=label, plot=plot, filter_freq=filter_freq,
                 raw_data_file=raw_data_file, **kwargs))
         except ValueError as e:
-            logging.debug("Error raised {}".format(e))
-            logging.warning('No data found for {}.'.format(name))
+            logger.debug("Error raised {}".format(e))
+            logger.warning('No data found for {}.'.format(name))
 
     return InterferometerSet(interferometers)
diff --git a/tupak/gw/likelihood.py b/tupak/gw/likelihood.py
index 3b197d9f17ae001a908c6bc7b53564f32d3c15c6..dfc09b27551ccc3d6f8f68a2a0f4bdef362dbc83 100644
--- a/tupak/gw/likelihood.py
+++ b/tupak/gw/likelihood.py
@@ -1,5 +1,3 @@
-import logging
-
 import numpy as np
 from scipy.interpolate import interp2d, interp1d
 
@@ -13,6 +11,7 @@ from scipy.special._ufuncs import i0e
 
 import tupak
 from tupak.core import likelihood as likelihood
+from tupak.core.utils import logger
 
 
 class GravitationalWaveTransient(likelihood.Likelihood):
@@ -92,12 +91,12 @@ class GravitationalWaveTransient(likelihood.Likelihood):
             wfg_attr = getattr(self.waveform_generator, attr)
             ifo_attr = getattr(self.interferometers, attr)
             if wfg_attr is None:
-                logging.debug(
+                logger.debug(
                     "The waveform_generator {} is None. Setting from the "
                     "provided interferometers.".format(attr))
                 setattr(self.waveform_generator, attr, ifo_attr)
             elif wfg_attr != ifo_attr:
-                logging.warning(
+                logger.warning(
                     "The waveform_generator {} is not equal to that of the "
                     "provided interferometers. Overwriting the "
                     "waveform_generator.".format(attr))
@@ -226,7 +225,7 @@ class GravitationalWaveTransient(likelihood.Likelihood):
 
     def setup_distance_marginalization(self):
         if 'luminosity_distance' not in self.prior.keys():
-            logging.info('No prior provided for distance, using default prior.')
+            logger.info('No prior provided for distance, using default prior.')
             self.prior['luminosity_distance'] = tupak.core.prior.create_default_prior('luminosity_distance')
         self.distance_array = np.linspace(self.prior['luminosity_distance'].minimum,
                                           self.prior['luminosity_distance'].maximum, int(1e4))
@@ -267,7 +266,7 @@ class GravitationalWaveTransient(likelihood.Likelihood):
 
     def setup_phase_marginalization(self):
         if 'phase' not in self.prior.keys() or not isinstance(self.prior['phase'], tupak.core.prior.Prior):
-            logging.info('No prior provided for phase at coalescence, using default prior.')
+            logger.info('No prior provided for phase at coalescence, using default prior.')
             self.prior['phase'] = tupak.core.prior.create_default_prior('phase')
         self.bessel_function_interped = interp1d(np.linspace(0, 1e6, int(1e5)),
                                                  np.log([i0e(snr) for snr in np.linspace(0, 1e6, int(1e5))])
diff --git a/tupak/gw/prior.py b/tupak/gw/prior.py
index e9b55cb8ddb385d58b9054b7056a6db553124e4a..864618e5e16a4d0195146b9f2f86faf181293f97 100644
--- a/tupak/gw/prior.py
+++ b/tupak/gw/prior.py
@@ -1,5 +1,7 @@
 from tupak.core.prior import *
 
+from tupak.core.utils import logger
+
 
 class UniformComovingVolume(FromFile):
 
@@ -35,7 +37,7 @@ class BBHPriorSet(PriorSet):
         """
         if dictionary is None and filename is None:
             filename = os.path.join(os.path.dirname(__file__), 'prior_files', 'binary_black_holes.prior')
-            logging.info('No prior given, using default BBH priors in {}.'.format(filename))
+            logger.info('No prior given, using default BBH priors in {}.'.format(filename))
         elif not os.path.isfile(filename):
             filename = os.path.join(os.path.dirname(__file__), 'prior_files', filename)
         PriorSet.__init__(self, dictionary=dictionary, filename=filename)
@@ -67,7 +69,7 @@ class BBHPriorSet(PriorSet):
             if key in parameter_set:
                 if len(parameter_set.intersection(self)) > 2:
                     redundant = True
-                    logging.warning('{} in prior. This may lead to unexpected behaviour.'.format(
+                    logger.warning('{} in prior. This may lead to unexpected behaviour.'.format(
                         parameter_set.intersection(self)))
                     break
             elif len(parameter_set.intersection(self)) == 2:
@@ -78,7 +80,7 @@ class BBHPriorSet(PriorSet):
             if key in parameter_set:
                 if len(parameter_set.intersection(self)) > 1:
                     redundant = True
-                    logging.warning('{} in prior. This may lead to unexpected behaviour.'.format(
+                    logger.warning('{} in prior. This may lead to unexpected behaviour.'.format(
                         parameter_set.intersection(self)))
                     break
                 elif len(parameter_set.intersection(self)) == 1:
diff --git a/tupak/gw/source.py b/tupak/gw/source.py
index afe9d0cd0f21cdeeea152c88b613c85773815c88..86b26a157ef81deece15b313a99efd33be60bbb2 100644
--- a/tupak/gw/source.py
+++ b/tupak/gw/source.py
@@ -1,15 +1,15 @@
 from __future__ import division, print_function
 
-import logging
 import numpy as np
 
+from tupak.core.utils import logger
+from tupak.core import utils
+
 try:
     import lalsimulation as lalsim
 except ImportError:
-    logging.warning("You do not have lalsuite installed currently. You will "
-                    " not be able to use some of the prebuilt functions.")
-
-from tupak.core import utils
+    logger.warning("You do not have lalsuite installed currently. You will "
+                   " not be able to use some of the prebuilt functions.")
 
 
 def lal_binary_black_hole(
diff --git a/tupak/gw/utils.py b/tupak/gw/utils.py
index 0fda7255e5108f411fd1c719559e3ca31e64b90e..e64a0614a4ce4f0223ccbd37a22f3469ca6b009e 100644
--- a/tupak/gw/utils.py
+++ b/tupak/gw/utils.py
@@ -1,17 +1,16 @@
-import logging
 import os
 
 import numpy as np
 from scipy import signal
 
-from tupak.core.utils import gps_time_to_gmst, ra_dec_to_theta_phi, speed_of_light, nfft
+from tupak.core.utils import gps_time_to_gmst, ra_dec_to_theta_phi, speed_of_light, nfft, logger
 
 try:
     from gwpy.signal import filter_design
     from gwpy.timeseries import TimeSeries
 except ImportError:
-    logging.warning("You do not have gwpy installed currently. You will "
-                    " not be able to use some of the prebuilt functions.")
+    logger.warning("You do not have gwpy installed currently. You will "
+                   " not be able to use some of the prebuilt functions.")
 
 
 def asd_from_freq_series(freq_data, df):
@@ -131,7 +130,7 @@ def get_polarization_tensor(ra, dec, time, psi, mode):
     elif mode.lower() == 'y':
         return np.einsum('i,j->ij', n, omega) + np.einsum('i,j->ij', omega, n)
     else:
-        logging.warning("{} not a polarization mode!".format(mode))
+        logger.warning("{} not a polarization mode!".format(mode))
         return None
 
 
@@ -330,13 +329,13 @@ def get_open_strain_data(
     t2 = t2 + buffer_time
 
     if os.path.isfile(filename) and cache:
-        logging.info('Using cached data from {}'.format(filename))
+        logger.info('Using cached data from {}'.format(filename))
         strain = TimeSeries.read(filename)
     else:
-        logging.info('Fetching open data from {} to {} with buffer time {}'
+        logger.info('Fetching open data from {} to {} with buffer time {}'
                      .format(t1, t2, buffer_time))
         strain = TimeSeries.fetch_open_data(name, t1, t2, **kwargs)
-        logging.info('Saving data to {}'.format(filename))
+        logger.info('Saving data to {}'.format(filename))
         strain.write(filename)
     return strain
 
@@ -372,9 +371,9 @@ def read_frame_file(file_name, t1, t2, channel=None, buffer_time=1, **kwargs):
         try:
             strain = TimeSeries.read(source=file_name, channel=channel, start=t1, end=t2, **kwargs)
             loaded = True
-            logging.info('Successfully loaded {}.'.format(channel))
+            logger.info('Successfully loaded {}.'.format(channel))
         except RuntimeError:
-            logging.warning('Channel {} not found. Trying preset channel names'.format(channel))
+            logger.warning('Channel {} not found. Trying preset channel names'.format(channel))
     for channel_type in ['GDS-CALIB_STRAIN', 'DCS-CALIB_STRAIN_C01', 'DCS-CALIB_STRAIN_C02']:
         for ifo_name in ['H1', 'L1']:
             channel = '{}:{}'.format(ifo_name, channel_type)
@@ -383,14 +382,14 @@ def read_frame_file(file_name, t1, t2, channel=None, buffer_time=1, **kwargs):
             try:
                 strain = TimeSeries.read(source=file_name, channel=channel, start=t1-buffer_time, end=t2+buffer_time, **kwargs)
                 loaded = True
-                logging.info('Successfully loaded {}.'.format(channel))
+                logger.info('Successfully loaded {}.'.format(channel))
             except RuntimeError:
                 pass
 
     if loaded:
         return strain
     else:
-        logging.warning('No data loaded.')
+        logger.warning('No data loaded.')
         return None