diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 89b737fa51b9ef2c170873662c08cb45e5bbe0e8..aad2ce4270789a59577aa585a73fb861f626a80e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -34,6 +34,7 @@ exitcode-jessie:
     - coverage run --include=/opt/conda/lib/python3.6/site-packages/tupak* -a test/prior_tests.py
     - coverage run --include=/opt/conda/lib/python3.6/site-packages/tupak* -a test/sampler_tests.py
     - coverage run --include=/opt/conda/lib/python3.6/site-packages/tupak* -a test/waveform_generator_tests.py
+    - coverage run --include=/opt/conda/lib/python3.6/site-packages/tupak* -a test/likelihood_tests.py
     - coverage html
     - coverage-badge -o coverage_badge.svg -f
     # Run all other tests (no coverage data collected)
diff --git a/test/likelihood_tests.py b/test/likelihood_tests.py
new file mode 100644
index 0000000000000000000000000000000000000000..80f6989391deab646bcd015f528dfb587c06f549
--- /dev/null
+++ b/test/likelihood_tests.py
@@ -0,0 +1,75 @@
+from __future__ import absolute_import
+import tupak
+from tupak.core import prior
+from tupak.core.result import Result
+import unittest
+from mock import MagicMock
+import numpy as np
+import inspect
+import os
+import copy
+
+
+class TestGaussianLikelihood(unittest.TestCase):
+
+    def setUp(self):
+        self.N = 100
+        self.sigma = 0.1
+        self.x = np.linspace(0, 1, self.N)
+        self.y = 2 * self.x + 1 + np.random.normal(0, self.sigma, self.N)
+        def function(x, m, c):
+            return m * x + c
+        self.function = function
+        pass
+
+    def tearDown(self):
+        pass
+
+    def test_known_sigma(self):
+        likelihood = tupak.core.likelihood.GaussianLikelihood(
+            self.x, self.y, self.function, self.sigma)
+        likelihood.parameters['m'] = 2
+        likelihood.parameters['c'] = 0
+        likelihood.log_likelihood()
+        self.assertTrue(likelihood.sigma == self.sigma)
+
+    def test_known_array_sigma(self):
+        sigma_array = np.ones(self.N) * self.sigma
+        likelihood = tupak.core.likelihood.GaussianLikelihood(
+            self.x, self.y, self.function, sigma_array)
+        likelihood.parameters['m'] = 2
+        likelihood.parameters['c'] = 0
+        likelihood.log_likelihood()
+        self.assertTrue(type(likelihood.sigma) == type(sigma_array))
+        self.assertTrue(all(likelihood.sigma == sigma_array))
+
+    def test_unknown_float_sigma(self):
+        likelihood = tupak.core.likelihood.GaussianLikelihood(
+            self.x, self.y, self.function, sigma=None)
+        likelihood.parameters['m'] = 2
+        likelihood.parameters['c'] = 0
+        self.assertTrue(likelihood.sigma is None)
+        with self.assertRaises(TypeError):
+            likelihood.log_likelihood()
+        likelihood.parameters['sigma'] = 1
+        likelihood.log_likelihood()
+        self.assertTrue(likelihood.sigma is None)
+
+    def test_y(self):
+        likelihood = tupak.core.likelihood.GaussianLikelihood(
+            self.x, self.y, self.function, self.sigma)
+        self.assertTrue(all(likelihood.y == self.y))
+
+    def test_x(self):
+        likelihood = tupak.core.likelihood.GaussianLikelihood(
+            self.x, self.y, self.function, self.sigma)
+        self.assertTrue(all(likelihood.x == self.x))
+
+    def test_N(self):
+        likelihood = tupak.core.likelihood.GaussianLikelihood(
+            self.x, self.y, self.function, self.sigma)
+        self.assertTrue(likelihood.N == len(self.x))
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/tupak/core/likelihood.py b/tupak/core/likelihood.py
index 95efae97d5299ff119875db0691db7a62992ff15..a854f6e59ad643afa76c5891dda50c276f35f5f6 100644
--- a/tupak/core/likelihood.py
+++ b/tupak/core/likelihood.py
@@ -61,35 +61,53 @@ class GaussianLikelihood(Likelihood):
         sigma: None, float, array_like
             If None, the standard deviation of the noise is unknown and will be
             estimated (note: this requires a prior to be given for sigma). If
-            not None, this defined the standard-deviation of the data points.
+            not None, this defines the standard-deviation of the data points.
             This can either be a single float, or an array with length equal
             to that for `x` and `y`.
         """
-        parameters = self._infer_parameters_from_model(function)
-        Likelihood.__init__(self, dict.fromkeys(parameters))
+
+        parameters = self._infer_parameters_from_function(function)
+        self.parameters = dict.fromkeys(parameters)
+
+        Likelihood.__init__(self, self.parameters)
 
         self.x = x
         self.y = y
         self.sigma = sigma
         self.function = function
 
-        self.parameters = dict.fromkeys(parameters)
+        # Check if sigma was provided, if not it is a parameter
         self.function_keys = self.parameters.keys()
         if self.sigma is None:
             self.parameters['sigma'] = None
 
     @staticmethod
-    def _infer_parameters_from_model(model):
-        parameters = inspect.getargspec(model).args
+    def _infer_parameters_from_function(function):
+        """ Infers the arguments of function (except the first arg which is
+            assumed to be the dep. variable
+        """
+        parameters = inspect.getargspec(function).args
         parameters.pop(0)
         return parameters
 
     @property
     def N(self):
+        """ The number of data points """
         return len(self.x)
 
     def log_likelihood(self):
+        # This checks if sigma has been set in parameters. If so, that value
+        # will be used. Otherwise, the attribute sigma is used. The logic is
+        # that if sigma is not in parameters the attribute is used which was
+        # given at init (i.e. the known sigma as either a float or array).
+        sigma = self.parameters.get('sigma', self.sigma)
+
+        # This sets up the function only parameters (i.e. not sigma)
         model_parameters = {k: self.parameters[k] for k in self.function_keys}
+
+        # Calculate the residual
         res = self.y - self.function(self.x, **model_parameters)
-        return -0.5 * (np.sum((res / self.sigma)**2)
-                       + self.N*np.log(2*np.pi*self.sigma**2))
+
+        # Return the summed log likelihood
+        return -0.5 * (np.sum((res / sigma)**2)
+                       + self.N * np.log(2 * np.pi * sigma**2))