Skip to content
Snippets Groups Projects
Commit d339a288 authored by Gregory Ashton's avatar Gregory Ashton
Browse files

More flake8 fixes

parent 91648bee
No related branches found
No related tags found
No related merge requests found
......@@ -1498,9 +1498,9 @@ class Pymc3(Sampler):
"""
try:
import theano
import theano # noqa
import theano.tensor as tt
from theano.compile.ops import as_op
from theano.compile.ops import as_op # noqa
except ImportError:
raise ImportError("Could not import theano")
......@@ -1534,7 +1534,7 @@ class Pymc3(Sampler):
def grad(self, inputs, g):
theta, = inputs
return [g[0]*self.logpgrad(theta)]
return [g[0] * self.logpgrad(theta)]
# create theano Op for calculating the gradient of the log likelihood
class LogLikeGrad(tt.Op):
......
......@@ -479,7 +479,7 @@ def derivatives(vals, func, releps=1e-3, abseps=None, mineps=1e-9, reltol=1e-3,
An array of indices in `vals` that are _not_ fixed values and therefore
can have derivatives taken. If `None` then derivatives of all values
are calculated.
Returns
-------
grads: array_like
......@@ -488,10 +488,10 @@ def derivatives(vals, func, releps=1e-3, abseps=None, mineps=1e-9, reltol=1e-3,
if nonfixedidx is None:
nonfixedidx = range(len(vals))
if len(nonfixedidx) > len(vals):
raise ValueError("To many non-fixed values")
if max(nonfixedidx) >= len(vals) or min(nonfixedidx) < 0:
raise ValueError("Non-fixed indexes contain non-existant indices")
......@@ -503,9 +503,9 @@ def derivatives(vals, func, releps=1e-3, abseps=None, mineps=1e-9, reltol=1e-3,
# set steps
if abseps is None:
if isinstance(releps, float):
eps = np.abs(vals)*releps
eps[eps == 0.] = releps # if any values are zero set eps to releps
teps = releps*np.ones(len(vals))
eps = np.abs(vals) * releps
eps[eps == 0.] = releps # if any values are zero set eps to releps
teps = releps * np.ones(len(vals))
elif isinstance(releps, (list, np.ndarray)):
if len(releps) != len(vals):
raise ValueError("Problem with input relative step sizes")
......@@ -516,7 +516,7 @@ def derivatives(vals, func, releps=1e-3, abseps=None, mineps=1e-9, reltol=1e-3,
raise RuntimeError("Relative step sizes are not a recognised type!")
else:
if isinstance(abseps, float):
eps = abseps*np.ones(len(vals))
eps = abseps * np.ones(len(vals))
elif isinstance(abseps, (list, np.ndarray)):
if len(abseps) != len(vals):
raise ValueError("Problem with input absolute step sizes")
......@@ -539,13 +539,13 @@ def derivatives(vals, func, releps=1e-3, abseps=None, mineps=1e-9, reltol=1e-3,
bvals = np.copy(vals)
# central difference
fvals[i] += 0.5*leps # change forwards distance to half eps
bvals[i] -= 0.5*leps # change backwards distance to half eps
cdiff = (func(fvals)-func(bvals))/leps
fvals[i] += 0.5 * leps # change forwards distance to half eps
bvals[i] -= 0.5 * leps # change backwards distance to half eps
cdiff = (func(fvals) - func(bvals)) / leps
while 1:
fvals[i] -= 0.5*leps # remove old step
bvals[i] += 0.5*leps
fvals[i] -= 0.5 * leps # remove old step
bvals[i] += 0.5 * leps
# change the difference by a factor of two
cureps *= epsscale
......@@ -557,19 +557,19 @@ def derivatives(vals, func, releps=1e-3, abseps=None, mineps=1e-9, reltol=1e-3,
leps *= epsscale
# central difference
fvals[i] += 0.5*leps # change forwards distance to half eps
bvals[i] -= 0.5*leps # change backwards distance to half eps
cdiffnew = (func(fvals)-func(bvals))/leps
fvals[i] += 0.5 * leps # change forwards distance to half eps
bvals[i] -= 0.5 * leps # change backwards distance to half eps
cdiffnew = (func(fvals) - func(bvals)) / leps
if cdiffnew == cdiff:
grads[count] = cdiff
break
# check whether previous diff and current diff are the same within reltol
rat = (cdiff/cdiffnew)
rat = (cdiff / cdiffnew)
if np.isfinite(rat) and rat > 0.:
# gradient has not changed sign
if np.abs(1.-rat) < reltol:
if np.abs(1. - rat) < reltol:
grads[count] = cdiffnew
break
else:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment