Improved using pyflakes

parent 638b85b0
......@@ -8,11 +8,11 @@ functions. It also provides a GUI that can use pre-defined fitting methods.
from __future__ import absolute_import, print_function
__all__ = ['bootstrap', 'plot_fit',
'curve_fitting', 'kernel_smoothing',
'curve_fitting', 'nonparam_regression',
'functions', 'residuals', 'CurveFitting']
from . import functions
from . import residuals
from . import functions,residuals
from . import bootstrap, plot_fit, curve_fitting, nonparam_regression
from .curve_fitting import CurveFitting
from path import path
......
......@@ -325,6 +325,7 @@ class CurveFitting(object):
use_derivs = (Dres is not None) and (Dfun is not None)
df = None
f = None
if fix_params:
p_save = np.array(p0, dtype=float)
......@@ -338,13 +339,14 @@ class CurveFitting(object):
"out of range.")
p0 = p_save[change_params]
def f(p):
def f_fixed(p):
p1 = np.array(p_save)
p1[change_params] = p
y0 = fct(p1, xdata)
return residuals(ydata, y0)
f = f_fixed
if use_derivs:
def df(p):
def df_fixed(p):
p1 = np.array(p_save)
p1[change_params] = p
y0 = fct(p1, xdata)
......@@ -353,18 +355,21 @@ class CurveFitting(object):
if col_deriv:
return dfct[change_params]*dr
return dfct[:,change_params]*dr[:, np.newaxis]
df = df_fixed
else:
def f(p):
def f_free(p):
y0 = fct(p, xdata)
return residuals(ydata, y0)
f = f_free
if use_derivs:
def df(p):
def df_free(p):
dfct = Dfun(p, xdata)
y0 = fct(p, xdata)
dr = np.atleast_1d(Dres(ydata, y0))
if col_deriv:
return dfct*dr
return dfct*dr[:, np.newaxis]
df = df_free
if use_derivs:
self.df = df
......
......@@ -53,7 +53,6 @@ import numpy as np
from .kernels import normal_kernel1d
from . import kde_methods
from .kde_bandwidth import variance_bandwidth, silverman_covariance, scotts_covariance, botev_bandwidth
from scipy import stats, optimize
from .utils import numpy_method_idx
class KDE1D(object):
......
......@@ -973,7 +973,7 @@ ExpTransform = Transform(np.exp, np.log, _inverse)
def transform_distribution(xs, ys, Dinv, out):
"""
r"""
Transform a distribution into another one by a change a variable.
:param ndarray xs: Evaluation points of the distribution
......@@ -991,11 +991,10 @@ def transform_distribution(xs, ys, Dinv, out):
f_Y(y) = \left| \frac{1}{g'(g^{-1}(y))} \right| \cdot f_X(g^{-1}(y))
"""
sel = ys == 0
Dinv(xs, out=out)
np.abs(out, out=out)
_inverse(out, out=out)
np.multiply(out, ys, out = out)
np.multiply(out, ys, out=out)
return out
......
......@@ -37,12 +37,11 @@ elif python_version.major == 3 and python_version.minor >= 3:
if ext in ilm.SOURCE_SUFFIXES:
return ilm.SourceFileLoader(pack_name, str(filepath))
if ext in ilm.BYTECODE_SUFFIXES:
return ilm.SourcelessFileLoader(pack_name, str(filename))
return ilm.SourcelessFileLoader(pack_name, str(filepath))
if ext in ilm.EXTENSION_SUFFIXES:
return ilm.ExtensionFileLoader(pack_name, str(filename))
return ilm.ExtensionFileLoader(pack_name, str(filepath))
if python_version.minor == 3:
from importlib import find_loader
def create_module(loader):
" Version for Python 3.3 "
......
......@@ -5,13 +5,8 @@ Module implementing non-parametric regressions using kernel methods.
"""
from __future__ import division, absolute_import, print_function
from scipy import stats
from scipy.linalg import sqrtm, solve
import scipy
import numpy as np
from .compat import irange
from . import npr_methods, kernels, kde_bandwidth
from .utils import numpy_method_idx
class NonParamRegression(object):
r"""
......@@ -284,7 +279,7 @@ class NonParamRegression(object):
self._fitted = True
def evaluate(self, points, out=None):
if not self.fitter:
if not self.fitted:
self.fit()
points = np.asanyarray(points)
real_shape = points.shape
......
......@@ -5,12 +5,13 @@ Module implementing non-parametric regressions using kernel methods.
"""
from __future__ import division, absolute_import, print_function
from scipy import stats, linalg
from scipy import linalg
import scipy
import numpy as np
from .compat import irange
from .cyth import HAS_CYTHON
from . import kde
from . import py_local_linear
local_linear = None
......@@ -29,7 +30,6 @@ def usePython():
Switch to using the python implementation of the methods
"""
global local_linear
from . import py_local_linear
local_linear = py_local_linear
if HAS_CYTHON:
......@@ -291,7 +291,6 @@ class LocalPolynomialKernel1D(RegressionKernelMethod):
xdata = reg.xdata[0, :, np.newaxis] # make it a column vector
ydata = reg.ydata[:, np.newaxis] # make it a column vector
points = points[0] # make it a line vector
q = self.q
bw = reg.bandwidth
kernel = reg.kernel
designMatrix = self.designMatrix
......@@ -305,23 +304,13 @@ class LocalPolynomialKernel1D(RegressionKernelMethod):
out[i] = np.dot(Lx, ydata)
return out
@property
def q(self):
"""
Degree of the fitted polynom
"""
return 1
class PolynomialDesignMatrix(object):
"""
Class used to create a design matrix for polynomial regression
"""
def __init__(self, dim, deg):
self.dim = dim
if out is None:
out = np.empty(points.shape, dtype=float)
self.deg = deg
self._designMatrixSize()
def _designMatrixSize(self):
......@@ -473,7 +462,10 @@ class LocalPolynomialKernel(RegressionKernelMethod):
self._q = int(val)
def fit(self, reg):
if reg.dim == 1:
if self.q == 0:
obj = SpatialAverage()
return obj.fit(reg)
elif reg.dim == 1:
obj = LocalPolynomialKernel1D(self.q)
return obj.fit(reg)
self = super(LocalPolynomialKernel, self).fit(reg)
......
......@@ -13,7 +13,6 @@ from scipy.special import erfinv, gamma
from scipy import stats
#from .kernel_smoothing import LocalLinearKernel1D
from .nonparam_regression import NonParamRegression
from . import npr_methods
from .compat import unicode_csv_writer as csv_writer
from collections import namedtuple
......
from __future__ import division
from numpy import log, exp, newaxis
from numpy import log, exp
class Standard(object):
......
......@@ -2,7 +2,7 @@ import numpy as np
from .. import kde_methods
from ..utils import namedtuple
from ..compat import irange
from scipy import stats, special
from scipy import stats
from .. import kernels
from .. import kde
......
from __future__ import division, absolute_import, print_function
from .. import kde
from .. import kde_methods
import numpy as np
from numpy import newaxis
from numpy.random import randn
from scipy import stats, integrate, interpolate
from ..compat import irange
from ..utils import make_ufunc
from . import kde_utils
class TestCDF(kde_utils.KDETester):
......
......@@ -2,12 +2,9 @@ from __future__ import division, absolute_import, print_function
from .. import kde
from .. import kde_methods
from .. import kernels
import numpy as np
from numpy import newaxis
from numpy.random import randn
from scipy import integrate
from ..compat import irange
from . import kde_utils
class TestBandwidth(object):
......@@ -15,7 +12,7 @@ class TestBandwidth(object):
def setUpClass(cls):
cls.ratios = np.array([1., 2., 5.])
d = randn(500)
cls.vs = cls.ratios[:, newaxis]*np.array([d, d, d])
cls.vs = cls.ratios[:, np.newaxis]*np.array([d, d, d])
cls.ss = np.var(cls.vs, axis=1)
def variance_methods(self, m):
......
......@@ -4,10 +4,8 @@ from .. import kernels
from .. import _kernels
from scipy import stats
from scipy.fftpack import fft, dct
from scipy.integrate import quad
import numpy as np
import numpy.testing
import numpy as np
from . import kde_utils
class RefKernel1D(kernels.Kernel1D):
......
......@@ -4,8 +4,6 @@ from .. import nonparam_regression, npr_methods
import numpy as np
from ..compat import irange, izip
from . import kde_utils
def fct(xs):
"""
......@@ -32,7 +30,6 @@ class TestConvergence1D(object):
sizes = [2 ** i for i in range(5, 8)]
cls.sizes = sizes
xs = [ np.tile(np.linspace(0.01, 3, s), cls.nb_samples) for s in sizes ]
noise = cls.yy.max() / 10
ys = [fct(x) for x in xs]
cls.xs = [x.reshape((cls.nb_samples, s)) for x, s in izip(xs, sizes)]
cls.ys = [y.reshape((cls.nb_samples, s)) for y, s in izip(ys, sizes)]
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment