Skip to content

Removes normalize attributes (deprecated) #113

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Nov 29, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions _unittests/ut_mlmodel/test_quantile_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def test_quantile_regression_intercept(self):
self.assertNotEqual(clr.intercept_, 0)
self.assertNotEqual(clq.intercept_, 0)
self.assertEqualArray(clr.intercept_, clq.intercept_)
self.assertEqualArray(clr.coef_, clq.coef_)
self.assertEqualArray(clr.coef_, clq.coef_, atol=1e-10)

@unittest.skipIf(
compare_module_version(sklver, "0.24") == -1,
Expand All @@ -77,7 +77,7 @@ def test_quantile_regression_intercept_positive(self):
self.assertNotEqual(clr.intercept_, 0)
self.assertNotEqual(clq.intercept_, 0)
self.assertEqualArray(clr.intercept_, clq.intercept_)
self.assertEqualArray(clr.coef_, clq.coef_)
self.assertEqualArray(clr.coef_, clq.coef_, atol=1e-10)
self.assertGreater(clr.coef_.min(), 0)
self.assertGreater(clq.coef_.min(), 0)

Expand All @@ -92,7 +92,7 @@ def test_quantile_regression_intercept_weights(self):
self.assertNotEqual(clr.intercept_, 0)
self.assertNotEqual(clq.intercept_, 0)
self.assertEqualArray(clr.intercept_, clq.intercept_)
self.assertEqualArray(clr.coef_, clq.coef_)
self.assertEqualArray(clr.coef_, clq.coef_, atol=1e-10)

def test_quantile_regression_diff(self):
X = numpy.array([[0.1], [0.2], [0.3], [0.4], [0.5]])
Expand Down
2 changes: 1 addition & 1 deletion appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ init:
install:
- "%PYTHON%\\python -m pip install --upgrade pip"
# for many packages
- pip install llvmlite numba
- "%PYTHON%\\Scripts\\pip install llvmlite numba"
- "%PYTHON%\\Scripts\\pip install -r requirements-win.txt"
# install precompiled versions not available on pypi
- "%PYTHON%\\Scripts\\pip install torch torchvision torchaudio"
Expand Down
17 changes: 5 additions & 12 deletions mlinsights/mlmodel/quantile_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,21 +29,14 @@ class QuantileLinearRegression(LinearRegression):
value.
"""

def __init__(self, fit_intercept=True, normalize=False, copy_X=True,
def __init__(self, fit_intercept=True, copy_X=True,
n_jobs=1, delta=0.0001, max_iter=10, quantile=0.5,
positive=False, verbose=False):
"""
:param fit_intercept: boolean, optional, default True
whether to calculate the intercept for this model. If set
to False, no intercept will be used in calculations
(e.g. data is expected to be already centered).
:param normalize: boolean, optional, default False
This parameter is ignored when ``fit_intercept`` is set to False.
If True, the regressors X will be normalized before regression by
subtracting the mean and dividing by the l2-norm.
If you wish to standardize, please use
:class:`sklearn.preprocessing.StandardScaler` before calling ``fit`` on
an estimator with ``normalize=False``.
:param copy_X: boolean, optional, default True
If True, X will be copied; else, it may be overwritten.
:param n_jobs: int, optional, default 1
Expand All @@ -65,12 +58,12 @@ def __init__(self, fit_intercept=True, normalize=False, copy_X=True,
"""
try:
LinearRegression.__init__(
self, fit_intercept=fit_intercept, normalize=normalize,
self, fit_intercept=fit_intercept,
copy_X=copy_X, n_jobs=n_jobs, positive=positive)
except TypeError:
# scikit-learn<0.24
LinearRegression.__init__(
self, fit_intercept=fit_intercept, normalize=normalize,
self, fit_intercept=fit_intercept,
copy_X=copy_X, n_jobs=n_jobs)
self.max_iter = max_iter
self.verbose = verbose
Expand Down Expand Up @@ -140,12 +133,12 @@ def compute_z(Xm, beta, Y, W, delta=0.0001):

try:
clr = LinearRegression(fit_intercept=False, copy_X=self.copy_X,
n_jobs=self.n_jobs, normalize=self.normalize,
n_jobs=self.n_jobs,
positive=self.positive)
except AttributeError:
# scikit-learn<0.24
clr = LinearRegression(fit_intercept=False, copy_X=self.copy_X,
n_jobs=self.n_jobs, normalize=self.normalize)
n_jobs=self.n_jobs)

W = numpy.ones(X.shape[0]) if sample_weight is None else sample_weight
self.n_iter_ = 0
Expand Down