Switched to `StandardScaler` from deprecated `normalize` in `NaturalGradient` (#8299)

* switched to `StandardScaler`

* Don't mention private methods in reno

* Fix typo

Co-authored-by: Julien Gacon <gaconju@gmail.com>
Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com>
This commit is contained in:
Prakhar Bhatnagar 2022-07-13 14:18:23 -04:00 committed by GitHub
parent 400fadf70e
commit 56501cc673
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 18 additions and 6 deletions

View File

@ -354,7 +354,7 @@ class NaturalGradient(GradientBase):
lambda4: right starting point for L-curve corner search
tol_search: termination threshold for regularization parameter search
fit_intercept: if True calculate intercept
normalize: deprecated if fit_intercept=False, if True normalize A for regression
normalize: ignored if fit_intercept=False, if True normalize A for regression
copy_a: if True A is copied, else overwritten
max_iter: max. number of iterations if solver is CG
tol: precision of the regression solution
@ -369,11 +369,11 @@ class NaturalGradient(GradientBase):
"""
from sklearn.linear_model import Ridge
from sklearn.preprocessing import StandardScaler
reg = Ridge(
alpha=lambda_,
fit_intercept=fit_intercept,
normalize=normalize,
copy_X=copy_a,
max_iter=max_iter,
tol=tol,
@ -383,6 +383,9 @@ class NaturalGradient(GradientBase):
def reg_method(a, c, alpha):
reg.set_params(alpha=alpha)
if normalize:
reg.fit(StandardScaler().fit_transform(a), c)
else:
reg.fit(a, c)
return reg.coef_
@ -425,7 +428,7 @@ class NaturalGradient(GradientBase):
lambda4: right starting point for L-curve corner search
tol_search: termination threshold for regularization parameter search
fit_intercept: if True calculate intercept
normalize: deprecated if fit_intercept=False, if True normalize A for regression
normalize: ignored if fit_intercept=False, if True normalize A for regression
precompute: If True compute and use Gram matrix to speed up calculations.
Gram matrix can also be given explicitly
copy_a: if True A is copied, else overwritten
@ -444,11 +447,11 @@ class NaturalGradient(GradientBase):
"""
from sklearn.linear_model import Lasso
from sklearn.preprocessing import StandardScaler
reg = Lasso(
alpha=lambda_,
fit_intercept=fit_intercept,
normalize=normalize,
precompute=precompute,
copy_X=copy_a,
max_iter=max_iter,
@ -461,6 +464,9 @@ class NaturalGradient(GradientBase):
def reg_method(a, c, alpha):
reg.set_params(alpha=alpha)
if normalize:
reg.fit(StandardScaler().fit_transform(a), c)
else:
reg.fit(a, c)
return reg.coef_

View File

@ -0,0 +1,6 @@
---
fixes:
- |
Fix deprecation warnings in :class:`.NaturalGradient`, which now uses the
:class:`~sklearn.preprocessing.StandardScaler` to scale the data
before fitting the model if the ``normalize`` parameter is set to ``True``.