@@ -884,7 +884,7 @@ class MLPClassifier(ClassifierMixin, BaseMultilayerPerceptron):
884
884
layer i + 1.
885
885
886
886
n_iter_ : int
887
- The number of iterations the solver has ran .
887
+ The number of iterations the solver has run .
888
888
889
889
n_layers_ : int
890
890
Number of layers.
@@ -1292,10 +1292,13 @@ class MLPRegressor(RegressorMixin, BaseMultilayerPerceptron):
1292
1292
The minimum loss reached by the solver throughout fitting.
1293
1293
1294
1294
loss_curve_ : list of shape (`n_iter_`,)
1295
+ Loss value evaluated at the end of each training step.
1295
1296
The ith element in the list represents the loss at the ith iteration.
1296
1297
1297
1298
t_ : int
1298
1299
The number of training samples seen by the solver during fitting.
1300
+ Mathematically equals `n_iters * X.shape[0]`, it means
1301
+ `time_step` and it is used by optimizer's learning rate scheduler.
1299
1302
1300
1303
coefs_ : list of shape (n_layers - 1,)
1301
1304
The ith element in the list represents the weight matrix corresponding
@@ -1306,7 +1309,7 @@ class MLPRegressor(RegressorMixin, BaseMultilayerPerceptron):
1306
1309
layer i + 1.
1307
1310
1308
1311
n_iter_ : int
1309
- The number of iterations the solver has ran .
1312
+ The number of iterations the solver has run .
1310
1313
1311
1314
n_layers_ : int
1312
1315
Number of layers.
@@ -1317,13 +1320,6 @@ class MLPRegressor(RegressorMixin, BaseMultilayerPerceptron):
1317
1320
out_activation_ : str
1318
1321
Name of the output activation function.
1319
1322
1320
- loss_curve_ : list of shape (n_iters,)
1321
- Loss value evaluated at the end of each training step.
1322
-
1323
- t_ : int
1324
- Mathematically equals `n_iters * X.shape[0]`, it means
1325
- `time_step` and it is used by optimizer's learning rate scheduler.
1326
-
1327
1323
Examples
1328
1324
--------
1329
1325
>>> from sklearn.neural_network import MLPRegressor
0 commit comments