Replace lr with learning_rate in Keras optimizers, fixes #456

This commit is contained in:
Aurélien Geron
2021-08-31 20:54:35 +12:00
parent 1568ac3b94
commit 108fe1fa53
10 changed files with 94 additions and 94 deletions

View File

@@ -2717,7 +2717,7 @@
"n_epochs = 5\n",
"batch_size = 32\n",
"n_steps = len(X_train) // batch_size\n",
"optimizer = keras.optimizers.Nadam(lr=0.01)\n",
"optimizer = keras.optimizers.Nadam(learning_rate=0.01)\n",
"loss_fn = keras.losses.mean_squared_error\n",
"mean_loss = keras.metrics.Mean()\n",
"metrics = [keras.metrics.MeanAbsoluteError()]"
@@ -3828,7 +3828,7 @@
"n_epochs = 5\n",
"batch_size = 32\n",
"n_steps = len(X_train) // batch_size\n",
"optimizer = keras.optimizers.Nadam(lr=0.01)\n",
"optimizer = keras.optimizers.Nadam(learning_rate=0.01)\n",
"loss_fn = keras.losses.sparse_categorical_crossentropy\n",
"mean_loss = keras.metrics.Mean()\n",
"metrics = [keras.metrics.SparseCategoricalAccuracy()]"
@@ -3913,8 +3913,8 @@
"metadata": {},
"outputs": [],
"source": [
"lower_optimizer = keras.optimizers.SGD(lr=1e-4)\n",
"upper_optimizer = keras.optimizers.Nadam(lr=1e-3)"
"lower_optimizer = keras.optimizers.SGD(learning_rate=1e-4)\n",
"upper_optimizer = keras.optimizers.Nadam(learning_rate=1e-3)"
]
},
{