Replace lr with learning_rate in Keras optimizers, fixes #456

This commit is contained in:
Aurélien Geron
2021-08-31 20:54:35 +12:00
parent 1568ac3b94
commit 108fe1fa53
10 changed files with 94 additions and 94 deletions

View File

@@ -871,7 +871,7 @@
"metadata": {},
"outputs": [],
"source": [
"optimizer = keras.optimizers.Adam(lr=0.01)\n",
"optimizer = keras.optimizers.Adam(learning_rate=0.01)\n",
"loss_fn = keras.losses.binary_crossentropy"
]
},
@@ -1389,7 +1389,7 @@
"source": [
"batch_size = 32\n",
"discount_rate = 0.95\n",
"optimizer = keras.optimizers.Adam(lr=1e-2)\n",
"optimizer = keras.optimizers.Adam(learning_rate=1e-2)\n",
"loss_fn = keras.losses.mean_squared_error\n",
"\n",
"def training_step(batch_size):\n",
@@ -1534,7 +1534,7 @@
"source": [
"batch_size = 32\n",
"discount_rate = 0.95\n",
"optimizer = keras.optimizers.Adam(lr=6e-3)\n",
"optimizer = keras.optimizers.Adam(learning_rate=6e-3)\n",
"loss_fn = keras.losses.Huber()\n",
"\n",
"def training_step(batch_size):\n",
@@ -1682,7 +1682,7 @@
"source": [
"batch_size = 32\n",
"discount_rate = 0.95\n",
"optimizer = keras.optimizers.Adam(lr=7.5e-3)\n",
"optimizer = keras.optimizers.Adam(learning_rate=7.5e-3)\n",
"loss_fn = keras.losses.Huber()\n",
"\n",
"def training_step(batch_size):\n",
@@ -2207,7 +2207,7 @@
"\n",
"train_step = tf.Variable(0)\n",
"update_period = 4 # run a training step every 4 collect steps\n",
"optimizer = keras.optimizers.RMSprop(lr=2.5e-4, rho=0.95, momentum=0.0,\n",
"optimizer = keras.optimizers.RMSprop(learning_rate=2.5e-4, rho=0.95, momentum=0.0,\n",
" epsilon=0.00001, centered=True)\n",
"epsilon_fn = keras.optimizers.schedules.PolynomialDecay(\n",
" initial_learning_rate=1.0, # initial ε\n",
@@ -3032,7 +3032,7 @@
"metadata": {},
"outputs": [],
"source": [
"optimizer = keras.optimizers.Nadam(lr=0.005)\n",
"optimizer = keras.optimizers.Nadam(learning_rate=0.005)\n",
"loss_fn = keras.losses.sparse_categorical_crossentropy"
]
},