Change Embedding's input_dim argument

Wrong argument for the decoder's embedding layer.
This commit is contained in:
Ian Beauregard
2020-10-19 17:17:35 -04:00
parent e0cae0c7be
commit 2c700450b5

View File

@@ -2063,7 +2063,7 @@
" len(INPUT_CHARS) + 1, encoder_embedding_size)(encoder_inputs)\n",
"\n",
"decoder_embedding_layer = keras.layers.Embedding(\n",
" len(INPUT_CHARS) + 2, decoder_embedding_size)\n",
" len(OUTPUT_CHARS) + 2, decoder_embedding_size)\n",
"decoder_embeddings = decoder_embedding_layer(decoder_inputs)\n",
"\n",
"encoder = keras.layers.LSTM(units, return_state=True)\n",
@@ -2260,7 +2260,7 @@
" len(INPUT_CHARS) + 1, encoder_embedding_size)(encoder_inputs)\n",
"\n",
"decoder_embedding_layer = keras.layers.Embedding(\n",
" len(INPUT_CHARS) + 2, decoder_embedding_size)\n",
" len(OUTPUT_CHARS) + 2, decoder_embedding_size)\n",
"decoder_embeddings = decoder_embedding_layer(decoder_inputs)\n",
"\n",
"encoder = keras.layers.LSTM(units, return_state=True)\n",