mirror of
https://github.com/ArthurDanjou/ArtStudies.git
synced 2026-02-12 12:07:33 +01:00
Refactor code for improved readability and consistency across multiple Jupyter notebooks
- Added missing commas in various print statements and function calls for better syntax. - Reformatted code to enhance clarity, including breaking long lines and aligning parameters. - Updated function signatures to use float type for sigma parameters instead of int for better precision. - Cleaned up comments and documentation strings for clarity and consistency. - Ensured consistent formatting in plotting functions and data handling.
This commit is contained in:
@@ -92,6 +92,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import numpy as np\n",
|
||||
"\n",
|
||||
"import tensorflow as tf\n",
|
||||
"\n",
|
||||
"tf.keras.utils.set_random_seed(42)\n",
|
||||
@@ -346,7 +347,7 @@
|
||||
" tf.keras.layers.Dense(300, activation=\"relu\", kernel_initializer=\"he_normal\"),\n",
|
||||
" tf.keras.layers.Dense(100, activation=\"relu\", kernel_initializer=\"he_normal\"),\n",
|
||||
" tf.keras.layers.Dense(10, activation=\"softmax\"),\n",
|
||||
" ]\n",
|
||||
" ],\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
@@ -691,7 +692,9 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model.compile(\n",
|
||||
" loss=\"sparse_categorical_crossentropy\", optimizer=\"adam\", metrics=[\"accuracy\"]\n",
|
||||
" loss=\"sparse_categorical_crossentropy\",\n",
|
||||
" optimizer=\"adam\",\n",
|
||||
" metrics=[\"accuracy\"],\n",
|
||||
")"
|
||||
]
|
||||
},
|
||||
@@ -1101,11 +1104,13 @@
|
||||
" tf.keras.layers.Dense(300, activation=\"relu\", kernel_initializer=\"he_normal\"),\n",
|
||||
" tf.keras.layers.Dense(100, activation=\"relu\", kernel_initializer=\"he_normal\"),\n",
|
||||
" tf.keras.layers.Dense(10, activation=\"softmax\"),\n",
|
||||
" ]\n",
|
||||
" ],\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"model_10.compile(\n",
|
||||
" loss=\"sparse_categorical_crossentropy\", optimizer=\"adam\", metrics=[\"accuracy\"]\n",
|
||||
" loss=\"sparse_categorical_crossentropy\",\n",
|
||||
" optimizer=\"adam\",\n",
|
||||
" metrics=[\"accuracy\"],\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"model_10.fit(X_train01, y_train, epochs=10, validation_data=(X_val01, y_val))"
|
||||
@@ -1270,7 +1275,8 @@
|
||||
],
|
||||
"source": [
|
||||
"early_stopping_cb = tf.keras.callbacks.EarlyStopping(\n",
|
||||
" patience=5, restore_best_weights=True\n",
|
||||
" patience=5,\n",
|
||||
" restore_best_weights=True,\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"model = tf.keras.Sequential(\n",
|
||||
@@ -1280,11 +1286,13 @@
|
||||
" tf.keras.layers.Dense(300, activation=\"relu\", kernel_initializer=\"he_normal\"),\n",
|
||||
" tf.keras.layers.Dense(100, activation=\"relu\", kernel_initializer=\"he_normal\"),\n",
|
||||
" tf.keras.layers.Dense(10, activation=\"softmax\"),\n",
|
||||
" ]\n",
|
||||
" ],\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"model.compile(\n",
|
||||
" loss=\"sparse_categorical_crossentropy\", optimizer=\"adam\", metrics=[\"accuracy\"]\n",
|
||||
" loss=\"sparse_categorical_crossentropy\",\n",
|
||||
" optimizer=\"adam\",\n",
|
||||
" metrics=[\"accuracy\"],\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"history2 = model.fit(\n",
|
||||
@@ -1598,10 +1606,12 @@
|
||||
" tf.keras.layers.Input(shape=[28, 28]),\n",
|
||||
" tf.keras.layers.Flatten(),\n",
|
||||
" tf.keras.layers.Dense(10, activation=\"softmax\"),\n",
|
||||
" ]\n",
|
||||
" ],\n",
|
||||
")\n",
|
||||
"reg_log.compile(\n",
|
||||
" loss=\"sparse_categorical_crossentropy\", optimizer=\"adam\", metrics=[\"accuracy\"]\n",
|
||||
" loss=\"sparse_categorical_crossentropy\",\n",
|
||||
" optimizer=\"adam\",\n",
|
||||
" metrics=[\"accuracy\"],\n",
|
||||
")\n",
|
||||
"reg_log.fit(X_train01, y_train, epochs=90, validation_data=(X_val01, y_val))"
|
||||
]
|
||||
@@ -1709,10 +1719,12 @@
|
||||
" tf.keras.layers.Dense(300, activation=\"relu\", kernel_initializer=\"he_normal\"),\n",
|
||||
" tf.keras.layers.Dense(100, activation=\"relu\", kernel_initializer=\"he_normal\"),\n",
|
||||
" tf.keras.layers.Dense(10, activation=\"softmax\"),\n",
|
||||
" ]\n",
|
||||
" ],\n",
|
||||
")\n",
|
||||
"model_ter.compile(\n",
|
||||
" loss=\"sparse_categorical_crossentropy\", optimizer=\"adam\", metrics=[\"accuracy\"]\n",
|
||||
" loss=\"sparse_categorical_crossentropy\",\n",
|
||||
" optimizer=\"adam\",\n",
|
||||
" metrics=[\"accuracy\"],\n",
|
||||
")\n",
|
||||
"model_ter.fit(X_train, y_train, epochs=30, validation_data=(X_val, y_val))"
|
||||
]
|
||||
@@ -1820,10 +1832,12 @@
|
||||
" tf.keras.layers.Dense(300, activation=\"relu\", kernel_initializer=\"he_normal\"),\n",
|
||||
" tf.keras.layers.Dense(100, activation=\"relu\", kernel_initializer=\"he_normal\"),\n",
|
||||
" tf.keras.layers.Dense(10, activation=\"softmax\"),\n",
|
||||
" ]\n",
|
||||
" ],\n",
|
||||
")\n",
|
||||
"model_5.compile(\n",
|
||||
" loss=\"sparse_categorical_crossentropy\", optimizer=\"adam\", metrics=[\"accuracy\"]\n",
|
||||
" loss=\"sparse_categorical_crossentropy\",\n",
|
||||
" optimizer=\"adam\",\n",
|
||||
" metrics=[\"accuracy\"],\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"X_train_far_too_small, X_val_far_too_small = X_train / 25500.0, X_val / 25500.0\n",
|
||||
@@ -1938,16 +1952,22 @@
|
||||
" tf.keras.layers.Input(shape=[28, 28]),\n",
|
||||
" tf.keras.layers.Flatten(),\n",
|
||||
" tf.keras.layers.Dense(\n",
|
||||
" 300, activation=\"sigmoid\", kernel_initializer=\"he_normal\"\n",
|
||||
" 300,\n",
|
||||
" activation=\"sigmoid\",\n",
|
||||
" kernel_initializer=\"he_normal\",\n",
|
||||
" ),\n",
|
||||
" tf.keras.layers.Dense(\n",
|
||||
" 100, activation=\"sigmoid\", kernel_initializer=\"he_normal\"\n",
|
||||
" 100,\n",
|
||||
" activation=\"sigmoid\",\n",
|
||||
" kernel_initializer=\"he_normal\",\n",
|
||||
" ),\n",
|
||||
" tf.keras.layers.Dense(10, activation=\"softmax\"),\n",
|
||||
" ]\n",
|
||||
" ],\n",
|
||||
")\n",
|
||||
"model_sig_norm.compile(\n",
|
||||
" loss=\"sparse_categorical_crossentropy\", optimizer=\"adam\", metrics=[\"accuracy\"]\n",
|
||||
" loss=\"sparse_categorical_crossentropy\",\n",
|
||||
" optimizer=\"adam\",\n",
|
||||
" metrics=[\"accuracy\"],\n",
|
||||
")\n",
|
||||
"model_sig_norm.fit(X_train01, y_train, epochs=30, validation_data=(X_val, y_val))"
|
||||
]
|
||||
@@ -2043,16 +2063,22 @@
|
||||
" tf.keras.layers.Input(shape=[28, 28]),\n",
|
||||
" tf.keras.layers.Flatten(),\n",
|
||||
" tf.keras.layers.Dense(\n",
|
||||
" 300, activation=\"sigmoid\", kernel_initializer=\"he_normal\"\n",
|
||||
" 300,\n",
|
||||
" activation=\"sigmoid\",\n",
|
||||
" kernel_initializer=\"he_normal\",\n",
|
||||
" ),\n",
|
||||
" tf.keras.layers.Dense(\n",
|
||||
" 100, activation=\"sigmoid\", kernel_initializer=\"he_normal\"\n",
|
||||
" 100,\n",
|
||||
" activation=\"sigmoid\",\n",
|
||||
" kernel_initializer=\"he_normal\",\n",
|
||||
" ),\n",
|
||||
" tf.keras.layers.Dense(10, activation=\"softmax\"),\n",
|
||||
" ]\n",
|
||||
" ],\n",
|
||||
")\n",
|
||||
"model_sig_un_norm.compile(\n",
|
||||
" loss=\"sparse_categorical_crossentropy\", optimizer=\"adam\", metrics=[\"accuracy\"]\n",
|
||||
" loss=\"sparse_categorical_crossentropy\",\n",
|
||||
" optimizer=\"adam\",\n",
|
||||
" metrics=[\"accuracy\"],\n",
|
||||
")\n",
|
||||
"model_sig_un_norm.fit(X_train, y_train, epochs=30, validation_data=(X_val, y_val))"
|
||||
]
|
||||
@@ -2220,17 +2246,19 @@
|
||||
" tf.keras.layers.Dense(300, activation=\"relu\"),\n",
|
||||
" tf.keras.layers.Dense(100, activation=\"relu\"),\n",
|
||||
" tf.keras.layers.Dense(10, activation=\"softmax\"),\n",
|
||||
" ]\n",
|
||||
" ],\n",
|
||||
")\n",
|
||||
"model_high_variance.layers[1].set_weights(\n",
|
||||
" [200 * np.random.randn(28 * 28, 300) / 100, np.zeros(300)]\n",
|
||||
" [200 * np.random.randn(28 * 28, 300) / 100, np.zeros(300)],\n",
|
||||
")\n",
|
||||
"model_high_variance.layers[2].set_weights(\n",
|
||||
" [200 * np.random.randn(300, 100) / 100, np.zeros(100)]\n",
|
||||
" [200 * np.random.randn(300, 100) / 100, np.zeros(100)],\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"model_high_variance.compile(\n",
|
||||
" loss=\"sparse_categorical_crossentropy\", optimizer=\"adam\", metrics=[\"accuracy\"]\n",
|
||||
" loss=\"sparse_categorical_crossentropy\",\n",
|
||||
" optimizer=\"adam\",\n",
|
||||
" metrics=[\"accuracy\"],\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"model_high_variance.fit(X_train01, y_train, epochs=60, validation_data=(X_val01, y_val))"
|
||||
|
||||
Reference in New Issue
Block a user