Refactor code for improved readability and consistency across multiple Jupyter notebooks

- Added missing commas in various print statements and function calls for better syntax.
- Reformatted code to enhance clarity, including breaking long lines and aligning parameters.
- Updated function signatures to use float type for sigma parameters instead of int for better precision.
- Cleaned up comments and documentation strings for clarity and consistency.
- Ensured consistent formatting in plotting functions and data handling.
This commit is contained in:
2025-12-13 23:38:17 +01:00
parent f89ff4a016
commit d5a6bfd339
50 changed files with 779 additions and 449 deletions

View File

@@ -27,25 +27,32 @@
"\n",
"sns.set(style=\"whitegrid\")\n",
"\n",
"import tensorflow as tf\n",
"from sklearn.model_selection import train_test_split\n",
"from sklearn.preprocessing import StandardScaler\n",
"from tensorflow import keras\n",
"\n",
"(X_train_full, y_train_full), (X_test, y_test) = keras.datasets.mnist.load_data()\n",
"X_train, X_valid, y_train, y_valid = train_test_split(\n",
" X_train_full, y_train_full, train_size=0.8\n",
" X_train_full,\n",
" y_train_full,\n",
" train_size=0.8,\n",
")\n",
"\n",
"scaler = StandardScaler()\n",
"X_train = scaler.fit_transform(X_train.astype(np.float32).reshape(-1, 28 * 28)).reshape(\n",
" -1, 28, 28\n",
" -1,\n",
" 28,\n",
" 28,\n",
")\n",
"X_valid = scaler.transform(X_valid.astype(np.float32).reshape(-1, 28 * 28)).reshape(\n",
" -1, 28, 28\n",
" -1,\n",
" 28,\n",
" 28,\n",
")\n",
"X_test = scaler.transform(X_test.astype(np.float32).reshape(-1, 28 * 28)).reshape(\n",
" -1, 28, 28\n",
" -1,\n",
" 28,\n",
" 28,\n",
")"
]
},
@@ -79,13 +86,17 @@
" keras.layers.Input(shape=[28, 28]),\n",
" keras.layers.Flatten(),\n",
" keras.layers.Dense(\n",
" 256, activation=\"relu\", kernel_regularizer=keras.regularizers.l2(0.001)\n",
" 256,\n",
" activation=\"relu\",\n",
" kernel_regularizer=keras.regularizers.l2(0.001),\n",
" ),\n",
" keras.layers.Dense(\n",
" 128, activation=\"relu\", kernel_regularizer=keras.regularizers.l2(0.001)\n",
" 128,\n",
" activation=\"relu\",\n",
" kernel_regularizer=keras.regularizers.l2(0.001),\n",
" ),\n",
" keras.layers.Dense(10, activation=\"softmax\"),\n",
" ]\n",
" ],\n",
")"
]
},
@@ -174,7 +185,7 @@
" kernel_regularizer=keras.regularizers.l2(lambda_l2),\n",
" ),\n",
" keras.layers.Dense(10, activation=\"softmax\"),\n",
" ]\n",
" ],\n",
" )\n",
" model.compile(\n",
" loss=\"sparse_categorical_crossentropy\",\n",
@@ -220,7 +231,7 @@
" \"lambda_l2\": lambda_l2,\n",
" \"history\": pd.DataFrame(history.history),\n",
" \"n_epochs\": n_epochs,\n",
" }\n",
" },\n",
" )"
]
},

View File

@@ -58,7 +58,10 @@
"from sklearn.model_selection import train_test_split\n",
"\n",
"X_train, X_valid, y_train, y_valid = train_test_split(\n",
" X_train_full, y_train_full, test_size=0.2, random_state=42\n",
" X_train_full,\n",
" y_train_full,\n",
" test_size=0.2,\n",
" random_state=42,\n",
")\n",
"print(X_train.shape, y_train.shape)\n",
"print(X_valid.shape, y_valid.shape)"
@@ -181,7 +184,7 @@
" keras.layers.Dense(256, activation=\"relu\"),\n",
" keras.layers.Dense(128, activation=\"relu\"),\n",
" keras.layers.Dense(10, activation=\"softmax\"),\n",
" ]\n",
" ],\n",
")"
]
},
@@ -563,7 +566,7 @@
" keras.layers.Dense(256, activation=\"relu\"),\n",
" keras.layers.Dense(128, activation=\"relu\"),\n",
" keras.layers.Dense(10, activation=\"softmax\"),\n",
" ]\n",
" ],\n",
" )\n",
" model.compile(\n",
" loss=\"sparse_categorical_crossentropy\",\n",
@@ -673,7 +676,10 @@
" plt.subplot(1, 2, 1)\n",
" plt.plot(history_df[\"val_loss\"], linestyle=\"--\", color=colors[_])\n",
" plt.plot(\n",
" history_df[\"loss\"], label=f\"LR={learning_rate}\", alpha=0.5, color=colors[_]\n",
" history_df[\"loss\"],\n",
" label=f\"LR={learning_rate}\",\n",
" alpha=0.5,\n",
" color=colors[_],\n",
" )\n",
" plt.xlabel(\"Epochs\")\n",
" plt.ylabel(\"Loss\")\n",