Large change: replace os.path with pathlib, move to Python 3.7

This commit is contained in:
Aurélien Geron
2021-10-15 21:46:27 +13:00
parent 1b16a81fe5
commit fa1ae51184
19 changed files with 969 additions and 1066 deletions

View File

@@ -11,7 +11,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"_This notebook contains all the sample code in chapter 19._"
"_This notebook contains all the sample code and solutions to the exercises in chapter 19._"
]
},
{
@@ -32,8 +32,14 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"# Setup\n",
"First, let's import a few common modules, ensure MatplotLib plots figures inline and prepare a function to save the figures. We also check that Python 3.5 or later is installed (although Python 2.x may work, it is deprecated so we strongly recommend you use Python 3 instead), as well as Scikit-Learn ≥0.20 and TensorFlow ≥2.0.\n"
"# Setup"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"First, let's import a few common modules, ensure MatplotLib plots figures inline and prepare a function to save the figures."
]
},
{
@@ -42,9 +48,9 @@
"metadata": {},
"outputs": [],
"source": [
"# Python ≥3.5 is required\n",
"# Python ≥3.7 is required\n",
"import sys\n",
"assert sys.version_info >= (3, 5)\n",
"assert sys.version_info >= (3, 7)\n",
"\n",
"# Is this notebook running on Colab or Kaggle?\n",
"IS_COLAB = \"google.colab\" in sys.modules\n",
@@ -56,30 +62,31 @@
" !apt update && apt-get install -y tensorflow-model-server\n",
" %pip install -q -U tensorflow-serving-api\n",
"\n",
"# Scikit-Learn ≥0.20 is required\n",
"import sklearn\n",
"assert sklearn.__version__ >= \"0.20\"\n",
"# Common imports\n",
"import os\n",
"import numpy as np\n",
"from pathlib import Path\n",
"\n",
"# TensorFlow2.0 is required\n",
"# Scikit-Learn1.0 is required\n",
"import sklearn\n",
"assert sklearn.__version__ >= \"1.0\"\n",
"\n",
"# TensorFlow ≥2.6 is required\n",
"import tensorflow as tf\n",
"from tensorflow import keras\n",
"assert tf.__version__ >= \"2.0\"\n",
"\n",
"if not tf.config.list_physical_devices('GPU'):\n",
" print(\"No GPU was detected. CNNs can be very slow without a GPU.\")\n",
" if IS_COLAB:\n",
" print(\"Go to Runtime > Change runtime and select a GPU hardware accelerator.\")\n",
" if IS_KAGGLE:\n",
" print(\"Go to Settings > Accelerator and select GPU.\")\n",
"\n",
"# Common imports\n",
"import numpy as np\n",
"import os\n",
"assert tf.__version__ >= \"2.6\"\n",
"\n",
"# to make this notebook's output stable across runs\n",
"np.random.seed(42)\n",
"tf.random.set_seed(42)\n",
"\n",
"if not tf.config.list_physical_devices('GPU'):\n",
" print(\"No GPU was detected. Neural nets can be very slow without a GPU.\")\n",
" if IS_COLAB:\n",
" print(\"Go to Runtime > Change runtime and select a GPU hardware accelerator.\")\n",
" if IS_KAGGLE:\n",
" print(\"Go to Settings > Accelerator and select GPU.\")\n",
"\n",
"# To plot pretty figures\n",
"%matplotlib inline\n",
"import matplotlib as mpl\n",
@@ -89,14 +96,11 @@
"mpl.rc('ytick', labelsize=12)\n",
"\n",
"# Where to save the figures\n",
"PROJECT_ROOT_DIR = \".\"\n",
"CHAPTER_ID = \"deploy\"\n",
"IMAGES_PATH = os.path.join(PROJECT_ROOT_DIR, \"images\", CHAPTER_ID)\n",
"os.makedirs(IMAGES_PATH, exist_ok=True)\n",
"IMAGES_PATH = Path() / \"images\" / \"deploy\"\n",
"IMAGES_PATH.mkdir(parents=True, exist_ok=True)\n",
"\n",
"def save_fig(fig_id, tight_layout=True, fig_extension=\"png\", resolution=300):\n",
" path = os.path.join(IMAGES_PATH, fig_id + \".\" + fig_extension)\n",
" print(\"Saving figure\", fig_id)\n",
" path = IMAGES_PATH / f\"{fig_id}.{fig_extension}\"\n",
" if tight_layout:\n",
" plt.tight_layout()\n",
" plt.savefig(path, format=fig_extension, dpi=resolution)"
@@ -168,7 +172,7 @@
"source": [
"model_version = \"0001\"\n",
"model_name = \"my_mnist_model\"\n",
"model_path = os.path.join(model_name, model_version)\n",
"model_path = Path() / model_name / model_version\n",
"model_path"
]
},
@@ -187,7 +191,14 @@
"metadata": {},
"outputs": [],
"source": [
"tf.saved_model.save(model, model_path)"
"tf.saved_model.save(model, str(model_path))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Let's define a `tree()` function to view the structure of the `my_mnist_model` directory:"
]
},
{
@@ -196,20 +207,19 @@
"metadata": {},
"outputs": [],
"source": [
"for root, dirs, files in os.walk(model_name):\n",
" indent = ' ' * root.count(os.sep)\n",
" print('{}{}/'.format(indent, os.path.basename(root)))\n",
" for filename in files:\n",
" print('{}{}'.format(indent + ' ', filename))"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"!saved_model_cli show --dir {model_path}"
"def tree(path, level=0, indent=4):\n",
" if level == 0:\n",
" print(f\"{path}/\")\n",
" level += 1\n",
" sub_paths = sorted(path.iterdir())\n",
" sub_dirs = [sub_path for sub_path in sub_paths if sub_path.is_dir()]\n",
" filepaths = [sub_path for sub_path in sub_paths if not sub_path in sub_dirs]\n",
" indent_str = \" \" * indent * level\n",
" for sub_dir in sub_dirs:\n",
" print(f\"{indent_str}{sub_dir.name}/\")\n",
" tree(sub_dir, level + 1, indent)\n",
" for filepath in filepaths:\n",
" print(f\"{indent_str}{filepath.name}\")"
]
},
{
@@ -218,7 +228,7 @@
"metadata": {},
"outputs": [],
"source": [
"!saved_model_cli show --dir {model_path} --tag_set serve"
"tree(model_path.parent)"
]
},
{
@@ -226,6 +236,24 @@
"execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
"!saved_model_cli show --dir {model_path}"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [],
"source": [
"!saved_model_cli show --dir {model_path} --tag_set serve"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [],
"source": [
"!saved_model_cli show --dir {model_path} --tag_set serve \\\n",
" --signature_def serving_default"
@@ -233,7 +261,7 @@
},
{
"cell_type": "code",
"execution_count": 12,
"execution_count": 14,
"metadata": {},
"outputs": [],
"source": [
@@ -249,7 +277,7 @@
},
{
"cell_type": "code",
"execution_count": 13,
"execution_count": 15,
"metadata": {},
"outputs": [],
"source": [
@@ -258,7 +286,7 @@
},
{
"cell_type": "code",
"execution_count": 14,
"execution_count": 16,
"metadata": {},
"outputs": [],
"source": [
@@ -275,7 +303,7 @@
},
{
"cell_type": "code",
"execution_count": 15,
"execution_count": 17,
"metadata": {},
"outputs": [],
"source": [
@@ -286,16 +314,20 @@
},
{
"cell_type": "code",
"execution_count": 16,
"execution_count": 19,
"metadata": {},
"outputs": [],
"source": [
"np.round([[1.1347984e-04, 1.5187356e-07, 9.7032893e-04, 2.7640699e-03, 3.7826971e-06,\n",
" 7.6876910e-05, 3.9140293e-08, 9.9559116e-01, 5.3502394e-05, 4.2665208e-04],\n",
" [8.2443521e-04, 3.5493889e-05, 9.8826385e-01, 7.0466995e-03, 1.2957400e-07,\n",
" 2.3389691e-04, 2.5639210e-03, 9.5886099e-10, 1.0314899e-03, 8.7952529e-08],\n",
" [4.4693781e-05, 9.7028232e-01, 9.0526715e-03, 2.2641101e-03, 4.8766597e-04,\n",
" 2.8800720e-03, 2.2714981e-03, 8.3753867e-03, 4.0439744e-03, 2.9759688e-04]], 2)"
"np.round(\n",
" [[1.14172166e-04, 1.51857336e-07, 9.79080913e-04, 2.77538411e-03,\n",
" 3.75553282e-06, 7.66718149e-05, 3.91490929e-08, 9.95566308e-01,\n",
" 5.34432293e-05, 4.30987304e-04],\n",
" [8.14584550e-04, 3.54881959e-05, 9.88290966e-01, 7.04165967e-03,\n",
" 1.27466748e-07, 2.31963830e-04, 2.55776616e-03, 9.73469416e-10,\n",
" 1.02734682e-03, 8.74494361e-08],\n",
" [4.42889832e-05, 9.70350444e-01, 9.02883708e-03, 2.26117787e-03,\n",
" 4.85437602e-04, 2.87237833e-03, 2.26676138e-03, 8.35481752e-03,\n",
" 4.03870409e-03, 2.97143910e-04]], 2)"
]
},
{
@@ -332,11 +364,11 @@
},
{
"cell_type": "code",
"execution_count": 17,
"execution_count": 30,
"metadata": {},
"outputs": [],
"source": [
"os.environ[\"MODEL_DIR\"] = os.path.split(os.path.abspath(model_path))[0]"
"os.environ[\"MODEL_DIR\"] = str(model_path.absolute().parent)"
]
},
{
@@ -519,7 +551,7 @@
},
{
"cell_type": "code",
"execution_count": 30,
"execution_count": 32,
"metadata": {
"scrolled": true
},
@@ -542,36 +574,32 @@
},
{
"cell_type": "code",
"execution_count": 31,
"execution_count": 33,
"metadata": {},
"outputs": [],
"source": [
"model_version = \"0002\"\n",
"model_name = \"my_mnist_model\"\n",
"model_path = os.path.join(model_name, model_version)\n",
"model_path = Path() / model_name / model_version\n",
"model_path"
]
},
{
"cell_type": "code",
"execution_count": 32,
"execution_count": 35,
"metadata": {},
"outputs": [],
"source": [
"tf.saved_model.save(model, model_path)"
"tf.saved_model.save(model, str(model_path))"
]
},
{
"cell_type": "code",
"execution_count": 33,
"execution_count": 36,
"metadata": {},
"outputs": [],
"source": [
"for root, dirs, files in os.walk(model_name):\n",
" indent = ' ' * root.count(os.sep)\n",
" print('{}{}/'.format(indent, os.path.basename(root)))\n",
" for filename in files:\n",
" print('{}{}'.format(indent + ' ', filename))"
"tree(model_path.parent)"
]
},
{
@@ -955,7 +983,6 @@
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import json\n",
"\n",
"os.environ[\"TF_CONFIG\"] = json.dumps({\n",
@@ -1028,7 +1055,6 @@
"source": [
"%%writefile my_mnist_multiworker_task.py\n",
"\n",
"import os\n",
"import numpy as np\n",
"import tensorflow as tf\n",
"from tensorflow import keras\n",
@@ -1042,9 +1068,9 @@
"\n",
"# Only worker #0 will write checkpoints and log to TensorBoard\n",
"if resolver.task_id == 0:\n",
" root_logdir = os.path.join(os.curdir, \"my_mnist_multiworker_logs\")\n",
" root_logdir = Path() / \"my_mnist_multiworker_logs\"\n",
" run_id = time.strftime(\"run_%Y_%m_%d-%H_%M_%S\")\n",
" run_dir = os.path.join(root_logdir, run_id)\n",
" run_dir = root_logdir / run_id\n",
" callbacks = [\n",
" keras.callbacks.TensorBoard(run_dir),\n",
" keras.callbacks.ModelCheckpoint(\"my_mnist_multiworker_model.h5\",\n",
@@ -1240,7 +1266,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},