Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 17 additions & 12 deletions site/en/tutorials/images/segmentation.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,10 @@
},
"outputs": [],
"source": [
"!pip install git+https://github.com/tensorflow/examples.git"
"!pip install git+https://github.com/tensorflow/examples.git\n",
"!pip install -U keras\n",
"!pip install -q tensorflow_datasets\n",
"!pip install -q -U tensorflow-text tensorflow"
]
},
{
Expand All @@ -108,8 +111,9 @@
},
"outputs": [],
"source": [
"import tensorflow as tf\n",
"import numpy as np\n",
"\n",
"import tensorflow as tf\n",
"import tensorflow_datasets as tfds"
]
},
Expand Down Expand Up @@ -252,7 +256,7 @@
" # both use the same seed, so they'll make the same random changes.\n",
" self.augment_inputs = tf.keras.layers.RandomFlip(mode=\"horizontal\", seed=seed)\n",
" self.augment_labels = tf.keras.layers.RandomFlip(mode=\"horizontal\", seed=seed)\n",
" \n",
"\n",
" def call(self, inputs, labels):\n",
" inputs = self.augment_inputs(inputs)\n",
" labels = self.augment_labels(labels)\n",
Expand Down Expand Up @@ -450,7 +454,7 @@
"source": [
"## Train the model\n",
"\n",
"Now, all that is left to do is to compile and train the model. \n",
"Now, all that is left to do is to compile and train the model.\n",
"\n",
"Since this is a multiclass classification problem, use the `tf.keras.losses.SparseCategoricalCrossentropy` loss function with the `from_logits` argument set to `True`, since the labels are scalar integers instead of vectors of scores for each pixel of every class.\n",
"\n",
Expand Down Expand Up @@ -490,7 +494,7 @@
},
"outputs": [],
"source": [
"tf.keras.utils.plot_model(model, show_shapes=True)"
"tf.keras.utils.plot_model(model, show_shapes=True, expand_nested=True, dpi=64)"
]
},
{
Expand Down Expand Up @@ -695,12 +699,14 @@
},
"outputs": [],
"source": [
"label = [0,0]\n",
"prediction = [[-3., 0], [-3, 0]] \n",
"sample_weight = [1, 10] \n",
"label = np.array([0,0])\n",
"prediction = np.array([[-3., 0], [-3, 0]])\n",
"sample_weight = [1, 10]\n",
"\n",
"loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True,\n",
" reduction=tf.keras.losses.Reduction.NONE)\n",
"loss = tf.keras.losses.SparseCategoricalCrossentropy(\n",
" from_logits=True,\n",
" reduction=tf.keras.losses.Reduction.NONE\n",
")\n",
"loss(label, prediction, sample_weight).numpy()"
]
},
Expand Down Expand Up @@ -729,7 +735,7 @@
" class_weights = tf.constant([2.0, 2.0, 1.0])\n",
" class_weights = class_weights/tf.reduce_sum(class_weights)\n",
"\n",
" # Create an image of `sample_weights` by using the label at each pixel as an \n",
" # Create an image of `sample_weights` by using the label at each pixel as an\n",
" # index into the `class weights` .\n",
" sample_weights = tf.gather(class_weights, indices=tf.cast(label, tf.int32))\n",
"\n",
Expand Down Expand Up @@ -811,7 +817,6 @@
"metadata": {
"accelerator": "GPU",
"colab": {
"collapsed_sections": [],
"name": "segmentation.ipynb",
"toc_visible": true
},
Expand Down