Skip to content

Commit

Permalink
r1.2 이니셜커밋
Browse files Browse the repository at this point in the history
최신 버전(r1.2)
  • Loading branch information
lemonadegt committed Jun 26, 2017
1 parent 80f9205 commit 78a6f19
Show file tree
Hide file tree
Showing 17 changed files with 22 additions and 22 deletions.
2 changes: 1 addition & 1 deletion Chapter01/Least squares example.ipynb
Expand Up @@ -144,7 +144,7 @@
"outputs": [],
"source": [
"sess = tf.Session()\n",
"sess.run(tf.initialize_all_variables())"
"sess.run(tf.global_variables_initializer())"
]
},
{
Expand Down
Expand Up @@ -124,7 +124,7 @@
"outputs": [],
"source": [
"sess = tf.Session()\n",
"sess.run(tf.initialize_all_variables())"
"sess.run(tf.global_variables_initializer())"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion Chapter02/MNIST softmax estimation.ipynb
Expand Up @@ -129,7 +129,7 @@
"outputs": [],
"source": [
"sess = tf.InteractiveSession()\n",
"sess.run(tf.initialize_all_variables())"
"sess.run(tf.global_variables_initializer())"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion Chapter02/Maximum likelihood estimation example.ipynb
Expand Up @@ -440,7 +440,7 @@
"outputs": [],
"source": [
"sess = tf.Session()\n",
"sess.run(tf.initialize_all_variables())"
"sess.run(tf.global_variables_initializer())"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion Chapter03/Double layer network example.ipynb
Expand Up @@ -146,7 +146,7 @@
"outputs": [],
"source": [
"sess = tf.InteractiveSession()\n",
"sess.run(tf.initialize_all_variables())"
"sess.run(tf.global_variables_initializer())"
]
},
{
Expand Down
Expand Up @@ -113,7 +113,7 @@
" \n",
" def prepare_session(self):\n",
" sess = tf.InteractiveSession()\n",
" sess.run(tf.initialize_all_variables())\n",
" sess.run(tf.global_variables_initializer())\n",
" summary = tf.summary.merge_all()\n",
" writer = tf.summary.FileWriter(\"/tmp/mnist_sl_logs\", sess.graph)\n",
" \n",
Expand Down
2 changes: 1 addition & 1 deletion Chapter03/MNIST single layer network.ipynb
Expand Up @@ -119,7 +119,7 @@
"outputs": [],
"source": [
"sess = tf.InteractiveSession()\n",
"sess.run(tf.initialize_all_variables())"
"sess.run(tf.global_variables_initializer())"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion Chapter03/Single layer network example.ipynb
Expand Up @@ -142,7 +142,7 @@
"outputs": [],
"source": [
"sess = tf.InteractiveSession()\n",
"sess.run(tf.initialize_all_variables())"
"sess.run(tf.global_variables_initializer())"
]
},
{
Expand Down
Expand Up @@ -119,9 +119,9 @@
" accuracy = tf.reduce_mean(tf.cast(correct_prediction,\n",
" tf.float32), name='accuracy')\n",
" \n",
" tf.scalar_summary(\"loss\", loss)\n",
" tf.scalar_summary(\"accuracy\", accuracy)\n",
" tf.histogram_summary(\"convolution_filters\", W_conv)\n",
" tf.summary.scalar(\"loss\", loss)\n",
" tf.summary.scalar(\"accuracy\", accuracy)\n",
" tf.summary.histogram(\"convolution_filters\", W_conv)\n",
" \n",
" self.x, self.t, self.p = x, t, p\n",
" self.train_step = train_step\n",
Expand All @@ -130,9 +130,9 @@
" \n",
" def prepare_session(self):\n",
" sess = tf.InteractiveSession()\n",
" sess.run(tf.initialize_all_variables())\n",
" summary = tf.merge_all_summaries()\n",
" writer = tf.train.SummaryWriter(\"/tmp/mnist_df_logs\", sess.graph)\n",
" sess.run(tf.global_variables_initializer())\n",
" summary = tf.summary.merge_all()\n",
" writer = tf.summary.FileWriter(\"/tmp/mnist_df_logs\", sess.graph)\n",
" \n",
" self.sess = sess\n",
" self.summary = summary\n",
Expand Down
2 changes: 1 addition & 1 deletion Chapter04/MNIST dynamic filter classification.ipynb
Expand Up @@ -149,7 +149,7 @@
"outputs": [],
"source": [
"sess = tf.InteractiveSession()\n",
"sess.run(tf.initialize_all_variables())\n",
"sess.run(tf.global_variables_initializer())\n",
"saver = tf.train.Saver()"
]
},
Expand Down
2 changes: 1 addition & 1 deletion Chapter04/MNIST dynamic filter result.ipynb
Expand Up @@ -145,7 +145,7 @@
"outputs": [],
"source": [
"sess = tf.InteractiveSession()\n",
"sess.run(tf.initialize_all_variables())\n",
"sess.run(tf.global_variables_initializer())\n",
"saver = tf.train.Saver()\n",
"saver.restore(sess, 'mdc_session-4000')"
]
Expand Down
2 changes: 1 addition & 1 deletion Chapter04/ORENIST classification example.ipynb
Expand Up @@ -174,7 +174,7 @@
"outputs": [],
"source": [
"sess = tf.InteractiveSession()\n",
"sess.run(tf.initialize_all_variables())"
"sess.run(tf.global_variables_initializer())"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion Chapter04/ORENIST dynamic filter example.ipynb
Expand Up @@ -138,7 +138,7 @@
"outputs": [],
"source": [
"sess = tf.InteractiveSession()\n",
"sess.run(tf.initialize_all_variables())"
"sess.run(tf.global_variables_initializer())"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion Chapter04/ORENIST filter example.ipynb
Expand Up @@ -156,7 +156,7 @@
"outputs": [],
"source": [
"sess = tf.InteractiveSession()\n",
"sess.run(tf.initialize_all_variables())"
"sess.run(tf.global_variables_initializer())"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion Chapter05/Handwriting recognizer.ipynb
Expand Up @@ -131,7 +131,7 @@
"outputs": [],
"source": [
"sess = tf.InteractiveSession()\n",
"sess.run(tf.initialize_all_variables())\n",
"sess.run(tf.global_variables_initializer())\n",
"saver = tf.train.Saver()\n",
"saver.restore(sess, 'cnn_session-20000')"
]
Expand Down
2 changes: 1 addition & 1 deletion Chapter05/MNIST double layer CNN classification.ipynb
Expand Up @@ -185,7 +185,7 @@
"outputs": [],
"source": [
"sess = tf.InteractiveSession()\n",
"sess.run(tf.initialize_all_variables())\n",
"sess.run(tf.global_variables_initializer())\n",
"saver = tf.train.Saver()"
]
},
Expand Down
2 changes: 1 addition & 1 deletion README.md
@@ -1,6 +1,6 @@
# jupyter_tfbook

Jupyter Notebooks for TensorFlow Book
(Suitable for tensorflow r0.x)
(Suitable for tensorflow r1.2)

Disclaimer: This is not an official Google product

0 comments on commit 78a6f19

Please sign in to comment.