Skip to content

Commit 38fba9b

Browse files
perhapszzyperhapszzy
perhapszzy
authored and
perhapszzy
committed
fix bug
1 parent ac44fb4 commit 38fba9b

File tree

4 files changed

+138
-94
lines changed

4 files changed

+138
-94
lines changed

Deep_Learning_with_TensorFlow/1.0.0/Chapter03/.ipynb_checkpoints/3. 完整神经网络样例程序-checkpoint.ipynb

+35-26
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
"cell_type": "code",
2424
"execution_count": 2,
2525
"metadata": {
26-
"collapsed": false
26+
"collapsed": true
2727
},
2828
"outputs": [],
2929
"source": [
@@ -51,7 +51,9 @@
5151
"source": [
5252
"a = tf.matmul(x, w1)\n",
5353
"y = tf.matmul(a, w2)\n",
54-
"cross_entropy = -tf.reduce_mean(y_ * tf.log(tf.clip_by_value(y, 1e-10, 1.0))) \n",
54+
"y = tf.sigmoid(y)\n",
55+
"cross_entropy = -tf.reduce_mean(y_ * tf.log(tf.clip_by_value(y, 1e-10, 1.0))\n",
56+
" + (1 - y_) * tf.log(tf.clip_by_value(1 - y, 1e-10, 1.0)))\n",
5557
"train_step = tf.train.AdamOptimizer(0.001).minimize(cross_entropy)"
5658
]
5759
},
@@ -85,33 +87,31 @@
8587
{
8688
"cell_type": "code",
8789
"execution_count": 5,
88-
"metadata": {
89-
"collapsed": false
90-
},
90+
"metadata": {},
9191
"outputs": [
9292
{
9393
"name": "stdout",
9494
"output_type": "stream",
9595
"text": [
96-
"w1: [[-0.81131822 1.48459876 0.06532937]\n",
97-
" [-2.44270396 0.0992484 0.59122431]]\n",
98-
"w2: [[-0.81131822]\n",
96+
"[[-0.81131822 1.48459876 0.06532937]\n",
97+
" [-2.4427042 0.0992484 0.59122431]]\n",
98+
"[[-0.81131822]\n",
9999
" [ 1.48459876]\n",
100100
" [ 0.06532937]]\n",
101101
"\n",
102102
"\n",
103-
"After 0 training step(s), cross entropy on all data is 0.0674925\n",
104-
"After 1000 training step(s), cross entropy on all data is 0.0163385\n",
105-
"After 2000 training step(s), cross entropy on all data is 0.00907547\n",
106-
"After 3000 training step(s), cross entropy on all data is 0.00714436\n",
107-
"After 4000 training step(s), cross entropy on all data is 0.00578471\n",
103+
"After 0 training step(s), cross entropy on all data is 1.89805\n",
104+
"After 1000 training step(s), cross entropy on all data is 0.655075\n",
105+
"After 2000 training step(s), cross entropy on all data is 0.626172\n",
106+
"After 3000 training step(s), cross entropy on all data is 0.615096\n",
107+
"After 4000 training step(s), cross entropy on all data is 0.610309\n",
108108
"\n",
109109
"\n",
110-
"w1: [[-1.9618274 2.58235407 1.68203783]\n",
111-
" [-3.4681716 1.06982327 2.11788988]]\n",
112-
"w2: [[-1.8247149 ]\n",
113-
" [ 2.68546653]\n",
114-
" [ 1.41819501]]\n"
110+
"[[ 0.02476984 0.5694868 1.69219422]\n",
111+
" [-2.19773483 -0.23668921 1.11438966]]\n",
112+
"[[-0.45544702]\n",
113+
" [ 0.49110931]\n",
114+
" [-0.9811033 ]]\n"
115115
]
116116
}
117117
],
@@ -121,25 +121,34 @@
121121
" sess.run(init_op)\n",
122122
" \n",
123123
" # 输出目前(未经训练)的参数取值。\n",
124-
" print \"w1:\", sess.run(w1)\n",
125-
" print \"w2:\", sess.run(w2)\n",
126-
" print \"\\n\"\n",
124+
" print(sess.run(w1))\n",
125+
" print(sess.run(w2))\n",
126+
" print(\"\\n\")\n",
127127
" \n",
128128
" # 训练模型。\n",
129129
" STEPS = 5000\n",
130130
" for i in range(STEPS):\n",
131131
" start = (i*batch_size) % 128\n",
132132
" end = (i*batch_size) % 128 + batch_size\n",
133-
" sess.run(train_step, feed_dict={x: X[start:end], y_: Y[start:end]})\n",
133+
" sess.run([train_step, y, y_], feed_dict={x: X[start:end], y_: Y[start:end]})\n",
134134
" if i % 1000 == 0:\n",
135135
" total_cross_entropy = sess.run(cross_entropy, feed_dict={x: X, y_: Y})\n",
136136
" print(\"After %d training step(s), cross entropy on all data is %g\" % (i, total_cross_entropy))\n",
137137
" \n",
138138
" # 输出训练后的参数取值。\n",
139-
" print \"\\n\"\n",
140-
" print \"w1:\", sess.run(w1)\n",
141-
" print \"w2:\", sess.run(w2)"
139+
" print(\"\\n\")\n",
140+
" print(sess.run(w1))\n",
141+
" print(sess.run(w2))"
142142
]
143+
},
144+
{
145+
"cell_type": "code",
146+
"execution_count": null,
147+
"metadata": {
148+
"collapsed": true
149+
},
150+
"outputs": [],
151+
"source": []
143152
}
144153
],
145154
"metadata": {
@@ -158,7 +167,7 @@
158167
"name": "python",
159168
"nbconvert_exporter": "python",
160169
"pygments_lexer": "ipython2",
161-
"version": "2.7.10"
170+
"version": "2.7.13"
162171
}
163172
},
164173
"nbformat": 4,

Deep_Learning_with_TensorFlow/1.0.0/Chapter03/3. 完整神经网络样例程序.ipynb

+35-26
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
"cell_type": "code",
2424
"execution_count": 2,
2525
"metadata": {
26-
"collapsed": false
26+
"collapsed": true
2727
},
2828
"outputs": [],
2929
"source": [
@@ -51,7 +51,9 @@
5151
"source": [
5252
"a = tf.matmul(x, w1)\n",
5353
"y = tf.matmul(a, w2)\n",
54-
"cross_entropy = -tf.reduce_mean(y_ * tf.log(tf.clip_by_value(y, 1e-10, 1.0))) \n",
54+
"y = tf.sigmoid(y)\n",
55+
"cross_entropy = -tf.reduce_mean(y_ * tf.log(tf.clip_by_value(y, 1e-10, 1.0))\n",
56+
" + (1 - y_) * tf.log(tf.clip_by_value(1 - y, 1e-10, 1.0)))\n",
5557
"train_step = tf.train.AdamOptimizer(0.001).minimize(cross_entropy)"
5658
]
5759
},
@@ -85,33 +87,31 @@
8587
{
8688
"cell_type": "code",
8789
"execution_count": 5,
88-
"metadata": {
89-
"collapsed": false
90-
},
90+
"metadata": {},
9191
"outputs": [
9292
{
9393
"name": "stdout",
9494
"output_type": "stream",
9595
"text": [
96-
"w1: [[-0.81131822 1.48459876 0.06532937]\n",
97-
" [-2.44270396 0.0992484 0.59122431]]\n",
98-
"w2: [[-0.81131822]\n",
96+
"[[-0.81131822 1.48459876 0.06532937]\n",
97+
" [-2.4427042 0.0992484 0.59122431]]\n",
98+
"[[-0.81131822]\n",
9999
" [ 1.48459876]\n",
100100
" [ 0.06532937]]\n",
101101
"\n",
102102
"\n",
103-
"After 0 training step(s), cross entropy on all data is 0.0674925\n",
104-
"After 1000 training step(s), cross entropy on all data is 0.0163385\n",
105-
"After 2000 training step(s), cross entropy on all data is 0.00907547\n",
106-
"After 3000 training step(s), cross entropy on all data is 0.00714436\n",
107-
"After 4000 training step(s), cross entropy on all data is 0.00578471\n",
103+
"After 0 training step(s), cross entropy on all data is 1.89805\n",
104+
"After 1000 training step(s), cross entropy on all data is 0.655075\n",
105+
"After 2000 training step(s), cross entropy on all data is 0.626172\n",
106+
"After 3000 training step(s), cross entropy on all data is 0.615096\n",
107+
"After 4000 training step(s), cross entropy on all data is 0.610309\n",
108108
"\n",
109109
"\n",
110-
"w1: [[-1.9618274 2.58235407 1.68203783]\n",
111-
" [-3.4681716 1.06982327 2.11788988]]\n",
112-
"w2: [[-1.8247149 ]\n",
113-
" [ 2.68546653]\n",
114-
" [ 1.41819501]]\n"
110+
"[[ 0.02476984 0.5694868 1.69219422]\n",
111+
" [-2.19773483 -0.23668921 1.11438966]]\n",
112+
"[[-0.45544702]\n",
113+
" [ 0.49110931]\n",
114+
" [-0.9811033 ]]\n"
115115
]
116116
}
117117
],
@@ -121,25 +121,34 @@
121121
" sess.run(init_op)\n",
122122
" \n",
123123
" # 输出目前(未经训练)的参数取值。\n",
124-
" print \"w1:\", sess.run(w1)\n",
125-
" print \"w2:\", sess.run(w2)\n",
126-
" print \"\\n\"\n",
124+
" print(sess.run(w1))\n",
125+
" print(sess.run(w2))\n",
126+
" print(\"\\n\")\n",
127127
" \n",
128128
" # 训练模型。\n",
129129
" STEPS = 5000\n",
130130
" for i in range(STEPS):\n",
131131
" start = (i*batch_size) % 128\n",
132132
" end = (i*batch_size) % 128 + batch_size\n",
133-
" sess.run(train_step, feed_dict={x: X[start:end], y_: Y[start:end]})\n",
133+
" sess.run([train_step, y, y_], feed_dict={x: X[start:end], y_: Y[start:end]})\n",
134134
" if i % 1000 == 0:\n",
135135
" total_cross_entropy = sess.run(cross_entropy, feed_dict={x: X, y_: Y})\n",
136136
" print(\"After %d training step(s), cross entropy on all data is %g\" % (i, total_cross_entropy))\n",
137137
" \n",
138138
" # 输出训练后的参数取值。\n",
139-
" print \"\\n\"\n",
140-
" print \"w1:\", sess.run(w1)\n",
141-
" print \"w2:\", sess.run(w2)"
139+
" print(\"\\n\")\n",
140+
" print(sess.run(w1))\n",
141+
" print(sess.run(w2))"
142142
]
143+
},
144+
{
145+
"cell_type": "code",
146+
"execution_count": null,
147+
"metadata": {
148+
"collapsed": true
149+
},
150+
"outputs": [],
151+
"source": []
143152
}
144153
],
145154
"metadata": {
@@ -158,7 +167,7 @@
158167
"name": "python",
159168
"nbconvert_exporter": "python",
160169
"pygments_lexer": "ipython2",
161-
"version": "2.7.10"
170+
"version": "2.7.13"
162171
}
163172
},
164173
"nbformat": 4,

Deep_Learning_with_TensorFlow/1.4.0/Chapter03/.ipynb_checkpoints/3. 完整神经网络样例程序-checkpoint.ipynb

+34-21
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,9 @@
5151
"source": [
5252
"a = tf.matmul(x, w1)\n",
5353
"y = tf.matmul(a, w2)\n",
54-
"cross_entropy = -tf.reduce_mean(y_ * tf.log(tf.clip_by_value(y, 1e-10, 1.0))) \n",
54+
"y = tf.sigmoid(y)\n",
55+
"cross_entropy = -tf.reduce_mean(y_ * tf.log(tf.clip_by_value(y, 1e-10, 1.0))\n",
56+
" + (1 - y_) * tf.log(tf.clip_by_value(1 - y, 1e-10, 1.0)))\n",
5557
"train_step = tf.train.AdamOptimizer(0.001).minimize(cross_entropy)"
5658
]
5759
},
@@ -85,31 +87,33 @@
8587
{
8688
"cell_type": "code",
8789
"execution_count": 5,
88-
"metadata": {},
90+
"metadata": {
91+
"scrolled": true
92+
},
8993
"outputs": [
9094
{
9195
"name": "stdout",
9296
"output_type": "stream",
9397
"text": [
94-
"w1: [[-0.81131822 1.48459876 0.06532937]\n",
98+
"[[-0.81131822 1.48459876 0.06532937]\n",
9599
" [-2.4427042 0.0992484 0.59122431]]\n",
96-
"w2: [[-0.81131822]\n",
100+
"[[-0.81131822]\n",
97101
" [ 1.48459876]\n",
98102
" [ 0.06532937]]\n",
99103
"\n",
100104
"\n",
101-
"After 0 training step(s), cross entropy on all data is 0.0674925\n",
102-
"After 1000 training step(s), cross entropy on all data is 0.0163385\n",
103-
"After 2000 training step(s), cross entropy on all data is 0.00907547\n",
104-
"After 3000 training step(s), cross entropy on all data is 0.00714436\n",
105-
"After 4000 training step(s), cross entropy on all data is 0.00578471\n",
105+
"After 0 training step(s), cross entropy on all data is 1.89805\n",
106+
"After 1000 training step(s), cross entropy on all data is 0.655075\n",
107+
"After 2000 training step(s), cross entropy on all data is 0.626172\n",
108+
"After 3000 training step(s), cross entropy on all data is 0.615096\n",
109+
"After 4000 training step(s), cross entropy on all data is 0.610309\n",
106110
"\n",
107111
"\n",
108-
"w1: [[-1.96182752 2.58235407 1.68203771]\n",
109-
" [-3.46817183 1.06982315 2.11788988]]\n",
110-
"w2: [[-1.82471502]\n",
111-
" [ 2.68546653]\n",
112-
" [ 1.41819501]]\n"
112+
"[[ 0.02476984 0.5694868 1.69219422]\n",
113+
" [-2.19773483 -0.23668921 1.11438966]]\n",
114+
"[[-0.45544702]\n",
115+
" [ 0.49110931]\n",
116+
" [-0.9811033 ]]\n"
113117
]
114118
}
115119
],
@@ -119,25 +123,34 @@
119123
" sess.run(init_op)\n",
120124
" \n",
121125
" # 输出目前(未经训练)的参数取值。\n",
122-
" print \"w1:\", sess.run(w1)\n",
123-
" print \"w2:\", sess.run(w2)\n",
124-
" print \"\\n\"\n",
126+
" print(sess.run(w1))\n",
127+
" print(sess.run(w2))\n",
128+
" print(\"\\n\")\n",
125129
" \n",
126130
" # 训练模型。\n",
127131
" STEPS = 5000\n",
128132
" for i in range(STEPS):\n",
129133
" start = (i*batch_size) % 128\n",
130134
" end = (i*batch_size) % 128 + batch_size\n",
131-
" sess.run(train_step, feed_dict={x: X[start:end], y_: Y[start:end]})\n",
135+
" sess.run([train_step, y, y_], feed_dict={x: X[start:end], y_: Y[start:end]})\n",
132136
" if i % 1000 == 0:\n",
133137
" total_cross_entropy = sess.run(cross_entropy, feed_dict={x: X, y_: Y})\n",
134138
" print(\"After %d training step(s), cross entropy on all data is %g\" % (i, total_cross_entropy))\n",
135139
" \n",
136140
" # 输出训练后的参数取值。\n",
137-
" print \"\\n\"\n",
138-
" print \"w1:\", sess.run(w1)\n",
139-
" print \"w2:\", sess.run(w2)"
141+
" print(\"\\n\")\n",
142+
" print(sess.run(w1))\n",
143+
" print(sess.run(w2))"
140144
]
145+
},
146+
{
147+
"cell_type": "code",
148+
"execution_count": null,
149+
"metadata": {
150+
"collapsed": true
151+
},
152+
"outputs": [],
153+
"source": []
141154
}
142155
],
143156
"metadata": {

0 commit comments

Comments
 (0)