Skip to content

Commit d627f7e

Browse files
committed
rename target operatin
1 parent e90b26e commit d627f7e

File tree

4 files changed

+121
-9
lines changed

4 files changed

+121
-9
lines changed

examples/mnist.rs

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,10 +15,6 @@ use tensorflow::Tensor;
1515
use image::io::Reader as ImageReader;
1616
use image::GenericImageView;
1717

18-
#[cfg_attr(feature = "examples_system_alloc", global_allocator)]
19-
#[cfg(feature = "examples_system_alloc")]
20-
static ALLOCATOR: std::alloc::System = std::alloc::System;
21-
2218
fn main() -> Result<(), Box<dyn Error>> {
2319
let filename = "examples/mnist/model.pb";
2420
if !Path::new(filename).exists() {
@@ -52,7 +48,7 @@ fn main() -> Result<(), Box<dyn Error>> {
5248
// Run the graph.
5349
let mut args = SessionRunArgs::new();
5450
args.add_feed(&graph.operation_by_name_required("x")?, 0, &x);
55-
let output = args.request_fetch(&graph.operation_by_name_required("Identity")?, 0);
51+
let output = args.request_fetch(&graph.operation_by_name_required("output/Softmax")?, 0);
5652
session.run(&mut args)?;
5753

5854
// Check our results.

examples/mnist/expected_values.txt

Lines changed: 113 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,113 @@
1-
[4.6020253e-07 1.1954968e-07 3.3753083e-04 7.1495533e-04 5.3654858e-12
2-
2.3896009e-06 3.2634282e-14 9.9892515e-01 8.8958734e-07 1.8540532e-05]
1+
[0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
2+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
3+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
4+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
5+
0.03571429 0.03571429 0.03571429 0.03571429], [0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
6+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
7+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
8+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
9+
0.03571429 0.03571429 0.03571429 0.03571429], [0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
10+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
11+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
12+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
13+
0.03571429 0.03571429 0.03571429 0.03571429], [0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
14+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
15+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
16+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
17+
0.03571429 0.03571429 0.03571429 0.03571429], [0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
18+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
19+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
20+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
21+
0.03571429 0.03571429 0.03571429 0.03571429], [0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
22+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
23+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
24+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
25+
0.03571429 0.03571429 0.03571429 0.03571429], [0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
26+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
27+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
28+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
29+
0.03571429 0.03571429 0.03571429 0.03571429], [0.03169955 0.03169955 0.03169955 0.03169955 0.03169955 0.03169955
30+
0.04406714 0.06548315 0.05913553 0.0573091 0.04010884 0.03650609
31+
0.03169955 0.03169955 0.03169955 0.03169955 0.03169955 0.03169955
32+
0.03169955 0.03169955 0.03169955 0.03169955 0.03169955 0.03169955
33+
0.03169955 0.03169955 0.03169955 0.03169955], [0.02067988 0.02067988 0.02067988 0.02067988 0.02067988 0.02067988
34+
0.04939008 0.05599373 0.05599373 0.05599373 0.05599373 0.05321069
35+
0.04495365 0.04495365 0.04495365 0.04495365 0.04495365 0.04495365
36+
0.04495365 0.04495365 0.04027891 0.02535771 0.02067988 0.02067988
37+
0.02067988 0.02067988 0.02067988 0.02067988], [0.02100883 0.02100883 0.02100883 0.02100883 0.02100883 0.02100883
38+
0.02732188 0.03285164 0.02786289 0.03285164 0.03981161 0.05116926
39+
0.05688441 0.0507695 0.05688441 0.05688441 0.05688441 0.05599906
40+
0.05157216 0.05688441 0.05688441 0.03637794 0.02100883 0.02100883
41+
0.02100883 0.02100883 0.02100883 0.02100883], [0.02992705 0.02992705 0.02992705 0.02992705 0.02992705 0.02992705
42+
0.02992705 0.02992705 0.02992705 0.02992705 0.02992705 0.03199019
43+
0.03876765 0.03161604 0.03891998 0.03891998 0.03891998 0.03771792
44+
0.03249595 0.07550908 0.08103175 0.04535176 0.02992705 0.02992705
45+
0.02992705 0.02992705 0.02992705 0.02992705], [0.0318223 0.0318223 0.0318223 0.0318223 0.0318223 0.0318223
46+
0.0318223 0.0318223 0.0318223 0.0318223 0.0318223 0.0318223
47+
0.0318223 0.0318223 0.0318223 0.0318223 0.0318223 0.0318223
48+
0.04406463 0.08582619 0.0722242 0.03414976 0.0318223 0.0318223
49+
0.0318223 0.0318223 0.0318223 0.0318223 ], [0.03155899 0.03155899 0.03155899 0.03155899 0.03155899 0.03155899
50+
0.03155899 0.03155899 0.03155899 0.03155899 0.03155899 0.03155899
51+
0.03155899 0.03155899 0.03155899 0.03155899 0.03155899 0.03440263
52+
0.07869533 0.08578622 0.04370002 0.03155899 0.03155899 0.03155899
53+
0.03155899 0.03155899 0.03155899 0.03155899], [0.03115518 0.03115518 0.03115518 0.03115518 0.03115518 0.03115518
54+
0.03115518 0.03115518 0.03115518 0.03115518 0.03115518 0.03115518
55+
0.03115518 0.03115518 0.03115518 0.03115518 0.03115518 0.05166925
56+
0.0843571 0.07922674 0.03702264 0.03115518 0.03115518 0.03115518
57+
0.03115518 0.03115518 0.03115518 0.03115518], [0.03134965 0.03134965 0.03134965 0.03134965 0.03134965 0.03134965
58+
0.03134965 0.03134965 0.03134965 0.03134965 0.03134965 0.03134965
59+
0.03134965 0.03134965 0.03134965 0.03134965 0.03951086 0.08323548
60+
0.08488365 0.03997844 0.03134965 0.03134965 0.03134965 0.03134965
61+
0.03134965 0.03134965 0.03134965 0.03134965], [0.03175196 0.03175196 0.03175196 0.03175196 0.03175196 0.03175196
62+
0.03175196 0.03175196 0.03175196 0.03175196 0.03175196 0.03175196
63+
0.03175196 0.03175196 0.03175196 0.03175196 0.05349152 0.08597296
64+
0.06610787 0.03238069 0.03175196 0.03175196 0.03175196 0.03175196
65+
0.03175196 0.03175196 0.03175196 0.03175196], [0.03208184 0.03208184 0.03208184 0.03208184 0.03208184 0.03208184
66+
0.03208184 0.03208184 0.03208184 0.03208184 0.03208184 0.03208184
67+
0.03208184 0.03208184 0.03208184 0.03323436 0.07167999 0.08484611
68+
0.04027541 0.03208184 0.03208184 0.03208184 0.03208184 0.03208184
69+
0.03208184 0.03208184 0.03208184 0.03208184], [0.03185903 0.03185903 0.03185903 0.03185903 0.03185903 0.03185903
70+
0.03185903 0.03185903 0.03185903 0.03185903 0.03185903 0.03185903
71+
0.03185903 0.03185903 0.03185903 0.05221859 0.08626287 0.06504285
72+
0.03185903 0.03185903 0.03185903 0.03185903 0.03185903 0.03185903
73+
0.03185903 0.03185903 0.03185903 0.03185903], [0.03141553 0.03141553 0.03141553 0.03141553 0.03141553 0.03141553
74+
0.03141553 0.03141553 0.03141553 0.03141553 0.03141553 0.03141553
75+
0.03141553 0.03141553 0.04215781 0.08406715 0.08051783 0.03928456
76+
0.03141553 0.03141553 0.03141553 0.03141553 0.03141553 0.03141553
77+
0.03141553 0.03141553 0.03141553 0.03141553], [0.03117072 0.03117072 0.03117072 0.03117072 0.03117072 0.03117072
78+
0.03117072 0.03117072 0.03117072 0.03117072 0.03117072 0.03117072
79+
0.03117072 0.03358196 0.07415415 0.08439919 0.05976736 0.03117072
80+
0.03117072 0.03117072 0.03117072 0.03117072 0.03117072 0.03117072
81+
0.03117072 0.03117072 0.03117072 0.03117072], [0.03082252 0.03082252 0.03082252 0.03082252 0.03082252 0.03082252
82+
0.03082252 0.03082252 0.03082252 0.03082252 0.03082252 0.03082252
83+
0.03118728 0.06832831 0.08345638 0.07275293 0.03535715 0.03082252
84+
0.03082252 0.03082252 0.03082252 0.03082252 0.03082252 0.03082252
85+
0.03082252 0.03082252 0.03082252 0.03082252], [0.03131999 0.03131999 0.03131999 0.03131999 0.03131999 0.03131999
86+
0.03131999 0.03131999 0.03131999 0.03131999 0.03131999 0.03131999
87+
0.03635298 0.08480335 0.08480335 0.04236054 0.03131999 0.03131999
88+
0.03131999 0.03131999 0.03131999 0.03131999 0.03131999 0.03131999
89+
0.03131999 0.03131999 0.03131999 0.03131999], [0.03142894 0.03142894 0.03142894 0.03142894 0.03142894 0.03142894
90+
0.03142894 0.03142894 0.03142894 0.03142894 0.03142894 0.03549166
91+
0.07565325 0.08509834 0.04933874 0.03155243 0.03142894 0.03142894
92+
0.03142894 0.03142894 0.03142894 0.03142894 0.03142894 0.03142894
93+
0.03142894 0.03142894 0.03142894 0.03142894], [0.0309347 0.0309347 0.0309347 0.0309347 0.0309347 0.0309347
94+
0.0309347 0.0309347 0.0309347 0.0309347 0.0309347 0.05211471
95+
0.08376013 0.08376013 0.03793219 0.0309347 0.0309347 0.0309347
96+
0.0309347 0.0309347 0.0309347 0.0309347 0.0309347 0.0309347
97+
0.0309347 0.0309347 0.0309347 0.0309347 ], [0.02985528 0.02985528 0.02985528 0.02985528 0.02985528 0.02985528
98+
0.02985528 0.02985528 0.02985528 0.02985528 0.03792374 0.07712144
99+
0.08083743 0.08083743 0.0366086 0.02985528 0.02985528 0.02985528
100+
0.02985528 0.02985528 0.02985528 0.02985528 0.02985528 0.02985528
101+
0.02985528 0.02985528 0.02985528 0.02985528], [0.02980383 0.02980383 0.02980383 0.02980383 0.02980383 0.02980383
102+
0.02980383 0.02980383 0.02980383 0.02980383 0.0479015 0.08069814
103+
0.08069814 0.07034844 0.03486557 0.02980383 0.02980383 0.02980383
104+
0.02980383 0.02980383 0.02980383 0.02980383 0.02980383 0.02980383
105+
0.02980383 0.02980383 0.02980383 0.02980383], [0.03160568 0.03160568 0.03160568 0.03160568 0.03160568 0.03160568
106+
0.03160568 0.03160568 0.03160568 0.03160568 0.05079747 0.08557688
107+
0.07117215 0.03391729 0.03160568 0.03160568 0.03160568 0.03160568
108+
0.03160568 0.03160568 0.03160568 0.03160568 0.03160568 0.03160568
109+
0.03160568 0.03160568 0.03160568 0.03160568], [0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
110+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
111+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
112+
0.03571429 0.03571429 0.03571429 0.03571429 0.03571429 0.03571429
113+
0.03571429 0.03571429 0.03571429 0.03571429]

examples/mnist/mnist.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,11 +36,16 @@
3636
model.fit(x_train, y_train, epochs=1)
3737

3838
# convert output type through softmax so that it can be interpreted as probability
39-
probability_model = tf.keras.Sequential([model, tf.keras.layers.Softmax()])
39+
@tf.function
40+
def probability_model(x):
41+
output = model(x)
42+
probability = tf.keras.layers.Softmax(name="output")(x)
43+
return probability
44+
4045

4146
# convert keras model to TF2 function to get a computation graph
4247
x = tf.TensorSpec((None, 28, 28), tf.float32)
43-
tf_model = tf.function(lambda x: probability_model(x)).get_concrete_function(x=x)
48+
tf_model = probability_model.get_concrete_function(x=x)
4449

4550
# now all variables are converted to constants.
4651
# if this step is omitted, dumped graph does not include trained weights

examples/mnist/model.pb

-1.39 KB
Binary file not shown.

0 commit comments

Comments
 (0)