3
3
tf .set_random_seed (777 ) # for reproducibility
4
4
5
5
6
- def MinMaxScaler (data ):
6
+ def min_max_scaler (data ):
7
7
numerator = data - np .min (data , 0 )
8
8
denominator = np .max (data , 0 ) - np .min (data , 0 )
9
9
# noise term prevents the zero division
10
10
return numerator / (denominator + 1e-7 )
11
11
12
12
13
- xy = np .array ([[828.659973 , 833.450012 , 908100 , 828.349976 , 831.659973 ],
14
- [823.02002 , 828.070007 , 1828100 , 821.655029 , 828.070007 ],
15
- [819.929993 , 824.400024 , 1438100 , 818.97998 , 824.159973 ],
16
- [816 , 820.958984 , 1008100 , 815.48999 , 819.23999 ],
17
- [819.359985 , 823 , 1188100 , 818.469971 , 818.97998 ],
18
- [819 , 823 , 1198100 , 816 , 820.450012 ],
19
- [811.700012 , 815.25 , 1098100 , 809.780029 , 813.669983 ],
20
- [809.51001 , 816.659973 , 1398100 , 804.539978 , 809.559998 ]])
13
+ xy = np .array (
14
+ [
15
+ [828.659973 , 833.450012 , 908100 , 828.349976 , 831.659973 ],
16
+ [823.02002 , 828.070007 , 1828100 , 821.655029 , 828.070007 ],
17
+ [819.929993 , 824.400024 , 1438100 , 818.97998 , 824.159973 ],
18
+ [816 , 820.958984 , 1008100 , 815.48999 , 819.23999 ],
19
+ [819.359985 , 823 , 1188100 , 818.469971 , 818.97998 ],
20
+ [819 , 823 , 1198100 , 816 , 820.450012 ],
21
+ [811.700012 , 815.25 , 1098100 , 809.780029 , 813.669983 ],
22
+ [809.51001 , 816.659973 , 1398100 , 804.539978 , 809.559998 ],
23
+ ]
24
+ )
21
25
22
26
# very important. It does not work without it.
23
- xy = MinMaxScaler (xy )
27
+ xy = min_max_scaler (xy )
24
28
print (xy )
25
29
30
+ '''
31
+ [[0.99999999 0.99999999 0. 1. 1. ]
32
+ [0.70548491 0.70439552 1. 0.71881782 0.83755791]
33
+ [0.54412549 0.50274824 0.57608696 0.606468 0.6606331 ]
34
+ [0.33890353 0.31368023 0.10869565 0.45989134 0.43800918]
35
+ [0.51436 0.42582389 0.30434783 0.58504805 0.42624401]
36
+ [0.49556179 0.42582389 0.31521739 0.48131134 0.49276137]
37
+ [0.11436064 0. 0.20652174 0.22007776 0.18597238]
38
+ [0. 0.07747099 0.5326087 0. 0. ]]
39
+ '''
40
+
26
41
x_data = xy [:, 0 :- 1 ]
27
42
y_data = xy [:, [- 1 ]]
28
43
@@ -40,29 +55,59 @@ def MinMaxScaler(data):
40
55
cost = tf .reduce_mean (tf .square (hypothesis - Y ))
41
56
42
57
# Minimize
43
- optimizer = tf .train .GradientDescentOptimizer (learning_rate = 1e-5 )
44
- train = optimizer .minimize (cost )
58
+ train = tf .train .GradientDescentOptimizer (learning_rate = 1e-5 ).minimize (cost )
45
59
46
60
# Launch the graph in a session.
47
- sess = tf .Session ()
48
- # Initializes global variables in the graph.
49
- sess .run (tf .global_variables_initializer ())
61
+ with tf .Session () as sess :
62
+ # Initializes global variables in the graph.
63
+ sess .run (tf .global_variables_initializer ())
50
64
51
- for step in range (101 ):
52
- cost_val , hy_val , _ = sess .run (
53
- [cost , hypothesis , train ], feed_dict = {X : x_data , Y : y_data })
54
- print (step , "Cost: " , cost_val , "\n Prediction:\n " , hy_val )
65
+ for step in range (101 ):
66
+ _ , cost_val , hy_val = sess .run (
67
+ [train , cost , hypothesis ], feed_dict = {X : x_data , Y : y_data }
68
+ )
69
+ print (step , "Cost: " , cost_val , "\n Prediction:\n " , hy_val )
55
70
56
71
'''
57
- 100 Cost: 0.152254
72
+ 0 Cost: 0.15230925
73
+ Prediction:
74
+ [[ 1.6346191 ]
75
+ [ 0.06613699]
76
+ [ 0.3500818 ]
77
+ [ 0.6707252 ]
78
+ [ 0.61130744]
79
+ [ 0.61464405]
80
+ [ 0.23171967]
81
+ [-0.1372836 ]]
82
+ 1 Cost: 0.15230872
58
83
Prediction:
59
- [[ 1.63450289]
60
- [ 0.06628087]
84
+ [[ 1.634618 ]
85
+ [ 0.06613836]
86
+ [ 0.35008252]
87
+ [ 0.670725 ]
88
+ [ 0.6113076 ]
89
+ [ 0.6146443 ]
90
+ [ 0.23172 ]
91
+ [-0.13728246]]
92
+ ...
93
+ 99 Cost: 0.1522546
94
+ Prediction:
95
+ [[ 1.6345041 ]
96
+ [ 0.06627947]
97
+ [ 0.35014683]
98
+ [ 0.670706 ]
99
+ [ 0.6113161 ]
100
+ [ 0.61466044]
101
+ [ 0.23175153]
102
+ [-0.13716647]]
103
+ 100 Cost: 0.15225402
104
+ Prediction:
105
+ [[ 1.6345029 ]
106
+ [ 0.06628093]
61
107
[ 0.35014752]
62
108
[ 0.67070574]
63
- [ 0.61131608 ]
64
- [ 0.61466062 ]
109
+ [ 0.61131614 ]
110
+ [ 0.6146606 ]
65
111
[ 0.23175186]
66
112
[-0.13716528]]
67
-
68
113
'''
0 commit comments