Skip to content

Commit 662772e

Browse files
qoocrabkkweon
authored andcommitted
Update lab-07-3-linear_regression_min_max.py
1. Changed the function name because there are many inquiries that it is confused with 'MinMaxScaler' of 'sklearn.preprocessing'. 2. Added normalized output.
1 parent be4ea88 commit 662772e

File tree

1 file changed

+70
-25
lines changed

1 file changed

+70
-25
lines changed

lab-07-3-linear_regression_min_max.py

+70-25
Original file line numberDiff line numberDiff line change
@@ -3,26 +3,41 @@
33
tf.set_random_seed(777) # for reproducibility
44

55

6-
def MinMaxScaler(data):
6+
def min_max_scaler(data):
77
numerator = data - np.min(data, 0)
88
denominator = np.max(data, 0) - np.min(data, 0)
99
# noise term prevents the zero division
1010
return numerator / (denominator + 1e-7)
1111

1212

13-
xy = np.array([[828.659973, 833.450012, 908100, 828.349976, 831.659973],
14-
[823.02002, 828.070007, 1828100, 821.655029, 828.070007],
15-
[819.929993, 824.400024, 1438100, 818.97998, 824.159973],
16-
[816, 820.958984, 1008100, 815.48999, 819.23999],
17-
[819.359985, 823, 1188100, 818.469971, 818.97998],
18-
[819, 823, 1198100, 816, 820.450012],
19-
[811.700012, 815.25, 1098100, 809.780029, 813.669983],
20-
[809.51001, 816.659973, 1398100, 804.539978, 809.559998]])
13+
xy = np.array(
14+
[
15+
[828.659973, 833.450012, 908100, 828.349976, 831.659973],
16+
[823.02002, 828.070007, 1828100, 821.655029, 828.070007],
17+
[819.929993, 824.400024, 1438100, 818.97998, 824.159973],
18+
[816, 820.958984, 1008100, 815.48999, 819.23999],
19+
[819.359985, 823, 1188100, 818.469971, 818.97998],
20+
[819, 823, 1198100, 816, 820.450012],
21+
[811.700012, 815.25, 1098100, 809.780029, 813.669983],
22+
[809.51001, 816.659973, 1398100, 804.539978, 809.559998],
23+
]
24+
)
2125

2226
# very important. It does not work without it.
23-
xy = MinMaxScaler(xy)
27+
xy = min_max_scaler(xy)
2428
print(xy)
2529

30+
'''
31+
[[0.99999999 0.99999999 0. 1. 1. ]
32+
[0.70548491 0.70439552 1. 0.71881782 0.83755791]
33+
[0.54412549 0.50274824 0.57608696 0.606468 0.6606331 ]
34+
[0.33890353 0.31368023 0.10869565 0.45989134 0.43800918]
35+
[0.51436 0.42582389 0.30434783 0.58504805 0.42624401]
36+
[0.49556179 0.42582389 0.31521739 0.48131134 0.49276137]
37+
[0.11436064 0. 0.20652174 0.22007776 0.18597238]
38+
[0. 0.07747099 0.5326087 0. 0. ]]
39+
'''
40+
2641
x_data = xy[:, 0:-1]
2742
y_data = xy[:, [-1]]
2843

@@ -40,29 +55,59 @@ def MinMaxScaler(data):
4055
cost = tf.reduce_mean(tf.square(hypothesis - Y))
4156

4257
# Minimize
43-
optimizer = tf.train.GradientDescentOptimizer(learning_rate=1e-5)
44-
train = optimizer.minimize(cost)
58+
train = tf.train.GradientDescentOptimizer(learning_rate=1e-5).minimize(cost)
4559

4660
# Launch the graph in a session.
47-
sess = tf.Session()
48-
# Initializes global variables in the graph.
49-
sess.run(tf.global_variables_initializer())
61+
with tf.Session() as sess:
62+
# Initializes global variables in the graph.
63+
sess.run(tf.global_variables_initializer())
5064

51-
for step in range(101):
52-
cost_val, hy_val, _ = sess.run(
53-
[cost, hypothesis, train], feed_dict={X: x_data, Y: y_data})
54-
print(step, "Cost: ", cost_val, "\nPrediction:\n", hy_val)
65+
for step in range(101):
66+
_, cost_val, hy_val = sess.run(
67+
[train, cost, hypothesis], feed_dict={X: x_data, Y: y_data}
68+
)
69+
print(step, "Cost: ", cost_val, "\nPrediction:\n", hy_val)
5570

5671
'''
57-
100 Cost: 0.152254
72+
0 Cost: 0.15230925
73+
Prediction:
74+
[[ 1.6346191 ]
75+
[ 0.06613699]
76+
[ 0.3500818 ]
77+
[ 0.6707252 ]
78+
[ 0.61130744]
79+
[ 0.61464405]
80+
[ 0.23171967]
81+
[-0.1372836 ]]
82+
1 Cost: 0.15230872
5883
Prediction:
59-
[[ 1.63450289]
60-
[ 0.06628087]
84+
[[ 1.634618 ]
85+
[ 0.06613836]
86+
[ 0.35008252]
87+
[ 0.670725 ]
88+
[ 0.6113076 ]
89+
[ 0.6146443 ]
90+
[ 0.23172 ]
91+
[-0.13728246]]
92+
...
93+
99 Cost: 0.1522546
94+
Prediction:
95+
[[ 1.6345041 ]
96+
[ 0.06627947]
97+
[ 0.35014683]
98+
[ 0.670706 ]
99+
[ 0.6113161 ]
100+
[ 0.61466044]
101+
[ 0.23175153]
102+
[-0.13716647]]
103+
100 Cost: 0.15225402
104+
Prediction:
105+
[[ 1.6345029 ]
106+
[ 0.06628093]
61107
[ 0.35014752]
62108
[ 0.67070574]
63-
[ 0.61131608]
64-
[ 0.61466062]
109+
[ 0.61131614]
110+
[ 0.6146606 ]
65111
[ 0.23175186]
66112
[-0.13716528]]
67-
68113
'''

0 commit comments

Comments
 (0)