4
4
5
5
class VariationalAutoencoder (object ):
6
6
7
- def __init__ (self , n_input , n_hidden , optimizer = tf .train .AdamOptimizer (),
8
- gaussian_sample_size = 128 ):
7
+ def __init__ (self , n_input , n_hidden , optimizer = tf .train .AdamOptimizer ()):
9
8
self .n_input = n_input
10
9
self .n_hidden = n_hidden
11
- self .gaussian_sample_size = gaussian_sample_size
12
10
13
11
network_weights = self ._initialize_weights ()
14
12
self .weights = network_weights
@@ -18,14 +16,12 @@ def __init__(self, n_input, n_hidden, optimizer = tf.train.AdamOptimizer(),
18
16
self .z_mean = tf .add (tf .matmul (self .x , self .weights ['w1' ]), self .weights ['b1' ])
19
17
self .z_log_sigma_sq = tf .add (tf .matmul (self .x , self .weights ['log_sigma_w1' ]), self .weights ['log_sigma_b1' ])
20
18
21
-
22
19
# sample from gaussian distribution
23
- eps = tf .random_normal (( self .gaussian_sample_size , n_hidden ), 0 , 1 , dtype = tf .float32 )
20
+ eps = tf .random_normal (tf . pack ([ tf . shape ( self .x )[ 0 ], self . n_hidden ] ), 0 , 1 , dtype = tf .float32 )
24
21
self .z = tf .add (self .z_mean , tf .mul (tf .sqrt (tf .exp (self .z_log_sigma_sq )), eps ))
25
22
26
23
self .reconstruction = tf .add (tf .matmul (self .z , self .weights ['w2' ]), self .weights ['b2' ])
27
24
28
-
29
25
# cost
30
26
reconstr_loss = 0.5 * tf .reduce_sum (tf .pow (tf .sub (self .reconstruction , self .x ), 2.0 ))
31
27
latent_loss = - 0.5 * tf .reduce_sum (1 + self .z_log_sigma_sq
@@ -38,7 +34,6 @@ def __init__(self, n_input, n_hidden, optimizer = tf.train.AdamOptimizer(),
38
34
self .sess = tf .Session ()
39
35
self .sess .run (init )
40
36
41
-
42
37
def _initialize_weights (self ):
43
38
all_weights = dict ()
44
39
all_weights ['w1' ] = tf .Variable (autoencoder .Utils .xavier_init (self .n_input , self .n_hidden ))
0 commit comments