@@ -42,7 +42,8 @@ def __init__(self, layers, optimizer, loss, max_epochs=10, batch_size=64, random
42
42
self .training = False
43
43
self ._initialized = False
44
44
45
- def _setup_layers (self , x_shape , ):
45
+ def _setup_layers (self , x_shape ):
46
+ """Initialize model's layers."""
46
47
x_shape = list (x_shape )
47
48
x_shape [0 ] = self .batch_size
48
49
@@ -55,6 +56,8 @@ def _setup_layers(self, x_shape, ):
55
56
logging .info ('Total parameters: %s' % self .n_params )
56
57
57
58
def _find_bprop_entry (self ):
59
+ """Find entry layer for back propagation."""
60
+
58
61
if len (self .layers ) > 0 and not hasattr (self .layers [- 1 ], 'parameters' ):
59
62
return - 1
60
63
return len (self .layers )
@@ -73,7 +76,10 @@ def fit(self, X, y=None):
73
76
self .is_training = False
74
77
75
78
def update (self , X , y ):
79
+ # Forward pass
76
80
y_pred = self .fprop (X )
81
+
82
+ # Backward pass
77
83
grad = self .loss_grad (y , y_pred )
78
84
for layer in reversed (self .layers [:self .bprop_entry ]):
79
85
grad = layer .backward_pass (grad )
@@ -100,14 +106,18 @@ def parametric_layers(self):
100
106
101
107
@property
102
108
def parameters (self ):
109
+ """Returns a list of all parameters."""
103
110
params = []
104
111
for layer in self .parametric_layers :
105
112
params .append (layer .parameters )
106
113
return params
107
114
108
115
def error (self , X = None , y = None ):
116
+ """Calculate an error for given examples."""
109
117
training_phase = self .is_training
110
118
if training_phase :
119
+ # Temporally disable training.
120
+ # Some layers work differently while training (e.g. Dropout).
111
121
self .is_training = False
112
122
if X is None and y is None :
113
123
y_pred = self ._predict (self .X )
@@ -131,6 +141,7 @@ def is_training(self, train):
131
141
layer .is_training = train
132
142
133
143
def shuffle_dataset (self ):
144
+ """Shuffle rows in the dataset."""
134
145
n_samples = self .X .shape [0 ]
135
146
indices = np .arange (n_samples )
136
147
np .random .shuffle (indices )
@@ -139,10 +150,12 @@ def shuffle_dataset(self):
139
150
140
151
@property
141
152
def n_layers (self ):
153
+ """Returns the number of layers."""
142
154
return self ._n_layers
143
155
144
156
@property
145
157
def n_params (self ):
158
+ """Return the number of trainable parameters."""
146
159
return sum ([layer .parameters .n_params for layer in self .parametric_layers ])
147
160
148
161
def reset (self ):
0 commit comments