Skip to content

Commit 5eb54b4

Browse files
committed
fix bug
1 parent 000aec2 commit 5eb54b4

File tree

1 file changed

+5
-4
lines changed

1 file changed

+5
-4
lines changed

detection/grad_cam_retinanet.py

+5-4
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ def __init__(self, net, layer_name):
1919
self.net = net
2020
self.layer_name = layer_name
2121
self.feature = []
22-
self.gradient = None
22+
self.gradient = []
2323
self.net.eval()
2424
self.handlers = []
2525
self._register_hook()
@@ -37,7 +37,8 @@ def _get_grads_hook(self, module, input_grad, output_grad):
3737
:param output_grad:tuple,长度为1
3838
:return:
3939
"""
40-
self.gradient = output_grad[0]
40+
self.gradient.insert(0, output_grad[0]) # 梯度的顺序反的
41+
print("gradient shape:{}".format(output_grad[0].size()))
4142

4243
def _register_hook(self):
4344
for (name, module) in self.net.named_modules():
@@ -63,7 +64,7 @@ def __call__(self, inputs, index=0):
6364
feature_level = output[0]['instances'].feature_levels[index] # box来自第几层feature map
6465
score.backward()
6566

66-
gradient = self.gradient[0].cpu().data.numpy() # [C,H,W]
67+
gradient = self.gradient[feature_level][0].cpu().data.numpy() # [C,H,W]
6768
weight = np.mean(gradient, axis=(1, 2)) # [C]
6869

6970
# feature_level 指feature map的层级,0去除batch维
@@ -103,7 +104,7 @@ def __call__(self, inputs, index=0):
103104
feature_level = output[0]['instances'].feature_levels[index] # box来自第几层feature map
104105
score.backward()
105106

106-
gradient = self.gradient[0].cpu().data.numpy() # [C,H,W]
107+
gradient = self.gradient[feature_level][0].cpu().data.numpy() # [C,H,W]
107108
gradient = np.maximum(gradient, 0.) # ReLU
108109
indicate = np.where(gradient > 0, 1., 0.) # 示性函数
109110
norm_factor = np.sum(gradient, axis=(1, 2)) # [C]归一化

0 commit comments

Comments
 (0)