Skip to content

Commit 6c4dcca

Browse files
committed
pylint fix
1 parent e03e56a commit 6c4dcca

File tree

7 files changed

+28
-30
lines changed

7 files changed

+28
-30
lines changed

.pylintrc

+1-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ disable = arguments-differ,
2929
max-line-length = 120
3030
ignore-docstrings = yes
3131
ignored-modules = numpy,torch,cv2,openvino
32-
extension-pkg-whitelist = torch,cv2
32+
extension-pkg-whitelist = torch,cv2,scipy
3333

3434
[SIMILARITIES]
3535
ignore-imports = yes

demo/demo.py

-6
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,4 @@
11
import argparse
2-
import inspect
3-
import os.path as osp
4-
from threading import Thread
5-
import queue
6-
import os
7-
import sys
82

93
import cv2 as cv
104
import glog as log

scripts/optuna_optim.py

+1-6
Original file line numberDiff line numberDiff line change
@@ -5,20 +5,15 @@
55
import time
66

77
import torch
8-
from torchvision import datasets
9-
from torchvision import transforms
108
import optuna
119
from optuna.trial import TrialState
1210
from functools import partial
13-
from icecream import ic
1411

1512
from torchdet3d.builders import (build_loader, build_model, build_loss,
1613
build_optimizer, build_scheduler)
17-
from torchdet3d.evaluation import Evaluator, compute_average_distance, compute_accuracy
14+
from torchdet3d.evaluation import compute_average_distance, compute_accuracy
1815
from torchdet3d.losses import LossManager
19-
from torchdet3d.trainer import Trainer
2016
from torchdet3d.utils import AverageMeter, read_py_config, Logger, set_random_seed
21-
import numpy as np
2217

2318
def put_on_device(items, device):
2419
for i, item in enumerate(items):

torchdet3d/dataloaders/objectron_main.py

+12-7
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,8 @@ def __init__(self, root_folder, mode='train', transform=None, debug_mode=False,
3333

3434
# filter categories
3535
if category_list != 'all':
36-
self.annotations = list(filter(lambda x: OBJECTRON_CLASSES[x['category_id'] - 1] in category_list, self.ann['annotations']))
36+
self.annotations = list(filter(lambda x: OBJECTRON_CLASSES[x['category_id'] - 1] in
37+
category_list, self.ann['annotations']))
3738
images_id = {ann_obj['image_id'] for ann_obj in self.annotations}
3839
# create dict since ordering now different
3940
self.images = {img_obj['id']: img_obj
@@ -52,7 +53,7 @@ def __getitem__(self, indx):
5253
img_id = self.annotations[indx]['image_id']
5354
cat_id = int(self.annotations[indx]['category_id']) - 1
5455
# in case when classes are not equal to 9 choose closest
55-
category = min([i for i in range(self.num_classes)], key=lambda x:abs(x-cat_id))
56+
category = min(range(self.num_classes), key=lambda x:abs(x-cat_id))
5657
# get raw key points for bb from annotations
5758
img_path = self.root_folder + '/' + (self.images[img_id]['file_name'])
5859
# read image
@@ -162,10 +163,14 @@ def dataset_test(root, mode='val', transform=None, batch_size=5):
162163
def cat_filter_test(root, mode='val', transform=None, category_list=['book']):
163164
ds = Objectron(root, mode=mode, transform=transform, category_list=category_list)
164165
dataloader = DataLoader(ds, batch_size=128, shuffle=True)
165-
for _, _, category in dataloader:
166-
for cat in category:
167-
assert OBJECTRON_CLASSES[cat] in category_list
168-
166+
ic(len(dataloader))
167+
def in_func(cat):
168+
assert OBJECTRON_CLASSES[cat] in category_list
169+
for i, (_, _, category) in enumerate(dataloader):
170+
ic(i)
171+
if i == len(dataloader) // 10:
172+
break
173+
map(in_func, category)
169174

170175
root = './data'
171176
normalization = A.augmentations.transforms.Normalize(**dict(mean=[0.5931, 0.4690, 0.4229],
@@ -183,7 +188,7 @@ def cat_filter_test(root, mode='val', transform=None, category_list=['book']):
183188
super_vision_test(root, mode='val', transform=transform, index=index)
184189
dataset_test(root, mode='val', transform=transform, batch_size=256)
185190
dataset_test(root, mode='train', transform=transform, batch_size=256)
186-
# cat_filter_test(root, mode='val', transform=transform, category_list=['shoe', 'camera', 'bottle', 'bike'])
191+
cat_filter_test(root, mode='val', transform=transform, category_list=['shoe', 'camera', 'bottle', 'bike'])
187192

188193
if __name__ == '__main__':
189194
test()

torchdet3d/evaluation/evaluate.py

+12-8
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ def val(self, epoch=None):
8484
IOU_meter = AverageMeter()
8585
ADD_cls_meter = [AverageMeter() for cl in range(self.num_classes)]
8686
SADD_cls_meter = [AverageMeter() for cl in range(self.num_classes)]
87-
acc_cls_meter = [AverageMeter() for cl in range(self.num_classes)]
87+
ACC_cls_meter = [AverageMeter() for cl in range(self.num_classes)]
8888
IOU__cls_meter = [AverageMeter() for cl in range(self.num_classes)]
8989

9090
# switch to eval mode
@@ -98,23 +98,24 @@ def val(self, epoch=None):
9898
# measure metrics
9999
ADD, SADD = compute_average_distance(pred_kp, gt_kp)
100100
IOU = compute_2d_based_iou(pred_kp, gt_kp)
101-
acc = compute_accuracy(pred_cats, gt_cats)
101+
ACC = compute_accuracy(pred_cats, gt_cats)
102102

103-
for cl, ADD_cls, SADD_cls, acc_cls in compute_metrics_per_cls(pred_kp, gt_kp, pred_cats, gt_cats):
103+
for cl, ADD_cls, SADD_cls, ACC_cls in compute_metrics_per_cls(pred_kp, gt_kp, pred_cats, gt_cats):
104104
ADD_cls_meter[cl].update(ADD_cls, imgs.size(0))
105105
SADD_cls_meter[cl].update(SADD_cls, imgs.size(0))
106-
acc_cls_meter[cl].update(acc_cls, imgs.size(0))
106+
ACC_cls_meter[cl].update(ACC_cls, imgs.size(0))
107+
IOU__cls_meter[cl].update(IOU, imgs.size(0))
107108

108109
# record loss
109110
ADD_meter.update(ADD, imgs.size(0))
110111
SADD_meter.update(SADD, imgs.size(0))
111-
ACC_meter.update(acc, imgs.size(0))
112+
ACC_meter.update(ACC, imgs.size(0))
112113
IOU_meter.update(IOU)
113114
if epoch is not None:
114115
# update progress bar
115116
loop.set_description(f'Val Epoch [{epoch}/{self.max_epoch}]')
116117
loop.set_postfix(ADD=ADD, avr_ADD=ADD_meter.avg, SADD=SADD,
117-
avr_SADD=SADD_meter.avg, acc=acc, acc_avg = ACC_meter.avg)
118+
avr_SADD=SADD_meter.avg, acc=ACC, acc_avg = ACC_meter.avg)
118119

119120
if self.debug and it == self.debug_steps:
120121
break
@@ -125,16 +126,19 @@ def val(self, epoch=None):
125126
self.writer.add_scalar('Val/ADD', ADD_meter.avg, global_step=self.val_step)
126127
self.writer.add_scalar('Val/SADD', SADD_meter.avg, global_step=self.val_step)
127128
self.writer.add_scalar('Val/ACC', ACC_meter.avg, global_step=self.val_step)
129+
self.writer.add_scalar('Val/IOU', IOU_meter.avg, global_step=self.val_step)
128130
for cls_ in range(self.num_classes):
129131
cl_str = OBJECTRON_CLASSES[cls_]
130132
if epoch is not None:
131133
self.writer.add_scalar(f'Val/ADD_{cl_str}', ADD_cls_meter[cls_].avg, global_step=self.val_step)
132134
self.writer.add_scalar(f'Val/SADD_{cl_str}', SADD_cls_meter[cls_].avg, global_step=self.val_step)
133-
self.writer.add_scalar(f'Val/ACC_{cl_str}', acc_cls_meter[cls_].avg, global_step=self.val_step)
135+
self.writer.add_scalar(f'Val/ACC_{cl_str}', ACC_cls_meter[cls_].avg, global_step=self.val_step)
136+
self.writer.add_scalar(f'Val/IOU_{cl_str}', IOU__cls_meter[cls_].avg, global_step=self.val_step)
134137
self.val_step += 1
135138
per_class_metr_message += (f"\n***{cl_str}***:\nADD: {ADD_cls_meter[cls_].avg}\n"
136139
f"SADD: {SADD_cls_meter[cls_].avg}\n"
137-
f"accuracy: {acc_cls_meter[cls_].avg}\n")
140+
f"IOU: {IOU__cls_meter[cls_].avg}\n"
141+
f"accuracy: {ACC_cls_meter[cls_].avg}\n")
138142

139143
ep_mess = f"epoch : {epoch}\n" if epoch is not None else ""
140144
print("\nComputed val metrics:\n"

torchdet3d/losses/regression_losses.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,8 @@ def parse_losses(self, pred_kp, gt_kp,
101101
cls_std = torch.std(torch.stack(self.s_cls))
102102
reg_mean = torch.mean(torch.stack(self.s_reg))
103103
reg_std = torch.std(torch.stack(self.s_reg))
104-
self.s_cls.clear(); self.s_reg.clear()
104+
self.s_cls.clear()
105+
self.s_reg.clear()
105106
if self.alwa_version == 'ver_1':
106107
cls = cls_mean + cls_std
107108
reg = reg_mean + reg_std

torchdet3d/trainer/train.py

-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33

44
from tqdm import tqdm
55
from dataclasses import dataclass
6-
import torch
76

87
from torchdet3d.evaluation import compute_average_distance, compute_accuracy
98
from torchdet3d.utils import AverageMeter, save_snap

0 commit comments

Comments
 (0)