Skip to content

Commit 7457947

Browse files
committed
Update model with updated pretrained weights
model is now updated with new pretrained weights for imagenet these weights are trained with no padding on conv1x1, thus resulting in faster inference and improved accuracy.
1 parent 0f5ebc0 commit 7457947

File tree

3 files changed

+46
-35
lines changed

3 files changed

+46
-35
lines changed

Cifar/models/simplenet.py

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -63,28 +63,28 @@ def _cfg(url="", **kwargs):
6363

6464
default_cfgs: Dict[str, Dict[str, Any]] = {
6565
"simplenetv1_small_m1_05": _cfg(
66-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m1_05-a7ec600b.pth"
66+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m1_05-be804903.pth"
6767
),
6868
"simplenetv1_small_m2_05": _cfg(
69-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m2_05-62617ea1.pth"
69+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m2_05-ca4b3e2b.pth"
7070
),
7171
"simplenetv1_small_m1_075": _cfg(
72-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m1_075-8427bf60.pth"
72+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m1_075-098acbff.pth"
7373
),
7474
"simplenetv1_small_m2_075": _cfg(
75-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m2_075-da714eb5.pth"
75+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m2_075-609ff4da.pth"
7676
),
7777
"simplenetv1_5m_m1": _cfg(
78-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_5m_m1-cc6b3ad1.pth"
78+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_5m_m1-36c4ca4d.pth"
7979
),
8080
"simplenetv1_5m_m2": _cfg(
81-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_5m_m2-c35297bf.pth"
81+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_5m_m2-9bd6bb36.pth"
8282
),
8383
"simplenetv1_9m_m1": _cfg(
84-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_9m_m1-8c98a0a5.pth"
84+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_9m_m1-524f9972.pth"
8585
),
8686
"simplenetv1_9m_m2": _cfg(
87-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_9m_m2-6b01be1e.pth"
87+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_9m_m2-59e8733b.pth"
8888
),
8989
}
9090

@@ -241,7 +241,7 @@ def __init__(
241241
],
242242
}
243243
# make sure values are in proper form!
244-
self.dropout_rates = {int(key):float(value) for key,value in drop_rates.items()}
244+
self.dropout_rates = {int(key): float(value) for key, value in drop_rates.items()}
245245
# 15 is the last layer of the network(including two previous pooling layers)
246246
# basically specifying the dropout rate for the very last layer to be used after the pooling
247247
self.last_dropout_rate = self.dropout_rates.get(15, 0.0)
@@ -296,7 +296,10 @@ def _make_layers(self, scale: float):
296296
# due to using 0 as dropout value(this applies up to 1.13.1) so here is an explicit
297297
# check to convert any possible integer value to its decimal counterpart.
298298
custom_dropout = None if custom_dropout is None else float(custom_dropout)
299-
kernel_size = 3 if layer_type == [] else 1
299+
kernel_size = 3
300+
if layer_type == ['k1']:
301+
kernel_size = 1
302+
padding = 0
300303

301304
if layer == "p":
302305
layers += [
@@ -307,13 +310,13 @@ def _make_layers(self, scale: float):
307310
filters = round(layer * scale)
308311
if custom_dropout is None:
309312
layers += [
310-
nn.Conv2d(input_channel, filters, kernel_size=kernel_size, stride=stride, padding=1),
313+
nn.Conv2d(input_channel, filters, kernel_size=kernel_size, stride=stride, padding=padding),
311314
nn.BatchNorm2d(filters, eps=1e-05, momentum=0.05, affine=True),
312315
nn.ReLU(inplace=True),
313316
]
314317
else:
315318
layers += [
316-
nn.Conv2d(input_channel, filters, kernel_size=kernel_size, stride=stride, padding=1),
319+
nn.Conv2d(input_channel, filters, kernel_size=kernel_size, stride=stride, padding=padding),
317320
nn.BatchNorm2d(filters, eps=1e-05, momentum=0.05, affine=True),
318321
nn.ReLU(inplace=True),
319322
nn.Dropout2d(p=custom_dropout, inplace=False),

ImageNet/simplenet.py

Lines changed: 16 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -63,28 +63,28 @@ def _cfg(url="", **kwargs):
6363

6464
default_cfgs: Dict[str, Dict[str, Any]] = {
6565
"simplenetv1_small_m1_05": _cfg(
66-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m1_05-a7ec600b.pth"
66+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m1_05-be804903.pth"
6767
),
6868
"simplenetv1_small_m2_05": _cfg(
69-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m2_05-62617ea1.pth"
69+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m2_05-ca4b3e2b.pth"
7070
),
7171
"simplenetv1_small_m1_075": _cfg(
72-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m1_075-8427bf60.pth"
72+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m1_075-098acbff.pth"
7373
),
7474
"simplenetv1_small_m2_075": _cfg(
75-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m2_075-da714eb5.pth"
75+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m2_075-609ff4da.pth"
7676
),
7777
"simplenetv1_5m_m1": _cfg(
78-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_5m_m1-cc6b3ad1.pth"
78+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_5m_m1-36c4ca4d.pth"
7979
),
8080
"simplenetv1_5m_m2": _cfg(
81-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_5m_m2-c35297bf.pth"
81+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_5m_m2-9bd6bb36.pth"
8282
),
8383
"simplenetv1_9m_m1": _cfg(
84-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_9m_m1-8c98a0a5.pth"
84+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_9m_m1-524f9972.pth"
8585
),
8686
"simplenetv1_9m_m2": _cfg(
87-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_9m_m2-6b01be1e.pth"
87+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_9m_m2-59e8733b.pth"
8888
),
8989
}
9090

@@ -241,7 +241,7 @@ def __init__(
241241
],
242242
}
243243
# make sure values are in proper form!
244-
self.dropout_rates = {int(key):float(value) for key,value in drop_rates.items()}
244+
self.dropout_rates = {int(key): float(value) for key, value in drop_rates.items()}
245245
# 15 is the last layer of the network(including two previous pooling layers)
246246
# basically specifying the dropout rate for the very last layer to be used after the pooling
247247
self.last_dropout_rate = self.dropout_rates.get(15, 0.0)
@@ -296,7 +296,11 @@ def _make_layers(self, scale: float):
296296
# due to using 0 as dropout value(this applies up to 1.13.1) so here is an explicit
297297
# check to convert any possible integer value to its decimal counterpart.
298298
custom_dropout = None if custom_dropout is None else float(custom_dropout)
299-
kernel_size = 3 if layer_type == [] else 1
299+
kernel_size = 3
300+
padding = 1
301+
if layer_type == ['k1']:
302+
kernel_size = 1
303+
padding = 0
300304

301305
if layer == "p":
302306
layers += [
@@ -307,13 +311,13 @@ def _make_layers(self, scale: float):
307311
filters = round(layer * scale)
308312
if custom_dropout is None:
309313
layers += [
310-
nn.Conv2d(input_channel, filters, kernel_size=kernel_size, stride=stride, padding=1),
314+
nn.Conv2d(input_channel, filters, kernel_size=kernel_size, stride=stride, padding=padding),
311315
nn.BatchNorm2d(filters, eps=1e-05, momentum=0.05, affine=True),
312316
nn.ReLU(inplace=True),
313317
]
314318
else:
315319
layers += [
316-
nn.Conv2d(input_channel, filters, kernel_size=kernel_size, stride=stride, padding=1),
320+
nn.Conv2d(input_channel, filters, kernel_size=kernel_size, stride=stride, padding=padding),
317321
nn.BatchNorm2d(filters, eps=1e-05, momentum=0.05, affine=True),
318322
nn.ReLU(inplace=True),
319323
nn.Dropout2d(p=custom_dropout, inplace=False),

ImageNet/training_scripts/imagenet_training/timm/models/simplenet.py

Lines changed: 15 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -59,28 +59,28 @@ def _cfg(url="", **kwargs):
5959

6060
default_cfgs: Dict[str, Dict[str, Any]] = {
6161
"simplenetv1_small_m1_05": _cfg(
62-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m1_05-a7ec600b.pth"
62+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m1_05-be804903.pth"
6363
),
6464
"simplenetv1_small_m2_05": _cfg(
65-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m2_05-62617ea1.pth"
65+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m2_05-ca4b3e2b.pth"
6666
),
6767
"simplenetv1_small_m1_075": _cfg(
68-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m1_075-8427bf60.pth"
68+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m1_075-098acbff.pth"
6969
),
7070
"simplenetv1_small_m2_075": _cfg(
71-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m2_075-da714eb5.pth"
71+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_small_m2_075-609ff4da.pth"
7272
),
7373
"simplenetv1_5m_m1": _cfg(
74-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_5m_m1-cc6b3ad1.pth"
74+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_5m_m1-36c4ca4d.pth"
7575
),
7676
"simplenetv1_5m_m2": _cfg(
77-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_5m_m2-c35297bf.pth"
77+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_5m_m2-9bd6bb36.pth"
7878
),
7979
"simplenetv1_9m_m1": _cfg(
80-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_9m_m1-8c98a0a5.pth"
80+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_9m_m1-524f9972.pth"
8181
),
8282
"simplenetv1_9m_m2": _cfg(
83-
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_9m_m2-6b01be1e.pth"
83+
url="https://github.com/Coderx7/SimpleNet_Pytorch/releases/download/v1.0.0/simplenetv1_9m_m2-59e8733b.pth"
8484
),
8585
}
8686

@@ -292,7 +292,11 @@ def _make_layers(self, scale: float):
292292
# due to using 0 as dropout value(this applies up to 1.13.1) so here is an explicit
293293
# check to convert any possible integer value to its decimal counterpart.
294294
custom_dropout = None if custom_dropout is None else float(custom_dropout)
295-
kernel_size = 3 if layer_type == [] else 1
295+
kernel_size = 3
296+
padding = 1
297+
if layer_type == ['k1']:
298+
kernel_size = 1
299+
padding = 0
296300

297301
if layer == "p":
298302
layers += [
@@ -303,13 +307,13 @@ def _make_layers(self, scale: float):
303307
filters = round(layer * scale)
304308
if custom_dropout is None:
305309
layers += [
306-
nn.Conv2d(input_channel, filters, kernel_size=kernel_size, stride=stride, padding=1),
310+
nn.Conv2d(input_channel, filters, kernel_size=kernel_size, stride=stride, padding=padding),
307311
nn.BatchNorm2d(filters, eps=1e-05, momentum=0.05, affine=True),
308312
nn.ReLU(inplace=True),
309313
]
310314
else:
311315
layers += [
312-
nn.Conv2d(input_channel, filters, kernel_size=kernel_size, stride=stride, padding=1),
316+
nn.Conv2d(input_channel, filters, kernel_size=kernel_size, stride=stride, padding=padding),
313317
nn.BatchNorm2d(filters, eps=1e-05, momentum=0.05, affine=True),
314318
nn.ReLU(inplace=True),
315319
nn.Dropout2d(p=custom_dropout, inplace=False),

0 commit comments

Comments
 (0)