Skip to content

Commit 84d9873

Browse files
authored
Add files via upload
1 parent 9774138 commit 84d9873

File tree

4 files changed

+2712
-0
lines changed

4 files changed

+2712
-0
lines changed

age_deploy.prototxt

Lines changed: 175 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,175 @@
1+
name: "CaffeNet"
2+
input: "data"
3+
input_dim: 1
4+
input_dim: 3
5+
input_dim: 227
6+
input_dim: 227
7+
layers {
8+
name: "conv1"
9+
type: CONVOLUTION
10+
bottom: "data"
11+
top: "conv1"
12+
convolution_param {
13+
num_output: 96
14+
kernel_size: 7
15+
stride: 4
16+
}
17+
}
18+
layers {
19+
name: "relu1"
20+
type: RELU
21+
bottom: "conv1"
22+
top: "conv1"
23+
}
24+
layers {
25+
name: "pool1"
26+
type: POOLING
27+
bottom: "conv1"
28+
top: "pool1"
29+
pooling_param {
30+
pool: MAX
31+
kernel_size: 3
32+
stride: 2
33+
}
34+
}
35+
layers {
36+
name: "norm1"
37+
type: LRN
38+
bottom: "pool1"
39+
top: "norm1"
40+
lrn_param {
41+
local_size: 5
42+
alpha: 0.0001
43+
beta: 0.75
44+
}
45+
}
46+
layers {
47+
name: "conv2"
48+
type: CONVOLUTION
49+
bottom: "norm1"
50+
top: "conv2"
51+
convolution_param {
52+
num_output: 256
53+
pad: 2
54+
kernel_size: 5
55+
}
56+
}
57+
layers {
58+
name: "relu2"
59+
type: RELU
60+
bottom: "conv2"
61+
top: "conv2"
62+
}
63+
layers {
64+
name: "pool2"
65+
type: POOLING
66+
bottom: "conv2"
67+
top: "pool2"
68+
pooling_param {
69+
pool: MAX
70+
kernel_size: 3
71+
stride: 2
72+
}
73+
}
74+
layers {
75+
name: "norm2"
76+
type: LRN
77+
bottom: "pool2"
78+
top: "norm2"
79+
lrn_param {
80+
local_size: 5
81+
alpha: 0.0001
82+
beta: 0.75
83+
}
84+
}
85+
layers {
86+
name: "conv3"
87+
type: CONVOLUTION
88+
bottom: "norm2"
89+
top: "conv3"
90+
convolution_param {
91+
num_output: 384
92+
pad: 1
93+
kernel_size: 3
94+
}
95+
}
96+
layers{
97+
name: "relu3"
98+
type: RELU
99+
bottom: "conv3"
100+
top: "conv3"
101+
}
102+
layers {
103+
name: "pool5"
104+
type: POOLING
105+
bottom: "conv3"
106+
top: "pool5"
107+
pooling_param {
108+
pool: MAX
109+
kernel_size: 3
110+
stride: 2
111+
}
112+
}
113+
layers {
114+
name: "fc6"
115+
type: INNER_PRODUCT
116+
bottom: "pool5"
117+
top: "fc6"
118+
inner_product_param {
119+
num_output: 512
120+
}
121+
}
122+
layers {
123+
name: "relu6"
124+
type: RELU
125+
bottom: "fc6"
126+
top: "fc6"
127+
}
128+
layers {
129+
name: "drop6"
130+
type: DROPOUT
131+
bottom: "fc6"
132+
top: "fc6"
133+
dropout_param {
134+
dropout_ratio: 0.5
135+
}
136+
}
137+
layers {
138+
name: "fc7"
139+
type: INNER_PRODUCT
140+
bottom: "fc6"
141+
top: "fc7"
142+
inner_product_param {
143+
num_output: 512
144+
}
145+
}
146+
layers {
147+
name: "relu7"
148+
type: RELU
149+
bottom: "fc7"
150+
top: "fc7"
151+
}
152+
layers {
153+
name: "drop7"
154+
type: DROPOUT
155+
bottom: "fc7"
156+
top: "fc7"
157+
dropout_param {
158+
dropout_ratio: 0.5
159+
}
160+
}
161+
layers {
162+
name: "fc8"
163+
type: INNER_PRODUCT
164+
bottom: "fc7"
165+
top: "fc8"
166+
inner_product_param {
167+
num_output: 8
168+
}
169+
}
170+
layers {
171+
name: "prob"
172+
type: SOFTMAX
173+
bottom: "fc8"
174+
top: "prob"
175+
}

gender_deploy.prototxt

Lines changed: 175 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,175 @@
1+
name: "CaffeNet"
2+
input: "data"
3+
input_dim: 10
4+
input_dim: 3
5+
input_dim: 227
6+
input_dim: 227
7+
layers {
8+
name: "conv1"
9+
type: CONVOLUTION
10+
bottom: "data"
11+
top: "conv1"
12+
convolution_param {
13+
num_output: 96
14+
kernel_size: 7
15+
stride: 4
16+
}
17+
}
18+
layers {
19+
name: "relu1"
20+
type: RELU
21+
bottom: "conv1"
22+
top: "conv1"
23+
}
24+
layers {
25+
name: "pool1"
26+
type: POOLING
27+
bottom: "conv1"
28+
top: "pool1"
29+
pooling_param {
30+
pool: MAX
31+
kernel_size: 3
32+
stride: 2
33+
}
34+
}
35+
layers {
36+
name: "norm1"
37+
type: LRN
38+
bottom: "pool1"
39+
top: "norm1"
40+
lrn_param {
41+
local_size: 5
42+
alpha: 0.0001
43+
beta: 0.75
44+
}
45+
}
46+
layers {
47+
name: "conv2"
48+
type: CONVOLUTION
49+
bottom: "norm1"
50+
top: "conv2"
51+
convolution_param {
52+
num_output: 256
53+
pad: 2
54+
kernel_size: 5
55+
}
56+
}
57+
layers {
58+
name: "relu2"
59+
type: RELU
60+
bottom: "conv2"
61+
top: "conv2"
62+
}
63+
layers {
64+
name: "pool2"
65+
type: POOLING
66+
bottom: "conv2"
67+
top: "pool2"
68+
pooling_param {
69+
pool: MAX
70+
kernel_size: 3
71+
stride: 2
72+
}
73+
}
74+
layers {
75+
name: "norm2"
76+
type: LRN
77+
bottom: "pool2"
78+
top: "norm2"
79+
lrn_param {
80+
local_size: 5
81+
alpha: 0.0001
82+
beta: 0.75
83+
}
84+
}
85+
layers {
86+
name: "conv3"
87+
type: CONVOLUTION
88+
bottom: "norm2"
89+
top: "conv3"
90+
convolution_param {
91+
num_output: 384
92+
pad: 1
93+
kernel_size: 3
94+
}
95+
}
96+
layers{
97+
name: "relu3"
98+
type: RELU
99+
bottom: "conv3"
100+
top: "conv3"
101+
}
102+
layers {
103+
name: "pool5"
104+
type: POOLING
105+
bottom: "conv3"
106+
top: "pool5"
107+
pooling_param {
108+
pool: MAX
109+
kernel_size: 3
110+
stride: 2
111+
}
112+
}
113+
layers {
114+
name: "fc6"
115+
type: INNER_PRODUCT
116+
bottom: "pool5"
117+
top: "fc6"
118+
inner_product_param {
119+
num_output: 512
120+
}
121+
}
122+
layers {
123+
name: "relu6"
124+
type: RELU
125+
bottom: "fc6"
126+
top: "fc6"
127+
}
128+
layers {
129+
name: "drop6"
130+
type: DROPOUT
131+
bottom: "fc6"
132+
top: "fc6"
133+
dropout_param {
134+
dropout_ratio: 0.5
135+
}
136+
}
137+
layers {
138+
name: "fc7"
139+
type: INNER_PRODUCT
140+
bottom: "fc6"
141+
top: "fc7"
142+
inner_product_param {
143+
num_output: 512
144+
}
145+
}
146+
layers {
147+
name: "relu7"
148+
type: RELU
149+
bottom: "fc7"
150+
top: "fc7"
151+
}
152+
layers {
153+
name: "drop7"
154+
type: DROPOUT
155+
bottom: "fc7"
156+
top: "fc7"
157+
dropout_param {
158+
dropout_ratio: 0.5
159+
}
160+
}
161+
layers {
162+
name: "fc8"
163+
type: INNER_PRODUCT
164+
bottom: "fc7"
165+
top: "fc8"
166+
inner_product_param {
167+
num_output: 2
168+
}
169+
}
170+
layers {
171+
name: "prob"
172+
type: SOFTMAX
173+
bottom: "fc8"
174+
top: "prob"
175+
}

0 commit comments

Comments
 (0)