Skip to content

Commit 0b916ee

Browse files
milancurcicRichard Weed
and
Richard Weed
authored
Add linear activation function (#119)
Co-authored-by: Richard Weed <[email protected]>
1 parent e51a746 commit 0b916ee

4 files changed

+46
-6
lines changed

src/nf/nf_activation_1d.f90

+15
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ module nf_activation_1d
1010
public :: elu, elu_prime
1111
public :: exponential
1212
public :: gaussian, gaussian_prime
13+
public :: linear, linear_prime
1314
public :: relu, relu_prime
1415
public :: sigmoid, sigmoid_prime
1516
public :: softmax, softmax_prime
@@ -72,6 +73,20 @@ pure function gaussian_prime(x) result(res)
7273
res = -2 * x * gaussian(x)
7374
end function gaussian_prime
7475

76+
pure function linear(x) result(res)
77+
! Linear activation function.
78+
real, intent(in) :: x(:)
79+
real :: res(size(x))
80+
res = x
81+
end function linear
82+
83+
pure function linear_prime(x) result(res)
84+
! First derivative of the Gaussian activation function.
85+
real, intent(in) :: x(:)
86+
real :: res(size(x))
87+
res = 1
88+
end function linear_prime
89+
7590
pure function relu(x) result(res)
7691
!! Rectified Linear Unit (ReLU) activation function.
7792
real, intent(in) :: x(:)

src/nf/nf_activation_3d.f90

+15
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ module nf_activation_3d
1010
public :: elu, elu_prime
1111
public :: exponential
1212
public :: gaussian, gaussian_prime
13+
public :: linear, linear_prime
1314
public :: relu, relu_prime
1415
public :: sigmoid, sigmoid_prime
1516
public :: softmax, softmax_prime
@@ -72,6 +73,20 @@ pure function gaussian_prime(x) result(res)
7273
res = -2 * x * gaussian(x)
7374
end function gaussian_prime
7475

76+
pure function linear(x) result(res)
77+
! Linear activation function.
78+
real, intent(in) :: x(:,:,:)
79+
real :: res(size(x,1),size(x,2),size(x,3))
80+
res = x
81+
end function linear
82+
83+
pure function linear_prime(x) result(res)
84+
! First derivative of the linear activation function.
85+
real, intent(in) :: x(:,:,:)
86+
real :: res(size(x,1),size(x,2),size(x,3))
87+
res = 1
88+
end function linear_prime
89+
7590
pure function relu(x) result(res)
7691
!! Rectified Linear Unit (ReLU) activation function.
7792
real, intent(in) :: x(:,:,:)

src/nf/nf_conv2d_layer_submodule.f90

+8-3
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
elu, elu_prime, &
55
exponential, &
66
gaussian, gaussian_prime, &
7+
linear, linear_prime, &
78
relu, relu_prime, &
89
sigmoid, sigmoid_prime, &
910
softmax, softmax_prime, &
@@ -254,6 +255,11 @@ elemental module subroutine set_activation(self, activation)
254255
self % activation_prime => gaussian_prime
255256
self % activation_name = 'gaussian'
256257

258+
case('linear')
259+
self % activation => linear
260+
self % activation_prime => linear_prime
261+
self % activation_name = 'linear'
262+
257263
case('relu')
258264
self % activation => relu
259265
self % activation_prime => relu_prime
@@ -286,9 +292,8 @@ elemental module subroutine set_activation(self, activation)
286292

287293
case default
288294
error stop 'Activation must be one of: ' // &
289-
'"elu", "exponential", "gaussian", "relu", ' // &
290-
'"sigmoid", "softmax", "softplus", "step", ' // &
291-
'or "tanh".'
295+
'"elu", "exponential", "gaussian", "linear", "relu", "sigmoid", ' // &
296+
'"softmax", "softplus", "step", or "tanh".'
292297

293298
end select
294299

src/nf/nf_dense_layer_submodule.f90

+8-3
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
elu, elu_prime, &
55
exponential, &
66
gaussian, gaussian_prime, &
7+
linear, linear_prime, &
78
relu, relu_prime, &
89
sigmoid, sigmoid_prime, &
910
softmax, softmax_prime, &
@@ -157,6 +158,11 @@ elemental module subroutine set_activation(self, activation)
157158
self % activation_prime => gaussian_prime
158159
self % activation_name = 'gaussian'
159160

161+
case('linear')
162+
self % activation => linear
163+
self % activation_prime => linear_prime
164+
self % activation_name = 'linear'
165+
160166
case('relu')
161167
self % activation => relu
162168
self % activation_prime => relu_prime
@@ -189,9 +195,8 @@ elemental module subroutine set_activation(self, activation)
189195

190196
case default
191197
error stop 'Activation must be one of: ' // &
192-
'"elu", "exponential", "gaussian", "relu", ' // &
193-
'"sigmoid", "softmax", "softplus", "step", ' // &
194-
'or "tanh".'
198+
'"elu", "exponential", "gaussian", "linear", "relu", "sigmoid", ' // &
199+
'"softmax", "softplus", "step", or "tanh".'
195200

196201
end select
197202

0 commit comments

Comments
 (0)