Skip to content

Commit d6575cf

Browse files
committed
Merge branch 'main' into concat
2 parents f676780 + 3049393 commit d6575cf

8 files changed

+55
-20
lines changed

src/nf/nf_activation.f90

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -733,7 +733,7 @@ pure function eval_3d_celu_prime(self, x) result(res)
733733
end function eval_3d_celu_prime
734734

735735
! Utility Functions
736-
function get_activation_by_name(activation_name) result(res)
736+
pure function get_activation_by_name(activation_name) result(res)
737737
character(len=*), intent(in) :: activation_name
738738
class(activation_function), allocatable :: res
739739

@@ -815,4 +815,4 @@ pure function get_name(self) result(name)
815815
end select
816816
end function get_name
817817

818-
end module nf_activation
818+
end module nf_activation

src/nf/nf_dense_layer.f90

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -98,15 +98,6 @@ module subroutine get_gradients_ptr(self, dw_ptr, db_ptr)
9898
real, pointer, intent(out) :: db_ptr(:)
9999
end subroutine get_gradients_ptr
100100

101-
module subroutine set_params(self, params)
102-
!! Set the parameters of this layer.
103-
!! The parameters are ordered as weights first, biases second.
104-
class(dense_layer), intent(in out) :: self
105-
!! Dense layer instance
106-
real, intent(in), target :: params(:)
107-
!! Parameters of this layer
108-
end subroutine set_params
109-
110101
module subroutine init(self, input_shape)
111102
!! Initialize the layer data structures.
112103
!!

src/nf/nf_dense_layer_submodule.f90

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
submodule(nf_dense_layer) nf_dense_layer_submodule
22

33
use nf_activation, only: activation_function
4-
use nf_base_layer, only: base_layer
54
use nf_random, only: random_normal
65

76
implicit none

src/nf/nf_embedding_layer_submodule.f90

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
#define ABSOLUTE 2
44

55
submodule(nf_embedding_layer) nf_embedding_layer_submodule
6-
use nf_base_layer, only: base_layer
76
implicit none
87
contains
98
module function embedding_layer_cons(vocab_size, model_dimension, positional) result(res)

src/nf/nf_flatten_layer_submodule.f90

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
!! It is used internally by the layer type.
55
!! It is not intended to be used directly by the user.
66

7-
use nf_base_layer, only: base_layer
87

98
implicit none
109

src/nf/nf_layer_constructors_submodule.f90

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
submodule(nf_layer_constructors) nf_layer_constructors_submodule
22

3-
use nf_layer, only: layer
43
use nf_conv1d_layer, only: conv1d_layer
54
use nf_conv2d_layer, only: conv2d_layer
65
use nf_dense_layer, only: dense_layer
@@ -18,7 +17,7 @@
1817
use nf_self_attention_layer, only: self_attention_layer
1918
use nf_embedding_layer, only: embedding_layer
2019
use nf_layernorm_layer, only: layernorm_layer
21-
use nf_activation, only: activation_function, relu, sigmoid
20+
use nf_activation, only: relu, sigmoid
2221

2322
implicit none
2423

src/nf/nf_multihead_attention_layer_submodule.f90

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
submodule(nf_multihead_attention_layer) nf_multihead_attention_layer_submodule
22
use nf_activation, only: softmax
3-
use nf_base_layer, only: base_layer
43
use nf_linear2d_layer, only: linear2d_layer
54

65
implicit none
@@ -288,7 +287,7 @@ module subroutine init_base(self, input_shape)
288287
self % model_dimension = input_shape(2)
289288

290289
if (mod(self % model_dimension, self % n_heads) /= 0) then
291-
write(stderr, '(a)'), 'Number of heads must be divisible by model dimension'
290+
write(stderr, '(a)') 'Number of heads must be divisible by model dimension'
292291
error stop
293292
end if
294293
self % head_size = self % model_dimension / self % n_heads
@@ -335,4 +334,4 @@ module subroutine init_base(self, input_shape)
335334
allocate(self % jacobian, mold=self % d_sdpa)
336335
allocate(self % d_normalize, mold=self % attention_matrix)
337336
end subroutine init_base
338-
end submodule nf_multihead_attention_layer_submodule
337+
end submodule nf_multihead_attention_layer_submodule

src/nf/nf_optimizers.f90

Lines changed: 50 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ module nf_optimizers
1818
type, abstract :: optimizer_base_type
1919
real :: learning_rate = 0.01
2020
contains
21+
procedure :: get_name
2122
procedure(init), deferred :: init
2223
procedure(minimize), deferred :: minimize
2324
end type optimizer_base_type
@@ -312,4 +313,52 @@ pure subroutine minimize_adagrad(self, param, gradient)
312313

313314
end subroutine minimize_adagrad
314315

315-
end module nf_optimizers
316+
317+
! Utility Functions
318+
!! Returns the default optimizer corresponding to the provided name
319+
pure function get_optimizer_by_name(optimizer_name) result(res)
320+
character(len=*), intent(in) :: optimizer_name
321+
class(optimizer_base_type), allocatable :: res
322+
323+
select case(trim(optimizer_name))
324+
case('adagrad')
325+
allocate ( res, source = adagrad() )
326+
327+
case('adam')
328+
allocate ( res, source = adam() )
329+
330+
case('rmsprop')
331+
allocate ( res, source = rmsprop() )
332+
333+
case('sgd')
334+
allocate ( res, source = sgd() )
335+
336+
case default
337+
error stop 'optimizer_name must be one of: ' // &
338+
'"adagrad", "adam", "rmsprop", "sgd".'
339+
end select
340+
341+
end function get_optimizer_by_name
342+
343+
344+
!! Returns the name of the optimizer
345+
pure function get_name(self) result(name)
346+
class(optimizer_base_type), intent(in) :: self
347+
character(:), allocatable :: name
348+
349+
select type (self)
350+
class is (adagrad)
351+
name = 'adagrad'
352+
class is (adam)
353+
name = 'adam'
354+
class is (rmsprop)
355+
name = 'rmsprop'
356+
class is (sgd)
357+
name = 'sgd'
358+
class default
359+
error stop 'Unknown optimizer type.'
360+
end select
361+
362+
end function get_name
363+
364+
end module nf_optimizers

0 commit comments

Comments
 (0)