Skip to content

getting / setting network parameters (third-time lucky with tab space formatting) #107

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 69 commits into from
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
69 commits
Select commit Hold shift + click to select a range
1526b39
modified: CMakeLists.txt
jvo203 Nov 30, 2022
d1b544c
new file: example/params.f90
jvo203 Nov 30, 2022
c3589ec
modified: example/params.f90
jvo203 Nov 30, 2022
b2ea2fd
modified: src/nf/nf_network.f90
jvo203 Nov 30, 2022
a1ec5b3
modified: example/params.f90
jvo203 Nov 30, 2022
f8ad474
modified: src/nf/nf_network.f90
jvo203 Nov 30, 2022
dcd9a43
modified: src/nf/nf_network_submodule.f90
jvo203 Nov 30, 2022
cb67eaa
modified: src/nf/nf_layer.f90
jvo203 Nov 30, 2022
7030a83
modified: src/nf/nf_dense_layer.f90
jvo203 Nov 30, 2022
d5b2968
modified: src/nf/nf_dense_layer_submodule.f90
jvo203 Nov 30, 2022
a815383
modified: src/nf/nf_conv2d_layer_submodule.f90
jvo203 Nov 30, 2022
06553da
modified: example/params.f90
jvo203 Nov 30, 2022
f919f69
modified: src/nf/nf_network_submodule.f90
jvo203 Nov 30, 2022
73bae84
modified: example/params.f90
jvo203 Nov 30, 2022
041f365
modified: example/params.f90
jvo203 Dec 1, 2022
1ffbec0
modified: example/params.f90
jvo203 Dec 1, 2022
35789d8
modified: example/params.f90
jvo203 Dec 1, 2022
7fc5e1c
modified: src/nf/nf_network.f90
jvo203 Dec 1, 2022
d767e51
modified: src/nf/nf_network_submodule.f90
jvo203 Dec 1, 2022
0338f2b
modified: src/nf/nf_network_submodule.f90
jvo203 Dec 1, 2022
77f9ed9
modified: src/nf/nf_layer.f90
jvo203 Dec 1, 2022
ae58e0d
modified: src/nf/nf_network.f90
jvo203 Dec 1, 2022
7c3f862
modified: src/nf/nf_network.f90
jvo203 Dec 1, 2022
48f6d71
modified: src/nf/nf_dense_layer.f90
jvo203 Dec 1, 2022
1be2fab
modified: src/nf/nf_dense_layer_submodule.f90
jvo203 Dec 1, 2022
6863113
modified: src/nf/nf_layer_submodule.f90
jvo203 Dec 1, 2022
cfe2a42
modified: src/nf/nf_dense_layer_submodule.f90
jvo203 Dec 1, 2022
90acbf2
modified: src/nf/nf_conv2d_layer.f90
jvo203 Dec 1, 2022
c48dfab
modified: src/nf/nf_conv2d_layer_submodule.f90
jvo203 Dec 1, 2022
a509ae5
modified: example/params.f90
jvo203 Dec 1, 2022
75f21e1
modified: src/nf/nf_network.f90
jvo203 Dec 1, 2022
18ccb86
modified: src/nf/nf_layer.f90
jvo203 Dec 1, 2022
ef2bce9
modified: example/params.f90
jvo203 Dec 1, 2022
786c01d
modified: src/nf/nf_network.f90
jvo203 Dec 1, 2022
832c672
modified: src/nf/nf_dense_layer_submodule.f90
jvo203 Dec 1, 2022
99ae50b
modified: src/nf/nf_dense_layer_submodule.f90
jvo203 Dec 1, 2022
7072334
modified: src/nf/nf_dense_layer_submodule.f90
jvo203 Dec 1, 2022
7b76159
modified: src/nf/nf_conv2d_layer_submodule.f90
jvo203 Dec 1, 2022
8d5959f
get_parameters() is now PURE
jvo203 Dec 1, 2022
ff22439
modified: example/params.f90
jvo203 Dec 1, 2022
4df9803
modified: example/params.f90
jvo203 Dec 2, 2022
4167dab
modified: example/params.f90
jvo203 Dec 2, 2022
4677428
modified: example/params.f90
jvo203 Dec 2, 2022
75bfa28
modified: example/params.f90
jvo203 Dec 2, 2022
1bb05ca
modified: example/params.f90
jvo203 Dec 2, 2022
40eb92b
modified: src/nf/nf_network.f90
jvo203 Dec 2, 2022
6ea7e77
modified: src/nf/nf_network_submodule.f90
jvo203 Dec 2, 2022
a750fc9
modified: src/nf/nf_network_submodule.f90
jvo203 Dec 2, 2022
4d0f2a9
modified: src/nf/nf_layer.f90
jvo203 Dec 2, 2022
7243998
modified: src/nf/nf_layer_submodule.f90
jvo203 Dec 2, 2022
6d17ccc
modified: src/nf/nf_layer_submodule.f90
jvo203 Dec 2, 2022
ba7b69a
modified: src/nf/nf_dense_layer.f90
jvo203 Dec 2, 2022
d6e8155
modified: src/nf/nf_dense_layer_submodule.f90
jvo203 Dec 2, 2022
9523b6e
modified: src/nf/nf_layer_submodule.f90
jvo203 Dec 2, 2022
18f2a05
modified: src/nf/nf_network_submodule.f90
jvo203 Dec 2, 2022
18caece
modified: src/nf/nf_layer_submodule.f90
jvo203 Dec 2, 2022
19fd7c6
modified: src/nf/nf_conv2d_layer.f90
jvo203 Dec 2, 2022
4680558
modified: src/nf/nf_conv2d_layer_submodule.f90
jvo203 Dec 2, 2022
7fb1cac
modified: src/nf/nf_conv2d_layer_submodule.f90
jvo203 Dec 2, 2022
89b0f35
modified: example/params.f90
jvo203 Dec 3, 2022
e12ebc2
modified: src/nf/nf_network.f90
jvo203 Dec 3, 2022
3dd091c
modified: src/nf/nf_layer.f90
jvo203 Dec 3, 2022
6af35bd
modified: src/nf/nf_dense_layer.f90
jvo203 Dec 3, 2022
a41f25f
modified: src/nf/nf_conv2d_layer.f90
jvo203 Dec 3, 2022
aa88ecf
modified: src/nf/nf_network_submodule.f90
jvo203 Dec 19, 2022
0a2ac33
modified: src/nf/nf_network_submodule.f90
jvo203 Dec 19, 2022
c886038
modified: src/nf/nf_layer.f90
jvo203 Dec 19, 2022
10092d7
modified: src/nf/nf_dense_layer.f90
jvo203 Dec 19, 2022
d51417f
modified: src/nf/nf_conv2d_layer.f90
jvo203 Dec 19, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
modified: src/nf/nf_layer.f90
  • Loading branch information
jvo203 committed Nov 30, 2022
commit cb67eaa60ef023c44ce496be07a0e68ef4d34002
207 changes: 108 additions & 99 deletions src/nf/nf_conv2d_layer.f90
Original file line number Diff line number Diff line change
@@ -1,103 +1,112 @@
module nf_conv2d_layer

!! This modules provides a 2-d convolutional `conv2d_layer` type.

use nf_activation_3d, only: activation_function
use nf_base_layer, only: base_layer
implicit none

private
public :: conv2d_layer

type, extends(base_layer) :: conv2d_layer

integer :: width
integer :: height
integer :: channels
integer :: kernel_size
integer :: filters

real, allocatable :: biases(:) ! size(filters)
real, allocatable :: kernel(:,:,:,:) ! filters x channels x window x window
real, allocatable :: output(:,:,:) ! filters x output_width * output_height
real, allocatable :: z(:,:,:) ! kernel .dot. input + bias

real, allocatable :: dw(:,:,:,:) ! weight (kernel) gradients
real, allocatable :: db(:) ! bias gradients
real, allocatable :: gradient(:,:,:)

procedure(activation_function), pointer, nopass :: &
activation => null()
procedure(activation_function), pointer, nopass :: &
activation_prime => null()

contains

procedure :: init
procedure :: forward
procedure :: backward
procedure :: set_activation
procedure :: update

end type conv2d_layer

interface conv2d_layer
pure module function conv2d_layer_cons(filters, kernel_size, activation) &
result(res)
!! `conv2d_layer` constructor function
integer, intent(in) :: filters
integer, intent(in) :: kernel_size
character(*), intent(in) :: activation
type(conv2d_layer) :: res
end function conv2d_layer_cons
end interface conv2d_layer

interface

module subroutine init(self, input_shape)
!! Initialize the layer data structures.
!!
!! This is a deferred procedure from the `base_layer` abstract type.
class(conv2d_layer), intent(in out) :: self
!! A `conv2d_layer` instance
integer, intent(in) :: input_shape(:)
!! Input layer dimensions
end subroutine init

pure module subroutine forward(self, input)
!! Apply a forward pass on the `conv2d` layer.
class(conv2d_layer), intent(in out) :: self
!! A `conv2d_layer` instance
real, intent(in) :: input(:,:,:)
!! Input data
end subroutine forward

pure module subroutine backward(self, input, gradient)
!! Apply a backward pass on the `conv2d` layer.
class(conv2d_layer), intent(in out) :: self
!! A `conv2d_layer` instance
real, intent(in) :: input(:,:,:)
!! Input data (previous layer)
real, intent(in) :: gradient(:,:,:)
!! Gradient (next layer)
end subroutine backward

elemental module subroutine set_activation(self, activation)
!! Set the activation functions.
class(conv2d_layer), intent(in out) :: self
!! Layer instance
character(*), intent(in) :: activation
!! String with the activation function name
end subroutine set_activation

module subroutine update(self, learning_rate)
!! Update the weights and biases.
class(conv2d_layer), intent(in out) :: self
!! Dense layer instance
real, intent(in) :: learning_rate
!! Learning rate (must be > 0)
end subroutine update

end interface
!! This modules provides a 2-d convolutional `conv2d_layer` type.

use nf_activation_3d, only: activation_function
use nf_base_layer, only: base_layer
implicit none

private
public :: conv2d_layer

type, extends(base_layer) :: conv2d_layer

integer :: width
integer :: height
integer :: channels
integer :: kernel_size
integer :: filters

real, allocatable :: biases(:) ! size(filters)
real, allocatable :: kernel(:,:,:,:) ! filters x channels x window x window
real, allocatable :: output(:,:,:) ! filters x output_width * output_height
real, allocatable :: z(:,:,:) ! kernel .dot. input + bias

real, allocatable :: dw(:,:,:,:) ! weight (kernel) gradients
real, allocatable :: db(:) ! bias gradients
real, allocatable :: gradient(:,:,:)

procedure(activation_function), pointer, nopass :: &
activation => null()
procedure(activation_function), pointer, nopass :: &
activation_prime => null()

contains

procedure :: init
procedure :: forward
procedure :: get_num_params
procedure :: backward
procedure :: set_activation
procedure :: update

end type conv2d_layer

interface conv2d_layer
pure module function conv2d_layer_cons(filters, kernel_size, activation) &
result(res)
!! `conv2d_layer` constructor function
integer, intent(in) :: filters
integer, intent(in) :: kernel_size
character(*), intent(in) :: activation
type(conv2d_layer) :: res
end function conv2d_layer_cons
end interface conv2d_layer

interface

module subroutine init(self, input_shape)
!! Initialize the layer data structures.
!!
!! This is a deferred procedure from the `base_layer` abstract type.
class(conv2d_layer), intent(in out) :: self
!! A `conv2d_layer` instance
integer, intent(in) :: input_shape(:)
!! Input layer dimensions
end subroutine init

pure module subroutine forward(self, input)
!! Apply a forward pass on the `conv2d` layer.
class(conv2d_layer), intent(in out) :: self
!! A `conv2d_layer` instance
real, intent(in) :: input(:,:,:)
!! Input data
end subroutine forward

pure module subroutine backward(self, input, gradient)
!! Apply a backward pass on the `conv2d` layer.
class(conv2d_layer), intent(in out) :: self
!! A `conv2d_layer` instance
real, intent(in) :: input(:,:,:)
!! Input data (previous layer)
real, intent(in) :: gradient(:,:,:)
!! Gradient (next layer)
end subroutine backward

pure module function get_num_params(self) result(res)
!! Get the number of parameters in the layer.
class(conv2d_layer), intent(in) :: self
!! A `conv2d_layer` instance
integer :: res
!! Number of parameters
end function get_num_params

elemental module subroutine set_activation(self, activation)
!! Set the activation functions.
class(conv2d_layer), intent(in out) :: self
!! Layer instance
character(*), intent(in) :: activation
!! String with the activation function name
end subroutine set_activation

module subroutine update(self, learning_rate)
!! Update the weights and biases.
class(conv2d_layer), intent(in out) :: self
!! Dense layer instance
real, intent(in) :: learning_rate
!! Learning rate (must be > 0)
end subroutine update

end interface

end module nf_conv2d_layer
Loading