Skip to content

Commit c459ab8

Browse files
authored
Merge pull request #66 from milancurcic/maxpool-layer
Forward pass for a max-pooling layer
2 parents d1b988a + 6898c1d commit c459ab8

9 files changed

+334
-15
lines changed

CMakeLists.txt

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,8 @@ add_library(neural
8686
src/nf_layer_submodule.f90
8787
src/nf_loss.f90
8888
src/nf_loss_submodule.f90
89+
src/nf_maxpool2d_layer.f90
90+
src/nf_maxpool2d_layer_submodule.f90
8991
src/nf_network.f90
9092
src/nf_network_submodule.f90
9193
src/nf_optimizers.f90
@@ -100,7 +102,7 @@ string(REGEX REPLACE "^ | $" "" LIBS "${LIBS}")
100102

101103
# tests
102104
enable_testing()
103-
foreach(execid input1d_layer input3d_layer dense_layer conv2d_layer dense_network conv2d_network)
105+
foreach(execid input1d_layer input3d_layer dense_layer conv2d_layer maxpool2d_layer dense_network conv2d_network)
104106
add_executable(test_${execid} test/test_${execid}.f90)
105107
target_link_libraries(test_${execid} neural ${LIBS})
106108
add_test(test_${execid} bin/test_${execid})

README.md

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,18 +16,20 @@ Read the paper [here](https://arxiv.org/abs/1902.06714).
1616

1717
## Features
1818

19-
* Dense, fully connected neural networks of arbitrary shape and size
20-
* Backprop with Mean Square Error cost function
19+
* Dense, fully connected neural layers
20+
* Convolutional and max-pooling layers (experimental, forward propagation only)
21+
* Stochastic and mini-batch gradient descent for back-propagation
2122
* Data-based parallelism
2223
* Several activation functions
2324

2425
### Available layer types
2526

26-
| Layer type | Constructor name | Rank of output array | Forward pass | Backward pass |
27-
|------------|------------------|----------------------|--------------|---------------|
28-
| Input | `input` | 1, 3 | n/a | n/a |
29-
| Dense (fully-connected) | `dense` | 1 |||
30-
| Convolutional (2-d) | `conv2d` | 3 |||
27+
| Layer type | Constructor name | Supported input layers | Rank of output array | Forward pass | Backward pass |
28+
|------------|------------------|------------------------|----------------------|--------------|---------------|
29+
| Input (1-d and 3-d) | `input` | n/a | 1, 3 | n/a | n/a |
30+
| Dense (fully-connected) | `input` (1-d) | `dense` | 1 |||
31+
| Convolutional (2-d) | `input` (3-d), `conv2d`, `maxpool2d` | `conv2d` | 3 |||
32+
| Max-pooling (2-d) | `input` (3-d), `conv2d`, `maxpool2d` | `maxpool2d` | 3 |||
3133

3234
## Getting started
3335

src/nf.f90

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
module nf
22
use nf_datasets_mnist, only: label_digits, load_mnist
33
use nf_layer, only: layer
4-
use nf_layer_constructors, only: conv2d, dense, input
4+
use nf_layer_constructors, only: conv2d, dense, input, maxpool2d
55
use nf_network, only: network
66
end module nf

src/nf_layer_constructors.f90

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ module nf_layer_constructors
77
implicit none
88

99
private
10-
public :: conv2d, dense, input
10+
public :: conv2d, dense, input, maxpool2d
1111

1212
interface input
1313

@@ -111,6 +111,28 @@ pure module function conv2d(filters, kernel_size, activation) result(res)
111111
!! Resulting layer instance
112112
end function conv2d
113113

114+
pure module function maxpool2d(pool_size, stride) result(res)
115+
!! 2-d maxpooling layer constructor.
116+
!!
117+
!! This layer is for downscaling other layers, typically `conv2d`.
118+
!!
119+
!! Example:
120+
!!
121+
!! ```
122+
!! use nf, only :: maxpool2d, layer
123+
!! type(layer) :: maxpool2d_layer
124+
!! maxpool2d_layer = maxpool2d(pool_size=2)
125+
!! maxpool2d_layer = maxpool2d(pool_size=2, stride=3)
126+
!! ```
127+
integer, intent(in) :: pool_size
128+
!! Width of the pooling window, commonly 2
129+
integer, intent(in), optional :: stride
130+
!! Stride of the pooling window, commonly equal to `pool_size`;
131+
!! Defaults to `pool_size` if omitted.
132+
type(layer) :: res
133+
!! Resulting layer instance
134+
end function maxpool2d
135+
114136
end interface
115137

116138
end module nf_layer_constructors

src/nf_layer_constructors_submodule.f90

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
use nf_dense_layer, only: dense_layer
66
use nf_input1d_layer, only: input1d_layer
77
use nf_input3d_layer, only: input3d_layer
8+
use nf_maxpool2d_layer, only: maxpool2d_layer
89

910
implicit none
1011

@@ -72,4 +73,33 @@ pure module function conv2d(filters, kernel_size, activation) result(res)
7273

7374
end function conv2d
7475

76+
77+
pure module function maxpool2d(pool_size, stride) result(res)
78+
integer, intent(in) :: pool_size
79+
integer, intent(in), optional :: stride
80+
integer :: stride_
81+
type(layer) :: res
82+
83+
if (pool_size < 2) &
84+
error stop 'pool_size must be >= 2 in a maxpool2d layer'
85+
86+
! Stride defaults to pool_size if not provided
87+
if (present(stride)) then
88+
stride_ = stride
89+
else
90+
stride_ = pool_size
91+
end if
92+
93+
if (stride_ < 1) &
94+
error stop 'stride must be >= 1 in a maxpool2d layer'
95+
96+
res % name = 'maxpool2d'
97+
98+
allocate( &
99+
res % p, &
100+
source=maxpool2d_layer(pool_size, stride_) &
101+
)
102+
103+
end function maxpool2d
104+
75105
end submodule nf_layer_constructors_submodule

src/nf_layer_submodule.f90

Lines changed: 26 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
use nf_dense_layer, only: dense_layer
55
use nf_input1d_layer, only: input1d_layer
66
use nf_input3d_layer, only: input3d_layer
7+
use nf_maxpool2d_layer, only: maxpool2d_layer
78

89
implicit none
910

@@ -51,12 +52,26 @@ pure module subroutine forward(self, input)
5152

5253
type is(conv2d_layer)
5354

54-
! Input layers permitted: input3d, conv2d
55+
! Input layers permitted: input3d, conv2d, maxpool2d
5556
select type(prev_layer => input % p)
5657
type is(input3d_layer)
5758
call this_layer % forward(prev_layer % output)
5859
type is(conv2d_layer)
5960
call this_layer % forward(prev_layer % output)
61+
type is(maxpool2d_layer)
62+
call this_layer % forward(prev_layer % output)
63+
end select
64+
65+
type is(maxpool2d_layer)
66+
67+
! Input layers permitted: input3d, conv2d, maxpool2d
68+
select type(prev_layer => input % p)
69+
type is(input3d_layer)
70+
call this_layer % forward(prev_layer % output)
71+
type is(conv2d_layer)
72+
call this_layer % forward(prev_layer % output)
73+
type is(maxpool2d_layer)
74+
call this_layer % forward(prev_layer % output)
6075
end select
6176

6277
end select
@@ -92,8 +107,10 @@ pure module subroutine get_output_3d(self, output)
92107
allocate(output, source=this_layer % output)
93108
type is(conv2d_layer)
94109
allocate(output, source=this_layer % output)
110+
type is(maxpool2d_layer)
111+
allocate(output, source=this_layer % output)
95112
class default
96-
error stop '3-d output can only be read from an input3d or conv2d layer.'
113+
error stop '3-d output can only be read from an input3d, conv2d, or maxpool2d layer.'
97114

98115
end select
99116

@@ -111,9 +128,13 @@ impure elemental module subroutine init(self, input)
111128
call this_layer % init(input % layer_shape)
112129
end select
113130

114-
! The shape of a conv2d layer is not known until we receive an input layer.
115-
select type(this_layer => self % p); type is(conv2d_layer)
116-
self % layer_shape = shape(this_layer % output)
131+
! The shape of conv2d or maxpool2d layers is not known
132+
! until we receive an input layer.
133+
select type(this_layer => self % p)
134+
type is(conv2d_layer)
135+
self % layer_shape = shape(this_layer % output)
136+
type is(maxpool2d_layer)
137+
self % layer_shape = shape(this_layer % output)
117138
end select
118139

119140
self % input_layer_shape = input % layer_shape

src/nf_maxpool2d_layer.f90

Lines changed: 75 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
module nf_maxpool2d_layer
2+
3+
!! This module provides the 2-d maxpooling layer.
4+
5+
use nf_base_layer, only: base_layer
6+
implicit none
7+
8+
private
9+
public :: maxpool2d_layer
10+
11+
type, extends(base_layer) :: maxpool2d_layer
12+
13+
integer :: channels
14+
integer :: width
15+
integer :: height
16+
integer :: pool_size
17+
integer :: stride
18+
19+
! Locations (as input matrix indices) of the maximum values
20+
! in the width (x) and height (y) dimensions
21+
integer, allocatable :: maxloc_x(:,:,:)
22+
integer, allocatable :: maxloc_y(:,:,:)
23+
24+
real, allocatable :: output(:,:,:)
25+
26+
contains
27+
28+
procedure :: init
29+
procedure :: forward
30+
procedure :: backward
31+
32+
end type maxpool2d_layer
33+
34+
interface maxpool2d_layer
35+
pure module function maxpool2d_layer_cons(pool_size, stride) result(res)
36+
!! `maxpool2d` constructor function
37+
integer, intent(in) :: pool_size
38+
!! Width and height of the pooling window
39+
integer, intent(in) :: stride
40+
!! Stride of the pooling window
41+
type(maxpool2d_layer) :: res
42+
end function maxpool2d_layer_cons
43+
end interface maxpool2d_layer
44+
45+
interface
46+
47+
module subroutine init(self, input_shape)
48+
!! Initialize the `maxpool2d` layer instance with an input shape.
49+
class(maxpool2d_layer), intent(in out) :: self
50+
!! `maxpool2d_layer` instance
51+
integer, intent(in) :: input_shape(:)
52+
!! Array shape of the input layer
53+
end subroutine init
54+
55+
pure module subroutine forward(self, input)
56+
!! Run a forward pass of the `maxpool2d` layer.
57+
class(maxpool2d_layer), intent(in out) :: self
58+
!! `maxpool2d_layer` instance
59+
real, intent(in) :: input(:,:,:)
60+
!! Input data (output of the previous layer)
61+
end subroutine forward
62+
63+
module subroutine backward(self, input, gradient)
64+
!! Run a backward pass of the `maxpool2d` layer.
65+
class(maxpool2d_layer), intent(in out) :: self
66+
!! `maxpool2d_layer` instance
67+
real, intent(in) :: input(:,:,:)
68+
!! Input data (output of the previous layer)
69+
real, intent(in) :: gradient(:,:,:)
70+
!! Gradient from the downstream layer
71+
end subroutine backward
72+
73+
end interface
74+
75+
end module nf_maxpool2d_layer

src/nf_maxpool2d_layer_submodule.f90

Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
submodule(nf_maxpool2d_layer) nf_maxpool2d_layer_submodule
2+
3+
implicit none
4+
5+
contains
6+
7+
pure module function maxpool2d_layer_cons(pool_size, stride) result(res)
8+
implicit none
9+
integer, intent(in) :: pool_size
10+
integer, intent(in) :: stride
11+
type(maxpool2d_layer) :: res
12+
res % pool_size = pool_size
13+
res % stride = stride
14+
end function maxpool2d_layer_cons
15+
16+
17+
module subroutine init(self, input_shape)
18+
implicit none
19+
class(maxpool2d_layer), intent(in out) :: self
20+
integer, intent(in) :: input_shape(:)
21+
22+
self % channels = input_shape(1)
23+
self % width = input_shape(2) / self % stride
24+
self % height = input_shape(3) / self % stride
25+
26+
allocate(self % maxloc_x(self % channels, self % width, self % height))
27+
self % maxloc_x = 0
28+
29+
allocate(self % maxloc_y(self % channels, self % width, self % height))
30+
self % maxloc_y = 0
31+
32+
allocate(self % output(self % channels, self % width, self % height))
33+
self % output = 0
34+
35+
end subroutine init
36+
37+
38+
pure module subroutine forward(self, input)
39+
implicit none
40+
class(maxpool2d_layer), intent(in out) :: self
41+
real, intent(in) :: input(:,:,:)
42+
integer :: input_width, input_height
43+
integer :: i, j, n
44+
integer :: ii, jj
45+
integer :: iend, jend
46+
integer :: maxloc_xy(2)
47+
48+
input_width = size(input, dim=2)
49+
input_height = size(input, dim=2)
50+
51+
! Stride along the width and height of the input image
52+
stride_over_input: do concurrent( &
53+
i = 1:input_width:self % stride, &
54+
j = 1:input_height:self % stride &
55+
)
56+
57+
! Indices of the pooling layer
58+
ii = i / self % stride + 1
59+
jj = j / self % stride + 1
60+
61+
iend = i + self % pool_size - 1
62+
jend = j + self % pool_size - 1
63+
64+
maxpool_for_each_channel: do concurrent(n = 1:self % channels)
65+
66+
! Get and store the location of the maximum value
67+
maxloc_xy = maxloc(input(n,i:iend,j:jend))
68+
self % maxloc_x(n,ii,jj) = maxloc_xy(1) + i - 1
69+
self % maxloc_y(n,ii,jj) = maxloc_xy(2) + j - 1
70+
71+
self % output(n,ii,jj) = &
72+
input(n,self % maxloc_x(n,ii,jj),self % maxloc_y(n,ii,jj))
73+
74+
end do maxpool_for_each_channel
75+
76+
end do stride_over_input
77+
78+
end subroutine forward
79+
80+
81+
module subroutine backward(self, input, gradient)
82+
implicit none
83+
class(maxpool2d_layer), intent(in out) :: self
84+
real, intent(in) :: input(:,:,:)
85+
real, intent(in) :: gradient(:,:,:)
86+
print *, 'Warning: maxpool2d backward pass not implemented'
87+
end subroutine backward
88+
89+
end submodule nf_maxpool2d_layer_submodule

0 commit comments

Comments
 (0)