Skip to content

Commit 9082db8

Browse files
committed
Bug fix
1 parent b64038a commit 9082db8

File tree

3 files changed

+23
-25
lines changed

3 files changed

+23
-25
lines changed

src/nf/nf_layer_constructors.f90

Lines changed: 17 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -208,22 +208,21 @@ module function conv2d(filters, kernel_size, activation) result(res)
208208
end function conv2d
209209

210210
module function locally_connected_1d(filters, kernel_size, activation) result(res)
211-
!! CHANGE THE COMMENTS!!!
212-
!! 2-d convolutional layer constructor.
211+
!! 1-d locally connected network constructor
213212
!!
214-
!! This layer is for building 2-d convolutional network.
215-
!! Although the established convention is to call these layers 2-d,
216-
!! the shape of the data is actuall 3-d: image width, image height,
213+
!! This layer is for building 1-d locally connected network.
214+
!! Although the established convention is to call these layers 1-d,
215+
!! the shape of the data is actuall 2-d: image width,
217216
!! and the number of channels.
218-
!! A conv2d layer must not be the first layer in the network.
217+
!! A locally connected 1d layer must not be the first layer in the network.
219218
!!
220219
!! Example:
221220
!!
222221
!! ```
223-
!! use nf, only :: conv2d, layer
224-
!! type(layer) :: conv2d_layer
225-
!! conv2d_layer = dense(filters=32, kernel_size=3)
226-
!! conv2d_layer = dense(filters=32, kernel_size=3, activation='relu')
222+
!! use nf, only :: locally_connected_1d, layer
223+
!! type(layer) :: locally_connected_1d_layer
224+
!! locally_connected_1d_layer = dense(filters=32, kernel_size=3)
225+
!! locally_connected_1d_layer = dense(filters=32, kernel_size=3, activation='relu')
227226
!! ```
228227
integer, intent(in) :: filters
229228
!! Number of filters in the output of the layer
@@ -236,17 +235,17 @@ module function locally_connected_1d(filters, kernel_size, activation) result(re
236235
end function locally_connected_1d
237236

238237
module function maxpool1d(pool_size, stride) result(res)
239-
!! 2-d maxpooling layer constructor.
238+
!! 1-d maxpooling layer constructor.
240239
!!
241-
!! This layer is for downscaling other layers, typically `conv2d`.
240+
!! This layer is for downscaling other layers, typically `conv1d`.
242241
!!
243242
!! Example:
244243
!!
245244
!! ```
246-
!! use nf, only :: maxpool2d, layer
247-
!! type(layer) :: maxpool2d_layer
248-
!! maxpool2d_layer = maxpool2d(pool_size=2)
249-
!! maxpool2d_layer = maxpool2d(pool_size=2, stride=3)
245+
!! use nf, only :: maxpool1d, layer
246+
!! type(layer) :: maxpool1d_layer
247+
!! maxpool1d_layer = maxpool1d(pool_size=2)
248+
!! maxpool1d_layer = maxpool1d(pool_size=2, stride=3)
250249
!! ```
251250
integer, intent(in) :: pool_size
252251
!! Width of the pooling window, commonly 2
@@ -292,9 +291,9 @@ end function reshape
292291

293292
module function reshape2d(output_shape) result(res)
294293
!! Rank-1 to rank-any reshape layer constructor.
295-
!! Currently implemented is only rank-3 for the output of the reshape.
294+
!! Currently implemented is only rank-2 for the output of the reshape.
296295
!!
297-
!! This layer is for connecting 1-d inputs to conv2d or similar layers.
296+
!! This layer is for connecting 1-d inputs to conv1d or similar layers.
298297
integer, intent(in) :: output_shape(:)
299298
!! Shape of the output
300299
type(layer) :: res

src/nf/nf_network_submodule.f90

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -92,9 +92,9 @@ module function network_from_layers(layers) result(res)
9292
type is(conv1d_layer)
9393
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
9494
n = n + 1
95-
!type is(reshape2d_layer)
96-
! res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
97-
! n = n + 1
95+
type is(reshape2d_layer)
96+
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
97+
n = n + 1
9898
class default
9999
n = n + 1
100100
end select
@@ -163,7 +163,6 @@ module subroutine backward(self, output, loss)
163163
call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient)
164164
type is(conv2d_layer)
165165
call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient)
166-
167166
type is(flatten_layer)
168167
if (size(self % layers(n) % layer_shape) == 2) then
169168
call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient_2d)
@@ -172,7 +171,6 @@ module subroutine backward(self, output, loss)
172171
end if
173172
type is(maxpool2d_layer)
174173
call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient)
175-
176174
type is(reshape3d_layer)
177175
call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient)
178176
type is(linear2d_layer)

test/test_conv1d_network.f90

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ program test_conv1d_network
4141
real :: y(1)
4242
real :: tolerance = 1e-4
4343
integer :: n
44-
integer, parameter :: num_iterations = 1500
44+
integer, parameter :: num_iterations = 1000
4545

4646
! Test training of a minimal constant mapping
4747
allocate(sample_input(1, 5))
@@ -60,6 +60,7 @@ program test_conv1d_network
6060
call cnn % forward(sample_input)
6161
call cnn % backward(y)
6262
call cnn % update(optimizer=sgd(learning_rate=1.))
63+
print *, cnn % predict(sample_input), y
6364
if (all(abs(cnn % predict(sample_input) - y) < tolerance)) exit
6465
end do
6566

@@ -79,7 +80,7 @@ program test_conv1d_network
7980
real :: y(1)
8081
real :: tolerance = 1e-4
8182
integer :: n
82-
integer, parameter :: num_iterations = 1500
83+
integer, parameter :: num_iterations = 1000
8384

8485
call random_number(x)
8586
y = [0.1234567]

0 commit comments

Comments
 (0)