Skip to content

Commit 10e54d0

Browse files
committed
embedding_layer: add more comments
1 parent f1b414c commit 10e54d0

File tree

2 files changed

+9
-0
lines changed

2 files changed

+9
-0
lines changed

src/nf/nf_embedding_layer.f90

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,7 @@ pure module subroutine backward(self, input, gradient)
5858
end subroutine backward
5959

6060
pure module subroutine positional_encoding(self, pos)
61+
!! Sum embedding with positional info (trigonometric, not trianable)
6162
class(embedding_layer), intent(in out) :: self
6263
integer, intent(in) :: pos
6364
end subroutine positional_encoding

src/nf/nf_layer_constructors.f90

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -234,6 +234,14 @@ module function self_attention(num_heads) result(res)
234234
end function self_attention
235235

236236
module function embedding(sequence_length, vocab_size, model_dimension) result(res)
237+
!! Embedding layer constructor.
238+
!!
239+
!! This layer is for inputting token indices from the dictionary to the network.
240+
!! Works as a trainable lookup table that converts each index into a vector.
241+
!! Embedding layer must be the first layer in a network.
242+
!! `sequence_length`: max len of input sequence
243+
!! `vocab_size`: length of token vocabulary
244+
!! `model_dimension`: size of target embeddings
237245
integer, intent(in) :: sequence_length, vocab_size, model_dimension
238246
type(layer) :: res
239247
end function embedding

0 commit comments

Comments
 (0)