Skip to content

Commit 741f200

Browse files
committed
multihead_attention: remove some of the copypaste comments
1 parent e7911e9 commit 741f200

File tree

1 file changed

+0
-3
lines changed

1 file changed

+0
-3
lines changed

src/nf/nf_multihead_attention.f90

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -60,11 +60,8 @@ module subroutine backward(self, input, gradient)
6060
!! Self Attention: sum output gradients
6161
!! Cross Attention: use them separately
6262
class(multihead_attention_layer), intent(in out) :: self
63-
!! Dense layer instance
6463
real, intent(in) :: input(:, :, :)
65-
!! Input from the previous layer
6664
real, intent(in) :: gradient(:, :, :)
67-
!! Gradient from the next layer
6865
end subroutine backward
6966

7067
module subroutine forward(self, query, key, value)

0 commit comments

Comments
 (0)