|  | 
|  | 1 | +submodule(nf_avgpool2d_layer) nf_avgpool2d_layer_submodule | 
|  | 2 | +  implicit none | 
|  | 3 | + | 
|  | 4 | +contains | 
|  | 5 | + | 
|  | 6 | +  pure module function avgpool2d_layer_cons(pool_size, stride) result(res) | 
|  | 7 | +    implicit none | 
|  | 8 | +    integer, intent(in) :: pool_size | 
|  | 9 | +    integer, intent(in) :: stride | 
|  | 10 | +    type(avgpool2d_layer) :: res | 
|  | 11 | + | 
|  | 12 | +    res % pool_size = pool_size | 
|  | 13 | +    res % stride    = stride | 
|  | 14 | +  end function avgpool2d_layer_cons | 
|  | 15 | + | 
|  | 16 | + | 
|  | 17 | +  module subroutine init(self, input_shape) | 
|  | 18 | +    implicit none | 
|  | 19 | +    class(avgpool2d_layer), intent(in out) :: self | 
|  | 20 | +    integer, intent(in) :: input_shape(:) | 
|  | 21 | +    ! input_shape is expected to be (channels, width, height) | 
|  | 22 | + | 
|  | 23 | +    self % channels = input_shape(1) | 
|  | 24 | +    self % width    = input_shape(2) / self % stride | 
|  | 25 | +    self % height   = input_shape(3) / self % stride | 
|  | 26 | + | 
|  | 27 | +    ! Allocate the gradient array corresponding to the input dimensions. | 
|  | 28 | +    allocate(self % gradient(input_shape(1), input_shape(2), input_shape(3))) | 
|  | 29 | +    self % gradient = 0 | 
|  | 30 | + | 
|  | 31 | +    ! Allocate the output array (after pooling). | 
|  | 32 | +    allocate(self % output(self % channels, self % width, self % height)) | 
|  | 33 | +    self % output = 0 | 
|  | 34 | +  end subroutine init | 
|  | 35 | + | 
|  | 36 | + | 
|  | 37 | +  pure module subroutine forward(self, input) | 
|  | 38 | +    implicit none | 
|  | 39 | +    class(avgpool2d_layer), intent(in out) :: self | 
|  | 40 | +    real, intent(in) :: input(:,:,:) | 
|  | 41 | +    integer :: input_width, input_height | 
|  | 42 | +    integer :: i, j, n | 
|  | 43 | +    integer :: ii, jj, iend, jend | 
|  | 44 | +    integer :: iextent, jextent | 
|  | 45 | + | 
|  | 46 | +    input_width  = size(input, dim=2) | 
|  | 47 | +    input_height = size(input, dim=3) | 
|  | 48 | +     | 
|  | 49 | +    ! Ensure we only process complete pooling regions. | 
|  | 50 | +    iextent = input_width - mod(input_width, self % stride) | 
|  | 51 | +    jextent = input_height - mod(input_height, self % stride) | 
|  | 52 | + | 
|  | 53 | +    ! Loop over the input with a step size equal to the stride and over all channels. | 
|  | 54 | +    do concurrent (i = 1:iextent:self % stride, j = 1:jextent:self % stride, n = 1:self % channels) | 
|  | 55 | +      ii = (i - 1) / self % stride + 1 | 
|  | 56 | +      jj = (j - 1) / self % stride + 1 | 
|  | 57 | +       | 
|  | 58 | +      iend = min(i + self % pool_size - 1, input_width) | 
|  | 59 | +      jend = min(j + self % pool_size - 1, input_height) | 
|  | 60 | +       | 
|  | 61 | +      ! Compute the average over the pooling region. | 
|  | 62 | +      self % output(n, ii, jj) = sum(input(n, i:iend, j:jend)) / ((iend - i + 1) * (jend - j + 1)) | 
|  | 63 | +    end do | 
|  | 64 | +  end subroutine forward | 
|  | 65 | + | 
|  | 66 | + | 
|  | 67 | +  pure module subroutine backward(self, input, gradient) | 
|  | 68 | +    implicit none | 
|  | 69 | +    class(avgpool2d_layer), intent(in out) :: self | 
|  | 70 | +    real, intent(in) :: input(:,:,:) | 
|  | 71 | +    real, intent(in) :: gradient(:,:,:) | 
|  | 72 | +    integer :: channels, pooled_width, pooled_height | 
|  | 73 | +    integer :: i, j, n, x, y, istart, iend, jstart, jend | 
|  | 74 | +    real :: scale_factor | 
|  | 75 | + | 
|  | 76 | +    channels      = size(gradient, dim=1) | 
|  | 77 | +    pooled_width  = size(gradient, dim=2) | 
|  | 78 | +    pooled_height = size(gradient, dim=3) | 
|  | 79 | + | 
|  | 80 | +    ! The gradient for average pooling is distributed evenly over the pooling window. | 
|  | 81 | +    do concurrent (n = 1:channels, i = 1:pooled_width, j = 1:pooled_height) | 
|  | 82 | +      istart = (i - 1) * self % stride + 1 | 
|  | 83 | +      iend   = min(istart + self % pool_size - 1, size(input, dim=2)) | 
|  | 84 | +      jstart = (j - 1) * self % stride + 1 | 
|  | 85 | +      jend   = min(jstart + self % pool_size - 1, size(input, dim=3)) | 
|  | 86 | +      scale_factor = 1.0 / ((iend - istart + 1) * (jend - jstart + 1)) | 
|  | 87 | + | 
|  | 88 | +      do concurrent (x = istart:iend, y = jstart:jend) | 
|  | 89 | +        self % gradient(n, x, y) = gradient(n, i, j) * scale_factor | 
|  | 90 | +      end do | 
|  | 91 | +    end do | 
|  | 92 | +  end subroutine backward | 
|  | 93 | + | 
|  | 94 | +end submodule nf_avgpool2d_layer_submodule | 
0 commit comments