From 1d55c85cd351232cc869851de726a0a629ee2243 Mon Sep 17 00:00:00 2001 From: znb899 <83007857+znb899@users.noreply.github.com> Date: Wed, 12 Jul 2023 12:24:17 +0200 Subject: [PATCH] unused ratio in channel attention module init --- model/resnet_cbam.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/model/resnet_cbam.py b/model/resnet_cbam.py index f196e34..67f3228 100644 --- a/model/resnet_cbam.py +++ b/model/resnet_cbam.py @@ -28,9 +28,9 @@ def __init__(self, in_planes, ratio=16): self.avg_pool = nn.AdaptiveAvgPool2d(1) self.max_pool = nn.AdaptiveMaxPool2d(1) - self.fc = nn.Sequential(nn.Conv2d(in_planes, in_planes // 16, 1, bias=False), + self.fc = nn.Sequential(nn.Conv2d(in_planes, in_planes // ratio, 1, bias=False), nn.ReLU(), - nn.Conv2d(in_planes // 16, in_planes, 1, bias=False)) + nn.Conv2d(in_planes // ratio, in_planes, 1, bias=False)) self.sigmoid = nn.Sigmoid() def forward(self, x):