From 1c3863733e219f14d4e4cf7828c9e019fe42e345 Mon Sep 17 00:00:00 2001 From: Shuangchi He <34329208+Yulv-git@users.noreply.github.com> Date: Wed, 11 Jan 2023 22:13:01 +0800 Subject: [PATCH] Update modules.py (#222) Co-authored-by: Ayush Chaurasia Co-authored-by: Glenn Jocher --- ultralytics/nn/modules.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/ultralytics/nn/modules.py b/ultralytics/nn/modules.py index 51ca310d40..5782969c66 100644 --- a/ultralytics/nn/modules.py +++ b/ultralytics/nn/modules.py @@ -134,7 +134,7 @@ class TransformerBlock(nn.Module): class Bottleneck(nn.Module): # Standard bottleneck - def __init__(self, c1, c2, shortcut=True, g=1, k=(3, 3), e=0.5): # ch_in, ch_out, shortcut, kernels, groups, expand + def __init__(self, c1, c2, shortcut=True, g=1, k=(3, 3), e=0.5): # ch_in, ch_out, shortcut, groups, kernels, expand super().__init__() c_ = int(c2 * e) # hidden channels self.cv1 = Conv(c1, c_, k[0], 1) @@ -234,8 +234,8 @@ class SpatialAttention(nn.Module): class CBAM(nn.Module): - # CSP Bottleneck with 3 convolutions - def __init__(self, c1, ratio=16, kernel_size=7): # ch_in, ch_out, number, shortcut, groups, expansion + # Convolutional Block Attention Module + def __init__(self, c1, kernel_size=7): # ch_in, kernels super().__init__() self.channel_attention = ChannelAttention(c1) self.spatial_attention = SpatialAttention(kernel_size) @@ -245,8 +245,8 @@ class CBAM(nn.Module): class C1(nn.Module): - # CSP Bottleneck with 3 convolutions - def __init__(self, c1, c2, n=1): # ch_in, ch_out, number, shortcut, groups, expansion + # CSP Bottleneck with 1 convolution + def __init__(self, c1, c2, n=1): # ch_in, ch_out, number super().__init__() self.cv1 = Conv(c1, c2, 1, 1) self.m = nn.Sequential(*(Conv(c2, c2, 3) for _ in range(n)))