适合新手搭建ResNet50残差网络的架构图(最全)

适合新手搭建ResNet50残差网络的架构图+代码(最全)

网上的教程大多复杂难懂,不适合新手,本来神经网络就难,这些教程本身更难,对新手极度不友好,因此自己做的这个架构图和写的代码,面向新手,大神跳过

ResNet50架构图

from torch import nn
import torch
from torchviz import make_dot

class box(nn.Module):
    def __init__(self, in_channels, index=999, stride=1, downsample=False):
        super(box, self).__init__()

        last_stride = 2  # 虚残差中卷积核的步距

        if downsample:  # 虚残差结构
            f_out_channnels = in_channels * 2
            out_channels = int(in_channels / 2)
            if index == 0:  # here is first core
                in_channels = int(in_channels / 2)  # 第一层设置为128,是方便了后面的统一处理
                out_channels = in_channels
                f_out_channnels = in_channels * 4
                last_stride = 1

                stride = 1

        else:  # 实残差
            f_out_channnels = in_channels * 1
            out_channels = int(in_channels / 4)



        self.downsample = downsample
        self.conv1 = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=1, stride=1, padding=0, bias=False)
        self.relu = nn.ReLU(inplace=True)
        self.bn1 = nn.BatchNorm2d(out_channels)

        self.conv2 = nn.Conv2d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=stride, padding=1, bias=False)
        self.relu = nn.ReLU(inplace=True)
        self.bn2 = nn.BatchNorm2d(out_channels)

        self.conv3 = nn.Conv2d(in_channels=out_channels, out_channels=f_out_channnels, kernel_size=1, stride=1, padding=0)
        self.bn3 = nn.BatchNorm2d(f_out_channnels)

        self.fe = nn.Sequential(
            nn.Conv2d(in_channels=in_channels, out_channels=f_out_channnels, kernel_size=1, stride=last_stride,  padding=0, bias=False),
            nn.BatchNorm2d(f_out_channnels),
        )



    def forward(self, x):
        identity = x
        if self.downsample:
            identity = self.fe(x)
        x = self.conv1(x)
        x = self.bn1(x)
        x = self.relu(x)

        x = self.conv2(x)
        x = self.bn2(x)
        x = self.relu(x)

        x = self.conv3(x)
        x = self.bn3(x)

        out = x + identity
        out = self.relu(out)
        return out


class New50(nn.Module):
    def __init__(self,in_out, num_classes=4):
        super(New50, self).__init__()


        self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2,padding=3, bias=False)
        self.bn1 = nn.BatchNorm2d(64)
        self.relu = nn.ReLU(inplace=True)

        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)

        self.avgpool = nn.AdaptiveAvgPool2d((1, 1))  # output size = (1, 1)
        self.fc = nn.Linear(512 * 4 , num_classes)

        layers = []
        for index, z in enumerate(in_out):
            in_ch = z[0]                                      # 这里通道/2
            layers.append(box(in_channels=in_ch, stride=2, downsample=z[2], index=index))  # 这里处理第一层

            for i in range(1, z[1]):
                layers.append(box(in_channels=z[3]))  # 这里处理其他两层
        # print(layers)
        self.fes = nn.Sequential(*layers)

    def forward(self, x):
        x = self.conv1(x)
        x = self.bn1(x)
        x = self.relu(x)
        x = self.maxpool(x)
        x = self.fes(x)
        x = self.avgpool(x)
        x = torch.flatten(x, 1)
        x = self.fc(x)
        return x




in_out = [(128, 3, True, 256), (256, 4, True, 512), (512, 6, True, 1024), (1024, 3, True, 2048)]

s = New50(in_out=in_out)
def resnet500():
    return New50(in_out=in_out)

'''每层的第一层输入
   每层重复的次数
   是否走虚残差
   每层的第二个卷积核的输入'''

后续还会上传ResNet30,FCN,UNet等架构图和代码。

文章出处登录后可见!

已经登录?立即刷新

共计人评分,平均

到目前为止还没有投票!成为第一位评论此文章。

(0)
xiaoxingxing的头像xiaoxingxing管理团队
上一篇 2023年9月1日
下一篇 2023年9月1日

相关推荐

此站出售,如需请站内私信或者邮箱!