首页 > 其他分享 >代码实现yolov3主干网络,可以直接运行

代码实现yolov3主干网络,可以直接运行

时间:2024-08-02 13:00:21浏览次数:10  
标签:__ 主干 layer ch yolov3 nn 代码 self out

目录

 1. 主干网head版本1

  2. 主干网head版本2

 3. 将网络层全部放入数组切片取输出

 4. 用循环写法将输入输出提取出来yaml文件


 1. 主干网head版本1

import torch
from torch import nn
import torch.nn.functional as F
class ConvBnLeakRelu(nn.Module):
    def __init__(self,in_ch,out_ch,k=3,s=1):
        super().__init__()
        self.layer=nn.Sequential(
            nn.Conv2d(in_ch, out_ch, kernel_size=k, padding=k // 2,stride=s),
            nn.BatchNorm2d(out_ch),
            nn.LeakyReLU(),
        )
    def forward(self,x):
        return self.layer(x)
class Resnet(nn.Module):
    def __init__(self,ch):
        super().__init__()
        self.resblock=nn.Sequential(
            ConvBnLeakRelu(ch,ch//2,k=1),
            ConvBnLeakRelu(ch//2,ch,k=3)
        )
    def forward(self,x):
        return F.relu(self.resblock(x)+x)
class Unsample(nn.Module):
    def __init__(self,in_ch,out_ch):
        super().__init__()
        self.layer=ConvBnLeakRelu(in_ch,out_ch,k=3,s=2)
    def forward(self,x):
        return self.layer(x)

class Net(nn.Module):
    def __init__(self):
        super().__init__()
        self.inpuiut_layer=nn.Conv2d(3,32,3,1)
        self.layer1=nn.Sequential(
            Unsample(32,64),
                Resnet(64),
        )
        self.layer2=nn.Sequential(
            Unsample(64, 128),
            Resnet(128),
            Resnet(128),
        )
        self.layer3 = nn.Sequential(
            Unsample(128, 256),
            Resnet(256),
            Resnet(256),
            Resnet(256),
            Resnet(256),
            Resnet(256),
            Resnet(256),
            Resnet(256),
            Resnet(256),
        )

        self.layer4 = nn.Sequential(
            Unsample(256, 512),
            Resnet(512),
            Resnet(512),
            Resnet(512),
            Resnet(512),
            Resnet(512),
            Resnet(512),
            Resnet(512),
            Resnet(512),
        )
        self.layer5 = nn.Sequential(
            Unsample(512, 1024),
            Resnet(1024),
            Resnet(1024),
            Resnet(1024),
            Resnet(1024),
        )
    def forward(self,x):
        x=self.inpuiut_layer(x)
        x=self.layer1(x)
        x=self.layer2(x)
        x_52=self.layer3(x)
        x_26=self.layer4(x_52)
        x_13=self.layer5(x_26)
        return x_52,x_26,x_13
if __name__ == '__main__':
    x=torch.randn(1,3,416,416)
    net=Net()
    out=net(x)
    for i in out:
        print(i.shape)

 

  2. 主干网head版本2

import torch
from torch import nn
import torch.nn.functional as F
class ConvBnLeakRelu(nn.Module):
    def __init__(self,in_ch,out_ch,k=3,s=1):
        super().__init__()
        self.layer=nn.Sequential(
            nn.Conv2d(in_ch, out_ch, kernel_size=k, padding=k // 2,stride=s),
            nn.BatchNorm2d(out_ch),
            nn.LeakyReLU(),
        )
    def forward(self,x):
        return self.layer(x)
class Resnet(nn.Module):
    def __init__(self,ch):
        super().__init__()
        self.resblock=nn.Sequential(
            ConvBnLeakRelu(ch,ch//2,k=1),
            ConvBnLeakRelu(ch//2,ch,k=3)
        )
    def forward(self,x):
        return F.relu(self.resblock(x)+x)
class Unsample(nn.Module):
    def __init__(self,in_ch,out_ch):
        super().__init__()
        self.layer=ConvBnLeakRelu(in_ch,out_ch,k=3,s=2)
    def forward(self,x):
        return self.layer(x)

class Net(nn.Module):
    def __init__(self):
        super().__init__()
        self.inpuiut_layer=nn.Conv2d(3,32,3,1)
        self.layer1=self.make_layer(32,64,1)
        self.layer2=self.make_layer(64, 128,2)
        self.layer3=self.make_layer(128, 256,8)
        self.layer4=self.make_layer(256, 512,8)
        self.layer5=self.make_layer(512, 1024,4)

    def make_layer(self,in_ch,out_ch,block_num):
        layers=[]
        layers+=[Unsample(in_ch, out_ch)]
        for i in range(block_num):
            layers+=[Resnet(out_ch)]
        return nn.Sequential(*layers)
    def forward(self,x):
        x=self.inpuiut_layer(x)
        x=self.layer1(x)
        x=self.layer2(x)
        x_52=self.layer3(x)
        x_26=self.layer4(x_52)
        x_13=self.layer5(x_26)
        return x_52,x_26,x_13
if __name__ == '__main__':
    x=torch.randn(1,3,416,416)
    net=Net()
    out=net(x)
    for i in out:
        print(i.shape)

 

 

 3. 将网络层全部放入数组切片取输出

 

import torch
from torch import nn
import torch.nn.functional as F


class ConvBnLeakRelu(nn.Module):
    def __init__(self, in_ch, out_ch, k=3, s=1):
        super().__init__()
        self.layer = nn.Sequential(
            nn.Conv2d(in_ch, out_ch, kernel_size=k, padding=k // 2, stride=s),
            nn.BatchNorm2d(out_ch),
            nn.LeakyReLU(),
        )

    def forward(self, x):
        return self.layer(x)


class Resnet(nn.Module):
    def __init__(self, ch):
        super().__init__()
        self.resblock = nn.Sequential(
            ConvBnLeakRelu(ch, ch // 2, k=1),
            ConvBnLeakRelu(ch // 2, ch, k=3)
        )

    def forward(self, x):
        return F.relu(self.resblock(x) + x)


class Unsample(nn.Module):
    def __init__(self, in_ch, out_ch):
        super().__init__()
        self.layer = ConvBnLeakRelu(in_ch, out_ch, k=3, s=2)

    def forward(self, x):
        return self.layer(x)


cif = [[32, 64, 1],
       [64, 128, 2],
       [128, 256, 8],
       [256, 512, 8],
       [512, 1024, 4]]


class Net(nn.Module):
    def __init__(self):
        super().__init__()
        self.inpuiut_layer = nn.Conv2d(3, 32, 3, 1)
        # self.layer1 = self.make_layer(32, 64, 1)
        # self.layer2 = self.make_layer(64, 128, 2)
        # self.layer3 = self.make_layer(128, 256, 8)
        # self.layer4 = self.make_layer(256, 512, 8)
        # self.layer5 = self.make_layer(512, 1024, 4)
        self.layers = []
        for in_ch,out_ch,block_num in cif:
            self.layers+=[self.make_layer(in_ch, out_ch,block_num)]
        self.layers=nn.Sequential(*self.layers)
    def make_layer(self, in_ch, out_ch, block_num):
        layers = []
        layers += [Unsample(in_ch, out_ch)]
        for i in range(block_num):
            layers += [Resnet(out_ch)]
        return nn.Sequential(*layers)

    def forward(self, x):
        x = self.inpuiut_layer(x)
        x_52 = self.layers[0:3](x)
        x_26 = self.layers[3:4](x_52)
        x_13 = self.layers[4:5](x_26)
        return x_52, x_26, x_13


if __name__ == '__main__':
    x = torch.randn(1, 3, 416, 416)
    net = Net()
    out = net(x)
    for i in out:
        print(i.shape)

 4. 用循环写法将输入输出提取出来yaml文件

import torch
import yaml
from torch import nn
import torch.nn.functional as F


class ConvBnLeakRelu(nn.Module):
    def __init__(self, in_ch, out_ch, k=3, s=1):
        super().__init__()
        self.layer = nn.Sequential(
            nn.Conv2d(in_ch, out_ch, kernel_size=k, padding=k // 2, stride=s),
            nn.BatchNorm2d(out_ch),
            nn.LeakyReLU(),
        )

    def forward(self, x):
        return self.layer(x)


class Resnet(nn.Module):
    def __init__(self, ch):
        super().__init__()
        self.resblock = nn.Sequential(
            ConvBnLeakRelu(ch, ch // 2, k=1),
            ConvBnLeakRelu(ch // 2, ch, k=3)
        )

    def forward(self, x):
        return F.relu(self.resblock(x) + x)


class Unsample(nn.Module):
    def __init__(self, in_ch, out_ch):
        super().__init__()
        self.layer = ConvBnLeakRelu(in_ch, out_ch, k=3, s=2)

    def forward(self, x):
        return self.layer(x)


# cif = [[32, 64, 1],
#        [64, 128, 2],
#        [128, 256, 8],
#        [256, 512, 8],
#        [512, 1024, 4]]
with open('tt.yaml','r') as file:
    cif=yaml.load(file,Loader=yaml.Loader)
    cif=cif['conf']
print(cif)
class Net(nn.Module):
    def __init__(self):
        super().__init__()
        self.inpuiut_layer = nn.Conv2d(3, 32, 3, 1)
        # self.layer1 = self.make_layer(32, 64, 1)
        # self.layer2 = self.make_layer(64, 128, 2)
        # self.layer3 = self.make_layer(128, 256, 8)
        # self.layer4 = self.make_layer(256, 512, 8)
        # self.layer5 = self.make_layer(512, 1024, 4)
        self.layers = []
        for in_ch,out_ch,block_num in cif:
            self.layers+=[self.make_layer(in_ch, out_ch,block_num)]
        self.layers=nn.Sequential(*self.layers)
    def make_layer(self, in_ch, out_ch, block_num):
        layers = []
        layers += [Unsample(in_ch, out_ch)]
        for i in range(block_num):
            layers += [Resnet(out_ch)]
        return nn.Sequential(*layers)

    def forward(self, x):
        x = self.inpuiut_layer(x)
        x_52 = self.layers[0:3](x)
        x_26 = self.layers[3:4](x_52)
        x_13 = self.layers[4:5](x_26)
        return x_52, x_26, x_13


if __name__ == '__main__':
    x = torch.randn(1, 3, 416, 416)
    net = Net()
    out = net(x)
    for i in out:
        print(i.shape)

 tt.yaml

#conf:
#  [ [ 32, 64, 1 ],
#    [ 64, 128, 2 ],
#    [ 128, 256, 8 ],
#    [ 256, 512, 8 ],
#    [ 512, 1024, 4 ] ]
conf:
  - [ 32, 64, 1 ]
  - [ 64, 128, 2 ]
  - [ 128, 256, 8 ]
  - [ 256, 512, 8 ]
  - [ 512, 1024, 4 ]

标签:__,主干,layer,ch,yolov3,nn,代码,self,out
From: https://blog.csdn.net/m0_53291740/article/details/140865611

相关文章

  • 随机森林的可解释性分析(含python代码)
    随机森林的可解释性分析1.引言可解释性的重要性2.随机森林的原理2.1基本原理:2.2随机森林的实现3.随机森林的可解释性分析3.1特征重要性3.2特征重要性3.3SHAP值3.4部分依赖图(PDP)3.5交互特征效应3.6变量依赖图4.结论5.参考文献1.引言在机器学习领域,随机森林......
  • 代码随想录day17 || 654 最大二叉树,617 合并二叉树,700 二叉搜索树搜索,98 验证二叉搜索
    645最大二叉树funcconstructMaximumBinaryTree(nums[]int)*TreeNode{ //思路,算法思路基本等同于通过中序前序构造二叉树 //1,取最大值作为根节点 //2,切割数组 //3,递归左右子树 iflen(nums)==0{ returnnil } //切割数组取最大值 max,left,right:=......
  • GitHub Star 数量前 15 的开源低代码项目
    上周,我们发布了文章《GitHubStar数量前12的开源无代码工具》,收获了许多喜爱。许多朋友留言问,为什么n8n、Appsmith等熟知的产品没有上榜?原因是我们是根据GitHub上的"no-code"标签来制作这份名单的,而这两款产品只打上了“low-code”标签。今天这篇文章,我们将盘点低代码......
  • 代码随想录算法训练营第二十一天| 39. 组合总和, 40.组合总和II, 131.分割回文串
    今天是回溯算法学习的第二天,主要的学习内容包括:1.组合问题的重复使用2.组合问题的去重3.分割问题的处理方法。39.组合总和题目链接:39.组合总和-力扣(LeetCode)这个组合问题的特点是,集合内的元素可以重复使用。与前面组合问题的区别在于,在每一次回溯中,不是从i+1的位置开......
  • chapter5------编写主引导扇区代码
    主引导扇区(MainBootSector,MBR)什么是主引导扇区:处理器加电或者复位之后(简单来说就是启动计算机),如果硬盘是首选的启动设备,那么ROM-BIOS(基本输入输出系统)将试图读取硬盘的0面0道1扇区(简单来说就是第一个扇区),这就是主引导扇区主引导扇区的特点:扇区数据仅有512字节......
  • 代码随想录算法训练营第57天 | 并查集理论基础
    并查集理论基础https://www.programmercarl.com/kamacoder/图论并查集理论基础.html107.寻找存在的路径https://kamacoder.com/problempage.php?pid=1179代码随想录https://www.programmercarl.com/kamacoder/0107.寻找存在的路径.html#思路并查集理论基础并查集用于判断......
  • web小游戏开发:拼图(完)完整代码展示
    web小游戏开发:拼图(完)关于不规则图形拼图的讨论CANVASdrawImage踩坑第一个坑,canvas内置画布的宽高和实际显示的宽高没有关联第二个坑,来源图片的尺寸切割不规则图形完整代码htmlcssjs关于不规则图形拼图的讨论在开始制作这个拼图之前,我们就已......
  • SourceGenerator 生成db to class代码优化结果记录
    优化上一次实验代码写的较为随意,本次穷尽所学,优化了一把,不过果然还是没比过Dapperaot,虽然没使用Interceptor,但理论上其优化不该有这么大差距知识差距不少呀,都看不懂Dapperaot利用了什么姿势领先,有大神们能教教吗?优化点减少类型判断提前做类型判断,并在生成时利用......
  • 深入理解PHP8的新特性:如何高效使用异步编程和代码
    PHP8是PHP编程语言的最新主要版本,带来了许多令人兴奋的新特性和改进。其中最突出的特性之一是对异步编程的支持。异步编程允许我们在处理并发任务时提高性能和响应能力。本文将深入探讨PHP8的异步编程特性,并介绍如何高效地使用它们。首先,让我们了解一下什么是异步编程。在传统的......
  • Java毕业设计-基于springboot开发的智能学习平台系统-毕业论文(附毕设源代码)
    文章目录前言一、毕设成果演示(源代码在文末)二、毕设摘要展示1、开发说明2、需求/流程分析3、系统功能结构三、系统实现展示1、学生信息管理2、课程信息管理3/试卷信息管理4/公告信息管理四、毕设内容和源代码获取总结逃逸的卡路里博主介绍:✌️码农一枚|毕设布道师......