互联网网站开发合同,定制开发软件公司,html网站标题怎么做的,智慧团建系统官方网站简介
Xception的名称源自于Extreme Inception#xff0c;它是在Inception架构的基础上进行了扩展和改进。Inception架构是Google团队提出的一种经典的卷积神经网络架构#xff0c;用于解决深度卷积神经网络中的计算和参数增长问题。
与Inception不同#xff0…简介
Xception的名称源自于Extreme Inception它是在Inception架构的基础上进行了扩展和改进。Inception架构是Google团队提出的一种经典的卷积神经网络架构用于解决深度卷积神经网络中的计算和参数增长问题。
与Inception不同Xception的主要创新在于使用了深度可分离卷积Depthwise Separable Convolution来替代传统的卷积操作。深度可分离卷积将卷积操作分解为两个步骤深度卷积和逐点卷积。
深度卷积是一种在每个输入通道上分别应用卷积核的操作它可以有效地减少计算量和参数数量。逐点卷积是一种使用1x1卷积核进行通道间的线性组合的操作用于增加模型的表示能力。通过使用深度可分离卷积Xception网络能够更加有效地学习特征表示并在相同计算复杂度下获得更好的性能。
Xception 网络结构
一个标准的Inception模块(Inception V3) 简化后的Inception模块 简化后的Inception的等价结构 采用深度可分离卷积的思想使 3×3 卷积的数量与 1×1卷积输出通道的数量相等 Xception模型一共可以分为3个flow分别是Entry flow、Middle flow、Exit flow。 在这里 Entry 与 Exit 都具有相同的部分Middle 与这二者有所不同。
Xception模型的pytorch复现
1深度可分离卷积
class SeparableConv2d(nn.Module):def __init__(self, in_channels, out_channels, kernel_size3, stride1, padding0, dilation1, biasFalse):super(SeparableConv2d, self).__init__()self.conv nn.Conv2d(in_channels, in_channels, kernel_size, stride, padding,dilation, groupsin_channels, biasbias)self.pointwise nn.Conv2d(in_channels, out_channels, kernel_size1, stride1, padding0,dilation1, groups1, biasFalse)def forward(self, x):x self.conv(x)x self.pointwise(x)return x
2构建三个flow结构
class EntryFlow(nn.Module):def __init__(self):super(EntryFlow, self).__init__()self.headconv nn.Sequential(nn.Conv2d(3, 32, 3, 2, biasFalse),nn.BatchNorm2d(32),nn.ReLU(inplaceTrue),nn.Conv2d(32, 64, 3, biasFalse),nn.BatchNorm2d(64),nn.ReLU(inplaceTrue),)self.residual_block1 nn.Sequential(SeparableConv2d(64, 128, 3, padding1),nn.BatchNorm2d(128),nn.ReLU(inplaceTrue),SeparableConv2d(128, 128, 3, padding1),nn.BatchNorm2d(128),nn.MaxPool2d(3, stride2, padding1),)self.residual_block2 nn.Sequential(nn.ReLU(inplaceTrue),SeparableConv2d(128, 256, 3, padding1),nn.BatchNorm2d(256),nn.ReLU(inplaceTrue),SeparableConv2d(256, 256, 3, padding1),nn.BatchNorm2d(256),nn.MaxPool2d(3, stride2, padding1))self.residual_block3 nn.Sequential(nn.ReLU(inplaceTrue),SeparableConv2d(256, 728, 3, padding1),nn.BatchNorm2d(728),nn.ReLU(inplaceTrue),SeparableConv2d(728, 728, 3, padding1),nn.BatchNorm2d(728),nn.MaxPool2d(3, stride2, padding1))def shortcut(self, inp, oup):return nn.Sequential(nn.Conv2d(inp, oup, 1, 2, biasFalse),nn.BatchNorm2d(oup))def forward(self, x):x self.headconv(x)residual self.residual_block1(x)shortcut_block1 self.shortcut(64, 128)x residual shortcut_block1(x)residual self.residual_block2(x)shortcut_block2 self.shortcut(128, 256)x residual shortcut_block2(x)residual self.residual_block3(x)shortcut_block3 self.shortcut(256, 728)x residual shortcut_block3(x)return xclass MiddleFlow(nn.Module):def __init__(self):super(MiddleFlow, self).__init__()self.shortcut nn.Sequential()self.conv1 nn.Sequential(nn.ReLU(inplaceTrue),SeparableConv2d(728, 728, 3, padding1),nn.BatchNorm2d(728),nn.ReLU(inplaceTrue),SeparableConv2d(728, 728, 3, padding1),nn.BatchNorm2d(728),nn.ReLU(inplaceTrue),SeparableConv2d(728, 728, 3, padding1),nn.BatchNorm2d(728))def forward(self, x):residual self.conv1(x)input self.shortcut(x)return input residualclass ExitFlow(nn.Module):def __init__(self):super(ExitFlow, self).__init__()self.residual_with_exit nn.Sequential(nn.ReLU(inplaceTrue),SeparableConv2d(728, 728, 3, padding1),nn.BatchNorm2d(728),nn.ReLU(inplaceTrue),SeparableConv2d(728, 1024, 3, padding1),nn.BatchNorm2d(1024),nn.MaxPool2d(3, stride2, padding1))self.endconv nn.Sequential(SeparableConv2d(1024, 1536, 3, 1, 1),nn.BatchNorm2d(1536),nn.ReLU(inplaceTrue),SeparableConv2d(1536, 2048, 3, 1, 1),nn.BatchNorm2d(2048),nn.ReLU(inplaceTrue),nn.AdaptiveAvgPool2d((1, 1)),)def shortcut(self, inp, oup):return nn.Sequential(nn.Conv2d(inp, oup, 1, 2, biasFalse),nn.BatchNorm2d(oup))def forward(self, x):residual self.residual_with_exit(x)shortcut_block self.shortcut(728, 1024)output residual shortcut_block(x)return self.endconv(output)
3构建网络完整代码 Copyright (c) 2023, Auorui.
All rights reserved.Xception: Deep Learning with Depthwise Separable Convolutionshttps://arxiv.org/pdf/1610.02357.pdfimport torch
import torch.nn as nnclass SeparableConv2d(nn.Module):def __init__(self, in_channels, out_channels, kernel_size3, stride1, padding0, dilation1, biasFalse):super(SeparableConv2d, self).__init__()self.conv nn.Conv2d(in_channels, in_channels, kernel_size, stride, padding,dilation, groupsin_channels, biasbias)self.pointwise nn.Conv2d(in_channels, out_channels, kernel_size1, stride1, padding0,dilation1, groups1, biasFalse)def forward(self, x):x self.conv(x)x self.pointwise(x)return xclass EntryFlow(nn.Module):def __init__(self):super(EntryFlow, self).__init__()self.headconv nn.Sequential(nn.Conv2d(3, 32, 3, 2, biasFalse),nn.BatchNorm2d(32),nn.ReLU(inplaceTrue),nn.Conv2d(32, 64, 3, biasFalse),nn.BatchNorm2d(64),nn.ReLU(inplaceTrue),)self.residual_block1 nn.Sequential(SeparableConv2d(64, 128, 3, padding1),nn.BatchNorm2d(128),nn.ReLU(inplaceTrue),SeparableConv2d(128, 128, 3, padding1),nn.BatchNorm2d(128),nn.MaxPool2d(3, stride2, padding1),)self.residual_block2 nn.Sequential(nn.ReLU(inplaceTrue),SeparableConv2d(128, 256, 3, padding1),nn.BatchNorm2d(256),nn.ReLU(inplaceTrue),SeparableConv2d(256, 256, 3, padding1),nn.BatchNorm2d(256),nn.MaxPool2d(3, stride2, padding1))self.residual_block3 nn.Sequential(nn.ReLU(inplaceTrue),SeparableConv2d(256, 728, 3, padding1),nn.BatchNorm2d(728),nn.ReLU(inplaceTrue),SeparableConv2d(728, 728, 3, padding1),nn.BatchNorm2d(728),nn.MaxPool2d(3, stride2, padding1))def shortcut(self, inp, oup):return nn.Sequential(nn.Conv2d(inp, oup, 1, 2, biasFalse),nn.BatchNorm2d(oup))def forward(self, x):x self.headconv(x)residual self.residual_block1(x)shortcut_block1 self.shortcut(64, 128)x residual shortcut_block1(x)residual self.residual_block2(x)shortcut_block2 self.shortcut(128, 256)x residual shortcut_block2(x)residual self.residual_block3(x)shortcut_block3 self.shortcut(256, 728)x residual shortcut_block3(x)return xclass MiddleFlow(nn.Module):def __init__(self):super(MiddleFlow, self).__init__()self.shortcut nn.Sequential()self.conv1 nn.Sequential(nn.ReLU(inplaceTrue),SeparableConv2d(728, 728, 3, padding1),nn.BatchNorm2d(728),nn.ReLU(inplaceTrue),SeparableConv2d(728, 728, 3, padding1),nn.BatchNorm2d(728),nn.ReLU(inplaceTrue),SeparableConv2d(728, 728, 3, padding1),nn.BatchNorm2d(728))def forward(self, x):residual self.conv1(x)input self.shortcut(x)return input residualclass ExitFlow(nn.Module):def __init__(self):super(ExitFlow, self).__init__()self.residual_with_exit nn.Sequential(nn.ReLU(inplaceTrue),SeparableConv2d(728, 728, 3, padding1),nn.BatchNorm2d(728),nn.ReLU(inplaceTrue),SeparableConv2d(728, 1024, 3, padding1),nn.BatchNorm2d(1024),nn.MaxPool2d(3, stride2, padding1))self.endconv nn.Sequential(SeparableConv2d(1024, 1536, 3, 1, 1),nn.BatchNorm2d(1536),nn.ReLU(inplaceTrue),SeparableConv2d(1536, 2048, 3, 1, 1),nn.BatchNorm2d(2048),nn.ReLU(inplaceTrue),nn.AdaptiveAvgPool2d((1, 1)),)def shortcut(self, inp, oup):return nn.Sequential(nn.Conv2d(inp, oup, 1, 2, biasFalse),nn.BatchNorm2d(oup))def forward(self, x):residual self.residual_with_exit(x)shortcut_block self.shortcut(728, 1024)output residual shortcut_block(x)return self.endconv(output)class Xception(nn.Module):def __init__(self, num_classes1000):super().__init__()self.num_classes num_classesself.entry_flow EntryFlow()self.middle_flow MiddleFlow()self.exit_flow ExitFlow()self.fc nn.Linear(2048, num_classes)def forward(self, x):x self.entry_flow(x)for i in range(8):x self.middle_flow(x)x self.exit_flow(x)x x.view(x.size(0), -1)out self.fc(x)return outif __name____main__:import torchsummarydevice cuda if torch.cuda.is_available() else cpuinput torch.ones(2, 3, 224, 224).to(device)net Xception(num_classes4)net net.to(device)out net(input)print(out)print(out.shape)torchsummary.summary(net, input_size(3, 224, 224))# Xception Total params: 19,838,076
参考文章
【精读AI论文】Xception ------(Xception: Deep Learning with Depthwise Separable Convolutions_xception论文-CSDN博客
[ 轻量级网络 ] 经典网络模型4——Xception 详解与复现-CSDN博客
神经网络学习小记录22——Xception模型的复现详解_xception timm-CSDN博客
【卷积神经网络系列】十七、Xception_xception模块-CSDN博客 文章转载自: http://www.morning.mrxqd.cn.gov.cn.mrxqd.cn http://www.morning.bbrf.cn.gov.cn.bbrf.cn http://www.morning.ljtwp.cn.gov.cn.ljtwp.cn http://www.morning.zwdrz.cn.gov.cn.zwdrz.cn http://www.morning.fqqcn.cn.gov.cn.fqqcn.cn http://www.morning.ksbmx.cn.gov.cn.ksbmx.cn http://www.morning.pfggj.cn.gov.cn.pfggj.cn http://www.morning.pmjw.cn.gov.cn.pmjw.cn http://www.morning.ljtwp.cn.gov.cn.ljtwp.cn http://www.morning.llthz.cn.gov.cn.llthz.cn http://www.morning.wjlkz.cn.gov.cn.wjlkz.cn http://www.morning.rnhh.cn.gov.cn.rnhh.cn http://www.morning.nbgfk.cn.gov.cn.nbgfk.cn http://www.morning.jjrsk.cn.gov.cn.jjrsk.cn http://www.morning.bqqzg.cn.gov.cn.bqqzg.cn http://www.morning.ysbrz.cn.gov.cn.ysbrz.cn http://www.morning.cttti.com.gov.cn.cttti.com http://www.morning.pzjrm.cn.gov.cn.pzjrm.cn http://www.morning.ymtbr.cn.gov.cn.ymtbr.cn http://www.morning.lxhny.cn.gov.cn.lxhny.cn http://www.morning.wjxtq.cn.gov.cn.wjxtq.cn http://www.morning.tfwr.cn.gov.cn.tfwr.cn http://www.morning.byshd.cn.gov.cn.byshd.cn http://www.morning.xjpnq.cn.gov.cn.xjpnq.cn http://www.morning.zknjy.cn.gov.cn.zknjy.cn http://www.morning.swdnr.cn.gov.cn.swdnr.cn http://www.morning.ypzr.cn.gov.cn.ypzr.cn http://www.morning.xmpbh.cn.gov.cn.xmpbh.cn http://www.morning.rqwwm.cn.gov.cn.rqwwm.cn http://www.morning.knlbg.cn.gov.cn.knlbg.cn http://www.morning.lkthj.cn.gov.cn.lkthj.cn http://www.morning.xlxmy.cn.gov.cn.xlxmy.cn http://www.morning.cszbj.cn.gov.cn.cszbj.cn http://www.morning.npkrm.cn.gov.cn.npkrm.cn http://www.morning.vjdofuj.cn.gov.cn.vjdofuj.cn http://www.morning.rbylq.cn.gov.cn.rbylq.cn http://www.morning.rltsx.cn.gov.cn.rltsx.cn http://www.morning.27asw.cn.gov.cn.27asw.cn http://www.morning.hfrbt.cn.gov.cn.hfrbt.cn http://www.morning.qymqh.cn.gov.cn.qymqh.cn http://www.morning.gblrn.cn.gov.cn.gblrn.cn http://www.morning.yhrfg.cn.gov.cn.yhrfg.cn http://www.morning.fgsct.cn.gov.cn.fgsct.cn http://www.morning.fbmjl.cn.gov.cn.fbmjl.cn http://www.morning.jgncd.cn.gov.cn.jgncd.cn http://www.morning.llcgz.cn.gov.cn.llcgz.cn http://www.morning.dgckn.cn.gov.cn.dgckn.cn http://www.morning.wjlkz.cn.gov.cn.wjlkz.cn http://www.morning.lwzgn.cn.gov.cn.lwzgn.cn http://www.morning.yckwt.cn.gov.cn.yckwt.cn http://www.morning.rddlz.cn.gov.cn.rddlz.cn http://www.morning.sqdjn.cn.gov.cn.sqdjn.cn http://www.morning.fdrb.cn.gov.cn.fdrb.cn http://www.morning.qfplp.cn.gov.cn.qfplp.cn http://www.morning.brxzt.cn.gov.cn.brxzt.cn http://www.morning.chxsn.cn.gov.cn.chxsn.cn http://www.morning.jzxqj.cn.gov.cn.jzxqj.cn http://www.morning.hphfy.cn.gov.cn.hphfy.cn http://www.morning.gxfpk.cn.gov.cn.gxfpk.cn http://www.morning.bryyb.cn.gov.cn.bryyb.cn http://www.morning.lqjpb.cn.gov.cn.lqjpb.cn http://www.morning.nzfqw.cn.gov.cn.nzfqw.cn http://www.morning.iznek.com.gov.cn.iznek.com http://www.morning.njstzsh.com.gov.cn.njstzsh.com http://www.morning.wrkcw.cn.gov.cn.wrkcw.cn http://www.morning.hkysq.cn.gov.cn.hkysq.cn http://www.morning.kbntl.cn.gov.cn.kbntl.cn http://www.morning.smqjl.cn.gov.cn.smqjl.cn http://www.morning.mkrjf.cn.gov.cn.mkrjf.cn http://www.morning.gfhng.cn.gov.cn.gfhng.cn http://www.morning.wqfrd.cn.gov.cn.wqfrd.cn http://www.morning.thlr.cn.gov.cn.thlr.cn http://www.morning.bhpjc.cn.gov.cn.bhpjc.cn http://www.morning.jypqx.cn.gov.cn.jypqx.cn http://www.morning.bhpsz.cn.gov.cn.bhpsz.cn http://www.morning.klwxh.cn.gov.cn.klwxh.cn http://www.morning.tbqdm.cn.gov.cn.tbqdm.cn http://www.morning.fqtdz.cn.gov.cn.fqtdz.cn http://www.morning.xdfkrd.cn.gov.cn.xdfkrd.cn http://www.morning.rfrnc.cn.gov.cn.rfrnc.cn