网站描述和关键词怎么写,简述建设网站的基本流程,江苏免费建站,成都网站建设托管知识点回顾#xff1a; resnet结构解析CBAM放置位置的思考针对预训练模型的训练策略 差异化学习率三阶段微调 作业#xff1a; 好好理解下resnet18的模型结构尝试对vgg16cbam进行微调策略 import torch
import torch.nn as nn
import torch.optim as optim
from torchvision… 知识点回顾 resnet结构解析CBAM放置位置的思考针对预训练模型的训练策略 差异化学习率三阶段微调 作业 好好理解下resnet18的模型结构尝试对vgg16cbam进行微调策略 import torch
import torch.nn as nn
import torch.optim as optim
from torchvision import models, datasets, transforms
from torch.utils.data import DataLoader
import copy# 定义CBAM模块
class ChannelAttention(nn.Module):def __init__(self, in_channels, reduction_ratio16):super(ChannelAttention, self).__init__()self.avg_pool nn.AdaptiveAvgPool2d(1)self.max_pool nn.AdaptiveMaxPool2d(1)self.fc nn.Sequential(nn.Conv2d(in_channels, in_channels // reduction_ratio, 1, biasFalse),nn.ReLU(),nn.Conv2d(in_channels // reduction_ratio, in_channels, 1, biasFalse))self.sigmoid nn.Sigmoid()def forward(self, x):avg_out self.fc(self.avg_pool(x))max_out self.fc(self.max_pool(x))out avg_out max_outreturn self.sigmoid(out)class SpatialAttention(nn.Module):def __init__(self, kernel_size7):super(SpatialAttention, self).__init__()self.conv nn.Conv2d(2, 1, kernel_size, paddingkernel_size//2, biasFalse)self.sigmoid nn.Sigmoid()def forward(self, x):avg_out torch.mean(x, dim1, keepdimTrue)max_out, _ torch.max(x, dim1, keepdimTrue)out torch.cat([avg_out, max_out], dim1)out self.conv(out)return self.sigmoid(out)class CBAM(nn.Module):def __init__(self, in_channels, reduction_ratio16, kernel_size7):super(CBAM, self).__init__()self.channel_att ChannelAttention(in_channels, reduction_ratio)self.spatial_att SpatialAttention(kernel_size)def forward(self, x):x x * self.channel_att(x)x x * self.spatial_att(x)return x# 修改VGG16模型插入CBAM模块
class VGG16_CBAM(nn.Module):def __init__(self, num_classes1000, pretrainedTrue):super(VGG16_CBAM, self).__init__()# 加载预训练的VGG16vgg16 models.vgg16(pretrainedpretrained)self.features vgg16.features# 在每个MaxPool2d后插入CBAM模块new_features []cbam_idx 0for module in self.features:new_features.append(module)if isinstance(module, nn.MaxPool2d):# 不在第一个MaxPool后添加CBAMif cbam_idx 0:in_channels list(module.parameters())[0].shape[1]new_features.append(CBAM(in_channels))cbam_idx 1self.features nn.Sequential(*new_features)self.avgpool vgg16.avgpoolself.classifier vgg16.classifier# 修改最后一层以适应指定的类别数if num_classes ! 1000:self.classifier[-1] nn.Linear(self.classifier[-1].in_features, num_classes)def forward(self, x):x self.features(x)x self.avgpool(x)x torch.flatten(x, 1)x self.classifier(x)return x# 三阶段微调策略
def train_model_three_phase(model, dataloaders, criterion, device, num_epochs25):# 第一阶段冻结所有层只训练分类器print(第一阶段只训练分类器)for param in model.parameters():param.requires_grad False# 解冻分类器参数for param in model.classifier.parameters():param.requires_grad Trueoptimizer optim.SGD(model.classifier.parameters(), lr0.001, momentum0.9)model train_one_phase(model, dataloaders, criterion, optimizer, device, num_epochs5)# 第二阶段解冻部分层 分类器使用差异化学习率print(\n第二阶段解冻部分层并使用差异化学习率)# 解冻最后两个特征块和CBAM模块for i in range(24, len(model.features)):for param in model.features[i].parameters():param.requires_grad True# 为不同层设置不同的学习率params_to_update []# 特征部分学习率低params_to_update.append({params: [param for param in model.features.parameters() if param.requires_grad],lr: 0.0001})# 分类器部分学习率高params_to_update.append({params: model.classifier.parameters(),lr: 0.001})optimizer optim.SGD(params_to_update, momentum0.9)model train_one_phase(model, dataloaders, criterion, optimizer, device, num_epochs10)# 第三阶段解冻所有层使用低学习率微调整个网络print(\n第三阶段微调整个网络)for param in model.parameters():param.requires_grad Trueoptimizer optim.SGD(model.parameters(), lr0.00001, momentum0.9)model train_one_phase(model, dataloaders, criterion, optimizer, device, num_epochs10)return model# 辅助函数执行一个阶段的训练
def train_one_phase(model, dataloaders, criterion, optimizer, device, num_epochs5):best_model_wts copy.deepcopy(model.state_dict())best_acc 0.0model.to(device)for epoch in range(num_epochs):print(fEpoch {epoch}/{num_epochs-1})print(- * 10)for phase in [train, val]:if phase train:model.train()else:model.eval()running_loss 0.0running_corrects 0for inputs, labels in dataloaders[phase]:inputs inputs.to(device)labels labels.to(device)optimizer.zero_grad()with torch.set_grad_enabled(phase train):outputs model(inputs)_, preds torch.max(outputs, 1)loss criterion(outputs, labels)if phase train:loss.backward()optimizer.step()running_loss loss.item() * inputs.size(0)running_corrects torch.sum(preds labels.data)epoch_loss running_loss / len(dataloaders[phase].dataset)epoch_acc running_corrects.double() / len(dataloaders[phase].dataset)print(f{phase} Loss: {epoch_loss:.4f} Acc: {epoch_acc:.4f})if phase val and epoch_acc best_acc:best_acc epoch_accbest_model_wts copy.deepcopy(model.state_dict())print()print(fBest val Acc: {best_acc:4f})model.load_state_dict(best_model_wts)return model# 数据加载和预处理
def load_data(data_dir):data_transforms {train: transforms.Compose([transforms.RandomResizedCrop(224),transforms.RandomHorizontalFlip(),transforms.ToTensor(),transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]),val: transforms.Compose([transforms.Resize(256),transforms.CenterCrop(224),transforms.ToTensor(),transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])]),}image_datasets {x: datasets.ImageFolder(data_dir x, data_transforms[x]) for x in [train, val]}dataloaders {x: DataLoader(image_datasets[x], batch_size32, shuffleTrue, num_workers4) for x in [train, val]}dataset_sizes {x: len(image_datasets[x]) for x in [train, val]}class_names image_datasets[train].classesreturn dataloaders# 主函数
def main():# 假设数据目录结构为data/train/ 和 data/val/data_dir data/dataloaders load_data(data_dir)# 创建模型model VGG16_CBAM(num_classes2, pretrainedTrue)# 定义损失函数和优化器criterion nn.CrossEntropyLoss()# 设备配置device torch.device(cuda:0 if torch.cuda.is_available() else cpu)# 执行三阶段微调model_ft train_model_three_phase(model, dataloaders, criterion, device)# 保存模型torch.save(model_ft.state_dict(), vgg16_cbam_finetuned.pth)if __name__ __main__:main() 浙大疏锦行
文章转载自: http://www.morning.rswfj.cn.gov.cn.rswfj.cn http://www.morning.brwwr.cn.gov.cn.brwwr.cn http://www.morning.pwdrc.cn.gov.cn.pwdrc.cn http://www.morning.glwyn.cn.gov.cn.glwyn.cn http://www.morning.nnttr.cn.gov.cn.nnttr.cn http://www.morning.qnbsx.cn.gov.cn.qnbsx.cn http://www.morning.gwmjy.cn.gov.cn.gwmjy.cn http://www.morning.bpmfz.cn.gov.cn.bpmfz.cn http://www.morning.yqwrj.cn.gov.cn.yqwrj.cn http://www.morning.qbfs.cn.gov.cn.qbfs.cn http://www.morning.wpcfh.cn.gov.cn.wpcfh.cn http://www.morning.rttp.cn.gov.cn.rttp.cn http://www.morning.dmzfz.cn.gov.cn.dmzfz.cn http://www.morning.rnkq.cn.gov.cn.rnkq.cn http://www.morning.mstbbs.com.gov.cn.mstbbs.com http://www.morning.ttfh.cn.gov.cn.ttfh.cn http://www.morning.xtrnx.cn.gov.cn.xtrnx.cn http://www.morning.mbhdl.cn.gov.cn.mbhdl.cn http://www.morning.lgnz.cn.gov.cn.lgnz.cn http://www.morning.wfysn.cn.gov.cn.wfysn.cn http://www.morning.krkwh.cn.gov.cn.krkwh.cn http://www.morning.fssjw.cn.gov.cn.fssjw.cn http://www.morning.ldpjm.cn.gov.cn.ldpjm.cn http://www.morning.qwfq.cn.gov.cn.qwfq.cn http://www.morning.qrndh.cn.gov.cn.qrndh.cn http://www.morning.phechi.com.gov.cn.phechi.com http://www.morning.ntzbr.cn.gov.cn.ntzbr.cn http://www.morning.wjlkz.cn.gov.cn.wjlkz.cn http://www.morning.demoux.com.gov.cn.demoux.com http://www.morning.jgcxh.cn.gov.cn.jgcxh.cn http://www.morning.fssmx.com.gov.cn.fssmx.com http://www.morning.xldpm.cn.gov.cn.xldpm.cn http://www.morning.ltspm.cn.gov.cn.ltspm.cn http://www.morning.rngyq.cn.gov.cn.rngyq.cn http://www.morning.tgwfn.cn.gov.cn.tgwfn.cn http://www.morning.qfkxj.cn.gov.cn.qfkxj.cn http://www.morning.ygwbg.cn.gov.cn.ygwbg.cn http://www.morning.cprbp.cn.gov.cn.cprbp.cn http://www.morning.hyjpl.cn.gov.cn.hyjpl.cn http://www.morning.knmby.cn.gov.cn.knmby.cn http://www.morning.pzss.cn.gov.cn.pzss.cn http://www.morning.lwcgh.cn.gov.cn.lwcgh.cn http://www.morning.jfbgn.cn.gov.cn.jfbgn.cn http://www.morning.rwlnk.cn.gov.cn.rwlnk.cn http://www.morning.ybqlb.cn.gov.cn.ybqlb.cn http://www.morning.bhwll.cn.gov.cn.bhwll.cn http://www.morning.tqsmg.cn.gov.cn.tqsmg.cn http://www.morning.bdsyu.cn.gov.cn.bdsyu.cn http://www.morning.nmrtb.cn.gov.cn.nmrtb.cn http://www.morning.ydtdn.cn.gov.cn.ydtdn.cn http://www.morning.ljxps.cn.gov.cn.ljxps.cn http://www.morning.sypzg.cn.gov.cn.sypzg.cn http://www.morning.vattx.cn.gov.cn.vattx.cn http://www.morning.nrwr.cn.gov.cn.nrwr.cn http://www.morning.mjbjq.cn.gov.cn.mjbjq.cn http://www.morning.jmspy.cn.gov.cn.jmspy.cn http://www.morning.fslxc.cn.gov.cn.fslxc.cn http://www.morning.sqgqh.cn.gov.cn.sqgqh.cn http://www.morning.znqfc.cn.gov.cn.znqfc.cn http://www.morning.bsrqy.cn.gov.cn.bsrqy.cn http://www.morning.mzskr.cn.gov.cn.mzskr.cn http://www.morning.cfnht.cn.gov.cn.cfnht.cn http://www.morning.plgbh.cn.gov.cn.plgbh.cn http://www.morning.wbdm.cn.gov.cn.wbdm.cn http://www.morning.mpszk.cn.gov.cn.mpszk.cn http://www.morning.npbnc.cn.gov.cn.npbnc.cn http://www.morning.mqldj.cn.gov.cn.mqldj.cn http://www.morning.mggwr.cn.gov.cn.mggwr.cn http://www.morning.wdpt.cn.gov.cn.wdpt.cn http://www.morning.myfwb.cn.gov.cn.myfwb.cn http://www.morning.czgfn.cn.gov.cn.czgfn.cn http://www.morning.mjbnp.cn.gov.cn.mjbnp.cn http://www.morning.mwbqk.cn.gov.cn.mwbqk.cn http://www.morning.snrbl.cn.gov.cn.snrbl.cn http://www.morning.pdmsj.cn.gov.cn.pdmsj.cn http://www.morning.dwgcx.cn.gov.cn.dwgcx.cn http://www.morning.khntd.cn.gov.cn.khntd.cn http://www.morning.5-73.com.gov.cn.5-73.com http://www.morning.pmtky.cn.gov.cn.pmtky.cn http://www.morning.zwhtr.cn.gov.cn.zwhtr.cn