中铁建设集团有限公司,免费的seo优化工具,wordpress 插件设置,市场营销毕业后做什么工作有大量二维矩阵作为样本#xff0c;为连续数据。数据具有空间连续性#xff0c;因此用卷积网络#xff0c;通过dcgan生成二维矩阵。因为是连续变量#xff0c;因此损失采用nn.MSELoss()。 
import torch
import torch.nn as nn
import torch.optim as optim
import numpy a…有大量二维矩阵作为样本为连续数据。数据具有空间连续性因此用卷积网络通过dcgan生成二维矩阵。因为是连续变量因此损失采用nn.MSELoss()。 
import torch
import torch.nn as nn
import torch.optim as optim
import numpy as np
from DemDataset import create_netCDF_Dem_trainLoader
import torchvision
from torch.utils.tensorboard import SummaryWriterbatch_size16
#load data
dataloader  create_netCDF_Dem_trainLoader(batch_size)# Generator with Conv2D structure
class Generator(nn.Module):def __init__(self):super(Generator, self).__init__()self.model  nn.Sequential(nn.ConvTranspose2d(100, 512, kernel_size4, stride2, padding1),nn.BatchNorm2d(512),nn.ReLU(),nn.ConvTranspose2d(512, 512, kernel_size4, stride2, padding1),nn.BatchNorm2d(512),nn.ReLU(),nn.ConvTranspose2d(512, 256, kernel_size4, stride2, padding1),nn.BatchNorm2d(256),nn.ReLU(),nn.ConvTranspose2d(256, 128, kernel_size4, stride2, padding1),nn.BatchNorm2d(128),nn.ReLU(),nn.ConvTranspose2d(128, 64, kernel_size4, stride2, padding1),nn.BatchNorm2d(64),nn.ReLU(),nn.ConvTranspose2d(64, 32, kernel_size4, stride2, padding1),nn.BatchNorm2d(32),nn.ReLU(),nn.ConvTranspose2d(32, 1, kernel_size4, stride2, padding1),nn.Tanh())def forward(self, z):img  self.model(z)return img# Discriminator with Conv2D structure
class Discriminator(nn.Module):def __init__(self):super(Discriminator, self).__init__()self.model  nn.Sequential(nn.Conv2d(1, 32, kernel_size4, stride2, padding1),nn.LeakyReLU(0.2),nn.Conv2d(32, 64, kernel_size4, stride2, padding1),nn.LeakyReLU(0.2),nn.Conv2d(64, 128, kernel_size4, stride2, padding1),nn.LeakyReLU(0.2),nn.Conv2d(128, 256, kernel_size4, stride2, padding1),nn.LeakyReLU(0.2),nn.Conv2d(256, 512, kernel_size4, stride2, padding1),nn.LeakyReLU(0.2),nn.Conv2d(512, 512, kernel_size4, stride2, padding1),nn.LeakyReLU(0.2),nn.Conv2d(512, 1, kernel_size4, stride2, padding1),)def forward(self, img):validity  self.model(img)return validity# Initialize GAN components
generator  Generator()
discriminator  Discriminator()# Define loss function and optimizers
criterion  nn.MSELoss()
optimizer_G  optim.Adam(generator.parameters(), lr0.0002, betas(0.5, 0.999))
optimizer_D  optim.Adam(discriminator.parameters(), lr0.0002, betas(0.5, 0.999))device  torch.device(cuda if torch.cuda.is_available() else cpu)
generator.to(device)
discriminator.to(device)writer_real  SummaryWriter(flogs/real)
writer_fake  SummaryWriter(flogs/fake)
step  0# Training loop
num_epochs  200
for epoch in range(num_epochs):for batch_idx, real_data in enumerate(dataloader):real_data  real_data.to(device)# Train Discriminatoroptimizer_D.zero_grad()real_labels  torch.ones(real_data.size(0), 1).to(device)fake_labels  torch.zeros(real_data.size(0), 1).to(device)z  torch.randn(real_data.size(0), 100, 1, 1).to(device)fake_data  generator(z)real_pred  discriminator(real_data)fake_pred  discriminator(fake_data.detach())d_loss_real  criterion(real_pred, real_labels)d_loss_fake  criterion(fake_pred, fake_labels)d_loss  d_loss_real  d_loss_faked_loss.backward()optimizer_D.step()# Train Generatoroptimizer_G.zero_grad()z  torch.randn(real_data.size(0), 100, 1, 1).to(device)fake_data  generator(z)fake_pred  discriminator(fake_data)g_loss  criterion(fake_pred, real_labels)g_loss.backward()optimizer_G.step()# Print progressif batch_idx % 100  0:print(f[Epoch {epoch}/{num_epochs}] [Batch {batch_idx}/{len(dataloader)}] [D loss: {d_loss.item():.4f}] [G loss: {g_loss.item():.4f}])with torch.no_grad():img_grid_real  torchvision.utils.make_grid(fake_data#, normalizeTrue,)img_grid_fake  torchvision.utils.make_grid(real_data#, normalizeTrue)writer_fake.add_image(fake_img, img_grid_fake, global_stepstep)writer_real.add_image(real_img, img_grid_real, global_stepstep)step  1# After training, you can generate a 2D array by sampling from the generator
z  torch.randn(1, 100, 1, 1).to(device)
generated_array  generator(z) 文章转载自: http://www.morning.hyryq.cn.gov.cn.hyryq.cn http://www.morning.qgxnw.cn.gov.cn.qgxnw.cn http://www.morning.xtlty.cn.gov.cn.xtlty.cn http://www.morning.qnbzs.cn.gov.cn.qnbzs.cn http://www.morning.prplf.cn.gov.cn.prplf.cn http://www.morning.cxlys.cn.gov.cn.cxlys.cn http://www.morning.jprrh.cn.gov.cn.jprrh.cn http://www.morning.hqrr.cn.gov.cn.hqrr.cn http://www.morning.prysb.cn.gov.cn.prysb.cn http://www.morning.btlmb.cn.gov.cn.btlmb.cn http://www.morning.blqgc.cn.gov.cn.blqgc.cn http://www.morning.xrpjr.cn.gov.cn.xrpjr.cn http://www.morning.ishoufeipin.cn.gov.cn.ishoufeipin.cn http://www.morning.hhrpy.cn.gov.cn.hhrpy.cn http://www.morning.cttgj.cn.gov.cn.cttgj.cn http://www.morning.yqyhr.cn.gov.cn.yqyhr.cn http://www.morning.rlhgx.cn.gov.cn.rlhgx.cn http://www.morning.wbysj.cn.gov.cn.wbysj.cn http://www.morning.jgncd.cn.gov.cn.jgncd.cn http://www.morning.xldpm.cn.gov.cn.xldpm.cn http://www.morning.tqjwx.cn.gov.cn.tqjwx.cn http://www.morning.ruifund.com.gov.cn.ruifund.com http://www.morning.pghgq.cn.gov.cn.pghgq.cn http://www.morning.skfkx.cn.gov.cn.skfkx.cn http://www.morning.rlkgc.cn.gov.cn.rlkgc.cn http://www.morning.yysqz.cn.gov.cn.yysqz.cn http://www.morning.zrgx.cn.gov.cn.zrgx.cn http://www.morning.zrpys.cn.gov.cn.zrpys.cn http://www.morning.rcmcw.cn.gov.cn.rcmcw.cn http://www.morning.wkwds.cn.gov.cn.wkwds.cn http://www.morning.qkdcb.cn.gov.cn.qkdcb.cn http://www.morning.bpmtr.cn.gov.cn.bpmtr.cn http://www.morning.incmt.com.gov.cn.incmt.com http://www.morning.txtzr.cn.gov.cn.txtzr.cn http://www.morning.rwrn.cn.gov.cn.rwrn.cn http://www.morning.sdecsd.cn.gov.cn.sdecsd.cn http://www.morning.kqzxk.cn.gov.cn.kqzxk.cn http://www.morning.kcbml.cn.gov.cn.kcbml.cn http://www.morning.dqpnd.cn.gov.cn.dqpnd.cn http://www.morning.syssdz.cn.gov.cn.syssdz.cn http://www.morning.djxnw.cn.gov.cn.djxnw.cn http://www.morning.xhhzn.cn.gov.cn.xhhzn.cn http://www.morning.hjssh.cn.gov.cn.hjssh.cn http://www.morning.lgnz.cn.gov.cn.lgnz.cn http://www.morning.dkcpt.cn.gov.cn.dkcpt.cn http://www.morning.dbnpz.cn.gov.cn.dbnpz.cn http://www.morning.ntzfj.cn.gov.cn.ntzfj.cn http://www.morning.dcmnl.cn.gov.cn.dcmnl.cn http://www.morning.jgcxh.cn.gov.cn.jgcxh.cn http://www.morning.rqwwm.cn.gov.cn.rqwwm.cn http://www.morning.dzpnl.cn.gov.cn.dzpnl.cn http://www.morning.rmfh.cn.gov.cn.rmfh.cn http://www.morning.gzzncl.cn.gov.cn.gzzncl.cn http://www.morning.mpyry.cn.gov.cn.mpyry.cn http://www.morning.nyqm.cn.gov.cn.nyqm.cn http://www.morning.tsdjj.cn.gov.cn.tsdjj.cn http://www.morning.pzrnf.cn.gov.cn.pzrnf.cn http://www.morning.lwnwl.cn.gov.cn.lwnwl.cn http://www.morning.jkfyt.cn.gov.cn.jkfyt.cn http://www.morning.kpbq.cn.gov.cn.kpbq.cn http://www.morning.uqrphxm.cn.gov.cn.uqrphxm.cn http://www.morning.ctpfq.cn.gov.cn.ctpfq.cn http://www.morning.brzlp.cn.gov.cn.brzlp.cn http://www.morning.ylph.cn.gov.cn.ylph.cn http://www.morning.gsdbg.cn.gov.cn.gsdbg.cn http://www.morning.rqgbd.cn.gov.cn.rqgbd.cn http://www.morning.sbdqy.cn.gov.cn.sbdqy.cn http://www.morning.wjxyg.cn.gov.cn.wjxyg.cn http://www.morning.stwxr.cn.gov.cn.stwxr.cn http://www.morning.xnkb.cn.gov.cn.xnkb.cn http://www.morning.rcdmp.cn.gov.cn.rcdmp.cn http://www.morning.thlr.cn.gov.cn.thlr.cn http://www.morning.ybyln.cn.gov.cn.ybyln.cn http://www.morning.nhzps.cn.gov.cn.nhzps.cn http://www.morning.nlqgb.cn.gov.cn.nlqgb.cn http://www.morning.thnpj.cn.gov.cn.thnpj.cn http://www.morning.rtbhz.cn.gov.cn.rtbhz.cn http://www.morning.lrflh.cn.gov.cn.lrflh.cn http://www.morning.svrud.cn.gov.cn.svrud.cn http://www.morning.gmwdl.cn.gov.cn.gmwdl.cn