代码拉取完成,页面将自动刷新
import torch.nn as nn
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
nn.init.normal_(m.weight.data, 0.0, 0.02)
elif classname.find('BatchNorm') != -1:
nn.init.normal_(m.weight.data, 1.0, 0.02)
nn.init.constant_(m.bias.data, 0)
class Discriminator(nn.Module):
def __init__(self):
super().__init__()
self.main = nn.Sequential(
# input 1824
nn.Conv1d(1, 64, kernel_size=4, stride=2, padding=1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
# state size 912
nn.Conv1d(64, 128, kernel_size=4, stride=2, padding=1, bias=False),
nn.BatchNorm1d(128),
nn.LeakyReLU(0.2, inplace=True),
# state size 456
nn.Conv1d(128, 256, kernel_size=4,
stride=2, padding=1, bias=False),
nn.BatchNorm1d(256),
nn.LeakyReLU(0.2, inplace=True),
# state size 228
nn.Conv1d(256, 512, kernel_size=4,
stride=2, padding=1, bias=False),
nn.BatchNorm1d(512),
nn.LeakyReLU(0.2, inplace=True),
# state size 114
nn.Conv1d(512, 1, kernel_size=114, stride=1, padding=0, bias=False),
)
def forward(self, x, y=None):
x = self.main(x)
return x
class Generator(nn.Module):
def __init__(self, nz):
super().__init__()
self.main = nn.Sequential(
nn.ConvTranspose1d(nz, 512, 114, 1, 0, bias=False),
nn.BatchNorm1d(512),
nn.ReLU(True),
nn.ConvTranspose1d(512, 256, 4, 2, 1, bias=False),
nn.BatchNorm1d(256),
nn.ReLU(True),
nn.ConvTranspose1d(256, 128, 4, 2, 1, bias=False),
nn.BatchNorm1d(128),
nn.ReLU(True),
nn.ConvTranspose1d(128, 64, 4, 2, 1, bias=False),
nn.BatchNorm1d(64),
nn.ReLU(True),
nn.ConvTranspose1d(64, 1, 4, 2, 1, bias=False),
nn.Tanh()
)
def forward(self, x):
x = self.main(x)
return x
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。