DCGAN的Pytorch权初始化问题



我正在尝试为DCGAN创建一个生成器并初始化自定义权重。在Pytorch教程中,代码如下所示:

# Generator Code
class Generator(nn.Module):
def __init__(self, ngpu):
super(Generator, self).__init__()
self.ngpu = ngpu
self.main = nn.Sequential(
# input is Z, going into a convolution
nn.ConvTranspose2d( nz, ngf * 8, 4, 1, 0, bias=False),
nn.BatchNorm2d(ngf * 8),
nn.ReLU(True),
# state size. (ngf*8) x 4 x 4
nn.ConvTranspose2d(ngf * 8, ngf * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(ngf * 4),
nn.ReLU(True),
# state size. (ngf*4) x 8 x 8
nn.ConvTranspose2d( ngf * 4, ngf * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(ngf * 2),
nn.ReLU(True),
# state size. (ngf*2) x 16 x 16
nn.ConvTranspose2d( ngf * 2, ngf, 4, 2, 1, bias=False),
nn.BatchNorm2d(ngf),
nn.ReLU(True),
# state size. (ngf) x 32 x 32
nn.ConvTranspose2d( ngf, nc, 4, 2, 1, bias=False),
nn.Tanh()
# state size. (nc) x 64 x 64
)
def forward(self, input):
return self.main(input)

然而,我正在尝试为顺序操作创建块,并且我的生成器看起来如下:

class ConvTr(nn.Module):
def __init__(self, input_channels, output_channels, k_size, stride, pad, b = False):
super().__init__()

self.conv = nn.ConvTranspose2d(in_channels=input_channels,
out_channels=output_channels,
kernel_size=k_size,
stride=stride,
padding=pad,
bias=b)

self.batch_norm = nn.BatchNorm2d(input_channels)
self.activation = nn.ReLU()


def forward(self, x):
x = self.conv(x)
x = self.activation(x)
x = self.batch_norm(x)
return x

class Generator(nn.Module):

def __init__(self, inputNoise=100):
super().__init__()
self.conv1 = ConvTr(input_channels = inputNoise, output_channels = 128, k_size = 4, stride = 2, pad = 0)
self.conv2 = ConvTr(input_channels = 128, output_channels = 64, k_size = 3, stride = 2, pad = 1)
self.conv3 = ConvTr(input_channels = 64, output_channels = 32, k_size = 4, stride = 2, pad = 1)
self.conv4 = ConvTranspose2d(in_channels=32, out_channels=1, kernel_size=4, stride=2, padding=1)
self.tanh = Tanh()

def forward(self, x):

x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
x = self.conv4(x)
output = self.tanh(x)
return output

权重初始化函数如下:

def weights_init(model):
# get the class name
classname = model.__class__.__name__
# check if the classname contains the word "conv"
if classname.find("Conv") != -1:
# intialize the weights from normal distribution
nn.init.normal_(model.weight.data, 0.0, 0.02)
# otherwise, check if the name contains the word "BatcnNorm"
elif classname.find("BatchNorm") != -1:
# intialize the weights from normal distribution and set the
# bias to 0
nn.init.normal_(model.weight.data, 1.0, 0.02)
nn.init.constant_(model.bias.data, 0)

现在,当我试图初始化权重时,它会给我以下错误:

raise AttributeError("'{}' object has no attribute '{}'".format(
AttributeError: 'ConvTr' object has no attribute 'weight'

三个问题:

  1. 使用模型。适用于做模块级操作(如init weight)
  2. 使用isinstance来查找它是哪个层
  3. 不使用.data,它已经被弃用很长时间了,应该尽可能避免使用

初始化权重,执行以下操作

def init_weight(model):
def _init_weight(module):
if isinstance(module, nn.Conv2d) or isinstance(module, nn.ConvTranspose2d):
nn.init.normal_(module.weight, 0., 0.02)
elif isinstance(module, nn.BatchNorm2d):
nn.init.normal_(module.weight, 1.0, 0.02)
nn.init.constant_(module.bias, 0)
model.apply(_init_weight)
# m is the model you want to initialize
init_weight(m)

edit: add ConvTranspose in condition

最新更新