Fixed bias

This commit is contained in:
Hoel Bagard 2021-01-22 12:48:33 +09:00
commit ce6314bf5e
No known key found for this signature in database
GPG key ID: 7182AC568D3A6DEF

View file

@ -56,7 +56,7 @@ class Conv1d(Layer):
super().__init__(activation, use_batch_norm) super().__init__(activation, use_batch_norm)
self.conv = nn.Conv1d(in_channels, out_channels, kernel_size, stride=stride, self.conv = nn.Conv1d(in_channels, out_channels, kernel_size, stride=stride,
bias=not Layer.USE_BATCH_NORM, **kwargs) bias=not self.use_batch_norm, **kwargs)
self.batch_norm = nn.BatchNorm1d( self.batch_norm = nn.BatchNorm1d(
out_channels, out_channels,
momentum=Layer.BATCH_NORM_MOMENTUM, momentum=Layer.BATCH_NORM_MOMENTUM,
@ -72,7 +72,7 @@ class Conv2d(Layer):
super().__init__(activation, use_batch_norm) super().__init__(activation, use_batch_norm)
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride=stride, self.conv = nn.Conv2d(in_channels, out_channels, kernel_size, stride=stride,
bias=not Layer.USE_BATCH_NORM, **kwargs) bias=not self.use_batch_norm, **kwargs)
self.batch_norm = nn.BatchNorm2d( self.batch_norm = nn.BatchNorm2d(
out_channels, out_channels,
momentum=Layer.BATCH_NORM_MOMENTUM, momentum=Layer.BATCH_NORM_MOMENTUM,
@ -88,7 +88,7 @@ class Conv3d(Layer):
super().__init__(activation, use_batch_norm) super().__init__(activation, use_batch_norm)
self.conv = nn.Conv3d(in_channels, out_channels, kernel_size, stride=stride, self.conv = nn.Conv3d(in_channels, out_channels, kernel_size, stride=stride,
bias=not Layer.USE_BATCH_NORM, **kwargs) bias=not self.use_batch_norm, **kwargs)
self.batch_norm = nn.BatchNorm3d( self.batch_norm = nn.BatchNorm3d(
out_channels, out_channels,
momentum=Layer.BATCH_NORM_MOMENTUM, momentum=Layer.BATCH_NORM_MOMENTUM,
@ -105,7 +105,7 @@ class Deconv2d(Layer):
self.deconv = nn.ConvTranspose2d( self.deconv = nn.ConvTranspose2d(
in_channels, out_channels, kernel_size, stride=stride, in_channels, out_channels, kernel_size, stride=stride,
bias=not Layer.USE_BATCH_NORM, **kwargs) bias=not self.use_batch_norm, **kwargs)
self.batch_norm = nn.BatchNorm2d( self.batch_norm = nn.BatchNorm2d(
out_channels, out_channels,
momentum=Layer.BATCH_NORM_MOMENTUM, momentum=Layer.BATCH_NORM_MOMENTUM,