Fixed issues: layers now use self.use_batch_norm instead of default value, fixed Layer's forward

This commit is contained in:
Hoel Bagard 2021-01-22 12:38:07 +09:00
commit a4280a1b78
No known key found for this signature in database
GPG key ID: 7182AC568D3A6DEF

View file

@ -21,9 +21,8 @@ class Layer(nn.Module):
def __init__(self, activation, use_batch_norm): def __init__(self, activation, use_batch_norm):
super().__init__() super().__init__()
self.name = 'Layer'
# Preload default # Preload default
self.batch_norm: torch.nn._BatchNorm = None
self.activation = Layer.ACTIVATION if activation == 0 else activation self.activation = Layer.ACTIVATION if activation == 0 else activation
self.use_batch_norm = Layer.USE_BATCH_NORM if use_batch_norm is None else use_batch_norm self.use_batch_norm = Layer.USE_BATCH_NORM if use_batch_norm is None else use_batch_norm
@ -31,7 +30,8 @@ class Layer(nn.Module):
output = input_data output = input_data
if self.activation is not None: if self.activation is not None:
output = self.activation(output) output = self.activation(output)
if self.use_batch_norm is not None: if self.use_batch_norm:
# It is assumed here that if using batch norm, then self.batch_norm has been instanciated.
output = self.batch_norm(output) output = self.batch_norm(output)
return output return output
@ -44,7 +44,7 @@ class Linear(Layer):
self.batch_norm = nn.BatchNorm1d( self.batch_norm = nn.BatchNorm1d(
out_channels, out_channels,
momentum=Layer.BATCH_NORM_MOMENTUM, momentum=Layer.BATCH_NORM_MOMENTUM,
track_running_stats=Layer.BATCH_NORM_TRAINING if Layer.USE_BATCH_NORM else None track_running_stats=Layer.BATCH_NORM_TRAINING) if self.use_batch_norm else None
def forward(self, input_data: torch.Tensor) -> torch.Tensor: def forward(self, input_data: torch.Tensor) -> torch.Tensor:
return super().forward(self.fc(input_data)) return super().forward(self.fc(input_data))
@ -60,7 +60,7 @@ class Conv1d(Layer):
self.batch_norm = nn.BatchNorm1d( self.batch_norm = nn.BatchNorm1d(
out_channels, out_channels,
momentum=Layer.BATCH_NORM_MOMENTUM, momentum=Layer.BATCH_NORM_MOMENTUM,
track_running_stats=Layer.BATCH_NORM_TRAINING if Layer.USE_BATCH_NORM else None track_running_stats=Layer.BATCH_NORM_TRAINING) if self.use_batch_norm else None
def forward(self, input_data: torch.Tensor) -> torch.Tensor: def forward(self, input_data: torch.Tensor) -> torch.Tensor:
return super().forward(self.conv(input_data)) return super().forward(self.conv(input_data))
@ -92,7 +92,7 @@ class Conv3d(Layer):
self.batch_norm = nn.BatchNorm3d( self.batch_norm = nn.BatchNorm3d(
out_channels, out_channels,
momentum=Layer.BATCH_NORM_MOMENTUM, momentum=Layer.BATCH_NORM_MOMENTUM,
track_running_stats=Layer.BATCH_NORM_TRAINING if Layer.USE_BATCH_NORM else None track_running_stats=Layer.BATCH_NORM_TRAINING) if self.use_batch_norm else None
def forward(self, input_data: torch.Tensor) -> torch.Tensor: def forward(self, input_data: torch.Tensor) -> torch.Tensor:
return super().forward(self.conv(input_data)) return super().forward(self.conv(input_data))