Merge branch 'master' into 'BatchNormModifications'

# Conflicts:
#   layers.py
This commit is contained in:
Corentin 2021-05-21 06:53:31 +00:00
commit fe11f3e6d5
11 changed files with 753 additions and 159 deletions

View file

@ -22,8 +22,13 @@ class Layer(nn.Module):
def __init__(self, activation, use_batch_norm):
super().__init__()
# Preload default
if activation == 0:
activation = Layer.ACTIVATION
if isinstance(activation, type):
self.activation = activation()
else:
self.activation = activation
self.batch_norm: torch.nn._BatchNorm = None
self.activation = Layer.ACTIVATION if activation == 0 else activation
self.use_batch_norm = Layer.USE_BATCH_NORM if use_batch_norm is None else use_batch_norm
def forward(self, input_data: torch.Tensor) -> torch.Tensor:
@ -40,7 +45,7 @@ class Linear(Layer):
def __init__(self, in_channels: int, out_channels: int, activation=0, use_batch_norm: bool = None, **kwargs):
super().__init__(activation, use_batch_norm)
self.fc = nn.Linear(in_channels, out_channels, **kwargs)
self.fc = nn.Linear(in_channels, out_channels, bias=not self.batch_norm, **kwargs)
self.batch_norm = nn.BatchNorm1d(
out_channels,
momentum=Layer.BATCH_NORM_MOMENTUM,
@ -76,7 +81,7 @@ class Conv2d(Layer):
self.batch_norm = nn.BatchNorm2d(
out_channels,
momentum=Layer.BATCH_NORM_MOMENTUM,
track_running_stats=not Layer.BATCH_NORM_TRAINING if Layer.USE_BATCH_NORM else None
track_running_stats=Layer.BATCH_NORM_TRAINING) if self.use_batch_norm else None
def forward(self, input_data: torch.Tensor) -> torch.Tensor:
return super().forward(self.conv(input_data))
@ -109,7 +114,7 @@ class Deconv2d(Layer):
self.batch_norm = nn.BatchNorm2d(
out_channels,
momentum=Layer.BATCH_NORM_MOMENTUM,
track_running_stats=not Layer.BATCH_NORM_TRAINING if Layer.USE_BATCH_NORM else None
track_running_stats=Layer.BATCH_NORM_TRAINING) if self.use_batch_norm else None
def forward(self, input_data: torch.Tensor) -> torch.Tensor:
return super().forward(self.deconv(input_data))