Fix Batch norm tracking

This commit is contained in:
Corentin Risselin 2020-07-07 11:04:57 +09:00
commit 7f4a162033

View file

@ -68,7 +68,7 @@ class Linear(Layer):
self.batch_norm = nn.BatchNorm1d( self.batch_norm = nn.BatchNorm1d(
out_channels, out_channels,
momentum=Layer.BATCH_NORM_MOMENTUM, momentum=Layer.BATCH_NORM_MOMENTUM,
track_running_stats=not Layer.BATCH_NORM_TRAINING) if self.batch_norm else None track_running_stats=Layer.BATCH_NORM_TRAINING) if self.batch_norm else None
def forward(self, input_data: torch.Tensor) -> torch.Tensor: def forward(self, input_data: torch.Tensor) -> torch.Tensor:
return super().forward(self.fc(input_data)) return super().forward(self.fc(input_data))