Clean mnist comment, add Linear layer
This commit is contained in:
parent
12afc7cc93
commit
ced13a4351
2 changed files with 20 additions and 11 deletions
23
layers.py
23
layers.py
|
|
@ -18,11 +18,7 @@ class Layer(nn.Module):
|
|||
|
||||
BATCH_NORM = True
|
||||
BATCH_NORM_TRAINING = False
|
||||
BATCH_NORM_DECAY = 0.95
|
||||
|
||||
REGULARIZER = None
|
||||
|
||||
PADDING = 'SAME'
|
||||
BATCH_NORM_MOMENTUM = 0.01
|
||||
|
||||
IS_TRAINING = False
|
||||
METRICS = False
|
||||
|
|
@ -50,10 +46,23 @@ class Layer(nn.Module):
|
|||
class Conv2d(Layer):
|
||||
def __init__(self, in_channels: int, out_channels: int, kernel_size: int = 3, stride: int = 1,
|
||||
activation=0, batch_norm=None, **kwargs):
|
||||
super(Conv2d, self).__init__(activation, batch_norm)
|
||||
super().__init__(activation, batch_norm)
|
||||
|
||||
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, **kwargs)
|
||||
self.batch_norm = nn.BatchNorm2d(out_channels, eps=0.001, momentum=0.01) if self.batch_norm else None
|
||||
self.batch_norm = nn.BatchNorm2d(
|
||||
out_channels, eps=0.001, momentum=Layer.BATCH_NORM_MOMENTUM) if self.batch_norm else None
|
||||
|
||||
def forward(self, input_data: torch.Tensor) -> torch.Tensor:
|
||||
return super().forward(self.conv(input_data))
|
||||
|
||||
|
||||
class Linear(Layer):
|
||||
def __init__(self, in_channels: int, out_channels: int, activation=0, batch_norm=None, **kwargs):
|
||||
super().__init__(activation, batch_norm)
|
||||
|
||||
self.fc = nn.Linear(in_channels, out_channels, **kwargs)
|
||||
self.batch_norm = nn.BatchNorm1d(
|
||||
out_channels, eps=0.001, momentum=Layer.BATCH_NORM_MOMENTUM) if self.batch_norm else None
|
||||
|
||||
def forward(self, input_data: torch.Tensor) -> torch.Tensor:
|
||||
return super().forward(self.fc(input_data))
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue