torch_utils/layers.py

91 lines
3.1 KiB
Python

from typing import Union, Tuple
import torch
import torch.nn as nn
import torch.nn.functional as F
from .utils.logger import DummyLogger
class LayerInfo():
def __init__(self):
self.memory = 0.0
self.ops = 0.0
self.output = 0.0
class Layer(nn.Module):
# Default layer arguments
ACTIVATION = F.leaky_relu
BATCH_NORM = True
BATCH_NORM_TRAINING = False
BATCH_NORM_MOMENTUM = 0.01
IS_TRAINING = False
METRICS = False
VERBOSE = 0
LOGGER = DummyLogger()
def __init__(self, activation, batch_norm):
super(Layer, self).__init__()
self.name = 'Layer'
self.info = LayerInfo()
# Preload default
self.activation = Layer.ACTIVATION if activation == 0 else activation
self.batch_norm = Layer.BATCH_NORM if batch_norm is None else batch_norm
def forward(self, input_data: torch.Tensor) -> torch.Tensor:
output = input_data
if self.activation is not None:
output = self.activation(output)
if self.batch_norm is not None:
output = self.batch_norm(output)
return output
class Conv1d(Layer):
def __init__(self, in_channels: int, out_channels: int, kernel_size: int = 3,
stride: Union[int, Tuple[int, int]] = 1, activation=0, batch_norm=None, **kwargs):
super().__init__(activation, batch_norm)
self.conv = nn.Conv1d(in_channels, out_channels, kernel_size=kernel_size, stride=stride,
bias=not self.batch_norm, **kwargs)
self.batch_norm = nn.BatchNorm1d(
out_channels,
momentum=Layer.BATCH_NORM_MOMENTUM,
track_running_stats=not Layer.BATCH_NORM_TRAINING) if self.batch_norm else None
def forward(self, input_data: torch.Tensor) -> torch.Tensor:
return super().forward(self.conv(input_data))
class Conv2d(Layer):
def __init__(self, in_channels: int, out_channels: int, kernel_size: int = 3,
stride: Union[int, Tuple[int, int]] = 1, activation=0, batch_norm=None, **kwargs):
super().__init__(activation, batch_norm)
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride,
bias=not self.batch_norm, **kwargs)
self.batch_norm = nn.BatchNorm2d(
out_channels,
momentum=Layer.BATCH_NORM_MOMENTUM,
track_running_stats=not Layer.BATCH_NORM_TRAINING) if self.batch_norm else None
def forward(self, input_data: torch.Tensor) -> torch.Tensor:
return super().forward(self.conv(input_data))
class Linear(Layer):
def __init__(self, in_channels: int, out_channels: int, activation=0, batch_norm=None, **kwargs):
super().__init__(activation, batch_norm)
self.fc = nn.Linear(in_channels, out_channels, **kwargs)
self.batch_norm = nn.BatchNorm1d(
out_channels,
momentum=Layer.BATCH_NORM_MOMENTUM,
track_running_stats=Layer.BATCH_NORM_TRAINING) if self.batch_norm else None
def forward(self, input_data: torch.Tensor) -> torch.Tensor:
return super().forward(self.fc(input_data))