Layers, batch generator, memory
This commit is contained in:
parent
9ab6adce7a
commit
268429fa1a
5 changed files with 277 additions and 0 deletions
59
layers.py
Normal file
59
layers.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
import torch
|
||||
import torch.nn as nn
|
||||
import torch.nn.functional as F
|
||||
|
||||
from .utils.logger import DummyLogger
|
||||
|
||||
|
||||
class LayerInfo():
|
||||
def __init__(self):
|
||||
self.memory = 0.0
|
||||
self.ops = 0.0
|
||||
self.output = 0.0
|
||||
|
||||
|
||||
class Layer(nn.Module):
|
||||
# Default layer arguments
|
||||
ACTIVATION = F.leaky_relu
|
||||
|
||||
BATCH_NORM = True
|
||||
BATCH_NORM_TRAINING = False
|
||||
BATCH_NORM_DECAY = 0.95
|
||||
|
||||
REGULARIZER = None
|
||||
|
||||
PADDING = 'SAME'
|
||||
|
||||
IS_TRAINING = False
|
||||
METRICS = False
|
||||
VERBOSE = 0
|
||||
LOGGER = DummyLogger()
|
||||
|
||||
def __init__(self, activation, batch_norm):
|
||||
super(Layer, self).__init__()
|
||||
self.name = 'Layer'
|
||||
self.info = LayerInfo()
|
||||
|
||||
# Preload default
|
||||
self.activation = Layer.ACTIVATION if activation == 0 else activation
|
||||
self.batch_norm = Layer.BATCH_NORM if batch_norm is None else batch_norm
|
||||
|
||||
def forward(self, input_data: torch.Tensor) -> torch.Tensor:
|
||||
output = input_data
|
||||
if self.activation is not None:
|
||||
output = self.activation(output)
|
||||
if self.batch_norm is not None:
|
||||
output = self.batch_norm(output)
|
||||
return output
|
||||
|
||||
|
||||
class Conv2d(Layer):
|
||||
def __init__(self, in_channels: int, out_channels: int, kernel_size: int, stride: int = 1,
|
||||
activation=0, batch_norm=None, **kwargs):
|
||||
super(Conv2d, self).__init__(activation, batch_norm)
|
||||
|
||||
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, **kwargs)
|
||||
self.batch_norm = nn.BatchNorm2d(out_channels, eps=0.001, momentum=0.01) if self.batch_norm else None
|
||||
|
||||
def forward(self, input_data: torch.Tensor) -> torch.Tensor:
|
||||
return super().forward(self.conv(input_data))
|
||||
Loading…
Add table
Add a link
Reference in a new issue