File size: 318 Bytes
35d85a5
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
import torch 
import torch.nn as nn 
import torch.functional as F

class batch_norm(nn.Module):
    def __init__(self, inp):
        super().__init__()
        self.batch = nn.BatchNorm2d(inp)
        self.relu = nn.ReLU()
    def forward(self, x):
        b = self.batch(x)
        op = self.relu(b)
        return op