Some notes for DNN lab5

    from torch.nn.parameter import Parameter

def forward(x: Tensor) -> Tensor:
if self.training:
mean, var = x.mean(axis=/batch/), x.var(axis=/batch/)
update running_mean, running_var
else:
mean, var = running_mean, running_var

return ...

def __init__(self):
# Note that torch.ones, torch.tensor etc. default to requires_grad=False,
# but Parameter also has an argument requires_grad that defaults to True and overrides it.
self.gamma = Parameter(torch.ones(num_features))
self.beta = Parameter(torch.zeroes(num_features))
# Running_mean and var are not parameters, but we can register them as "buffers":
# stuff that gets saved/copied/restored.
self.running_mean: Tensor
self.running_var: Tensor
self.register_buffer('running_mean', torch.zeros(num_features))
self.register_buffer('running_var', torch.ones(num_features))