42 lines
1.1 KiB
Python
42 lines
1.1 KiB
Python
|
import os
|
||
|
import torch
|
||
|
import numpy as np
|
||
|
from torch.nn import functional as F
|
||
|
|
||
|
# cifar10 classes
|
||
|
cifar10_classes = ['airplane', 'automobile', 'bird', 'cat', 'deer',
|
||
|
'dog', 'frog', 'horse', 'ship', 'truck']
|
||
|
|
||
|
|
||
|
def logmeanexp(x, dim=None, keepdim=False):
|
||
|
"""Stable computation of log(mean(exp(x))"""
|
||
|
|
||
|
|
||
|
if dim is None:
|
||
|
x, dim = x.view(-1), 0
|
||
|
x_max, _ = torch.max(x, dim, keepdim=True)
|
||
|
x = x_max + torch.log(torch.mean(torch.exp(x - x_max), dim, keepdim=True))
|
||
|
return x if keepdim else x.squeeze(dim)
|
||
|
|
||
|
# check if dimension is correct
|
||
|
|
||
|
# def dimension_check(x, dim=None, keepdim=False):
|
||
|
# if dim is None:
|
||
|
# x, dim = x.view(-1), 0
|
||
|
|
||
|
# return x if keepdim else x.squeeze(dim)
|
||
|
|
||
|
|
||
|
def adjust_learning_rate(optimizer, lr):
|
||
|
"""Sets the learning rate to the initial LR decayed by 10 every 30 epochs"""
|
||
|
for param_group in optimizer.param_groups:
|
||
|
param_group['lr'] = lr
|
||
|
|
||
|
|
||
|
def save_array_to_file(numpy_array, filename):
|
||
|
file = open(filename, 'a')
|
||
|
shape = " ".join(map(str, numpy_array.shape))
|
||
|
np.savetxt(file, numpy_array.flatten(), newline=" ", fmt="%.3f")
|
||
|
file.write("\n")
|
||
|
file.close()
|