180 lines
5.9 KiB
Python
180 lines
5.9 KiB
Python
import functions as aux
|
|
import statistics as st
|
|
|
|
alpha = 10000
|
|
|
|
models_bayes_cifar = aux.load_pickle("bayes_data_cifar_noisy.pkl")
|
|
models_bayes_mnist = aux.load_pickle("bayes_data_mnist_noisy.pkl")
|
|
models_lenet_cifar = aux.load_pickle("lenet_data_cifar_noisy.pkl")
|
|
models_lenet_mnist = aux.load_pickle("lenet_data_mnist_noisy.pkl")
|
|
|
|
entropy_data = {'CIFAR':
|
|
{'BCNN':
|
|
{0.1: None, 0.25: None,
|
|
0.5: None, 0.75: None, 0.99: None,
|
|
'raleigh': None, 'erlang': None,
|
|
'exponential': None, 'uniform': None,
|
|
'impulse': None},
|
|
'LeNet':
|
|
{0.1: None, 0.25: None,
|
|
0.5: None, 0.75: None, 0.99: None,
|
|
'raleigh': None, 'erlang': None,
|
|
'exponential': None, 'uniform': None,
|
|
'impulse': None},
|
|
},
|
|
'MNIST':
|
|
{'BCNN':
|
|
{0.1: None, 0.25: None,
|
|
0.5: None, 0.75: None, 0.99: None,
|
|
'raleigh': None, 'erlang': None,
|
|
'exponential': None, 'uniform': None,
|
|
'impulse': None},
|
|
'LeNet':
|
|
{0.1: None, 0.25: None,
|
|
0.5: None, 0.75: None, 0.99: None,
|
|
'raleigh': None, 'erlang': None,
|
|
'exponential': None, 'uniform': None,
|
|
'impulse': None},
|
|
},
|
|
}
|
|
|
|
"""
|
|
bayes_keys = ['conv1.W_mu', 'conv1.W_rho', 'conv1.bias_mu', 'conv1.bias_rho',
|
|
'conv2.W_mu', 'conv2.W_rho', 'conv2.bias_mu', 'conv2.bias_rho',
|
|
'fc1.W_mu', 'fc1.W_rho', 'fc1.bias_mu', 'fc1.bias_rho',
|
|
'fc2.W_mu', 'fc2.W_rho', 'fc2.bias_mu', 'fc2.bias_rho',
|
|
'fc3.W_mu', 'fc3.W_rho', 'fc3.bias_mu', 'fc3.bias_rho']
|
|
|
|
lenet_keys = ['conv1.weight', 'conv1.bias', 'conv2.weight', 'conv2.bias',
|
|
'fc1.weight', 'fc1.bias', 'fc2.weight', 'fc2.bias', 'fc3.weight',
|
|
'fc3.bias']
|
|
|
|
bayes_keys = ['conv1.W_mu', 'conv1.W_rho',
|
|
'conv2.W_mu', 'conv2.W_rho',
|
|
'fc1.W_mu', 'fc1.W_rho',
|
|
'fc2.W_mu', 'fc2.W_rho',
|
|
'fc3.W_mu', 'fc3.W_rho']
|
|
|
|
"""
|
|
|
|
noise_levels = [0.1, 0.25, 0.5, 0.75, 0.99, 'raleigh', 'erlang', 'exponential', 'uniform', 'impulse']
|
|
|
|
bayes_keys = ['conv1.W_mu',
|
|
'conv2.W_mu',
|
|
'fc1.W_mu',
|
|
'fc2.W_mu',
|
|
'fc3.W_mu']
|
|
|
|
|
|
lenet_keys = ['conv1.weight', 'conv2.weight',
|
|
'fc1.weight', 'fc2.weight', 'fc3.weight']
|
|
|
|
for noise in noise_levels:
|
|
for epoch in range(0, 30):
|
|
for k in bayes_keys:
|
|
models_bayes_cifar[noise][epoch][k] = \
|
|
aux.neumann_entropy(
|
|
aux.square_matrix(
|
|
models_bayes_cifar[noise][epoch][k]
|
|
)
|
|
)
|
|
|
|
for noise in noise_levels:
|
|
temp_epoch = []
|
|
for epoch in range(0, 30):
|
|
temp_mean = []
|
|
for layer in bayes_keys:
|
|
temp_mean.append(
|
|
models_bayes_cifar[noise][epoch][layer].item()
|
|
)
|
|
temp_mean = st.mean(temp_mean)
|
|
temp_epoch.append(temp_mean)
|
|
entropy_data['CIFAR']['BCNN'][noise] = [x / alpha for x in temp_epoch]# temp_epoch
|
|
|
|
# aux.save_pickle("bayes_data_cifar_ne.pkl", models_bayes_cifar)
|
|
del models_bayes_cifar
|
|
|
|
for noise in noise_levels:
|
|
for epoch in range(0, 30):
|
|
for k in bayes_keys:
|
|
models_bayes_mnist[noise][epoch][k] = \
|
|
aux.neumann_entropy(
|
|
aux.square_matrix(
|
|
models_bayes_mnist[noise][epoch][k]
|
|
)
|
|
)
|
|
|
|
for noise in noise_levels:
|
|
temp_epoch = []
|
|
for epoch in range(0, 30):
|
|
temp_mean = []
|
|
for layer in bayes_keys:
|
|
temp_mean.append(
|
|
models_bayes_mnist[noise][epoch][layer].item()
|
|
)
|
|
temp_mean = st.mean(temp_mean)
|
|
temp_epoch.append(
|
|
temp_mean
|
|
)
|
|
entropy_data['MNIST']['BCNN'][noise] = [x / alpha for x in temp_epoch]# temp_epoch
|
|
|
|
# aux.save_pickle("bayes_data_mnist_ne.pkl", models_bayes_mnist)
|
|
del models_bayes_mnist
|
|
|
|
for noise in noise_levels:
|
|
for epoch in range(0, 30):
|
|
for k in lenet_keys:
|
|
models_lenet_cifar[noise][epoch][k] = \
|
|
aux.neumann_entropy(
|
|
aux.square_matrix(
|
|
models_lenet_cifar[noise][epoch][k]
|
|
)
|
|
)
|
|
|
|
for noise in noise_levels:
|
|
temp_epoch = []
|
|
for epoch in range(0, 30):
|
|
temp_mean = []
|
|
for layer in lenet_keys:
|
|
temp_mean.append(
|
|
models_lenet_cifar[noise][epoch][layer].item()
|
|
)
|
|
temp_mean = st.mean(temp_mean)
|
|
temp_epoch.append(
|
|
temp_mean
|
|
)
|
|
entropy_data['CIFAR']['LeNet'][noise] = [x / alpha for x in temp_epoch]# temp_epoch
|
|
|
|
# aux.save_pickle("lenet_data_cifar_ne.pkl", models_lenet_cifar)
|
|
del models_lenet_cifar
|
|
|
|
for noise in noise_levels:
|
|
for epoch in range(0, 30):
|
|
for k in lenet_keys:
|
|
models_lenet_mnist[noise][epoch][k] = \
|
|
aux.neumann_entropy(
|
|
aux.square_matrix(
|
|
models_lenet_mnist[noise][epoch][k]
|
|
)
|
|
)
|
|
|
|
for noise in noise_levels:
|
|
temp_epoch = []
|
|
for epoch in range(0, 30):
|
|
temp_mean = []
|
|
for layer in lenet_keys:
|
|
temp_mean.append(
|
|
models_lenet_mnist[noise][epoch][layer].item()
|
|
)
|
|
temp_mean = st.mean(temp_mean)
|
|
temp_epoch.append(
|
|
temp_mean
|
|
)
|
|
entropy_data['MNIST']['LeNet'][noise] = [x / alpha for x in temp_epoch]# temp_epoch
|
|
|
|
|
|
# aux.save_pickle("lenet_data_mnist_ne.pkl", models_lenet_mnist)
|
|
del models_lenet_mnist
|
|
|
|
aux.save_pickle("entropy_data_noisy.pkl", entropy_data)
|