New processing for noisy datasets
This commit is contained in:
parent
1a35d08f66
commit
03f03d59d9
|
@ -2,6 +2,9 @@ import functions as aux
|
||||||
import statistics as st
|
import statistics as st
|
||||||
# import pandas as pd
|
# import pandas as pd
|
||||||
|
|
||||||
|
max_epoch = 30
|
||||||
|
max_size = 8
|
||||||
|
|
||||||
data_types = ['mni', 'cif']
|
data_types = ['mni', 'cif']
|
||||||
model_types = ['fre', 'bay']
|
model_types = ['fre', 'bay']
|
||||||
|
|
||||||
|
@ -15,41 +18,41 @@ gpu_exp_data = aux.load_pickle("result_gpu.pkl")
|
||||||
|
|
||||||
for data in data_types:
|
for data in data_types:
|
||||||
for model in model_types:
|
for model in model_types:
|
||||||
for size in range(1, 8):
|
for size in range(1, max_size):
|
||||||
gpu_ene_data[data][model][size] = \
|
gpu_ene_data[data][model][size] = \
|
||||||
list(
|
list(
|
||||||
aux.split(gpu_ene_data[data][model][size], 100)
|
aux.split(gpu_ene_data[data][model][size], max_epoch)
|
||||||
)
|
)
|
||||||
|
|
||||||
for data in data_types:
|
for data in data_types:
|
||||||
for model in model_types:
|
for model in model_types:
|
||||||
for size in range(1, 8):
|
for size in range(1, max_size):
|
||||||
cpu_ene_data[data][model][size] = \
|
cpu_ene_data[data][model][size] = \
|
||||||
list(
|
list(
|
||||||
aux.split(cpu_ene_data[data][model][size], 100)
|
aux.split(cpu_ene_data[data][model][size], max_epoch)
|
||||||
)
|
)
|
||||||
|
|
||||||
spl_ene_data = dict(gpu_ene_data)
|
spl_ene_data = dict(gpu_ene_data)
|
||||||
for data in data_types:
|
for data in data_types:
|
||||||
for model in model_types:
|
for model in model_types:
|
||||||
for size in range(1, 8):
|
for size in range(1, max_size):
|
||||||
for i in range(0, 100):
|
for i in range(0, max_epoch):
|
||||||
spl_ene_data[data][model][size][i] = \
|
spl_ene_data[data][model][size][i] = \
|
||||||
gpu_ene_data[data][model][size][i] +\
|
gpu_ene_data[data][model][size][i] +\
|
||||||
cpu_ene_data[data][model][size][i]
|
cpu_ene_data[data][model][size][i]
|
||||||
|
|
||||||
for data in data_types:
|
for data in data_types:
|
||||||
for model in model_types:
|
for model in model_types:
|
||||||
for size in range(1, 8):
|
for size in range(1, max_size):
|
||||||
for i in range(0, 100):
|
for i in range(0, max_epoch):
|
||||||
spl_ene_data[data][model][size][i] = \
|
spl_ene_data[data][model][size][i] = \
|
||||||
sum(spl_ene_data[data][model][size][i])
|
sum(spl_ene_data[data][model][size][i])
|
||||||
|
|
||||||
for data in data_types:
|
for data in data_types:
|
||||||
for model in model_types:
|
for model in model_types:
|
||||||
for size in range(1, 8):
|
for size in range(1, max_size):
|
||||||
temp = []
|
temp = []
|
||||||
for i in range(0, 100):
|
for i in range(0, max_epoch):
|
||||||
temp.append(
|
temp.append(
|
||||||
# st.mean(spl_ene_data[data][model][size][0:i+1])
|
# st.mean(spl_ene_data[data][model][size][0:i+1])
|
||||||
sum(spl_ene_data[data][model][size][0:i+1])
|
sum(spl_ene_data[data][model][size][0:i+1])
|
||||||
|
@ -59,8 +62,8 @@ for data in data_types:
|
||||||
eff_data = dict(gpu_ene_data)
|
eff_data = dict(gpu_ene_data)
|
||||||
for data in data_types:
|
for data in data_types:
|
||||||
for model in model_types:
|
for model in model_types:
|
||||||
for size in range(1, 8):
|
for size in range(1, max_size):
|
||||||
for i in range(0, 100):
|
for i in range(0, max_epoch):
|
||||||
eff_data[data][model][size][i] = \
|
eff_data[data][model][size][i] = \
|
||||||
(gpu_exp_data[data][model][size]['acc'][i] /
|
(gpu_exp_data[data][model][size]['acc'][i] /
|
||||||
spl_ene_data[data][model][size][i]) * 100
|
spl_ene_data[data][model][size][i]) * 100
|
||||||
|
|
|
@ -5,6 +5,8 @@ import torch
|
||||||
|
|
||||||
def square_matrix(tensor):
|
def square_matrix(tensor):
|
||||||
tensor_size = tensor.size()
|
tensor_size = tensor.size()
|
||||||
|
if len(tensor_size) == 0:
|
||||||
|
return tensor
|
||||||
if len(tensor_size) == 1:
|
if len(tensor_size) == 1:
|
||||||
temp = torch.zeros([tensor_size[0],
|
temp = torch.zeros([tensor_size[0],
|
||||||
tensor_size[0]-1])
|
tensor_size[0]-1])
|
||||||
|
@ -43,6 +45,8 @@ def square_matrix(tensor):
|
||||||
|
|
||||||
def neumann_entropy(tensor):
|
def neumann_entropy(tensor):
|
||||||
tensor_size = tensor.size()
|
tensor_size = tensor.size()
|
||||||
|
if len(tensor_size) == 0:
|
||||||
|
return tensor
|
||||||
if len(tensor_size) == 1:
|
if len(tensor_size) == 1:
|
||||||
return 0
|
return 0
|
||||||
elif len(tensor_size) == 2:
|
elif len(tensor_size) == 2:
|
||||||
|
|
|
@ -18,10 +18,10 @@ lenet_keys = ['conv1.weight', 'conv1.bias', 'conv2.weight', 'conv2.bias',
|
||||||
'fc3.bias']
|
'fc3.bias']
|
||||||
|
|
||||||
for size in range(1, 8):
|
for size in range(1, 8):
|
||||||
# if size != 8:
|
# if size != 3:
|
||||||
plt.plot(eff_df['MNIST']['BCNN'][size],
|
plt.plot(eff_df['MNIST']['LeNet'][size],
|
||||||
label='Efficiency size {}'.format(size))
|
label='Efficiency size {}'.format(size))
|
||||||
plt.plot(entropy_data['MNIST']['BCNN'][size],
|
plt.plot(entropy_data['MNIST']['LeNet'][size],
|
||||||
label='Entropy size {}'.format(size))
|
label='Entropy size {}'.format(size))
|
||||||
|
|
||||||
plt.legend(loc='upper right')
|
plt.legend(loc='upper right')
|
||||||
|
|
|
@ -0,0 +1,36 @@
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import functions as aux
|
||||||
|
|
||||||
|
model_type = 'BCNN' # BCNN or LeNet
|
||||||
|
dataset = 'MNIST' # MNIST or CIFAR
|
||||||
|
|
||||||
|
eff_df = aux.load_pickle("efficiency_data.pkl")
|
||||||
|
|
||||||
|
entropy_data_noise = aux.load_pickle("entropy_data_noisy.pkl")
|
||||||
|
entropy_data = aux.load_pickle("entropy_data.pkl")
|
||||||
|
|
||||||
|
bayes_keys = ['conv1.W_mu', 'conv1.W_rho', 'conv1.bias_mu', 'conv1.bias_rho',
|
||||||
|
'conv2.W_mu', 'conv2.W_rho', 'conv2.bias_mu', 'conv2.bias_rho',
|
||||||
|
'fc1.W_mu', 'fc1.W_rho', 'fc1.bias_mu', 'fc1.bias_rho',
|
||||||
|
'fc2.W_mu', 'fc2.W_rho', 'fc2.bias_mu', 'fc2.bias_rho',
|
||||||
|
'fc3.W_mu', 'fc3.W_rho', 'fc3.bias_mu', 'fc3.bias_rho']
|
||||||
|
|
||||||
|
lenet_keys = ['conv1.weight', 'conv1.bias', 'conv2.weight', 'conv2.bias',
|
||||||
|
'fc1.weight', 'fc1.bias', 'fc2.weight', 'fc2.bias', 'fc3.weight',
|
||||||
|
'fc3.bias']
|
||||||
|
|
||||||
|
all_noises = [0.1, 0.25, 0.5, 0.75, 0.99]
|
||||||
|
|
||||||
|
for size in range(1, 2):
|
||||||
|
plt.plot(eff_df['MNIST']['LeNet'][size],
|
||||||
|
label='Efficiency')
|
||||||
|
plt.plot(entropy_data[dataset][model_type][size],
|
||||||
|
label='Entropy at noise 0.0')
|
||||||
|
|
||||||
|
for noise in all_noises:
|
||||||
|
plt.plot(entropy_data_noise[dataset][model_type][noise],
|
||||||
|
label='Entropy at noise {}'.format(noise))
|
||||||
|
|
||||||
|
plt.legend(loc='upper right')
|
||||||
|
# plt.legend(loc='lower right')
|
||||||
|
plt.show()
|
|
@ -1,7 +1,9 @@
|
||||||
import functions as aux
|
import functions as aux
|
||||||
import statistics as st
|
import statistics as st
|
||||||
|
|
||||||
alpha = 100000
|
alpha = 10000
|
||||||
|
max_epoch = 30
|
||||||
|
max_size = 8
|
||||||
|
|
||||||
models_bayes_cifar = aux.load_pickle("bayes_data_cifar.pkl")
|
models_bayes_cifar = aux.load_pickle("bayes_data_cifar.pkl")
|
||||||
models_bayes_mnist = aux.load_pickle("bayes_data_mnist.pkl")
|
models_bayes_mnist = aux.load_pickle("bayes_data_mnist.pkl")
|
||||||
|
@ -55,8 +57,8 @@ bayes_keys = ['conv1.W_mu',
|
||||||
lenet_keys = ['conv1.weight', 'conv2.weight',
|
lenet_keys = ['conv1.weight', 'conv2.weight',
|
||||||
'fc1.weight', 'fc2.weight', 'fc3.weight']
|
'fc1.weight', 'fc2.weight', 'fc3.weight']
|
||||||
|
|
||||||
for model_size in range(1, 8):
|
for model_size in range(1, max_size):
|
||||||
for epoch in range(0, 100):
|
for epoch in range(0, max_epoch):
|
||||||
for k in bayes_keys:
|
for k in bayes_keys:
|
||||||
models_bayes_cifar[model_size][epoch][k] = \
|
models_bayes_cifar[model_size][epoch][k] = \
|
||||||
aux.neumann_entropy(
|
aux.neumann_entropy(
|
||||||
|
@ -65,9 +67,9 @@ for model_size in range(1, 8):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
for size in range(1, 8):
|
for size in range(1, max_size):
|
||||||
temp_epoch = []
|
temp_epoch = []
|
||||||
for epoch in range(0, 100):
|
for epoch in range(0, max_epoch):
|
||||||
temp_mean = []
|
temp_mean = []
|
||||||
for layer in bayes_keys:
|
for layer in bayes_keys:
|
||||||
temp_mean.append(
|
temp_mean.append(
|
||||||
|
@ -82,8 +84,8 @@ for size in range(1, 8):
|
||||||
# aux.save_pickle("bayes_data_cifar_ne.pkl", models_bayes_cifar)
|
# aux.save_pickle("bayes_data_cifar_ne.pkl", models_bayes_cifar)
|
||||||
del models_bayes_cifar
|
del models_bayes_cifar
|
||||||
|
|
||||||
for model_size in range(1, 8):
|
for model_size in range(1, max_size):
|
||||||
for epoch in range(0, 100):
|
for epoch in range(0, max_epoch):
|
||||||
for k in bayes_keys:
|
for k in bayes_keys:
|
||||||
models_bayes_mnist[model_size][epoch][k] = \
|
models_bayes_mnist[model_size][epoch][k] = \
|
||||||
aux.neumann_entropy(
|
aux.neumann_entropy(
|
||||||
|
@ -92,9 +94,9 @@ for model_size in range(1, 8):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
for size in range(1, 8):
|
for size in range(1, max_size):
|
||||||
temp_epoch = []
|
temp_epoch = []
|
||||||
for epoch in range(0, 100):
|
for epoch in range(0, max_epoch):
|
||||||
temp_mean = []
|
temp_mean = []
|
||||||
for layer in bayes_keys:
|
for layer in bayes_keys:
|
||||||
temp_mean.append(
|
temp_mean.append(
|
||||||
|
@ -109,8 +111,8 @@ for size in range(1, 8):
|
||||||
# aux.save_pickle("bayes_data_mnist_ne.pkl", models_bayes_mnist)
|
# aux.save_pickle("bayes_data_mnist_ne.pkl", models_bayes_mnist)
|
||||||
del models_bayes_mnist
|
del models_bayes_mnist
|
||||||
|
|
||||||
for model_size in range(1, 8):
|
for model_size in range(1, max_size):
|
||||||
for epoch in range(0, 100):
|
for epoch in range(0, max_epoch):
|
||||||
for k in lenet_keys:
|
for k in lenet_keys:
|
||||||
models_lenet_cifar[model_size][epoch][k] = \
|
models_lenet_cifar[model_size][epoch][k] = \
|
||||||
aux.neumann_entropy(
|
aux.neumann_entropy(
|
||||||
|
@ -119,9 +121,9 @@ for model_size in range(1, 8):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
for size in range(1, 8):
|
for size in range(1, max_size):
|
||||||
temp_epoch = []
|
temp_epoch = []
|
||||||
for epoch in range(0, 100):
|
for epoch in range(0, max_epoch):
|
||||||
temp_mean = []
|
temp_mean = []
|
||||||
for layer in lenet_keys:
|
for layer in lenet_keys:
|
||||||
temp_mean.append(
|
temp_mean.append(
|
||||||
|
@ -136,8 +138,8 @@ for size in range(1, 8):
|
||||||
# aux.save_pickle("lenet_data_cifar_ne.pkl", models_lenet_cifar)
|
# aux.save_pickle("lenet_data_cifar_ne.pkl", models_lenet_cifar)
|
||||||
del models_lenet_cifar
|
del models_lenet_cifar
|
||||||
|
|
||||||
for model_size in range(1, 8):
|
for model_size in range(1, max_size):
|
||||||
for epoch in range(0, 100):
|
for epoch in range(0, max_epoch):
|
||||||
for k in lenet_keys:
|
for k in lenet_keys:
|
||||||
models_lenet_mnist[model_size][epoch][k] = \
|
models_lenet_mnist[model_size][epoch][k] = \
|
||||||
aux.neumann_entropy(
|
aux.neumann_entropy(
|
||||||
|
@ -146,9 +148,9 @@ for model_size in range(1, 8):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
for size in range(1, 8):
|
for size in range(1, max_size):
|
||||||
temp_epoch = []
|
temp_epoch = []
|
||||||
for epoch in range(0, 100):
|
for epoch in range(0, max_epoch):
|
||||||
temp_mean = []
|
temp_mean = []
|
||||||
for layer in lenet_keys:
|
for layer in lenet_keys:
|
||||||
temp_mean.append(
|
temp_mean.append(
|
||||||
|
|
|
@ -0,0 +1,167 @@
|
||||||
|
import functions as aux
|
||||||
|
import statistics as st
|
||||||
|
|
||||||
|
alpha = 10000
|
||||||
|
|
||||||
|
models_bayes_cifar = aux.load_pickle("bayes_data_cifar_noisy.pkl")
|
||||||
|
models_bayes_mnist = aux.load_pickle("bayes_data_mnist_noisy.pkl")
|
||||||
|
models_lenet_cifar = aux.load_pickle("lenet_data_cifar_noisy.pkl")
|
||||||
|
models_lenet_mnist = aux.load_pickle("lenet_data_mnist_noisy.pkl")
|
||||||
|
|
||||||
|
entropy_data = {'CIFAR':
|
||||||
|
{'BCNN':
|
||||||
|
{0.1: None, 0.25: None,
|
||||||
|
0.5: None, 0.75: None, 0.99: None},
|
||||||
|
'LeNet':
|
||||||
|
{0.1: None, 0.25: None,
|
||||||
|
0.5: None, 0.75: None, 0.99: None},
|
||||||
|
},
|
||||||
|
'MNIST':
|
||||||
|
{'BCNN':
|
||||||
|
{0.1: None, 0.25: None,
|
||||||
|
0.5: None, 0.75: None, 0.99: None},
|
||||||
|
'LeNet':
|
||||||
|
{0.1: None, 0.25: None,
|
||||||
|
0.5: None, 0.75: None, 0.99: None},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
"""
|
||||||
|
bayes_keys = ['conv1.W_mu', 'conv1.W_rho', 'conv1.bias_mu', 'conv1.bias_rho',
|
||||||
|
'conv2.W_mu', 'conv2.W_rho', 'conv2.bias_mu', 'conv2.bias_rho',
|
||||||
|
'fc1.W_mu', 'fc1.W_rho', 'fc1.bias_mu', 'fc1.bias_rho',
|
||||||
|
'fc2.W_mu', 'fc2.W_rho', 'fc2.bias_mu', 'fc2.bias_rho',
|
||||||
|
'fc3.W_mu', 'fc3.W_rho', 'fc3.bias_mu', 'fc3.bias_rho']
|
||||||
|
|
||||||
|
lenet_keys = ['conv1.weight', 'conv1.bias', 'conv2.weight', 'conv2.bias',
|
||||||
|
'fc1.weight', 'fc1.bias', 'fc2.weight', 'fc2.bias', 'fc3.weight',
|
||||||
|
'fc3.bias']
|
||||||
|
|
||||||
|
bayes_keys = ['conv1.W_mu', 'conv1.W_rho',
|
||||||
|
'conv2.W_mu', 'conv2.W_rho',
|
||||||
|
'fc1.W_mu', 'fc1.W_rho',
|
||||||
|
'fc2.W_mu', 'fc2.W_rho',
|
||||||
|
'fc3.W_mu', 'fc3.W_rho']
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
noise_levels = [0.1, 0.25, 0.5, 0.75, 0.99]
|
||||||
|
|
||||||
|
bayes_keys = ['conv1.W_mu',
|
||||||
|
'conv2.W_mu',
|
||||||
|
'fc1.W_mu',
|
||||||
|
'fc2.W_mu',
|
||||||
|
'fc3.W_mu',]
|
||||||
|
|
||||||
|
|
||||||
|
lenet_keys = ['conv1.weight', 'conv2.weight',
|
||||||
|
'fc1.weight', 'fc2.weight', 'fc3.weight']
|
||||||
|
|
||||||
|
for noise in noise_levels:
|
||||||
|
for epoch in range(0, 30):
|
||||||
|
for k in bayes_keys:
|
||||||
|
models_bayes_cifar[noise][epoch][k] = \
|
||||||
|
aux.neumann_entropy(
|
||||||
|
aux.square_matrix(
|
||||||
|
models_bayes_cifar[noise][epoch][k]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for noise in noise_levels:
|
||||||
|
temp_epoch = []
|
||||||
|
for epoch in range(0, 30):
|
||||||
|
temp_mean = []
|
||||||
|
for layer in bayes_keys:
|
||||||
|
temp_mean.append(
|
||||||
|
models_bayes_cifar[noise][epoch][layer].item()
|
||||||
|
)
|
||||||
|
temp_mean = st.mean(temp_mean)
|
||||||
|
temp_epoch.append(temp_mean)
|
||||||
|
entropy_data['CIFAR']['BCNN'][noise] = [x / alpha for x in temp_epoch]# temp_epoch
|
||||||
|
|
||||||
|
# aux.save_pickle("bayes_data_cifar_ne.pkl", models_bayes_cifar)
|
||||||
|
del models_bayes_cifar
|
||||||
|
|
||||||
|
for noise in noise_levels:
|
||||||
|
for epoch in range(0, 30):
|
||||||
|
for k in bayes_keys:
|
||||||
|
models_bayes_mnist[noise][epoch][k] = \
|
||||||
|
aux.neumann_entropy(
|
||||||
|
aux.square_matrix(
|
||||||
|
models_bayes_mnist[noise][epoch][k]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for noise in noise_levels:
|
||||||
|
temp_epoch = []
|
||||||
|
for epoch in range(0, 30):
|
||||||
|
temp_mean = []
|
||||||
|
for layer in bayes_keys:
|
||||||
|
temp_mean.append(
|
||||||
|
models_bayes_mnist[noise][epoch][layer].item()
|
||||||
|
)
|
||||||
|
temp_mean = st.mean(temp_mean)
|
||||||
|
temp_epoch.append(
|
||||||
|
temp_mean
|
||||||
|
)
|
||||||
|
entropy_data['MNIST']['BCNN'][noise] = [x / alpha for x in temp_epoch]# temp_epoch
|
||||||
|
|
||||||
|
# aux.save_pickle("bayes_data_mnist_ne.pkl", models_bayes_mnist)
|
||||||
|
del models_bayes_mnist
|
||||||
|
|
||||||
|
for noise in noise_levels:
|
||||||
|
for epoch in range(0, 30):
|
||||||
|
for k in lenet_keys:
|
||||||
|
models_lenet_cifar[noise][epoch][k] = \
|
||||||
|
aux.neumann_entropy(
|
||||||
|
aux.square_matrix(
|
||||||
|
models_lenet_cifar[noise][epoch][k]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for noise in noise_levels:
|
||||||
|
temp_epoch = []
|
||||||
|
for epoch in range(0, 30):
|
||||||
|
temp_mean = []
|
||||||
|
for layer in lenet_keys:
|
||||||
|
temp_mean.append(
|
||||||
|
models_lenet_cifar[noise][epoch][layer].item()
|
||||||
|
)
|
||||||
|
temp_mean = st.mean(temp_mean)
|
||||||
|
temp_epoch.append(
|
||||||
|
temp_mean
|
||||||
|
)
|
||||||
|
entropy_data['CIFAR']['LeNet'][noise] = [x / alpha for x in temp_epoch]# temp_epoch
|
||||||
|
|
||||||
|
# aux.save_pickle("lenet_data_cifar_ne.pkl", models_lenet_cifar)
|
||||||
|
del models_lenet_cifar
|
||||||
|
|
||||||
|
for noise in noise_levels:
|
||||||
|
for epoch in range(0, 30):
|
||||||
|
for k in lenet_keys:
|
||||||
|
models_lenet_mnist[noise][epoch][k] = \
|
||||||
|
aux.neumann_entropy(
|
||||||
|
aux.square_matrix(
|
||||||
|
models_lenet_mnist[noise][epoch][k]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for noise in noise_levels:
|
||||||
|
temp_epoch = []
|
||||||
|
for epoch in range(0, 30):
|
||||||
|
temp_mean = []
|
||||||
|
for layer in lenet_keys:
|
||||||
|
temp_mean.append(
|
||||||
|
models_lenet_mnist[noise][epoch][layer].item()
|
||||||
|
)
|
||||||
|
temp_mean = st.mean(temp_mean)
|
||||||
|
temp_epoch.append(
|
||||||
|
temp_mean
|
||||||
|
)
|
||||||
|
entropy_data['MNIST']['LeNet'][noise] = [x / alpha for x in temp_epoch]# temp_epoch
|
||||||
|
|
||||||
|
|
||||||
|
# aux.save_pickle("lenet_data_mnist_ne.pkl", models_lenet_mnist)
|
||||||
|
del models_lenet_mnist
|
||||||
|
|
||||||
|
aux.save_pickle("entropy_data_noisy.pkl", entropy_data)
|
Loading…
Reference in New Issue