diff --git a/.gitignore b/.gitignore index 5642422..1a4925e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ __pycache__/ *.pkl +*.png diff --git a/functions.py b/functions.py index 1c1e442..685c000 100644 --- a/functions.py +++ b/functions.py @@ -46,9 +46,11 @@ def neumann_entropy(tensor): if len(tensor_size) == 1: return 0 elif len(tensor_size) == 2: - e = alg.eigvals(tensor) - # temp_abs = torch.abs(e) - temp_abs = e.real + e = alg.eigvals(tensor)#.real + #se = sum(e) + #e = e / se + temp_abs = torch.abs(e) + #temp_abs = e temp = torch.log(temp_abs).real temp = torch.nan_to_num(temp, nan=0.0, posinf=0.0, neginf=0.0) @@ -56,9 +58,11 @@ def neumann_entropy(tensor): elif len(tensor_size) > 2: for i, x in enumerate(tensor): for j, t in enumerate(x): - e = alg.eigvals(t) - # temp_abs = torch.abs(e) - temp_abs = e.real + e = alg.eigvals(t)#.real + #se = sum(e) + #e = e / se + temp_abs = torch.abs(e) + # temp_abs = e temp = torch.log(temp_abs).real temp = torch.nan_to_num(temp, nan=0.0, posinf=0.0, neginf=0.0) diff --git a/general_plots.py b/general_plots.py index ae59af8..32c1b87 100644 --- a/general_plots.py +++ b/general_plots.py @@ -18,12 +18,12 @@ lenet_keys = ['conv1.weight', 'conv1.bias', 'conv2.weight', 'conv2.bias', 'fc3.bias'] for size in range(1, 8): - if size != 8: - plt.plot(eff_df['CIFAR']['BCNN'][size], - label='Efficiency size {}'.format(size)) - plt.plot(entropy_data['CIFAR']['BCNN'][size], - label='Entropy size {}'.format(size)) + # if size != 8: + plt.plot(eff_df['MNIST']['BCNN'][size], + label='Efficiency size {}'.format(size)) + plt.plot(entropy_data['MNIST']['BCNN'][size], + label='Entropy size {}'.format(size)) -# plt.legend(loc='upper right') -plt.legend(loc='lower right') +plt.legend(loc='upper right') +# plt.legend(loc='lower right') plt.show() diff --git a/get_entropy.py b/get_entropy.py index 2824295..4b00a2e 100644 --- a/get_entropy.py +++ b/get_entropy.py @@ -1,6 +1,8 @@ import functions as aux import statistics as st +alpha = 100000 + models_bayes_cifar = aux.load_pickle("bayes_data_cifar.pkl") models_bayes_mnist = aux.load_pickle("bayes_data_mnist.pkl") models_lenet_cifar = aux.load_pickle("lenet_data_cifar.pkl") @@ -34,7 +36,6 @@ bayes_keys = ['conv1.W_mu', 'conv1.W_rho', 'conv1.bias_mu', 'conv1.bias_rho', lenet_keys = ['conv1.weight', 'conv1.bias', 'conv2.weight', 'conv2.bias', 'fc1.weight', 'fc1.bias', 'fc2.weight', 'fc2.bias', 'fc3.weight', 'fc3.bias'] -""" bayes_keys = ['conv1.W_mu', 'conv1.W_rho', 'conv2.W_mu', 'conv2.W_rho', @@ -42,6 +43,15 @@ bayes_keys = ['conv1.W_mu', 'conv1.W_rho', 'fc2.W_mu', 'fc2.W_rho', 'fc3.W_mu', 'fc3.W_rho'] +""" + +bayes_keys = ['conv1.W_mu', + 'conv2.W_mu', + 'fc1.W_mu', + 'fc2.W_mu', + 'fc3.W_mu',] + + lenet_keys = ['conv1.weight', 'conv2.weight', 'fc1.weight', 'fc2.weight', 'fc3.weight'] @@ -67,7 +77,7 @@ for size in range(1, 8): temp_epoch.append( temp_mean ) - entropy_data['CIFAR']['BCNN'][size] = temp_epoch + entropy_data['CIFAR']['BCNN'][size] = [x / alpha for x in temp_epoch]# temp_epoch # aux.save_pickle("bayes_data_cifar_ne.pkl", models_bayes_cifar) del models_bayes_cifar @@ -94,7 +104,7 @@ for size in range(1, 8): temp_epoch.append( temp_mean ) - entropy_data['MNIST']['BCNN'][size] = temp_epoch + entropy_data['MNIST']['BCNN'][size] = [x / alpha for x in temp_epoch]# temp_epoch # aux.save_pickle("bayes_data_mnist_ne.pkl", models_bayes_mnist) del models_bayes_mnist @@ -121,7 +131,7 @@ for size in range(1, 8): temp_epoch.append( temp_mean ) - entropy_data['CIFAR']['LeNet'][size] = temp_epoch + entropy_data['CIFAR']['LeNet'][size] = [x / alpha for x in temp_epoch]# temp_epoch # aux.save_pickle("lenet_data_cifar_ne.pkl", models_lenet_cifar) del models_lenet_cifar @@ -148,7 +158,7 @@ for size in range(1, 8): temp_epoch.append( temp_mean ) - entropy_data['MNIST']['LeNet'][size] = temp_epoch + entropy_data['MNIST']['LeNet'][size] = [x / alpha for x in temp_epoch]# temp_epoch # aux.save_pickle("lenet_data_mnist_ne.pkl", models_lenet_mnist)