import functions as aux import statistics as st alpha = 10000 max_epoch = 30 max_size = 8 models_bayes_cifar = aux.load_pickle("bayes_data_cifar.pkl") models_bayes_mnist = aux.load_pickle("bayes_data_mnist.pkl") models_lenet_cifar = aux.load_pickle("lenet_data_cifar.pkl") models_lenet_mnist = aux.load_pickle("lenet_data_mnist.pkl") entropy_data = {'CIFAR': {'BCNN': {1: None, 2: None, 3: None, 4: None, 5: None, 6: None, 7: None}, 'LeNet': {1: None, 2: None, 3: None, 4: None, 5: None, 6: None, 7: None} }, 'MNIST': {'BCNN': {1: None, 2: None, 3: None, 4: None, 5: None, 6: None, 7: None}, 'LeNet': {1: None, 2: None, 3: None, 4: None, 5: None, 6: None, 7: None} }, } """ bayes_keys = ['conv1.W_mu', 'conv1.W_rho', 'conv1.bias_mu', 'conv1.bias_rho', 'conv2.W_mu', 'conv2.W_rho', 'conv2.bias_mu', 'conv2.bias_rho', 'fc1.W_mu', 'fc1.W_rho', 'fc1.bias_mu', 'fc1.bias_rho', 'fc2.W_mu', 'fc2.W_rho', 'fc2.bias_mu', 'fc2.bias_rho', 'fc3.W_mu', 'fc3.W_rho', 'fc3.bias_mu', 'fc3.bias_rho'] lenet_keys = ['conv1.weight', 'conv1.bias', 'conv2.weight', 'conv2.bias', 'fc1.weight', 'fc1.bias', 'fc2.weight', 'fc2.bias', 'fc3.weight', 'fc3.bias'] bayes_keys = ['conv1.W_mu', 'conv1.W_rho', 'conv2.W_mu', 'conv2.W_rho', 'fc1.W_mu', 'fc1.W_rho', 'fc2.W_mu', 'fc2.W_rho', 'fc3.W_mu', 'fc3.W_rho'] """ bayes_keys = ['conv1.W_mu', 'conv2.W_mu', 'fc1.W_mu', 'fc2.W_mu', 'fc3.W_mu',] lenet_keys = ['conv1.weight', 'conv2.weight', 'fc1.weight', 'fc2.weight', 'fc3.weight'] for model_size in range(1, max_size): for epoch in range(0, max_epoch): for k in bayes_keys: models_bayes_cifar[model_size][epoch][k] = \ aux.neumann_entropy( aux.square_matrix( models_bayes_cifar[model_size][epoch][k] ) ) for size in range(1, max_size): temp_epoch = [] for epoch in range(0, max_epoch): temp_mean = [] for layer in bayes_keys: temp_mean.append( models_bayes_cifar[size][epoch][layer].item() ) temp_mean = st.mean(temp_mean) temp_epoch.append( temp_mean ) entropy_data['CIFAR']['BCNN'][size] = [x / alpha for x in temp_epoch]# temp_epoch # aux.save_pickle("bayes_data_cifar_ne.pkl", models_bayes_cifar) del models_bayes_cifar for model_size in range(1, max_size): for epoch in range(0, max_epoch): for k in bayes_keys: models_bayes_mnist[model_size][epoch][k] = \ aux.neumann_entropy( aux.square_matrix( models_bayes_mnist[model_size][epoch][k] ) ) for size in range(1, max_size): temp_epoch = [] for epoch in range(0, max_epoch): temp_mean = [] for layer in bayes_keys: temp_mean.append( models_bayes_mnist[size][epoch][layer].item() ) temp_mean = st.mean(temp_mean) temp_epoch.append( temp_mean ) entropy_data['MNIST']['BCNN'][size] = [x / alpha for x in temp_epoch]# temp_epoch # aux.save_pickle("bayes_data_mnist_ne.pkl", models_bayes_mnist) del models_bayes_mnist for model_size in range(1, max_size): for epoch in range(0, max_epoch): for k in lenet_keys: models_lenet_cifar[model_size][epoch][k] = \ aux.neumann_entropy( aux.square_matrix( models_lenet_cifar[model_size][epoch][k] ) ) for size in range(1, max_size): temp_epoch = [] for epoch in range(0, max_epoch): temp_mean = [] for layer in lenet_keys: temp_mean.append( models_lenet_cifar[size][epoch][layer].item() ) temp_mean = st.mean(temp_mean) temp_epoch.append( temp_mean ) entropy_data['CIFAR']['LeNet'][size] = [x / alpha for x in temp_epoch]# temp_epoch # aux.save_pickle("lenet_data_cifar_ne.pkl", models_lenet_cifar) del models_lenet_cifar for model_size in range(1, max_size): for epoch in range(0, max_epoch): for k in lenet_keys: models_lenet_mnist[model_size][epoch][k] = \ aux.neumann_entropy( aux.square_matrix( models_lenet_mnist[model_size][epoch][k] ) ) for size in range(1, max_size): temp_epoch = [] for epoch in range(0, max_epoch): temp_mean = [] for layer in lenet_keys: temp_mean.append( models_lenet_mnist[size][epoch][layer].item() ) temp_mean = st.mean(temp_mean) temp_epoch.append( temp_mean ) entropy_data['MNIST']['LeNet'][size] = [x / alpha for x in temp_epoch]# temp_epoch # aux.save_pickle("lenet_data_mnist_ne.pkl", models_lenet_mnist) del models_lenet_mnist aux.save_pickle("entropy_data.pkl", entropy_data)