diff --git a/efficiency_computations.py b/efficiency_computations.py index 61530ce..70a7af1 100644 --- a/efficiency_computations.py +++ b/efficiency_computations.py @@ -62,11 +62,15 @@ for data in data_types: eff_data = dict(gpu_ene_data) for data in data_types: for model in model_types: + if model == 'fre': + alpha = 100 + elif model == 'bay': + alpha = 1000 for size in range(1, max_size): for i in range(0, max_epoch): eff_data[data][model][size][i] = \ (gpu_exp_data[data][model][size]['acc'][i] / - spl_ene_data[data][model][size][i]) * 100 + spl_ene_data[data][model][size][i]) * alpha for data, o_data in zip(data_types, o_data_types): diff --git a/general_plots_noisy.py b/general_plots_noisy.py index ff17bd7..3cccbd8 100644 --- a/general_plots_noisy.py +++ b/general_plots_noisy.py @@ -1,8 +1,8 @@ import matplotlib.pyplot as plt import functions as aux -model_type = 'BCNN' # BCNN or LeNet -dataset = 'MNIST' # MNIST or CIFAR +model_type = 'LeNet' # BCNN or LeNet +dataset = 'CIFAR' # MNIST or CIFAR eff_df = aux.load_pickle("efficiency_data.pkl") @@ -19,10 +19,10 @@ lenet_keys = ['conv1.weight', 'conv1.bias', 'conv2.weight', 'conv2.bias', 'fc1.weight', 'fc1.bias', 'fc2.weight', 'fc2.bias', 'fc3.weight', 'fc3.bias'] -all_noises = [0.1, 0.25, 0.5, 0.75, 0.99] +all_noises = [0.1, 0.25, 0.5, 0.75, 0.99, 'raleigh', 'erlang', 'exponential', 'uniform', 'impulse'] for size in range(1, 2): - plt.plot(eff_df['MNIST']['LeNet'][size], + plt.plot(eff_df[dataset][model_type][size], label='Efficiency') plt.plot(entropy_data[dataset][model_type][size], label='Entropy at noise 0.0') diff --git a/get_entropy_noisy.py b/get_entropy_noisy.py index e60398c..a4980e3 100644 --- a/get_entropy_noisy.py +++ b/get_entropy_noisy.py @@ -11,18 +11,30 @@ models_lenet_mnist = aux.load_pickle("lenet_data_mnist_noisy.pkl") entropy_data = {'CIFAR': {'BCNN': {0.1: None, 0.25: None, - 0.5: None, 0.75: None, 0.99: None}, + 0.5: None, 0.75: None, 0.99: None, + 'raleigh': None, 'erlang': None, + 'exponential': None, 'uniform': None, + 'impulse': None}, 'LeNet': {0.1: None, 0.25: None, - 0.5: None, 0.75: None, 0.99: None}, + 0.5: None, 0.75: None, 0.99: None, + 'raleigh': None, 'erlang': None, + 'exponential': None, 'uniform': None, + 'impulse': None}, }, 'MNIST': {'BCNN': {0.1: None, 0.25: None, - 0.5: None, 0.75: None, 0.99: None}, + 0.5: None, 0.75: None, 0.99: None, + 'raleigh': None, 'erlang': None, + 'exponential': None, 'uniform': None, + 'impulse': None}, 'LeNet': {0.1: None, 0.25: None, - 0.5: None, 0.75: None, 0.99: None}, + 0.5: None, 0.75: None, 0.99: None, + 'raleigh': None, 'erlang': None, + 'exponential': None, 'uniform': None, + 'impulse': None}, }, } @@ -45,7 +57,7 @@ bayes_keys = ['conv1.W_mu', 'conv1.W_rho', """ -noise_levels = [0.1, 0.25, 0.5, 0.75, 0.99] +noise_levels = [0.1, 0.25, 0.5, 0.75, 0.99, 'raleigh', 'erlang', 'exponential', 'uniform', 'impulse'] bayes_keys = ['conv1.W_mu', 'conv2.W_mu',