Experiments-Data-Processing/proc_saved_dnns.py

262 lines
6.8 KiB
Python

import pickle
import torch
from find_epochs import get_experiment_epochs
# import warnings
# import numpy as np
# model_lenet_1_epoch_4.pt
# can be 1,2 or 3
exp_no = 2
# can be 1 to 7
model_size = 1
EXPERIMENT_2_PATH = "./EXPERIMENT_2_DATA"
EXPERIMENT_3_PATH = "./EXPERIMENT_3_DATA"
crit = "Efficiency"
experiment_criterias = {
"Early": "/Early_Stopping",
"Efficiency": "/Efficiency_Stopping",
"100": "/100_epoch",
"Accuracy": "/Accuracy_Bound",
"Energy": "/Energy_Bound",
}
bcnn_path = {
"prefix_path_mnist": "/checkpoints/MNIST/bayesian",
"mid_path_mnist": "/model_lenet_lrt_softplus_",
"prefix_path_cifar": "/checkpoints/CIFAR10/bayesian",
"mid_path_cifar": "/model_lenet_lrt_softplus_",
"end_path": "_epoch_",
"noise_path": "_noise_",
"suffix_path": ".pt",
}
lenet_path = {
"prefix_path_mnist": "/checkpoints/MNIST/frequentist",
"mid_path_mnist": "/model_lenet_",
"prefix_path_cifar": "/checkpoints/CIFAR10/frequentist",
"mid_path_cifar": "/model_lenet_",
"end_path": "_epoch_",
"noise_path": "_noise_",
"suffix_path": ".pt",
}
experiment_2_criterias = {
"Early": "/Early_Stopping",
"Efficiency": "/Efficiency_Stopping",
}
model_noise = None
datasets = ["MNIST", "CIFAR10"]
bcnn_layers = [
"conv1.W_mu",
"conv1.W_rho",
"conv1.bias_mu",
"conv1.bias_rho",
"conv2.W_mu",
"conv2.W_rho",
"conv2.bias_mu",
"conv2.bias_rho",
"fc1.W_mu",
"fc1.W_rho",
"fc1.bias_mu",
"fc1.bias_rho",
"fc2.W_mu",
"fc2.W_rho",
"fc2.bias_mu",
"fc2.bias_rho",
"fc3.W_mu",
"fc3.W_rho",
"fc3.bias_mu",
"fc3.bias_rho",
]
lenet_layers = [
"conv1.weight",
"conv1.bias",
"conv2.weight",
"conv2.bias",
"fc1.weight",
"fc1.bias",
"fc2.weight",
"fc2.bias",
"fc3.weight",
"fc3.bias",
]
bcnn_base = {
"conv1.W_mu": None,
"conv1.W_rho": None,
"conv1.bias_mu": None,
"conv1.bias_rho": None,
"conv2.W_mu": None,
"conv2.W_rho": None,
"conv2.bias_mu": None,
"conv2.bias_rho": None,
"fc1.W_mu": None,
"fc1.W_rho": None,
"fc1.bias_mu": None,
"fc1.bias_rho": None,
"fc2.W_mu": None,
"fc2.W_rho": None,
"fc2.bias_mu": None,
"fc2.bias_rho": None,
"fc3.W_mu": None,
"fc3.W_rho": None,
"fc3.bias_mu": None,
"fc3.bias_rho": None,
}
lenet_base = {
"conv1.weight": None,
"conv1.bias": None,
"conv2.weight": None,
"conv2.bias": None,
"fc1.weight": None,
"fc1.bias": None,
"fc2.weight": None,
"fc2.bias": None,
"fc3.weight": None,
"fc3.bias": None,
}
bcnn_cifar_size_data = {}
bcnn_mnist_size_data = {}
lenet_cifar_size_data = {}
lenet_mnist_size_data = {}
# CUDA settings
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
def load_model(save_path, gpu):
gpu = str(gpu)
if gpu == "cuda:0":
# warnings.warn("I'm in gpu")
model = torch.load(save_path, weights_only=True)
return model
elif gpu == "cpu":
# warnings.warn("I'm in cpu")
model = torch.load(save_path, map_location=torch.device("cpu"), weights_only=True)
return model
if __name__ == "__main__":
for model_size in range(1, 8):
bcnn_cifar = []
for model_epoch in range(
0,
get_experiment_epochs(experiment_number=2, size=7, model_type="bcnn", data_type="cifar")[
model_size
],
):
# LeNet CIFAR
bcnn_cifar.append(
load_model(
(
f"{EXPERIMENT_2_PATH}{experiment_2_criterias['Efficiency']}"
f"{bcnn_path['prefix_path_cifar']}{bcnn_path['mid_path_cifar']}"
f"{model_size}{bcnn_path['end_path']}{model_epoch}{bcnn_path['noise_path']}"
f"{model_noise}{bcnn_path['suffix_path']}"
),
"cpu",
)
)
bcnn_cifar_size_data[model_size] = {k: v for k, v in enumerate(bcnn_cifar)}
del bcnn_cifar
lenet_cifar = []
for model_epoch in range(
0,
get_experiment_epochs(experiment_number=2, size=7, model_type="lenet", data_type="cifar")[
model_size
],
):
# LeNet CIFAR
lenet_cifar.append(
load_model(
f"{EXPERIMENT_2_PATH}{experiment_2_criterias['Efficiency']}"
f"{lenet_path['prefix_path_cifar']}{lenet_path['mid_path_cifar']}"
f"{model_size}{lenet_path['end_path']}{model_epoch+1}{lenet_path['noise_path']}"
f"{model_noise}{lenet_path['suffix_path']}",
"cpu",
)
)
lenet_cifar_size_data[model_size] = {k: v for k, v in enumerate(lenet_cifar)}
del lenet_cifar
bcnn_mnist = []
for model_epoch in range(
0,
get_experiment_epochs(experiment_number=2, size=7, model_type="bcnn", data_type="mnist")[
model_size
],
):
# LeNet MNIST
bcnn_mnist.append(
load_model(
f"{EXPERIMENT_2_PATH}{experiment_2_criterias['Efficiency']}"
f"{bcnn_path['prefix_path_mnist']}{bcnn_path['mid_path_mnist']}"
f"{model_size}{bcnn_path['end_path']}{model_epoch}{bcnn_path['noise_path']}"
f"{model_noise}{bcnn_path['suffix_path']}",
"cpu",
)
)
bcnn_mnist_size_data[model_size] = {k: v for k, v in enumerate(bcnn_mnist)}
del bcnn_mnist
lenet_mnist = []
for model_epoch in range(
0,
get_experiment_epochs(experiment_number=2, size=7, model_type="lenet", data_type="mnist")[
model_size
],
):
# LeNet MNIST
lenet_mnist.append(
load_model(
f"{EXPERIMENT_2_PATH}{experiment_2_criterias['Efficiency']}"
f"{lenet_path['prefix_path_mnist']}{lenet_path['mid_path_mnist']}"
f"{model_size}{lenet_path['end_path']}{model_epoch+1}{lenet_path['noise_path']}"
f"{model_noise}{lenet_path['suffix_path']}",
"cpu",
)
)
lenet_mnist_size_data[model_size] = {k: v for k, v in enumerate(lenet_mnist)}
del lenet_mnist
# Saving all here
with open("bayes_data_cifar.pkl", "wb") as f:
pickle.dump(bcnn_cifar_size_data, f)
with open("lenet_data_cifar.pkl", "wb") as f:
pickle.dump(lenet_cifar_size_data, f)
with open("bayes_data_mnist.pkl", "wb") as f:
pickle.dump(bcnn_mnist_size_data, f)
with open("lenet_data_mnist.pkl", "wb") as f:
pickle.dump(lenet_mnist_size_data, f)